Hibernate support added

- Hibernate oriented repositories
- bootstraping of hibernate repositories
if hibernate support is enabled

Change-Id: I4af4815a46c92e2354147531fb2f6c5895531b43
This commit is contained in:
Tomasz Trębski 2015-04-27 15:31:59 +02:00 committed by Deklan Dieterly
parent afb58f3ef8
commit 5936c05020
22 changed files with 3357 additions and 87 deletions

View File

@ -69,11 +69,16 @@
<artifactId>monasca-common-util</artifactId>
<version>${mon.common.version}</version>
</dependency>
<dependency>
<groupId>commons-validator</groupId>
<artifactId>commons-validator</artifactId>
<version>1.4.0</version>
</dependency>
<dependency>
<groupId>monasca-common</groupId>
<artifactId>monasca-common-hibernate</artifactId>
<version>${mon.common.version}</version>
</dependency>
<dependency>
<groupId>commons-validator</groupId>
<artifactId>commons-validator</artifactId>
<version>1.4.0</version>
</dependency>
<dependency>
<groupId>monasca-common</groupId>
<artifactId>monasca-common-kafka</artifactId>
@ -154,7 +159,17 @@
<artifactId>influxdb-java</artifactId>
<version>1.0</version>
</dependency>
<dependency>
<groupId>postgresql</groupId>
<artifactId>postgresql</artifactId>
<version>9.1-901.jdbc4</version>
</dependency>
<dependency>
<groupId>com.zaxxer</groupId>
<artifactId>HikariCP-java6</artifactId>
<version>2.3.9</version>
<scope>compile</scope>
</dependency>
<!-- Test dependencies -->
<dependency>
@ -271,7 +286,7 @@
<artifactId>maven-failsafe-plugin</artifactId>
<version>2.17</version>
<configuration>
<groups>performance,functional,integration,database,slow</groups>
<groups>performance,functional,integration,database,slow,orm</groups>
<skipTests>${skipITs}</skipTests>
<parallel>methods</parallel>
<threadCount>4</threadCount>

View File

@ -14,6 +14,7 @@
package monasca.api;
import com.fasterxml.jackson.annotation.JsonProperty;
import monasca.common.hibernate.configuration.HibernateDbConfiguration;
import monasca.common.messaging.kafka.KafkaConfiguration;
import monasca.api.infrastructure.middleware.MiddlewareConfiguration;
import monasca.common.configuration.DatabaseConfiguration;
@ -59,5 +60,6 @@ public class ApiConfig extends Configuration {
@Valid
@JsonProperty
public DatabaseConfiguration databaseConfiguration;
@Valid
public HibernateDbConfiguration hibernate;
}

View File

@ -13,10 +13,12 @@
*/
package monasca.api;
import ch.qos.logback.classic.Level;
import io.dropwizard.db.DataSourceFactory;
import io.dropwizard.jdbi.DBIFactory;
import io.dropwizard.setup.Environment;
import java.util.Arrays;
import java.util.Properties;
import javax.inject.Named;
@ -25,6 +27,10 @@ import javax.inject.Singleton;
import kafka.javaapi.producer.Producer;
import kafka.producer.ProducerConfig;
import org.hibernate.SessionFactory;
import org.hibernate.boot.registry.StandardServiceRegistryBuilder;
import org.hibernate.cfg.Configuration;
import org.hibernate.service.ServiceRegistry;
import org.skife.jdbi.v2.DBI;
import com.codahale.metrics.MetricRegistry;
@ -33,14 +39,37 @@ import com.google.inject.AbstractModule;
import com.google.inject.Provides;
import com.google.inject.ProvisionException;
import com.google.inject.name.Names;
import monasca.api.app.ApplicationModule;
import monasca.api.domain.DomainModule;
import monasca.api.infrastructure.InfrastructureModule;
import monasca.common.hibernate.db.AlarmActionDb;
import monasca.common.hibernate.db.AlarmActionId;
import monasca.common.hibernate.db.AlarmDb;
import monasca.common.hibernate.db.AlarmDefinitionDb;
import monasca.common.hibernate.db.AlarmMetricDb;
import monasca.common.hibernate.db.AlarmMetricId;
import monasca.common.hibernate.db.MetricDefinitionDb;
import monasca.common.hibernate.db.MetricDefinitionDimensionsDb;
import monasca.common.hibernate.db.MetricDimensionDb;
import monasca.common.hibernate.db.NotificationMethodDb;
import monasca.common.hibernate.db.SubAlarmDb;
import monasca.common.hibernate.db.SubAlarmDefinitionDb;
import monasca.common.hibernate.db.SubAlarmDefinitionDimensionDb;
/**
* Monitoring API server bindings.
*/
public class MonApiModule extends AbstractModule {
public class MonApiModule
extends AbstractModule {
/**
* <b>PostgresSQL</b> {@link javax.sql.DataSource} class name
*/
private static final String POSTGRES_DS_CLASS = "org.postgresql.ds.PGPoolingDataSource";
/**
* <b>MySQL</b> {@link javax.sql.DataSource} class name
*/
private static final String MYSQL_DS_CLASS = "com.mysql.jdbc.jdbc2.optional.MysqlDataSource";
private final ApiConfig config;
private final Environment environment;
@ -53,7 +82,9 @@ public class MonApiModule extends AbstractModule {
protected void configure() {
bind(ApiConfig.class).toInstance(config);
bind(MetricRegistry.class).toInstance(environment.metrics());
bind(DataSourceFactory.class).annotatedWith(Names.named("mysql")).toInstance(config.mysql);
if (!this.isHibernateEnabled()) {
bind(DataSourceFactory.class).annotatedWith(Names.named("mysql")).toInstance(config.mysql);
}
bind(DataSourceFactory.class).annotatedWith(Names.named("vertica")).toInstance(config.vertica);
install(new ApplicationModule());
@ -61,6 +92,42 @@ public class MonApiModule extends AbstractModule {
install(new InfrastructureModule(this.config));
}
@Provides
@Singleton
@Named("orm")
public SessionFactory getSessionFactory() {
if (config.hibernate == null) {
throw new ProvisionException("Unable to provision ORM DBI, couldn't locate hibernate configuration");
}
try {
Configuration configuration = new Configuration();
configuration.addAnnotatedClass(AlarmDb.class);
configuration.addAnnotatedClass(AlarmActionDb.class);
configuration.addAnnotatedClass(AlarmActionId.class);
configuration.addAnnotatedClass(AlarmDefinitionDb.class);
configuration.addAnnotatedClass(AlarmMetricDb.class);
configuration.addAnnotatedClass(AlarmMetricId.class);
configuration.addAnnotatedClass(MetricDefinitionDb.class);
configuration.addAnnotatedClass(MetricDefinitionDimensionsDb.class);
configuration.addAnnotatedClass(MetricDimensionDb.class);
configuration.addAnnotatedClass(SubAlarmDefinitionDb.class);
configuration.addAnnotatedClass(SubAlarmDefinitionDimensionDb.class);
configuration.addAnnotatedClass(SubAlarmDb.class);
configuration.addAnnotatedClass(NotificationMethodDb.class);
configuration.setProperties(this.getORMProperties(this.config.hibernate.getDataSourceClassName()));
ServiceRegistry serviceRegistry = new StandardServiceRegistryBuilder().applySettings(configuration.getProperties()).build();
// builds a session factory from the service registry
return configuration.buildSessionFactory(serviceRegistry);
} catch (Throwable ex) {
throw new ProvisionException("Failed to provision ORM DBI", ex);
}
}
@Provides
@Singleton
@Named("mysql")
@ -93,4 +160,61 @@ public class MonApiModule extends AbstractModule {
ProducerConfig config = new ProducerConfig(props);
return new Producer<String, String>(config);
}
private Properties getORMProperties(final String dataSourceClassName) {
final Properties properties = new Properties();
// different drivers requires different sets of properties
switch (dataSourceClassName) {
case POSTGRES_DS_CLASS:
this.handlePostgresORMProperties(properties);
break;
case MYSQL_DS_CLASS:
this.handleMySQLORMProperties(properties);
break;
default:
throw new ProvisionException(
String.format(
"%s is not supported, valid data sources are %s",
dataSourceClassName,
Arrays.asList(POSTGRES_DS_CLASS, MYSQL_DS_CLASS)
)
);
}
// different drivers requires different sets of properties
// driver agnostic properties
this.handleCommonORMProperties(properties);
// driver agnostic properties
return properties;
}
private void handleCommonORMProperties(final Properties properties) {
properties.put("hibernate.connection.provider_class", this.config.hibernate.getProviderClass());
properties.put("hibernate.hbm2ddl.auto", this.config.hibernate.getAutoConfig());
properties.put("show_sql", this.config.getLoggingFactory().getLevel().equals(Level.DEBUG));
properties.put("hibernate.hikari.dataSource.user", this.config.hibernate.getUser());
properties.put("hibernate.hikari.dataSource.password", this.config.hibernate.getPassword());
properties.put("hibernate.hikari.dataSourceClassName", this.config.hibernate.getDataSourceClassName());
}
private void handleMySQLORMProperties(final Properties properties) {
properties.put("hibernate.hikari.dataSource.url", this.config.hibernate.getDataSourceUrl());
}
private void handlePostgresORMProperties(final Properties properties) {
properties.put("hibernate.hikari.dataSource.serverName", this.config.hibernate.getServerName());
properties.put("hibernate.hikari.dataSource.portNumber", this.config.hibernate.getPortNumber());
properties.put("hibernate.hikari.dataSource.databaseName", this.config.hibernate.getDatabaseName());
properties.put("hibernate.hikari.dataSource.initialConnections", this.config.hibernate.getInitialConnections());
properties.put("hibernate.hikari.dataSource.maxConnections", this.config.hibernate.getMaxConnections());
properties.put("hibernate.hikari.connectionTestQuery", "SELECT 1");
properties.put("hibernate.hikari.connectionTimeout", "5000");
properties.put("hibernate.hikari.initializationFailFast", "false");
}
private boolean isHibernateEnabled() {
return this.config.hibernate != null && this.config.hibernate.getSupportEnabled();
}
}

View File

@ -14,6 +14,7 @@
package monasca.api.domain.model.alarm;
import org.joda.time.DateTime;
import org.apache.commons.collections4.CollectionUtils;
import java.util.List;
@ -175,7 +176,8 @@ public class Alarm extends AbstractEntity implements Linked {
if (metrics == null) {
if (other.metrics != null)
return false;
} else if (!metrics.equals(other.metrics))
} else if (!CollectionUtils.isEqualCollection(metrics, other.metrics))
// order agnostic collection equality check
return false;
if (state != other.state)
return false;

View File

@ -27,6 +27,7 @@ import monasca.api.domain.model.metric.MetricDefinitionRepo;
import monasca.api.domain.model.notificationmethod.NotificationMethodRepo;
import monasca.api.domain.model.statistic.StatisticRepo;
import monasca.api.infrastructure.persistence.PersistUtils;
import monasca.api.infrastructure.persistence.Utils;
import monasca.api.infrastructure.persistence.influxdb.InfluxV9AlarmStateHistoryRepo;
import monasca.api.infrastructure.persistence.influxdb.InfluxV9MeasurementRepo;
import monasca.api.infrastructure.persistence.influxdb.InfluxV9MetricDefinitionRepo;
@ -35,8 +36,12 @@ import monasca.api.infrastructure.persistence.influxdb.InfluxV9StatisticRepo;
import monasca.api.infrastructure.persistence.influxdb.InfluxV9Utils;
import monasca.api.infrastructure.persistence.mysql.AlarmDefinitionMySqlRepoImpl;
import monasca.api.infrastructure.persistence.mysql.AlarmMySqlRepoImpl;
import monasca.api.infrastructure.persistence.mysql.NotificationMethodMySqlRepoImpl;
import monasca.api.infrastructure.persistence.mysql.MySQLUtils;
import monasca.api.infrastructure.persistence.mysql.NotificationMethodMySqlRepoImpl;
import monasca.api.infrastructure.persistence.hibernate.AlarmDefinitionSqlRepoImpl;
import monasca.api.infrastructure.persistence.hibernate.AlarmSqlRepoImpl;
import monasca.api.infrastructure.persistence.hibernate.NotificationMethodSqlRepoImpl;
import monasca.api.infrastructure.persistence.hibernate.AlarmHibernateUtils;
import monasca.api.infrastructure.persistence.vertica.AlarmStateHistoryVerticaRepoImpl;
import monasca.api.infrastructure.persistence.vertica.MeasurementVerticaRepoImpl;
import monasca.api.infrastructure.persistence.vertica.MetricDefinitionVerticaRepoImpl;
@ -59,19 +64,26 @@ public class InfrastructureModule extends AbstractModule {
@Override
protected void configure() {
final boolean hibernateEnabled = this.isHibernateEnabled();
this.bindUtils(hibernateEnabled);
// Bind repositories
bind(AlarmRepo.class).to(AlarmMySqlRepoImpl.class).in(Singleton.class);
bind(AlarmDefinitionRepo.class).to(AlarmDefinitionMySqlRepoImpl.class).in(Singleton.class);
bind(MySQLUtils.class);
bind(PersistUtils.class).in(Singleton.class);
if (hibernateEnabled) {
this.bind(AlarmRepo.class).to(AlarmSqlRepoImpl.class).in(Singleton.class);
this.bind(AlarmDefinitionRepo.class).to(AlarmDefinitionSqlRepoImpl.class).in(Singleton.class);
this.bind(NotificationMethodRepo.class).to(NotificationMethodSqlRepoImpl.class).in(Singleton.class);
} else {
bind(AlarmRepo.class).to(AlarmMySqlRepoImpl.class).in(Singleton.class);
bind(AlarmDefinitionRepo.class).to(AlarmDefinitionMySqlRepoImpl.class).in(Singleton.class);
bind(NotificationMethodRepo.class).to(NotificationMethodMySqlRepoImpl.class).in(Singleton.class);
bind(PersistUtils.class).in(Singleton.class);
}
if (config.databaseConfiguration.getDatabaseType().trim().equalsIgnoreCase(VERTICA)) {
bind(AlarmStateHistoryRepo.class).to(AlarmStateHistoryVerticaRepoImpl.class)
.in(Singleton.class);
bind(AlarmStateHistoryRepo.class).to(AlarmStateHistoryVerticaRepoImpl.class).in(Singleton.class);
bind(MetricDefinitionRepo.class).to(MetricDefinitionVerticaRepoImpl.class).in(Singleton.class);
bind(MeasurementRepo.class).to(MeasurementVerticaRepoImpl.class).in(Singleton.class);
bind(StatisticRepo.class).to(StatisticVerticaRepoImpl.class).in(Singleton.class);
@ -90,7 +102,6 @@ public class InfrastructureModule extends AbstractModule {
bind(InfluxV9Utils.class).in(Singleton.class);
bind(InfluxV9RepoReader.class).in(Singleton.class);
bind(AlarmStateHistoryRepo.class).to(InfluxV9AlarmStateHistoryRepo.class).in(Singleton.class);
bind(MetricDefinitionRepo.class).to(InfluxV9MetricDefinitionRepo.class).in(Singleton.class);
bind(MeasurementRepo.class).to(InfluxV9MeasurementRepo.class).in(Singleton.class);
@ -98,9 +109,18 @@ public class InfrastructureModule extends AbstractModule {
} else {
throw new ProvisionException("Failed to detect supported database. Supported databases are " + "'vertica' and 'influxdb'. Check your config file.");
throw new ProvisionException("Failed to detect supported database. Supported databases are "
+ "'vertica' and 'influxdb'. Check your config file.");
}
bind(NotificationMethodRepo.class).to(NotificationMethodMySqlRepoImpl.class).in(Singleton.class);
}
private boolean isHibernateEnabled() {
return this.config.hibernate != null && this.config.hibernate.getSupportEnabled();
}
private void bindUtils(final boolean hibernateEnabled) {
final Class<? extends Utils> implementation = hibernateEnabled ? AlarmHibernateUtils.class : MySQLUtils.class;
this.bind(Utils.class).to(implementation).in(Singleton.class);
}
}

View File

@ -0,0 +1,60 @@
/*
* Copyright 2015 FUJITSU LIMITED
*
* Licensed under the Apache License, Version 2.0 (the "License"); you may not use this file except
* in compliance with the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software distributed under the License
* is distributed on an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express
* or implied. See the License for the specific language governing permissions and limitations under
* the License.
*
*/
package monasca.api.infrastructure.persistence;
import java.util.List;
import java.util.Map;
abstract public class Utils {
public abstract List<String> findAlarmIds(String tenantId,
Map<String, String> dimensions);
protected String buildJoinClauseFor(Map<String, String> dimensions) {
if ((dimensions == null) || dimensions.isEmpty()) {
return "";
}
final StringBuilder sb =
new StringBuilder(
"join alarm_metric as am on a.id=am.alarm_id "
+ "join metric_definition_dimensions as mdd on am.metric_definition_dimensions_id=mdd.id ");
for (int i = 0; i < dimensions.size(); i++) {
final String tableAlias = "md" + i;
sb.append(" inner join metric_dimension ")
.append(tableAlias)
.append(" on ")
.append(tableAlias)
.append(".name = :dname")
.append(i)
.append(" and ")
.append(tableAlias)
.append(".value = :dvalue")
.append(i)
.append(" and mdd.metric_dimension_set_id = ")
.append(tableAlias)
.append(".dimension_set_id");
}
return sb.toString();
}
}

View File

@ -0,0 +1,771 @@
/*
* Copyright 2015 FUJITSU LIMITED
*
* Licensed under the Apache License, Version 2.0 (the "License"); you may not use this file except
* in compliance with the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software distributed under the License
* is distributed on an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express
* or implied. See the License for the specific language governing permissions and limitations under
* the License.
*/
package monasca.api.infrastructure.persistence.hibernate;
import java.util.ArrayList;
import java.util.Collection;
import java.util.Collections;
import java.util.Iterator;
import java.util.List;
import java.util.Map;
import java.util.Set;
import javax.inject.Inject;
import javax.inject.Named;
import org.apache.commons.collections4.CollectionUtils;
import org.apache.commons.collections4.MapUtils;
import org.hibernate.Query;
import org.hibernate.Session;
import org.hibernate.SessionFactory;
import org.hibernate.StatelessSession;
import org.hibernate.Transaction;
import org.hibernate.criterion.Projections;
import org.hibernate.criterion.Restrictions;
import org.hibernate.transform.AliasToEntityMapResultTransformer;
import org.hibernate.transform.ResultTransformer;
import org.joda.time.DateTime;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
import com.google.common.base.Joiner;
import com.google.common.base.Splitter;
import com.google.common.collect.Iterables;
import com.google.common.collect.Lists;
import com.google.common.collect.Maps;
import com.google.common.collect.Sets;
import monasca.api.domain.exception.EntityNotFoundException;
import monasca.api.domain.model.alarmdefinition.AlarmDefinition;
import monasca.api.domain.model.alarmdefinition.AlarmDefinitionRepo;
import monasca.api.infrastructure.persistence.SubAlarmDefinitionQueries;
import monasca.common.hibernate.db.AlarmActionDb;
import monasca.common.hibernate.db.AlarmDb;
import monasca.common.hibernate.db.AlarmDefinitionDb;
import monasca.common.hibernate.db.SubAlarmDefinitionDb;
import monasca.common.hibernate.db.SubAlarmDefinitionDimensionDb;
import monasca.common.hibernate.db.SubAlarmDefinitionDimensionId;
import monasca.common.model.alarm.AggregateFunction;
import monasca.common.model.alarm.AlarmOperator;
import monasca.common.model.alarm.AlarmSeverity;
import monasca.common.model.alarm.AlarmState;
import monasca.common.model.alarm.AlarmSubExpression;
import monasca.common.model.metric.MetricDefinition;
/**
* Alarm repository implementation.
*/
public class AlarmDefinitionSqlRepoImpl
extends BaseSqlRepo
implements AlarmDefinitionRepo {
private static final ResultTransformer ALARM_DEF_RESULT_TRANSFORMER = getAlarmDefResultTransformer();
private static final String ID = "ID";
private static final String NAME = "NAME";
private static final String DESCRIPTION = "DESCRIPTION";
private static final String EXPRESSION = "EXPRESSION";
private static final String SEVERITY = "SEVERITY";
private static final String MATCH_BY = "MATCH_BY";
private static final String ACTIONS_ENABLED = "ACTIONS_ENABLED";
private static final String STATE = "STATES";
private static final String NOTIFICATION_ID = "NOTIFICATIONIDS";
private static final Joiner COMMA_JOINER = Joiner.on(',');
private static final Splitter COMMA_SPLITTER = Splitter.on(',').omitEmptyStrings().trimResults();
private static final Logger logger = LoggerFactory.getLogger(AlarmDefinitionSqlRepoImpl.class);
@Inject
public AlarmDefinitionSqlRepoImpl(@Named("orm") SessionFactory sessionFactory) {
super(sessionFactory);
}
@Override
public AlarmDefinition create(String tenantId, String id, String name, String description, String severity, String expression,
Map<String, AlarmSubExpression> subExpressions, List<String> matchBy, List<String> alarmActions, List<String> okActions,
List<String> undeterminedActions) {
logger.trace(ORM_LOG_MARKER, "create(...) entering...");
Transaction tx = null;
Session session = null;
try {
session = sessionFactory.openSession();
tx = session.beginTransaction();
final DateTime now = DateTime.now();
final AlarmDefinitionDb alarmDefinition = new AlarmDefinitionDb(
id,
tenantId,
name,
description,
expression,
AlarmSeverity.valueOf(severity.toUpperCase()),
matchBy == null || Iterables.isEmpty(matchBy) ? null : COMMA_JOINER.join(matchBy),
true,
now,
now,
null
);
session.save(alarmDefinition);
this.createSubExpressions(session, alarmDefinition, subExpressions);
// Persist actions
this.persistActions(session, alarmDefinition, AlarmState.ALARM, alarmActions);
this.persistActions(session, alarmDefinition, AlarmState.OK, okActions);
this.persistActions(session, alarmDefinition, AlarmState.UNDETERMINED, undeterminedActions);
tx.commit();
tx = null;
logger.debug(ORM_LOG_MARKER, "AlarmDefinition [ {} ] has been committed to database", alarmDefinition);
return new AlarmDefinition(
id,
name,
description,
severity,
expression,
matchBy,
true,
alarmActions == null ? Collections.<String>emptyList() : alarmActions,
okActions == null ? Collections.<String>emptyList() : okActions,
undeterminedActions == null ? Collections.<String>emptyList() : undeterminedActions
);
} catch (RuntimeException e) {
this.rollbackIfNotNull(tx);
throw e;
} finally {
if (session != null) {
session.close();
}
}
}
@Override
public void deleteById(String tenantId, String alarmDefId) {
logger.trace(ORM_LOG_MARKER, "deleteById(...) entering...");
Session session = null;
Transaction tx = null;
try {
session = sessionFactory.openSession();
tx = session.beginTransaction();
final AlarmDefinitionDb result = (AlarmDefinitionDb) session
.getNamedQuery(AlarmDefinitionDb.Queries.FIND_BY_TENANT_AND_ID_NOT_DELETED)
.setString("tenant_id", tenantId)
.setString("id", alarmDefId)
.uniqueResult();
result.setDeletedAt(DateTime.now());
session.update(result);
// Cascade soft delete to alarms
session
.getNamedQuery(AlarmDb.Queries.DELETE_BY_ALARMDEFINITION_ID)
.setString("alarmDefinitionId", alarmDefId)
.executeUpdate();
tx.commit();
tx = null;
logger.debug(ORM_LOG_MARKER, "AlarmDefinition [ {} ] has been deleted from database", result);
} catch (Exception e) {
this.rollbackIfNotNull(tx);
throw e;
} finally {
if (session != null) {
session.close();
}
}
}
@Override
public String exists(final String tenantId,
final String name) {
logger.trace(ORM_LOG_MARKER, "exists(...) entering...");
StatelessSession session = null;
try {
session = sessionFactory.openStatelessSession();
List<?> ids = session
.createCriteria(AlarmDefinitionDb.class)
.add(Restrictions.eq("tenantId", tenantId))
.add(Restrictions.eq("name", name))
.add(Restrictions.isNull("deletedAt"))
.setProjection(Projections.property("id"))
.setMaxResults(1)
.list();
final String existingId = CollectionUtils.isEmpty(ids) ? null : (String) ids.get(0);
if (null == existingId) {
logger.debug(ORM_LOG_MARKER, "No AlarmDefinition matched tenantId={} and name={}", tenantId, name);
}
return existingId;
} finally {
if (session != null) {
session.close();
}
}
}
@Override
@SuppressWarnings("unchecked")
public List<AlarmDefinition> find(String tenantId, String name, Map<String, String> dimensions, String offset, int limit) {
logger.trace(ORM_LOG_MARKER, "find(...) entering...");
Session session = null;
List<AlarmDefinition> resultSet = Lists.newArrayList();
// TODO introduce criteria here, will make code significantly better
String query =
" SELECT t.id, t.tenant_id, t.name, t.description, t.expression, t.severity, t.match_by, "
+ "t.actions_enabled, aa.alarm_state AS states, aa.action_id AS notificationIds "
+ "FROM (SELECT distinct ad.id, ad.tenant_id, ad.name, ad.description, ad.expression, "
+ "ad.severity, ad.match_by, ad.actions_enabled, ad.created_at, ad.updated_at, ad.deleted_at "
+ "FROM alarm_definition AS ad LEFT OUTER JOIN sub_alarm_definition AS sad ON ad.id = sad.alarm_definition_id "
+ "LEFT OUTER JOIN sub_alarm_definition_dimension AS dim ON sad.id = dim.sub_alarm_definition_id %1$s "
+ "WHERE ad.tenant_id = :tenantId AND ad.deleted_at IS NULL %2$s %3$s) AS t "
+ "LEFT OUTER JOIN alarm_action AS aa ON t.id = aa.alarm_definition_id ORDER BY t.id, t.created_at";
StringBuilder sbWhere = new StringBuilder();
if (name != null) {
sbWhere.append(" and ad.name = :name");
}
if (offset != null) {
sbWhere.append(" and ad.id > :offset");
}
String limitPart = "";
if (limit > 0) {
limitPart = " limit :limit";
}
String sql = String.format(query, SubAlarmDefinitionQueries.buildJoinClauseFor(dimensions), sbWhere, limitPart);
try {
session = sessionFactory.openSession();
final Query qAlarmDefinition = session
.createSQLQuery(sql)
.setString("tenantId", tenantId)
.setResultTransformer(ALARM_DEF_RESULT_TRANSFORMER);
if (name != null) {
qAlarmDefinition.setString("name", name);
}
if (offset != null) {
qAlarmDefinition.setString("offset", offset);
}
if (limit > 0) {
qAlarmDefinition.setInteger("limit", limit + 1);
}
this.bindDimensionsToQuery(qAlarmDefinition, dimensions);
final List<Map<?,?>> alarmDefinitionDbList = qAlarmDefinition.list();
resultSet = CollectionUtils.isEmpty(alarmDefinitionDbList) ?
Lists.<AlarmDefinition>newArrayList() :
this.createAlarmDefinitions(alarmDefinitionDbList);
} finally {
if (session != null) {
session.close();
}
}
return resultSet;
}
@Override
@SuppressWarnings("unchecked")
public AlarmDefinition findById(String tenantId, String alarmDefId) {
logger.trace(ORM_LOG_MARKER, "findById(...) entering...");
Session session = null;
List<String> okActionIds = null;
List<String> alarmActionIds = null;
List<String> undeterminedActionIds = null;
try {
session = sessionFactory.openSession();
final AlarmDefinitionDb alarmDefinitionDb = (AlarmDefinitionDb) session
.getNamedQuery(AlarmDefinitionDb.Queries.FIND_BY_TENANT_AND_ID_NOT_DELETED)
.setString("tenant_id", tenantId)
.setString("id", alarmDefId).uniqueResult();
if (alarmDefinitionDb == null) {
throw new EntityNotFoundException("No alarm definition exists for tenantId=%s and id=%s", tenantId, alarmDefId);
}
final List<AlarmActionDb> alarmActionList = session
.getNamedQuery(AlarmActionDb.Queries.FIND_BY_TENANT_ID_AND_ALARMDEFINITION_ID_DISTINCT)
.setString("tenantId", tenantId)
.setString("alarmDefId", alarmDefId)
.list();
if(!CollectionUtils.isEmpty(alarmActionList)) {
logger.debug(ORM_LOG_MARKER, "Located {} AlarmActions for AlarmDefinition {}", alarmActionList.size(), alarmDefinitionDb);
okActionIds = Lists.newArrayList();
alarmActionIds = Lists.newArrayList();
undeterminedActionIds = Lists.newArrayList();
for (final AlarmActionDb alarmAction : alarmActionList) {
if (alarmAction.isInAlarmState(AlarmState.UNDETERMINED)) {
undeterminedActionIds.add(alarmAction.getAlarmActionId().getActionId());
} else if (alarmAction.isInAlarmState(AlarmState.OK)) {
okActionIds.add(alarmAction.getAlarmActionId().getActionId());
} else if (alarmAction.isInAlarmState(AlarmState.ALARM)) {
alarmActionIds.add(alarmAction.getAlarmActionId().getActionId());
}
}
}
return new AlarmDefinition(
alarmDefinitionDb.getId(),
alarmDefinitionDb.getName(),
alarmDefinitionDb.getDescription(),
alarmDefinitionDb.getSeverity().name(),
alarmDefinitionDb.getExpression(),
this.splitStringIntoList(alarmDefinitionDb.getMatchBy()),
alarmDefinitionDb.isActionsEnabled(),
alarmActionIds == null ? Collections.<String>emptyList() : alarmActionIds,
okActionIds == null ? Collections.<String>emptyList() : okActionIds,
undeterminedActionIds == null ? Collections.<String>emptyList() : undeterminedActionIds
);
} finally {
if (session != null) {
session.close();
}
}
}
@Override
@SuppressWarnings("unchecked")
public Map<String, MetricDefinition> findSubAlarmMetricDefinitions(String alarmDefId) {
logger.trace(ORM_LOG_MARKER, "findSubAlarmMetricDefinitions(...) entering...");
Session session = null;
Map<String, MetricDefinition> subAlarmMetricDefs = Maps.newHashMap();
try {
session = sessionFactory.openSession();
final List<SubAlarmDefinitionDb> subAlarmDefList = session
.getNamedQuery(SubAlarmDefinitionDb.Queries.BY_ALARMDEFINITION_ID)
.setString("id", alarmDefId)
.list();
final List<SubAlarmDefinitionDimensionDb> subAlarmDefDimensionList = session
.getNamedQuery(SubAlarmDefinitionDb.Queries.BY_ALARMDEFINITIONDIMENSION_SUBEXPRESSION_ID)
.setString("id", alarmDefId)
.list();
final Map<String, Map<String, String>> subAlarmDefDimensionMapExpression = this.mapAlarmDefDimensionExpression(
subAlarmDefDimensionList
);
for (SubAlarmDefinitionDb subAlarmDef : subAlarmDefList) {
String id = subAlarmDef.getId();
String metricName = subAlarmDef.getMetricName();
Map<String, String> dimensions = Collections.emptyMap();
if (subAlarmDefDimensionMapExpression.containsKey(id)) {
dimensions = subAlarmDefDimensionMapExpression.get(id);
}
subAlarmMetricDefs.put(id, new MetricDefinition(metricName, dimensions));
}
return subAlarmMetricDefs;
} finally {
if (session != null) {
session.close();
}
}
}
@Override
@SuppressWarnings("unchecked")
public Map<String, AlarmSubExpression> findSubExpressions(String alarmDefId) {
logger.trace(ORM_LOG_MARKER, "findSubExpressions(...) entering...");
Session session = null;
Map<String, AlarmSubExpression> subExpressions = Maps.newHashMap();
try {
session = sessionFactory.openSession();
List<SubAlarmDefinitionDb> subAlarmDefList = session
.getNamedQuery(SubAlarmDefinitionDb.Queries.BY_ALARMDEFINITION_ID)
.setString("id", alarmDefId)
.list();
Query querySybAlarmDefDimension = session
.getNamedQuery(SubAlarmDefinitionDb.Queries.BY_ALARMDEFINITIONDIMENSION_SUBEXPRESSION_ID)
.setString("id", alarmDefId);
List<SubAlarmDefinitionDimensionDb> subAlarmDefDimensionList = querySybAlarmDefDimension.list();
Map<String, Map<String, String>> subAlarmDefDimensionMapExpression = mapAlarmDefDimensionExpression(subAlarmDefDimensionList);
for (SubAlarmDefinitionDb subAlarmDef : subAlarmDefList) {
String id = subAlarmDef.getId();
AggregateFunction function = AggregateFunction.fromJson(subAlarmDef.getFunction());
String metricName = subAlarmDef.getMetricName();
AlarmOperator operator = AlarmOperator.fromJson(subAlarmDef.getOperator());
double threshold = subAlarmDef.getThreshold();
int period = subAlarmDef.getPeriod();
int periods = subAlarmDef.getPeriods();
Map<String, String> dimensions = Collections.emptyMap();
if (subAlarmDefDimensionMapExpression.containsKey(id)) {
dimensions = subAlarmDefDimensionMapExpression.get(id);
}
subExpressions.put(id, new AlarmSubExpression(function, new MetricDefinition(metricName, dimensions), operator, threshold, period, periods));
}
return subExpressions;
} finally {
if (session != null) {
session.close();
}
}
}
@Override
public void update(String tenantId, String id, boolean patch, String name, String description, String expression, List<String> matchBy,
String severity, boolean actionsEnabled, Collection<String> oldSubAlarmIds, Map<String, AlarmSubExpression> changedSubAlarms,
Map<String, AlarmSubExpression> newSubAlarms, List<String> alarmActions, List<String> okActions, List<String> undeterminedActions) {
logger.trace(ORM_LOG_MARKER, "update(...) entering...");
Transaction tx = null;
Session session = null;
try {
session = sessionFactory.openSession();
tx = session.beginTransaction();
final AlarmDefinitionDb alarmDefinitionDb = this.updateAlarmDefinition(
tenantId,
id,
name,
description,
expression,
matchBy,
severity,
actionsEnabled,
session
);
this.deleteOldSubAlarms(oldSubAlarmIds, session);
this.updateChangedSubAlarms(changedSubAlarms, session);
this.createSubExpressions(session, alarmDefinitionDb, newSubAlarms);
this.deleteOldAlarmActions(id, patch, alarmActions, okActions, undeterminedActions, session);
// Insert new actions
this.persistActions(session, alarmDefinitionDb, AlarmState.ALARM, alarmActions);
this.persistActions(session, alarmDefinitionDb, AlarmState.OK, okActions);
this.persistActions(session, alarmDefinitionDb, AlarmState.UNDETERMINED, undeterminedActions);
// Insert new actions
tx.commit();
tx = null;
} catch (RuntimeException e) {
this.rollbackIfNotNull(tx);
throw e;
} finally {
if (session != null) {
session.close();
}
}
}
private void deleteOldAlarmActions(final String id,
final boolean patch,
final List<String> alarmActions,
final List<String> okActions,
final List<String> undeterminedActions,
final Session session) {
if (patch) {
this.deleteActions(session, id, AlarmState.ALARM, alarmActions);
this.deleteActions(session, id, AlarmState.OK, okActions);
this.deleteActions(session, id, AlarmState.UNDETERMINED, undeterminedActions);
} else {
session
.getNamedQuery(AlarmActionDb.Queries.DELETE_BY_ALARMDEFINITION_ID)
.setString("id", id)
.executeUpdate();
}
}
private void updateChangedSubAlarms(final Map<String, AlarmSubExpression> changedSubAlarms,
final Session session) {
if (!MapUtils.isEmpty(changedSubAlarms))
for (Map.Entry<String, AlarmSubExpression> entry : changedSubAlarms.entrySet()) {
final AlarmSubExpression sa = entry.getValue();
final String subAlarmDefinitionId = entry.getKey();
SubAlarmDefinitionDb subAlarmDefinitionDb = (SubAlarmDefinitionDb) session.get(SubAlarmDefinitionDb.class, subAlarmDefinitionId);
subAlarmDefinitionDb.setOperator(sa.getOperator().name());
subAlarmDefinitionDb.setThreshold(sa.getThreshold());
subAlarmDefinitionDb.setUpdatedAt(new DateTime());
session.saveOrUpdate(subAlarmDefinitionDb);
}
}
private void deleteOldSubAlarms(final Collection<String> oldSubAlarmIds,
final Session session) {
if (!CollectionUtils.isEmpty(oldSubAlarmIds)) {
session
.getNamedQuery(SubAlarmDefinitionDb.Queries.DELETE_BY_IDS)
.setParameterList("ids", oldSubAlarmIds)
.executeUpdate();
}
}
private AlarmDefinitionDb updateAlarmDefinition(final String tenantId,
final String id,
final String name,
final String description,
final String expression,
final List<String> matchBy,
final String severity,
final boolean actionsEnabled,
final Session session) {
final AlarmDefinitionDb alarmDefinitionDb = (AlarmDefinitionDb) session
.getNamedQuery(AlarmDefinitionDb.Queries.FIND_BY_TENANT_ID_AND_ID)
.setString("tenantId", tenantId)
.setString("id", id)
.uniqueResult();
alarmDefinitionDb.setName(name);
alarmDefinitionDb.setDescription(description);
alarmDefinitionDb.setExpression(expression);
alarmDefinitionDb.setMatchBy(matchBy == null || Iterables.isEmpty(matchBy) ? null : COMMA_JOINER.join(matchBy));
alarmDefinitionDb.setSeverity(AlarmSeverity.valueOf(severity));
alarmDefinitionDb.setActionsEnabled(actionsEnabled);
session.saveOrUpdate(alarmDefinitionDb);
return alarmDefinitionDb;
}
private void deleteActions(final Session session,
final String id,
final AlarmState alarmState,
final List<String> actions) {
if (!CollectionUtils.isEmpty(actions))
session
.getNamedQuery(AlarmActionDb.Queries.DELETE_BY_ALARMDEFINITION_ID_AND_ALARMSTATE)
.setString("id", id)
.setString("alarmState", alarmState.name())
.executeUpdate();
}
private Map<String, Map<String, String>> mapAlarmDefDimensionExpression(List<SubAlarmDefinitionDimensionDb> subAlarmDefDimensionList) {
Map<String, Map<String, String>> subAlarmDefDimensionMapExpression = Maps.newHashMapWithExpectedSize(subAlarmDefDimensionList.size());
// Map expressions on sub_alarm_definition_dimension.sub_alarm_definition_id =
// sub_alarm_definition.id
for (SubAlarmDefinitionDimensionDb subAlarmDefDimension : subAlarmDefDimensionList) {
String subAlarmDefId = subAlarmDefDimension.getSubAlarmDefinitionDimensionId().getSubExpression().getId();
String name = subAlarmDefDimension.getSubAlarmDefinitionDimensionId().getDimensionName();
String value = subAlarmDefDimension.getValue();
if (subAlarmDefDimensionMapExpression.containsKey(subAlarmDefId)) {
subAlarmDefDimensionMapExpression.get(subAlarmDefId).put(name, value);
} else {
Map<String, String> expressionMap = Maps.newHashMap();
expressionMap.put(name, value);
subAlarmDefDimensionMapExpression.put(subAlarmDefId, expressionMap);
}
}
return subAlarmDefDimensionMapExpression;
}
private void bindDimensionsToQuery(Query query, Map<String, String> dimensions) {
if (dimensions != null) {
int i = 0;
for (Iterator<Map.Entry<String, String>> it = dimensions.entrySet().iterator(); it.hasNext(); i++) {
Map.Entry<String, String> entry = it.next();
query.setString("dname" + i, entry.getKey());
query.setString("dvalue" + i, entry.getValue());
}
}
}
private List<AlarmDefinition> createAlarmDefinitions(List<Map<?,?>> rows) {
final List<AlarmDefinition> result = new ArrayList<>();
Map<String, List<String>> okActionIdsMap = Maps.newHashMap();
Map<String, List<String>> alarmActionIdsMap = Maps.newHashMap();
Map<String, List<String>> undeterminedActionIdsMap = Maps.newHashMap();
Set<String> alarmDefinitionSet = Sets.newHashSet();
for (Map<?,?> row : rows) {
String alarmDefId = (String) row.get(ID);
String singleState = (String) row.get(STATE);
String notificationId = (String) row.get(NOTIFICATION_ID);
if (!okActionIdsMap.containsKey(alarmDefId)) {
okActionIdsMap.put(alarmDefId, Lists.<String>newArrayList());
}
if (!alarmActionIdsMap.containsKey(alarmDefId)) {
alarmActionIdsMap.put(alarmDefId, Lists.<String>newArrayList());
}
if (!undeterminedActionIdsMap.containsKey(alarmDefId)) {
undeterminedActionIdsMap.put(alarmDefId, Lists.<String>newArrayList());
}
if (singleState != null && notificationId != null) {
if (singleState.equals(AlarmState.UNDETERMINED.name())) {
undeterminedActionIdsMap.get(alarmDefId).add(notificationId);
}
if (singleState.equals(AlarmState.OK.name())) {
okActionIdsMap.get(alarmDefId).add(notificationId);
}
if (singleState.equals(AlarmState.ALARM.name())) {
alarmActionIdsMap.get(alarmDefId).add(notificationId);
}
}
}
for (Map<?,?> row : rows) {
String alarmDefId = (String) row.get(ID);
if (!alarmDefinitionSet.contains(alarmDefId)) {
String name = (String) row.get(NAME);
String description = (String) row.get(DESCRIPTION);
String severity = (String) row.get(SEVERITY);
String expression = (String) row.get(EXPRESSION);
List<String> match = this.splitStringIntoList((String) row.get(MATCH_BY));
Boolean actionEnabled = (Boolean) row.get(ACTIONS_ENABLED);
AlarmDefinition ad = new AlarmDefinition(
alarmDefId,
name,
description,
severity,
expression,
match,
actionEnabled,
alarmActionIdsMap.get(alarmDefId),
okActionIdsMap.get(alarmDefId),
undeterminedActionIdsMap.get(alarmDefId)
);
result.add(ad);
}
alarmDefinitionSet.add(alarmDefId);
}
return result;
}
private List<String> splitStringIntoList(String str) {
return str == null ? Lists.<String>newArrayList() : Lists.newArrayList(COMMA_SPLITTER.split(str));
}
private void createSubExpressions(Session session,
AlarmDefinitionDb alarmDefinition,
Map<String, AlarmSubExpression> alarmSubExpressions) {
if (alarmSubExpressions != null) {
for (Map.Entry<String, AlarmSubExpression> subEntry : alarmSubExpressions.entrySet()) {
String subAlarmId = subEntry.getKey();
AlarmSubExpression subExpr = subEntry.getValue();
MetricDefinition metricDef = subExpr.getMetricDefinition();
// Persist sub-alarm
final DateTime now = DateTime.now();
SubAlarmDefinitionDb subAlarmDefinitionDb = new SubAlarmDefinitionDb(
subAlarmId,
alarmDefinition,
subExpr.getFunction().name(),
metricDef.name,
subExpr.getOperator().name(),
subExpr.getThreshold(),
subExpr.getPeriod(),
subExpr.getPeriods(),
now,
now
);
session.save(subAlarmDefinitionDb);
// Persist sub-alarm dimensions
if (!MapUtils.isEmpty(metricDef.dimensions)) {
SubAlarmDefinitionDimensionDb definitionDimension;
SubAlarmDefinitionDimensionId definitionDimensionId;
for (Map.Entry<String, String> dimEntry : metricDef.dimensions.entrySet()) {
definitionDimensionId = new SubAlarmDefinitionDimensionId(subAlarmDefinitionDb, dimEntry.getKey());
definitionDimension = new SubAlarmDefinitionDimensionDb(definitionDimensionId, dimEntry.getValue());
session.save(definitionDimension);
}
}
}
}
}
private void persistActions(final Session session,
final AlarmDefinitionDb alarmDefinition,
final AlarmState alarmState,
final List<String> actions) {
if (actions != null) {
for (String action : actions) {
session.save(new AlarmActionDb(alarmDefinition, alarmState, action));
}
}
}
// method extracted for code-readability
private static ResultTransformer getAlarmDefResultTransformer() {
return new ResultTransformer() {
private static final long serialVersionUID = -3052468375925339521L;
@Override
public Object transformTuple(final Object[] tuple, final String[] aliases) {
for (int i = 0, length = aliases.length; i < length; i++) {
aliases[i] = aliases[i].toUpperCase();
}
return AliasToEntityMapResultTransformer
.INSTANCE
.transformTuple(tuple, aliases);
}
@Override
public List transformList(final List collection) {
return AliasToEntityMapResultTransformer
.INSTANCE
.transformList(collection);
}
};
}
}

View File

@ -0,0 +1,101 @@
/*
* Copyright 2015 FUJITSU LIMITED
*
* Licensed under the Apache License, Version 2.0 (the "License"); you may not use this file except
* in compliance with the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software distributed under the License
* is distributed on an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express
* or implied. See the License for the specific language governing permissions and limitations under
* the License.
*/
package monasca.api.infrastructure.persistence.hibernate;
import com.google.common.collect.Lists;
import org.hibernate.Query;
import org.hibernate.SessionFactory;
import org.hibernate.StatelessSession;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
import javax.inject.Inject;
import javax.inject.Named;
import java.util.Iterator;
import java.util.List;
import java.util.Map;
import monasca.api.infrastructure.persistence.Utils;
public class AlarmHibernateUtils
extends Utils {
private static final Logger logger = LoggerFactory.getLogger(AlarmHibernateUtils.class);
private static final String FIND_ALARM_IDS_SQL =
"select distinct a.id, ad.created_at "
+ "from alarm as a "
+ "join alarm_definition as ad on a.alarm_definition_id = ad.id "
+ "%s "
+ "where ad.tenant_id = :tenantId and ad.deleted_at is NULL "
+ "order by ad.created_at";
private final SessionFactory sessionFactory;
@Inject
public AlarmHibernateUtils(@Named("orm") SessionFactory sessionFactory) {
this.sessionFactory = sessionFactory;
}
public List<String> findAlarmIds(String tenantId, Map<String, String> dimensions) {
logger.trace(BaseSqlRepo.ORM_LOG_MARKER, "findAlarmIds(...) entering");
List<String> alarmIdList = null;
StatelessSession session = null;
try {
session = sessionFactory.openStatelessSession();
final String sql = this.findAlarmQueryString(dimensions);
final Query query = session
.createSQLQuery(sql)
.setString("tenantId", tenantId);
this.bindDimensionsToQuery(query, dimensions);
@SuppressWarnings("unchecked") List<Object[]> rows = query.list();
alarmIdList = Lists.newArrayListWithCapacity(rows.size());
for (Object[] row : rows) {
String id = (String) row[0];
alarmIdList.add(id);
}
} finally {
if (session != null) {
session.close();
}
}
// no need to check if alarmIdList != null, because in case of exception method
// will leave immediately, otherwise list wont be null.
return alarmIdList;
}
private String findAlarmQueryString(final Map<String, String> dimensions) {
return String.format(FIND_ALARM_IDS_SQL, this.buildJoinClauseFor(dimensions));
}
/*
duplicate required
monasca.api.infrastructure.persistence.DimensionQueries.bindDimensionsToQuery()
has incompatible signature
*/
private void bindDimensionsToQuery(Query query, Map<String, String> dimensions) {
if (dimensions != null) {
int i = 0;
for (Iterator<Map.Entry<String, String>> it = dimensions.entrySet().iterator(); it.hasNext(); i++) {
Map.Entry<String, String> entry = it.next();
query.setString("dname" + i, entry.getKey());
query.setString("dvalue" + i, entry.getValue());
}
}
}
}

View File

@ -0,0 +1,558 @@
/*
* Copyright 2015 FUJITSU LIMITED
*
* Licensed under the Apache License, Version 2.0 (the "License"); you may not use this file except
* in compliance with the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software distributed under the License
* is distributed on an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express
* or implied. See the License for the specific language governing permissions and limitations under
* the License.
*/
package monasca.api.infrastructure.persistence.hibernate;
import java.sql.Timestamp;
import java.util.ArrayList;
import java.util.HashMap;
import java.util.Iterator;
import java.util.LinkedList;
import java.util.List;
import java.util.Map;
import javax.inject.Inject;
import javax.inject.Named;
import com.google.common.collect.Lists;
import com.google.common.collect.Maps;
import org.hibernate.Query;
import org.hibernate.Session;
import org.hibernate.SessionFactory;
import org.hibernate.Transaction;
import org.hibernate.criterion.Projections;
import org.hibernate.criterion.Restrictions;
import org.joda.time.DateTime;
import org.joda.time.DateTimeZone;
import org.joda.time.format.DateTimeFormatter;
import org.joda.time.format.ISODateTimeFormat;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
import monasca.api.domain.exception.EntityNotFoundException;
import monasca.api.domain.model.alarm.Alarm;
import monasca.api.domain.model.alarm.AlarmRepo;
import monasca.common.hibernate.db.AlarmDb;
import monasca.common.hibernate.db.SubAlarmDb;
import monasca.common.hibernate.type.BinaryId;
import monasca.common.model.alarm.AlarmSeverity;
import monasca.common.model.alarm.AlarmState;
import monasca.common.model.alarm.AlarmSubExpression;
import monasca.common.model.metric.MetricDefinition;
/**
* Alarmed metric repository implementation.
*/
public class AlarmSqlRepoImpl
extends BaseSqlRepo
implements AlarmRepo {
private static final Logger logger = LoggerFactory.getLogger(AlarmSqlRepoImpl.class);
private static final DateTimeFormatter ISO_8601_FORMATTER = ISODateTimeFormat.dateOptionalTimeParser().withZoneUTC();
private static final String FIND_ALARM_BY_ID_SQL =
"select distinct ad.id as alarm_definition_id, ad.severity, ad.name as alarm_definition_name, "
+ "a.id, a.state, a.updatedAt, a.createdAt as created_timestamp, "
+ "md.name as metric_name, mdg.id.name, mdg.value, a.lifecycleState, a.link, a.stateUpdatedAt, "
+ "mdg.id.dimensionSetId from AlarmDb as a "
+ ", AlarmDefinitionDb as ad "
+ ", AlarmMetricDb as am "
+ ", MetricDefinitionDimensionsDb as mdd "
+ ", MetricDefinitionDb as md "
+ ", MetricDimensionDb as mdg "
+ "where "
+ " ad.id = a.alarmDefinition.id "
+ " and am.alarmMetricId.alarm.id = a.id "
+ " and mdd.id = am.alarmMetricId.metricDefinitionDimensions.id "
+ " and md.id = mdd.metricDefinition.id "
+ " and mdg.id.dimensionSetId = mdd.metricDimensionSetId "
+ " and ad.tenantId = :tenantId "
+ " %s "
+ " and ad.deletedAt is null order by a.id, mdg.id.dimensionSetId %s";
private static final String ALARM_SQL =
"select distinct ad.id as alarm_definition_id, ad.severity, ad.name as alarm_definition_name, "
+ "a.id, a.state, a.updated_at, a.created_at as created_timestamp, "
+ "md.name as metric_name, mdg.name, mdg.value, a.lifecycle_state, a.link, a.state_updated_at, "
+ "mdg.dimension_set_id "
+ "from alarm as a "
+ "inner join alarm_definition ad on ad.id = a.alarm_definition_id "
+ "inner join alarm_metric as am on am.alarm_id = a.id "
+ "inner join metric_definition_dimensions as mdd on mdd.id = am.metric_definition_dimensions_id "
+ "inner join metric_definition as md on md.id = mdd.metric_definition_id "
+ "left join (select dimension_set_id, name, value "
+ "from metric_dimension group by dimension_set_id, name, value) as mdg on mdg.dimension_set_id = mdd.metric_dimension_set_id "
+ "where ad.tenant_id = :tenantId and ad.deleted_at is null %s order by a.id ASC, mdg.dimension_set_id %s ";
@Inject
public AlarmSqlRepoImpl(@Named("orm") SessionFactory sessionFactory) {
super(sessionFactory);
}
@Override
public void deleteById(String tenantId, String id) {
logger.trace(ORM_LOG_MARKER, "deleteById(...) entering");
Transaction tx = null;
Session session = null;
try {
session = sessionFactory.openSession();
tx = session.beginTransaction();
final long result = (Long) session
.createCriteria(AlarmDb.class, "a")
.createAlias("alarmDefinition", "ad")
.add(Restrictions.conjunction(
Restrictions.eq("a.id", id),
Restrictions.eq("ad.tenantId", tenantId),
Restrictions.eqProperty("a.alarmDefinition.id", "ad.id"),
Restrictions.isNull("ad.deletedAt")
))
.setProjection(Projections.count("a.id"))
.setReadOnly(true)
.uniqueResult();
// This will throw an EntityNotFoundException if Alarm doesn't exist or has a different tenant
// id
if (result < 1) {
throw new EntityNotFoundException("No alarm exists for %s", id);
}
// delete alarm
session
.getNamedQuery(AlarmDb.Queries.DELETE_BY_ID)
.setString("id", id)
.executeUpdate();
tx.commit();
tx = null;
} catch (Exception e) {
this.rollbackIfNotNull(tx);
throw e;
} finally {
if (session != null) {
session.close();
}
}
}
@Override
public List<Alarm> find(String tenantId, String alarmDefId, String metricName, Map<String, String> metricDimensions, AlarmState state,
String lifecycleState, String link, DateTime stateUpdatedStart, String offset, int limit, boolean enforceLimit) {
logger.trace(ORM_LOG_MARKER, "find(...) entering");
List<Alarm> alarms;
alarms =
findInternal(tenantId, alarmDefId, metricName, metricDimensions, state, lifecycleState, link, stateUpdatedStart, offset, (3 * limit / 2),
enforceLimit);
if (limit == 0 || !enforceLimit)
return alarms;
if (alarms.size() > limit) {
for (int i = alarms.size() - 1; i > limit; i--) {
alarms.remove(i);
}
} else if (alarms.size() > 0) {
while (alarms.size() < limit) {
List<Alarm> alarms2;
int diff = limit - alarms.size();
String offset2 = alarms.get(alarms.size() - 1).getId();
alarms2 =
findInternal(tenantId, alarmDefId, metricName, metricDimensions, state, lifecycleState, link, stateUpdatedStart, offset2, (2 * diff),
enforceLimit);
if (alarms2.size() == 0)
break;
for (int i = 0; i < alarms2.size() && i < diff; i++)
alarms.add(alarms2.get(i));
}
}
return alarms;
}
private List<Alarm> findInternal(String tenantId, String alarmDefId, String metricName, Map<String, String> metricDimensions, AlarmState state,
String lifecycleState, String link, DateTime stateUpdatedStart, String offset, int limit, boolean enforceLimit) {
Session session = null;
List<Alarm> alarms = new LinkedList<>();
try {
Query query;
session = sessionFactory.openSession();
StringBuilder sbWhere = new StringBuilder();
if (alarmDefId != null) {
sbWhere.append("and ad.id = :alarmDefId ");
}
if (metricName != null) {
sbWhere.append(" and a.id in (select distinct a.id from alarm as a "
+ "inner join alarm_metric as am on am.alarm_id = a.id "
+ "inner join metric_definition_dimensions as mdd "
+ " on mdd.id = am.metric_definition_dimensions_id "
+ "inner join (select distinct id from metric_definition "
+ " where name = :metricName) as md "
+ "on md.id = mdd.metric_definition_id ");
buildJoinClauseFor(metricDimensions, sbWhere);
sbWhere.append(")");
}
if (state != null) {
sbWhere.append(" and a.state = :state");
}
if (lifecycleState != null) {
sbWhere.append(" and a.lifecycle_state = :lifecycleState");
}
if (link != null) {
sbWhere.append(" and a.link = :link");
}
if (stateUpdatedStart != null) {
sbWhere.append(" and a.state_updated_at >= :stateUpdatedStart");
}
if (offset != null) {
sbWhere.append(" and a.id > :offset");
}
String limitPart = "";
if (enforceLimit && limit > 0) {
limitPart = " limit :limit";
}
String sql = String.format(ALARM_SQL, sbWhere, limitPart);
try {
query = session.createSQLQuery(sql);
} catch (Exception e) {
logger.error("Failed to bind query {}, error is {}", sql, e.getMessage());
throw new RuntimeException("Failed to bind query", e);
}
query.setString("tenantId", tenantId);
if (alarmDefId != null) {
query.setString("alarmDefId", alarmDefId);
}
if (offset != null) {
query.setString("offset", offset);
}
if (metricName != null) {
query.setString("metricName", metricName);
}
if (state != null) {
query.setString("state", state.name());
}
if (link != null) {
query.setString("link", link);
}
if (lifecycleState != null) {
query.setString("lifecycleState", lifecycleState);
}
if (stateUpdatedStart != null) {
query.setDate("stateUpdatedStart", stateUpdatedStart.toDate());
}
if (enforceLimit && limit > 0) {
query.setInteger("limit", limit + 1);
}
if (metricName != null) {
bindDimensionsToQuery(query, metricDimensions);
}
List<Object[]> alarmList = (List<Object[]>) query.list();
alarms = createAlarms(alarmList);
} finally {
if (session != null) {
session.close();
}
}
return alarms;
}
private List<Alarm> createAlarms(List<Object[]> alarmList) {
List<Alarm> alarms = Lists.newLinkedList();
Alarm alarm = null;
String previousAlarmId = null;
BinaryId previousDimensionSetId = null;
List<MetricDefinition> alarmedMetrics = null;
Map<String, String> dimensionMap = new HashMap<>();
for (Object[] alarmRow : alarmList) {
String alarm_definition_id = (String) alarmRow[0];
AlarmSeverity severity = null;
AlarmState alarmState = null;
DateTime updated_timestamp = null;
DateTime created_timestamp = null;
BinaryId dimension_set_id = null;
DateTime state_updated_timestamp = null;
if (alarmRow[1] instanceof String) {
severity = AlarmSeverity.valueOf((String) alarmRow[1]);
} else {
severity = (AlarmSeverity) alarmRow[1];
}
String alarm_definition_name = (String) alarmRow[2];
String id = (String) alarmRow[3];
if (alarmRow[4] instanceof String) {
alarmState = AlarmState.valueOf((String) alarmRow[4]);
} else {
alarmState = (AlarmState) alarmRow[4];
}
if (alarmRow[5] instanceof Timestamp) {
Timestamp ts = (Timestamp) alarmRow[5];
updated_timestamp = ISO_8601_FORMATTER.parseDateTime(ts.toString().replace(" ", "T"));
} else {
updated_timestamp = new DateTime(((DateTime) alarmRow[5]).getMillis(), DateTimeZone.forID("UTC"));
}
if (alarmRow[6] instanceof Timestamp) {
Timestamp ts = (Timestamp) alarmRow[6];
created_timestamp = ISO_8601_FORMATTER.parseDateTime(ts.toString().replace(" ", "T"));
} else {
created_timestamp = new DateTime(((DateTime) alarmRow[6]).getMillis(), DateTimeZone.forID("UTC"));
}
String lifecycle_state = (String) alarmRow[10];
String link = (String) alarmRow[11];
if (alarmRow[13] instanceof BinaryId) {
dimension_set_id = (BinaryId) alarmRow[13];
} else {
dimension_set_id = new BinaryId((byte[]) alarmRow[13]);
}
if (alarmRow[12] instanceof Timestamp) {
Timestamp ts = (Timestamp) alarmRow[12];
state_updated_timestamp = ISO_8601_FORMATTER.parseDateTime(ts.toString().replace(" ", "T"));
} else {
state_updated_timestamp = new DateTime(((DateTime) alarmRow[12]).getMillis(), DateTimeZone.forID("UTC"));
}
String metric_name = (String) alarmRow[7];
String dimension_name = (String) alarmRow[8];
String dimension_value = (String) alarmRow[9];
if (!id.equals(previousAlarmId)) {
alarmedMetrics = new ArrayList<>();
dimensionMap = Maps.newHashMap();
alarmedMetrics.add(new MetricDefinition(metric_name, dimensionMap));
alarm =
new Alarm(id, alarm_definition_id, alarm_definition_name, severity.name(), alarmedMetrics, alarmState, lifecycle_state, link,
state_updated_timestamp, updated_timestamp, created_timestamp);
alarms.add(alarm);
previousDimensionSetId = dimension_set_id;
}
if (!dimension_set_id.equals(previousDimensionSetId)) {
dimensionMap = Maps.newHashMap();
alarmedMetrics.add(new MetricDefinition(metric_name, dimensionMap));
}
dimensionMap.put(dimension_name, dimension_value);
previousDimensionSetId = dimension_set_id;
previousAlarmId = id;
}
return alarms;
}
private void bindDimensionsToQuery(
Query query,
Map<String, String> dimensions) {
if (dimensions != null) {
int i = 0;
for (Iterator<Map.Entry<String, String>> it = dimensions.entrySet().iterator(); it.hasNext(); i++) {
Map.Entry<String, String> entry = it.next();
query.setString("dname" + i, entry.getKey());
query.setString("dvalue" + i, entry.getValue());
}
}
}
private void buildJoinClauseFor(Map<String, String> dimensions, StringBuilder sbJoin) {
if (dimensions == null) {
return;
}
for (int i = 0; i < dimensions.size(); i++) {
final String indexStr = String.valueOf(i);
sbJoin.append(" inner join metric_dimension md").append(indexStr).append(" on md")
.append(indexStr)
.append(".name = :dname").append(indexStr).append(" and md").append(indexStr)
.append(".value = :dvalue").append(indexStr)
.append(" and mdd.metric_dimension_set_id = md")
.append(indexStr).append(".dimension_set_id");
}
}
@Override
@SuppressWarnings("unchecked")
public Alarm findById(String tenantId, String id) {
logger.trace(ORM_LOG_MARKER, "findById(...) entering");
Session session = null;
final String sql = String.format(FIND_ALARM_BY_ID_SQL, " and a.id = :id", "");
List<Alarm> alarms = new LinkedList<>();
try {
session = sessionFactory.openSession();
Query qAlarmDefinition =
session.createQuery(sql).setString("tenantId", tenantId)
.setString("id", id);
List<Object[]> alarmList = (List<Object[]>) qAlarmDefinition.list();
if (alarmList.isEmpty()) {
throw new EntityNotFoundException("No alarm exists for %s", id);
}
alarms = this.createAlarms(alarmList);
} finally {
if (session != null) {
session.close();
}
}
return alarms.get(0);
}
@Override
public Alarm update(String tenantId, String id, AlarmState state, String lifecycleState, String link) {
Session session = null;
Alarm originalAlarm = null;
Transaction tx = null;
try {
session = sessionFactory.openSession();
tx = session.beginTransaction();
originalAlarm = findById(tenantId, id);
AlarmDb result = (AlarmDb) session
.getNamedQuery(AlarmDb.Queries.FIND_BY_ID)
.setString("id", id)
.uniqueResult();
if (!originalAlarm.getState().equals(state)) {
result.setStateUpdatedAt(DateTime.now());
result.setState(state);
}
result.setUpdatedAt(DateTime.now());
result.setLink(link);
result.setLifecycleState(lifecycleState);
session.update(result);
tx.commit();
tx = null;
} catch (Exception e) {
this.rollbackIfNotNull(tx);
throw e;
} finally {
if (session != null) {
session.close();
}
}
return originalAlarm;
}
@Override
@SuppressWarnings("unchecked")
public Map<String, AlarmSubExpression> findAlarmSubExpressions(String alarmId) {
Session session = null;
final Map<String, AlarmSubExpression> subAlarms = Maps.newHashMap();
logger.debug("AlarmSqlRepoImpl[findAlarmSubExpressions] called");
try {
session = sessionFactory.openSession();
final List<SubAlarmDb> result = session
.getNamedQuery(SubAlarmDb.Queries.BY_ALARM_ID)
.setString("id", alarmId)
.list();
if (result != null) {
for (SubAlarmDb row : result) {
subAlarms.put(row.getId(), AlarmSubExpression.of(row.getExpression()));
}
}
} finally {
if (session != null) {
session.close();
}
}
return subAlarms;
}
@Override
@SuppressWarnings("unchecked")
public Map<String, Map<String, AlarmSubExpression>> findAlarmSubExpressionsForAlarmDefinition(
String alarmDefinitionId) {
logger.trace(ORM_LOG_MARKER, "findAlarmSubExpressionsForAlarmDefinition(...) entering");
Session session = null;
Transaction tx = null;
Map<String, Map<String, AlarmSubExpression>> subAlarms = Maps.newHashMap();
try {
session = sessionFactory.openSession();
tx = session.beginTransaction();
final Iterator<SubAlarmDb> rows = session
.getNamedQuery(SubAlarmDb.Queries.BY_ALARMDEFINITION_ID)
.setString("id", alarmDefinitionId)
.setReadOnly(true)
.iterate();
while (rows.hasNext()) {
final SubAlarmDb row = rows.next();
final String alarmId = (String) session.getIdentifier(row.getAlarm());
Map<String, AlarmSubExpression> alarmMap = subAlarms.get(alarmId);
if (alarmMap == null) {
alarmMap = Maps.newHashMap();
subAlarms.put(alarmId, alarmMap);
}
final String id = row.getId();
final String expression = row.getExpression();
alarmMap.put(id, AlarmSubExpression.of(expression));
}
tx.commit();
tx = null;
} catch (Exception exp) {
this.rollbackIfNotNull(tx);
throw exp;
} finally {
if (session != null) {
session.close();
}
}
return subAlarms;
}
}

View File

@ -0,0 +1,54 @@
/*
* Copyright 2015 FUJITSU LIMITED
*
* Licensed under the Apache License, Version 2.0 (the "License"); you may not use this file except
* in compliance with the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software distributed under the License
* is distributed on an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express
* or implied. See the License for the specific language governing permissions and limitations under
* the License.
*
*/
package monasca.api.infrastructure.persistence.hibernate;
import org.hibernate.SessionFactory;
import org.hibernate.Transaction;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
import org.slf4j.Marker;
import org.slf4j.MarkerFactory;
/**
* Abstract foundation for ORM repositories.
*/
abstract class BaseSqlRepo {
protected static final Marker ORM_LOG_MARKER = MarkerFactory.getMarker("ORM");
private static final Logger LOG = LoggerFactory.getLogger(BaseSqlRepo.class);
protected final SessionFactory sessionFactory;
protected BaseSqlRepo(final SessionFactory sessionFactory) {
this.sessionFactory = sessionFactory;
}
/**
* Rollbacks passed {@code tx} transaction if such is not null.
* Assumption is being made that {@code tx} being null means transaction
* has been successfully comitted.
*
* @param tx {@link Transaction} object
*/
protected void rollbackIfNotNull(final Transaction tx) {
if (tx != null) {
try {
tx.rollback();
} catch (RuntimeException rbe) {
LOG.error(ORM_LOG_MARKER, "Couldnt roll back transaction", rbe);
}
}
}
}

View File

@ -0,0 +1,268 @@
/*
* Copyright 2015 FUJITSU LIMITED
*
* Licensed under the Apache License, Version 2.0 (the "License"); you may not use this file except
* in compliance with the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software distributed under the License
* is distributed on an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express
* or implied. See the License for the specific language governing permissions and limitations under
* the License.
*/
package monasca.api.infrastructure.persistence.hibernate;
import java.util.List;
import java.util.UUID;
import javax.inject.Inject;
import javax.inject.Named;
import com.google.common.collect.Lists;
import org.apache.commons.collections4.CollectionUtils;
import org.hibernate.Query;
import org.hibernate.Session;
import org.hibernate.SessionFactory;
import org.hibernate.Transaction;
import org.joda.time.DateTime;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
import monasca.api.domain.exception.EntityExistsException;
import monasca.api.domain.exception.EntityNotFoundException;
import monasca.api.domain.model.notificationmethod.NotificationMethod;
import monasca.api.domain.model.notificationmethod.NotificationMethodRepo;
import monasca.api.domain.model.notificationmethod.NotificationMethodType;
import monasca.common.hibernate.db.NotificationMethodDb;
import monasca.common.model.alarm.AlarmNotificationMethodType;
/**
* Notification method repository implementation.
*/
public class NotificationMethodSqlRepoImpl
extends BaseSqlRepo
implements NotificationMethodRepo {
private static final Logger LOG = LoggerFactory.getLogger(NotificationMethodSqlRepoImpl.class);
@Inject
public NotificationMethodSqlRepoImpl(@Named("orm") SessionFactory sessionFactory) {
super(sessionFactory);
}
@Override
public NotificationMethod create(String tenantId, String name, NotificationMethodType type,
String address) {
Transaction tx = null;
Session session = null;
try {
session = sessionFactory.openSession();
tx = session.beginTransaction();
if (byTenantIdAndName(session, tenantId, name) != null) {
throw new EntityExistsException("Notification method %s \"%s\" already exists.", tenantId,
name);
}
final String id = UUID.randomUUID().toString();
final DateTime now = DateTime.now();
final NotificationMethodDb db = new NotificationMethodDb(
id,
tenantId,
name,
AlarmNotificationMethodType.valueOf(type.name()),
address,
now,
now
);
session.save(db);
LOG.debug("Creating notification method {} for {}", name, tenantId);
tx.commit();
tx = null;
return this.convertToNotificationMethod(db);
} catch (RuntimeException e) {
this.rollbackIfNotNull(tx);
throw e;
} finally {
if (session != null) {
session.close();
}
}
}
@Override
public void deleteById(String tenantId, String notificationMethodId) {
Session session = null;
Transaction tx = null;
try {
if (!exists(tenantId, notificationMethodId)) {
throw new EntityNotFoundException("No notification exists for %s", notificationMethodId);
}
session = sessionFactory.openSession();
tx = session.beginTransaction();
// delete notification
session
.getNamedQuery(NotificationMethodDb.Queries.DELETE_BY_ID)
.setString("id", notificationMethodId)
.executeUpdate();
tx.commit();
tx = null;
} catch (RuntimeException e) {
this.rollbackIfNotNull(tx);
throw e;
} finally {
if (session != null) {
session.close();
}
}
}
@Override
public boolean exists(String tenantId, String notificationMethodId) {
Session session = null;
try {
session = sessionFactory.openSession();
return this.getByTenantIdAndId(session, tenantId, notificationMethodId) != null;
} finally {
if (session != null) {
session.close();
}
}
}
@Override
public NotificationMethod findById(String tenantId, String notificationMethodId) {
Session session = null;
try {
session = sessionFactory.openSession();
final NotificationMethodDb result = this.getByTenantIdAndId(session, tenantId, notificationMethodId);
if (result == null) {
throw new EntityNotFoundException("No notification method exists for %s",
notificationMethodId);
}
return this.convertToNotificationMethod(result);
} finally {
if (session != null) {
session.close();
}
}
}
@Override
public NotificationMethod update(String tenantId, String notificationMethodId, String name,
NotificationMethodType type, String address) {
Session session = null;
Transaction tx = null;
try {
session = sessionFactory.openSession();
final NotificationMethodDb result = this.byTenantIdAndName(session, tenantId, name);
if (result != null && !result.getId().equalsIgnoreCase(notificationMethodId)) {
throw new EntityExistsException("Notification method %s \"%s\" already exists.", tenantId,
name);
}
tx = session.beginTransaction();
NotificationMethodDb db;
if ((db = (NotificationMethodDb) session.get(NotificationMethodDb.class, notificationMethodId)) == null) {
throw new EntityNotFoundException("No notification method exists for %s",
notificationMethodId);
}
db.setName(name);
db.setType(AlarmNotificationMethodType.valueOf(type.name()));
db.setAddress(address);
session.save(db);
tx.commit();
tx = null;
return this.convertToNotificationMethod(db);
} catch (RuntimeException e) {
this.rollbackIfNotNull(tx);
throw e;
} finally {
if (session != null) {
session.close();
}
}
}
@Override
@SuppressWarnings("unchecked")
public List<NotificationMethod> find(String tenantId, String offset, int limit) {
Session session = null;
List<NotificationMethodDb> resultList;
List<NotificationMethod> notificationList = Lists.newArrayList();
final String rawQuery = "from NotificationMethodDb where tenant_id = :tenantId %1$s order by id";
try {
session = sessionFactory.openSession();
final String offsetPart = offset != null ? String.format("and id > '%s'", offset) : "";
final String queryHql = String.format(rawQuery, offsetPart);
final Query query = session.createQuery(queryHql).setString("tenantId", tenantId);
if (limit > 0) {
query.setMaxResults(limit + 1);
}
resultList = query.list();
if (CollectionUtils.isEmpty(resultList)) {
return notificationList;
}
for (NotificationMethodDb item : resultList) {
notificationList.add(this.convertToNotificationMethod(item));
}
return notificationList;
} finally {
if (session != null) {
session.close();
}
}
}
protected NotificationMethodDb byTenantIdAndName(final Session session,
final String tenantId,
final String name) {
return (NotificationMethodDb) session
.getNamedQuery(NotificationMethodDb.Queries.NOTIFICATION_BY_TENANT_ID_AND_NAME)
.setString("tenantId", tenantId)
.setString("name", name)
.uniqueResult();
}
protected NotificationMethodDb getByTenantIdAndId(final Session session,
final String tenantId,
final String id) {
return (NotificationMethodDb) session
.getNamedQuery(NotificationMethodDb.Queries.FIND_BY_TENANT_ID_AND_ID)
.setString("tenantId", tenantId)
.setString("id", id)
.uniqueResult();
}
protected NotificationMethod convertToNotificationMethod(final NotificationMethodDb db) {
return db == null ? null : new NotificationMethod(
db.getId(),
db.getName(),
NotificationMethodType.valueOf(db.getType().name()),
db.getAddress()
);
}
}

View File

@ -15,11 +15,10 @@ package monasca.api.infrastructure.persistence.influxdb;
import com.google.inject.Inject;
import monasca.api.ApiConfig;
import monasca.api.domain.model.alarmstatehistory.AlarmStateHistory;
import monasca.api.domain.model.alarmstatehistory.AlarmStateHistoryRepo;
import monasca.api.infrastructure.persistence.PersistUtils;
import monasca.api.infrastructure.persistence.mysql.MySQLUtils;
import monasca.api.infrastructure.persistence.Utils;
import monasca.common.model.alarm.AlarmState;
import monasca.common.model.alarm.AlarmTransitionSubAlarm;
import monasca.common.model.metric.MetricDefinition;
@ -30,7 +29,6 @@ import com.fasterxml.jackson.databind.PropertyNamingStrategy;
import org.joda.time.DateTime;
import org.joda.time.DateTimeZone;
import org.skife.jdbi.v2.DBI;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
@ -43,17 +41,13 @@ import java.util.List;
import java.util.Map;
import javax.annotation.Nullable;
import javax.inject.Named;
public class InfluxV9AlarmStateHistoryRepo implements AlarmStateHistoryRepo {
private static final Logger logger = LoggerFactory
.getLogger(InfluxV9AlarmStateHistoryRepo.class);
private final DBI mysql;
private final MySQLUtils mySQLUtils;
private final ApiConfig config;
private final String region;
private final Utils utils;
private final InfluxV9RepoReader influxV9RepoReader;
private final InfluxV9Utils influxV9Utils;
private final PersistUtils persistUtils;
@ -71,17 +65,12 @@ public class InfluxV9AlarmStateHistoryRepo implements AlarmStateHistoryRepo {
new TypeReference<List<AlarmTransitionSubAlarm>>() {};
@Inject
public InfluxV9AlarmStateHistoryRepo(@Named("mysql") DBI mysql,
MySQLUtils mySQLUtils,
ApiConfig config,
public InfluxV9AlarmStateHistoryRepo(Utils utils,
InfluxV9RepoReader influxV9RepoReader,
InfluxV9Utils influxV9Utils,
PersistUtils persistUtils) {
this.mysql = mysql;
this.mySQLUtils = mySQLUtils;
this.config = config;
this.region = config.region;
this.utils = utils;
this.influxV9RepoReader = influxV9RepoReader;
this.influxV9Utils = influxV9Utils;
this.persistUtils = persistUtils;
@ -120,7 +109,7 @@ public class InfluxV9AlarmStateHistoryRepo implements AlarmStateHistoryRepo {
DateTime startTime, @Nullable DateTime endTime,
@Nullable String offset, int limit) throws Exception {
List<String> alarmIdList = this.mySQLUtils.findAlarmIds(tenantId, dimensions);
List<String> alarmIdList = this.utils.findAlarmIds(tenantId, dimensions);
if (alarmIdList == null || alarmIdList.isEmpty()) {
return new ArrayList<>();

View File

@ -28,8 +28,10 @@ import java.util.Map;
import javax.inject.Named;
import monasca.api.infrastructure.persistence.DimensionQueries;
import monasca.api.infrastructure.persistence.Utils;
public class MySQLUtils {
public class MySQLUtils
extends Utils {
private static final Logger logger =
LoggerFactory.getLogger(MySQLUtils.class);
@ -38,9 +40,7 @@ public class MySQLUtils {
@Inject
public MySQLUtils(@Named("mysql") DBI mysql) {
this.mysql = mysql;
}
public List<String> findAlarmIds(String tenantId,
@ -48,17 +48,17 @@ public class MySQLUtils {
final String FIND_ALARM_IDS_SQL =
"select distinct a.id "
+ "from alarm as a "
+ "join alarm_definition as ad on a.alarm_definition_id = ad.id "
+ "%s "
+ "where ad.tenant_id = :tenantId and ad.deleted_at is NULL "
+ "order by ad.created_at";
+ "from alarm as a "
+ "join alarm_definition as ad on a.alarm_definition_id = ad.id "
+ "%s "
+ "where ad.tenant_id = :tenantId and ad.deleted_at is NULL "
+ "order by ad.created_at";
List<String> alarmIdList;
try (Handle h = this.mysql.open()) {
final String sql = String.format(FIND_ALARM_IDS_SQL, buildJoinClauseFor(dimensions));
final String sql = String.format(FIND_ALARM_IDS_SQL, this.buildJoinClauseFor(dimensions));
Query<Map<String, Object>> query = h.createQuery(sql).bind("tenantId", tenantId);
@ -72,38 +72,4 @@ public class MySQLUtils {
return alarmIdList;
}
private String buildJoinClauseFor(Map<String, String> dimensions) {
if ((dimensions == null) || dimensions.isEmpty()) {
return "";
}
final StringBuilder sb = new StringBuilder(
"join alarm_metric as am on a.id=am.alarm_id "
+ "join metric_definition_dimensions as mdd on am.metric_definition_dimensions_id=mdd.id ");
for (int i = 0; i < dimensions.size(); i++) {
final String tableAlias = "md" + i;
sb.append(" inner join metric_dimension ")
.append(tableAlias)
.append(" on ")
.append(tableAlias)
.append(".name = :dname")
.append(i)
.append(" and ")
.append(tableAlias)
.append(".value = :dvalue")
.append(i)
.append(" and mdd.metric_dimension_set_id = ")
.append(tableAlias)
.append(".dimension_set_id");
}
logger.debug("mysql dimension join clause: {}", sb.toString());
return sb.toString();
}
}

View File

@ -17,7 +17,7 @@ import monasca.api.domain.model.alarmstatehistory.AlarmStateHistory;
import monasca.api.domain.model.alarmstatehistory.AlarmStateHistoryRepo;
import monasca.api.infrastructure.persistence.DimensionQueries;
import monasca.api.infrastructure.persistence.PersistUtils;
import monasca.api.infrastructure.persistence.mysql.MySQLUtils;
import monasca.api.infrastructure.persistence.Utils;
import monasca.common.model.alarm.AlarmState;
import monasca.common.model.alarm.AlarmTransitionSubAlarm;
import monasca.common.model.metric.MetricDefinition;
@ -82,7 +82,7 @@ public class AlarmStateHistoryVerticaRepoImpl implements AlarmStateHistoryRepo {
new TypeReference<List<AlarmTransitionSubAlarm>>() {};
private final DBI vertica;
private final MySQLUtils mySQLUtils;
private final Utils utils;
private final PersistUtils persistUtils;
private final SimpleDateFormat simpleDateFormat;
@ -90,11 +90,11 @@ public class AlarmStateHistoryVerticaRepoImpl implements AlarmStateHistoryRepo {
@Inject
public AlarmStateHistoryVerticaRepoImpl(
@Named("vertica") DBI vertica,
MySQLUtils mySQLUtils,
Utils utils,
PersistUtils persistUtils) {
this.vertica = vertica;
this.mySQLUtils = mySQLUtils;
this.utils = utils;
this.persistUtils = persistUtils;
simpleDateFormat = new SimpleDateFormat("yyyy-MM-dd HH:mm:ss.SSS");
@ -159,7 +159,7 @@ public class AlarmStateHistoryVerticaRepoImpl implements AlarmStateHistoryRepo {
@Nullable String offset,
int limit) {
List<String> alarmIds = this.mySQLUtils.findAlarmIds(tenantId, dimensions);
List<String> alarmIds = this.utils.findAlarmIds(tenantId, dimensions);
if (alarmIds == null || alarmIds.isEmpty()) {

View File

@ -122,3 +122,16 @@ logging:
facility: local0
threshold: ALL
logFormat: # TODO
hibernate:
supportEnabled: true
providerClass: com.zaxxer.hikari.hibernate.HikariConnectionProvider
dataSourceClassName: org.postgresql.ds.PGPoolingDataSource
serverName: localhost
portNumber: 5432
databaseName: mon
user: mon
password: mon
initialConnections: 25
maxConnections: 100
autoConfig: validate

View File

@ -0,0 +1,366 @@
/*
* Copyright 2015 FUJITSU LIMITED
*
* Licensed under the Apache License, Version 2.0 (the "License"); you may not use this file except
* in compliance with the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software distributed under the License
* is distributed on an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express
* or implied. See the License for the specific language governing permissions and limitations under
* the License.
*/
package monasca.api.infrastructure.persistence.hibernate;
import static org.testng.Assert.assertEquals;
import static org.testng.Assert.assertNull;
import static org.testng.Assert.assertTrue;
import static org.testng.Assert.fail;
import java.util.ArrayList;
import java.util.Arrays;
import java.util.Collections;
import java.util.HashMap;
import java.util.List;
import java.util.Map;
import com.google.common.collect.ImmutableMap;
import monasca.api.domain.exception.EntityNotFoundException;
import monasca.api.domain.model.alarmdefinition.AlarmDefinition;
import monasca.api.domain.model.alarmdefinition.AlarmDefinitionRepo;
import monasca.common.hibernate.db.AlarmActionDb;
import monasca.common.hibernate.db.AlarmDefinitionDb;
import monasca.common.hibernate.db.SubAlarmDefinitionDb;
import monasca.common.hibernate.db.SubAlarmDefinitionDimensionDb;
import monasca.common.model.alarm.AggregateFunction;
import monasca.common.model.alarm.AlarmOperator;
import monasca.common.model.alarm.AlarmSeverity;
import monasca.common.model.alarm.AlarmState;
import monasca.common.model.alarm.AlarmSubExpression;
import monasca.common.model.metric.MetricDefinition;
import org.hibernate.Session;
import org.hibernate.SessionFactory;
import org.hibernate.criterion.Projections;
import org.hibernate.criterion.Restrictions;
import org.testng.annotations.AfterMethod;
import org.testng.annotations.BeforeMethod;
import org.testng.annotations.Test;
@Test(groups = "orm")
public class AlarmDefinitionSqlRepositoryImplTest {
private SessionFactory sessionFactory;
private AlarmDefinitionRepo repo;
private AlarmDefinition alarmDef_123;
private AlarmDefinition alarmDef_234;
private List<String> alarmActions;
@BeforeMethod
protected void beforeMethod() throws Exception {
this.sessionFactory = HibernateUtil.getSessionFactory();
this.repo = new AlarmDefinitionSqlRepoImpl(this.sessionFactory);
alarmActions = new ArrayList<>();
alarmActions.add("29387234");
alarmActions.add("77778687");
this.prepareData(this.sessionFactory);
}
@AfterMethod
protected void afterMethod() {
this.sessionFactory.close();
this.sessionFactory = null;
}
protected void prepareData(final SessionFactory sessionFactory) {
Session session = sessionFactory.openSession();
session.beginTransaction();
final AlarmDefinitionDb alarmDefinition123 = new AlarmDefinitionDb()
.setTenantId("bob")
.setName("90% CPU")
.setSeverity(AlarmSeverity.LOW)
.setExpression("avg(hpcs.compute{flavor_id=777, image_id=888, metric_name=cpu, device=1}) > 10")
.setMatchBy("flavor_id,image_id")
.setActionsEnabled(true);
session.save(alarmDefinition123.setId("123"));
final SubAlarmDefinitionDb subAlarmDefinition111 = new SubAlarmDefinitionDb()
.setAlarmDefinition(alarmDefinition123)
.setFunction("avg")
.setMetricName("hpcs.compute")
.setOperator(AlarmOperator.GT)
.setThreshold(10d)
.setPeriod(60)
.setPeriods(1);
session.save(subAlarmDefinition111.setId("111"));
final SubAlarmDefinitionDimensionDb subAlarmDefinitionDimensionFlavor777 = new SubAlarmDefinitionDimensionDb()
.setDimensionName("flavor_id")
.setValue("777");
final SubAlarmDefinitionDimensionDb subAlarmDefinitionDimensionImageId888 = new SubAlarmDefinitionDimensionDb()
.setDimensionName("image_id")
.setValue("888");
final SubAlarmDefinitionDimensionDb subAlarmDefinitionDimensionFlavorMetricNameCpu = new SubAlarmDefinitionDimensionDb()
.setDimensionName("metric_name")
.setValue("cpu");
final SubAlarmDefinitionDimensionDb subAlarmDefinitionDimensionDevice1 = new SubAlarmDefinitionDimensionDb()
.setDimensionName("device")
.setValue("1");
session.save(subAlarmDefinitionDimensionFlavor777.setSubExpression(subAlarmDefinition111));
session.save(subAlarmDefinitionDimensionImageId888.setSubExpression(subAlarmDefinition111));
session.save(subAlarmDefinitionDimensionFlavorMetricNameCpu.setSubExpression(subAlarmDefinition111));
session.save(subAlarmDefinitionDimensionDevice1.setSubExpression(subAlarmDefinition111));
final AlarmActionDb alarmAction29387234 = new AlarmActionDb()
.setActionId("29387234")
.setAlarmDefinition(alarmDefinition123)
.setAlarmState(AlarmState.ALARM);
final AlarmActionDb alarmAction77778687 = new AlarmActionDb()
.setActionId("77778687")
.setAlarmDefinition(alarmDefinition123)
.setAlarmState(AlarmState.ALARM);
session.save(alarmAction29387234);
session.save(alarmAction77778687);
final AlarmDefinitionDb alarmDefinition234 = new AlarmDefinitionDb()
.setTenantId("bob")
.setName("50% CPU")
.setSeverity(AlarmSeverity.LOW)
.setExpression("avg(hpcs.compute{flavor_id=777, image_id=888, metric_name=mem}) > 20 and avg(hpcs.compute) < 100")
.setMatchBy("flavor_id,image_id")
.setActionsEnabled(true);
session.save(alarmDefinition234.setId("234"));
final SubAlarmDefinitionDb subAlarmDefinition222 = new SubAlarmDefinitionDb()
.setAlarmDefinition(alarmDefinition234)
.setFunction("avg")
.setMetricName("hpcs.compute")
.setOperator(AlarmOperator.GT)
.setThreshold(20d)
.setPeriod(60)
.setPeriods(1);
final SubAlarmDefinitionDb subAlarmDefinition223 = new SubAlarmDefinitionDb()
.setAlarmDefinition(alarmDefinition234)
.setFunction("avg")
.setMetricName("hpcs.compute")
.setOperator(AlarmOperator.LT)
.setThreshold(100d)
.setPeriod(60)
.setPeriods(1);
session.save(subAlarmDefinition222.setId("222"));
session.save(subAlarmDefinition223.setId("223"));
session.save(
new SubAlarmDefinitionDimensionDb().setDimensionName("flavor_id").setValue("777").setSubExpression(subAlarmDefinition222)
);
session.save(
new SubAlarmDefinitionDimensionDb().setDimensionName("image_id").setValue("888").setSubExpression(subAlarmDefinition222)
);
session.save(
new SubAlarmDefinitionDimensionDb().setDimensionName("metric_name").setValue("mem").setSubExpression(subAlarmDefinition222)
);
session.save(
new AlarmActionDb().setAlarmDefinition(alarmDefinition234).setAlarmState(AlarmState.ALARM).setActionId("29387234")
);
session.save(
new AlarmActionDb().setAlarmDefinition(alarmDefinition234).setAlarmState(AlarmState.ALARM).setActionId("77778687")
);
session.getTransaction().commit();
session.close();
alarmDef_123 =
new AlarmDefinition("123", "90% CPU", null, "LOW", "avg(hpcs.compute{flavor_id=777, image_id=888, metric_name=cpu, device=1}) > 10",
Arrays.asList("flavor_id", "image_id"), true, Arrays.asList("29387234", "77778687"), Collections.<String>emptyList(),
Collections.<String>emptyList());
alarmDef_234 =
new AlarmDefinition("234", "50% CPU", null, "LOW",
"avg(hpcs.compute{flavor_id=777, image_id=888, metric_name=mem}) > 20 and avg(hpcs.compute) < 100",
Arrays.asList("flavor_id", "image_id"), true, Arrays.asList("29387234", "77778687"), Collections.<String>emptyList(),
Collections.<String>emptyList());
}
@Test(groups = "orm")
public void shouldCreate() {
Session session = null;
long subAlarmDimensionSize;
long subAlarmSize;
Map<String, AlarmSubExpression> subExpressions =
ImmutableMap.<String, AlarmSubExpression>builder()
.put("4433", AlarmSubExpression.of("avg(hpcs.compute{flavor_id=777, image_id=888, metric_name=cpu}) > 10")).build();
AlarmDefinition alarmA =
repo.create("555", "2345", "90% CPU", null, "LOW", "avg(hpcs.compute{flavor_id=777, image_id=888, metric_name=cpu}) > 10", subExpressions,
Arrays.asList("flavor_id", "image_id"), alarmActions, null, null);
AlarmDefinition alarmB = repo.findById("555", alarmA.getId());
assertEquals(alarmA.getId(), alarmB.getId());
assertEquals(alarmA.getName(), alarmB.getName());
assertEquals(alarmA.getAlarmActions().size(), alarmB.getAlarmActions().size());
for (String alarmAction : alarmA.getAlarmActions()) {
assertTrue(alarmB.getAlarmActions().contains(alarmAction));
}
// Assert that sub-alarm and sub-alarm-dimensions made it to the db
try {
session = sessionFactory.openSession();
subAlarmSize = (Long) session
.createCriteria(SubAlarmDefinitionDb.class)
.add(Restrictions.eq("id", "4433"))
.setProjection(Projections.rowCount())
.uniqueResult();
subAlarmDimensionSize = (Long) session.createCriteria(SubAlarmDefinitionDimensionDb.class)
.add(Restrictions.eq("subAlarmDefinitionDimensionId.subExpression.id", "4433"))
.setProjection(Projections.rowCount())
.uniqueResult();
} finally {
if (session != null) {
session.close();
}
}
assertEquals(subAlarmSize, (long) 1);
assertEquals(subAlarmDimensionSize, (long) 3);
}
@Test(groups = "orm")
public void shouldUpdate() {
List<String> oldSubAlarmIds = Arrays.asList("222");
AlarmSubExpression changedSubExpression = AlarmSubExpression.of("avg(hpcs.compute) <= 200");
Map<String, AlarmSubExpression> changedSubExpressions =
ImmutableMap.<String, AlarmSubExpression>builder().put("223", changedSubExpression).build();
AlarmSubExpression newSubExpression = AlarmSubExpression.of("avg(foo{flavor_id=777}) > 333");
Map<String, AlarmSubExpression> newSubExpressions = ImmutableMap.<String, AlarmSubExpression>builder().put("555", newSubExpression).build();
repo.update("bob", "234", false, "90% CPU", null, "avg(foo{flavor_id=777}) > 333 and avg(hpcs.compute) <= 200",
Arrays.asList("flavor_id", "image_id"), "LOW", false, oldSubAlarmIds, changedSubExpressions, newSubExpressions, alarmActions, null, null);
AlarmDefinition alarm = repo.findById("bob", "234");
AlarmDefinition expected =
new AlarmDefinition("234", "90% CPU", null, "LOW", "avg(foo{flavor_id=777}) > 333 and avg(hpcs.compute) <= 200", Arrays.asList("flavor_id",
"image_id"), false, alarmActions, Collections.<String>emptyList(), Collections.<String>emptyList());
assertEquals(expected.getId(), alarm.getId());
assertEquals(expected.getName(), alarm.getName());
assertEquals(expected.getExpressionData(), alarm.getExpressionData());
assertEquals(expected.getAlarmActions().size(), alarm.getAlarmActions().size());
for (String alarmAction : expected.getAlarmActions()) {
assertTrue(alarm.getAlarmActions().contains(alarmAction));
}
Map<String, AlarmSubExpression> subExpressions = repo.findSubExpressions("234");
assertEquals(subExpressions.get("223"), changedSubExpression);
assertEquals(subExpressions.get("555"), newSubExpression);
}
@Test(groups = "orm")
public void shouldFindById() {
Session session = null;
AlarmDefinition alarmDef_123_repo = repo.findById("bob", "123");
assertEquals(alarmDef_123.getDescription(), alarmDef_123_repo.getDescription());
assertEquals(alarmDef_123.getExpression(), alarmDef_123_repo.getExpression());
assertEquals(alarmDef_123.getExpressionData(), alarmDef_123_repo.getExpressionData());
assertEquals(alarmDef_123.getName(), alarmDef_123_repo.getName());
// Make sure it still finds AlarmDefinitions with no notifications
try {
session = sessionFactory.openSession();
session.createQuery("delete from AlarmActionDb").executeUpdate();
} finally {
if (session != null) {
session.close();
}
}
alarmDef_123.setAlarmActions(new ArrayList<String>(0));
assertEquals(alarmDef_123, repo.findById("bob", "123"));
}
@Test(groups = "orm")
public void shouldFindSubAlarmMetricDefinitions() {
assertEquals(repo.findSubAlarmMetricDefinitions("123").get("111"), new MetricDefinition("hpcs.compute", ImmutableMap.<String, String>builder()
.put("flavor_id", "777").put("image_id", "888").put("metric_name", "cpu").put("device", "1").build()));
assertEquals(repo.findSubAlarmMetricDefinitions("234").get("222"), new MetricDefinition("hpcs.compute", ImmutableMap.<String, String>builder()
.put("flavor_id", "777").put("image_id", "888").put("metric_name", "mem").build()));
assertTrue(repo.findSubAlarmMetricDefinitions("asdfasdf").isEmpty());
}
@Test(groups = "orm")
public void shouldFindSubExpressions() {
assertEquals(repo.findSubExpressions("123").get("111"), new AlarmSubExpression(AggregateFunction.AVG, new MetricDefinition("hpcs.compute",
ImmutableMap.<String, String>builder().put("flavor_id", "777").put("image_id", "888").put("metric_name", "cpu").put("device", "1").build()),
AlarmOperator.GT, 10, 60, 1));
assertEquals(repo.findSubExpressions("234").get("223"), new AlarmSubExpression(AggregateFunction.AVG, new MetricDefinition("hpcs.compute",
new HashMap<String, String>()), AlarmOperator.LT, 100, 60, 1));
assertTrue(repo.findSubAlarmMetricDefinitions("asdfasdf").isEmpty());
}
@Test(groups = "orm")
public void testExists() {
assertEquals(repo.exists("bob", "90% CPU"), "123");
// Negative
assertNull(repo.exists("bob", "999% CPU"));
}
@Test(groups = "orm")
public void shouldDeleteById() {
repo.deleteById("bob", "123");
try {
assertNull(repo.findById("bob", "123"));
fail();
} catch (EntityNotFoundException expected) {
}
assertEquals(Arrays.asList(alarmDef_234), repo.find("bob", null, null, null, 1));
}
public void shouldFindByDimension() {
final Map<String, String> dimensions = new HashMap<>();
dimensions.put("image_id", "888");
List<AlarmDefinition> result = repo.find("bob", null, dimensions, null, 1);
assertEquals(Arrays.asList(alarmDef_123, alarmDef_234), result);
dimensions.clear();
dimensions.put("device", "1");
assertEquals(Arrays.asList(alarmDef_123), repo.find("bob", null, dimensions, null, 1));
dimensions.clear();
dimensions.put("Not real", "AA");
assertEquals(0, repo.find("bob", null, dimensions, null, 1).size());
}
public void shouldFindByName() {
final Map<String, String> dimensions = new HashMap<>();
dimensions.put("image_id", "888");
List<AlarmDefinition> result = repo.find("bob", "90% CPU", dimensions, null, 1);
assertEquals(Arrays.asList(alarmDef_123), result);
}
}

View File

@ -0,0 +1,163 @@
/*
* Copyright 2015 FUJITSU LIMITED
*
* Licensed under the Apache License, Version 2.0 (the "License"); you may not use this file except
* in compliance with the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software distributed under the License
* is distributed on an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express
* or implied. See the License for the specific language governing permissions and limitations under
* the License.
*/
package monasca.api.infrastructure.persistence.hibernate;
import static monasca.api.infrastructure.persistence.hibernate.TestHelper.randomByteArray;
import static org.testng.Assert.assertEquals;
import java.util.HashMap;
import java.util.List;
import java.util.Map;
import com.beust.jcommander.internal.Maps;
import monasca.common.hibernate.db.AlarmDb;
import monasca.common.hibernate.db.AlarmDefinitionDb;
import monasca.common.hibernate.db.AlarmMetricDb;
import monasca.common.hibernate.db.MetricDefinitionDb;
import monasca.common.hibernate.db.MetricDefinitionDimensionsDb;
import monasca.common.hibernate.db.MetricDimensionDb;
import monasca.common.model.alarm.AlarmSeverity;
import monasca.common.model.alarm.AlarmState;
import org.hibernate.Session;
import org.hibernate.SessionFactory;
import org.joda.time.DateTime;
import org.joda.time.format.DateTimeFormatter;
import org.joda.time.format.ISODateTimeFormat;
import org.testng.annotations.AfterMethod;
import org.testng.annotations.BeforeMethod;
import org.testng.annotations.Test;
@Test(groups = "orm")
public class AlarmHibernateUtilsTest {
private static final DateTimeFormatter ISO_8601_FORMATTER = ISODateTimeFormat.dateOptionalTimeParser().withZoneUTC();
private static final String LUK_TENANT_ID = "luk";
private static final String BOB_TENANT_ID = "bob";
private static final String ALARM_DEF_NAME = "90%";
private static final String ALARM_DEF_EXPRESSION = "avg(cpu.idle_perc{flavor_id=777, image_id=888, device=1}) > 10";
private static final String ALARM_MATCH_BY = "flavor_id,image_id";
private static final int BINARY_KEY_LENGTH = 20;
private AlarmHibernateUtils repo;
private SessionFactory sessionFactory;
@BeforeMethod
protected void beforeMethod() {
this.sessionFactory = HibernateUtil.getSessionFactory();
this.prepareData(this.sessionFactory);
this.repo = new AlarmHibernateUtils(sessionFactory);
}
@AfterMethod
protected void afterNethod() {
this.sessionFactory.close();
this.sessionFactory = null;
}
private void prepareData(final SessionFactory sessionFactory) {
Session session = sessionFactory.openSession();
session.beginTransaction();
DateTime timestamp1 = ISO_8601_FORMATTER.parseDateTime("2015-03-14T09:26:53");
final AlarmDefinitionDb alarmDefinitionBob = this.newAlarmDefinition(session, "1", BOB_TENANT_ID);
final AlarmDefinitionDb alarmDefinitionLuk = this.newAlarmDefinition(session, "2", LUK_TENANT_ID);
session.save(alarmDefinitionBob);
session.save(alarmDefinitionLuk);
final AlarmDb alarmDb1 = new AlarmDb("1", alarmDefinitionBob, AlarmState.OK, "OPEN", "http://somesite.com/this-alarm-info", timestamp1, timestamp1, timestamp1);
final AlarmDb alarmDb2 = new AlarmDb("2", alarmDefinitionLuk, AlarmState.OK, "OPEN", "http://somesite.com/this-alarm-info", timestamp1, timestamp1, timestamp1);
session.save(alarmDb1);
session.save(alarmDb2);
final MetricDefinitionDb md1 = new MetricDefinitionDb(new byte[]{1}, "metric", BOB_TENANT_ID, "eu");
session.save(md1);
final MetricDimensionDb mDim1Instance = new MetricDimensionDb(randomByteArray(BINARY_KEY_LENGTH), "instance_id", "123");
final MetricDimensionDb mDim1Service = new MetricDimensionDb(randomByteArray(BINARY_KEY_LENGTH), "service", "monitoring");
final MetricDimensionDb mDim2Flavor = new MetricDimensionDb(randomByteArray(BINARY_KEY_LENGTH), "flavor_id", "222");
session.save(mDim1Instance);
session.save(mDim1Service);
session.save(mDim2Flavor);
final MetricDefinitionDimensionsDb mdd11 = new MetricDefinitionDimensionsDb(randomByteArray(BINARY_KEY_LENGTH), md1, mDim1Instance.getId().getDimensionSetId());
final MetricDefinitionDimensionsDb mdd22 = new MetricDefinitionDimensionsDb(randomByteArray(BINARY_KEY_LENGTH), md1, mDim2Flavor.getId().getDimensionSetId());
session.save(mdd11);
session.save(mdd22);
session.save(new AlarmMetricDb(alarmDb1, mdd11));
session.save(new AlarmMetricDb(alarmDb1, mdd22));
session.save(new AlarmMetricDb(alarmDb2, mdd11));
session.getTransaction().commit();
session.close();
}
private AlarmDefinitionDb newAlarmDefinition(final Session session,
final String id,
final String tenantId) {
final String str = "AlarmDefinition" + 1;
final DateTime now = DateTime.now();
final AlarmDefinitionDb definition = new AlarmDefinitionDb(id, tenantId, ALARM_DEF_NAME, str, ALARM_DEF_EXPRESSION, AlarmSeverity.LOW, ALARM_MATCH_BY, true, now, now, null);
session.save(definition);
return definition;
}
public void testNullArguments() {
List<String> result = repo.findAlarmIds(null, null);
assertEquals(result.size(), 0, "No alarms");
}
public void testWithTenantIdNoExist() {
List<String> result = repo.findAlarmIds("fake_id", null);
assertEquals(result.size(), 0, "No alarms");
}
public void testWithTenantId() {
List<String> result = repo.findAlarmIds(BOB_TENANT_ID, new HashMap<String, String>());
assertEquals(result.size(), 1, "Alarm found");
assertEquals(result.get(0), "1", "Alarm with id 1 found");
result = repo.findAlarmIds(LUK_TENANT_ID, new HashMap<String, String>());
assertEquals(result.size(), 1, "Alarm found");
assertEquals(result.get(0), "2", "Alarm with id 2 found");
}
public void testWithDimensions() {
Map<String, String> dimensions = Maps.newHashMap();
dimensions.put("flavor_id", "222");
List<String> result = repo.findAlarmIds(BOB_TENANT_ID, dimensions);
assertEquals(result.size(), 1, "Alarm found");
assertEquals(result.get(0), "1", "Alarm with id 1 found");
}
public void testWithNotExixtingDimensions() {
Map<String, String> dimensions = Maps.newHashMap();
dimensions.put("a", "b");
List<String> result = repo.findAlarmIds(BOB_TENANT_ID, dimensions);
assertEquals(result.size(), 0, "Alarm not found");
}
}

View File

@ -0,0 +1,477 @@
/*
* Copyright 2015 FUJITSU LIMITED
*
* Licensed under the Apache License, Version 2.0 (the "License"); you may not use this file except
* in compliance with the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software distributed under the License
* is distributed on an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express
* or implied. See the License for the specific language governing permissions and limitations under
* the License.
*/
package monasca.api.infrastructure.persistence.hibernate;
import static org.testng.Assert.assertEquals;
import static org.testng.Assert.assertNotEquals;
import static org.testng.Assert.assertTrue;
import java.util.Arrays;
import java.util.List;
import java.util.Map;
import com.google.common.collect.ImmutableMap;
import com.google.common.collect.ImmutableMap.Builder;
import com.google.common.collect.Lists;
import monasca.api.domain.exception.EntityNotFoundException;
import monasca.api.domain.model.alarm.Alarm;
import monasca.api.domain.model.alarm.AlarmRepo;
import monasca.common.hibernate.db.AlarmDb;
import monasca.common.hibernate.db.AlarmDefinitionDb;
import monasca.common.hibernate.db.AlarmMetricDb;
import monasca.common.hibernate.db.MetricDefinitionDb;
import monasca.common.hibernate.db.MetricDefinitionDimensionsDb;
import monasca.common.hibernate.db.MetricDimensionDb;
import monasca.common.hibernate.db.SubAlarmDb;
import monasca.common.hibernate.db.SubAlarmDefinitionDb;
import monasca.common.model.alarm.AlarmOperator;
import monasca.common.model.alarm.AlarmSeverity;
import monasca.common.model.alarm.AlarmState;
import monasca.common.model.alarm.AlarmSubExpression;
import monasca.common.model.metric.MetricDefinition;
import org.apache.commons.collections4.CollectionUtils;
import org.hibernate.Session;
import org.hibernate.SessionFactory;
import org.hibernate.criterion.Restrictions;
import org.joda.time.DateTime;
import org.joda.time.DateTimeZone;
import org.joda.time.format.DateTimeFormatter;
import org.joda.time.format.ISODateTimeFormat;
import org.testng.annotations.AfterMethod;
import org.testng.annotations.BeforeMethod;
import org.testng.annotations.Test;
@Test(groups = "orm")
public class AlarmSqlRepositoryImplTest {
private static final String TENANT_ID = "bob";
private static final String ALARM_ID = "234111";
private static final DateTimeFormatter ISO_8601_FORMATTER = ISODateTimeFormat.dateOptionalTimeParser().withZoneUTC();
private static final DateTimeZone UTC_TIMEZONE = DateTimeZone.forID("UTC");
private SessionFactory sessionFactory;
private AlarmRepo repo;
private Alarm compoundAlarm;
private Alarm alarm1;
private Alarm alarm2;
private Alarm alarm3;
@BeforeMethod
protected void setupClass() throws Exception {
this.sessionFactory = HibernateUtil.getSessionFactory();
this.repo = new AlarmSqlRepoImpl(this.sessionFactory);
this.prepareData(this.sessionFactory);
}
@AfterMethod
public void tearDown() throws Exception {
this.sessionFactory.close();
this.sessionFactory = null;
}
private void prepareData(final SessionFactory sessionFactory) {
final DateTime now = new DateTime();
Session session = null;
try {
session = sessionFactory.openSession();
session.beginTransaction();
DateTime timestamp1 = ISO_8601_FORMATTER.parseDateTime("2015-03-14T09:26:53").withZoneRetainFields(UTC_TIMEZONE);
DateTime timestamp2 = ISO_8601_FORMATTER.parseDateTime("2015-03-14T09:26:54").withZoneRetainFields(UTC_TIMEZONE);
DateTime timestamp3 = ISO_8601_FORMATTER.parseDateTime("2015-03-14T09:26:55").withZoneRetainFields(UTC_TIMEZONE);
DateTime timestamp4 = ISO_8601_FORMATTER.parseDateTime("2015-03-15T09:26:53").withZoneRetainFields(UTC_TIMEZONE);
final AlarmDefinitionDb alarmDefinition_90Percent = this.newAlarmDefinition(session,
"1",
TENANT_ID,
"90% CPU",
"avg(cpu.idle_perc{flavor_id=777, image_id=888, device=1}) > 10",
AlarmSeverity.LOW,
"flavor_id,image_id",
true
);
final AlarmDefinitionDb alarmDefinition_50Percent = this.newAlarmDefinition(session,
"234",
TENANT_ID,
"50% CPU",
"avg(cpu.sys_mem{service=monitoring}) > 20 and avg(cpu.idle_perc{service=monitoring}) < 10",
AlarmSeverity.LOW,
"hostname,region",
true
);
final AlarmDb alarmDb_234111 = new AlarmDb(ALARM_ID, alarmDefinition_50Percent, AlarmState.UNDETERMINED, null, null, timestamp4, timestamp4, timestamp4);
final AlarmDb alarmDb_1 = new AlarmDb("1", alarmDefinition_90Percent, AlarmState.OK, "OPEN", "http://somesite.com/this-alarm-info", timestamp1, timestamp1, timestamp1);
final AlarmDb alarmDb_2 = new AlarmDb("2", alarmDefinition_90Percent, AlarmState.UNDETERMINED, "OPEN", null, timestamp2, timestamp2, timestamp2);
final AlarmDb alarmDb_3 = new AlarmDb("3", alarmDefinition_90Percent, AlarmState.ALARM, null, "http://somesite.com/this-alarm-info", timestamp3, timestamp3, timestamp3);
session.save(alarmDb_1);
session.save(alarmDb_2);
session.save(alarmDb_3);
session.save(alarmDb_234111);
final List<AlarmDb> alarmDbs = Lists.newArrayList(alarmDb_1, alarmDb_2, alarmDb_3);
long subAlarmId = 42;
for (int alarmIndex = 0; alarmIndex < 3; alarmIndex++) {
final SubAlarmDefinitionDb subExpression = this.newSubAlarmDefinition(session, String.format("%d", alarmIndex + subAlarmId), alarmDefinition_50Percent);
session.save(
new SubAlarmDb(
String.valueOf(subAlarmId++),
alarmDbs.get(alarmIndex),
subExpression,
"avg(cpu.idle_perc{flavor_id=777, image_id=888, device=1}) > 10",
now,
now
)
);
}
final MetricDefinitionDb metricDefinition1 = new MetricDefinitionDb(new byte[]{1}, "cpu.idle_perc", "bob", "west");
session.save(metricDefinition1);
final MetricDimensionDb metricDimension1InstanceId = new MetricDimensionDb(new byte[]{1}, "instance_id", "123");
final MetricDimensionDb metricDimensionService = new MetricDimensionDb(new byte[]{1}, "service", "monitoring");
final MetricDimensionDb metricDimension2FlavorId = new MetricDimensionDb(new byte[]{2}, "flavor_id", "222");
session.save(metricDimension1InstanceId);
session.save(metricDimensionService);
session.save(metricDimension2FlavorId);
final MetricDefinitionDimensionsDb metricDefinitionDimensions11 = new MetricDefinitionDimensionsDb(
new byte[]{1, 1},
metricDefinition1,
metricDimension1InstanceId.getId().getDimensionSetId()
);
final MetricDefinitionDimensionsDb metricDefinitionDimensions22 = new MetricDefinitionDimensionsDb(
new byte[]{2, 2},
metricDefinition1,
metricDimension2FlavorId.getId().getDimensionSetId()
);
session.save(metricDefinitionDimensions11);
session.save(metricDefinitionDimensions22);
session.save(new AlarmMetricDb(alarmDbs.get(0), metricDefinitionDimensions11));
session.save(new AlarmMetricDb(alarmDbs.get(0), metricDefinitionDimensions22));
session.save(new AlarmMetricDb(alarmDbs.get(1), metricDefinitionDimensions11));
session.save(new AlarmMetricDb(alarmDbs.get(2), metricDefinitionDimensions22));
alarm1 =
new Alarm("1", "1", "90% CPU", "LOW",
buildAlarmMetrics(
buildMetricDefinition("cpu.idle_perc", "instance_id", "123", "service", "monitoring")
, buildMetricDefinition("cpu.idle_perc", "flavor_id", "222")
)
, AlarmState.OK, "OPEN", "http://somesite.com/this-alarm-info", timestamp1,
timestamp1, timestamp1);
alarm2 =
new Alarm("2", "1", "90% CPU", "LOW", buildAlarmMetrics(buildMetricDefinition("cpu.idle_perc", "instance_id", "123", "service",
"monitoring")), AlarmState.UNDETERMINED, "OPEN", null, timestamp2, timestamp2, timestamp2);
alarm3 =
new Alarm("3", "1", "90% CPU", "LOW", buildAlarmMetrics(buildMetricDefinition("cpu.idle_perc", "flavor_id", "222")), AlarmState.ALARM,
null, "http://somesite.com/this-alarm-info", timestamp3, timestamp3, timestamp3);
final SubAlarmDb subAlarmDb1 = new SubAlarmDb("4343", alarmDb_234111, "avg(cpu.sys_mem{service=monitoring}) > 20", now, now);
final SubAlarmDb subAlarmDb2 = new SubAlarmDb("4242", alarmDb_234111, "avg(cpu.idle_perc{service=monitoring}) < 10", now, now);
session.save(subAlarmDb1);
session.save(subAlarmDb2);
final MetricDefinitionDb metricDefinition111 = new MetricDefinitionDb(new byte[]{1, 1, 1}, "cpu.sys_mem", "bob", "west");
final MetricDefinitionDb metricDefinition112 = new MetricDefinitionDb(new byte[]{1, 1, 2}, "cpu.idle_perc", "bob", "west");
session.save(metricDefinition111);
session.save(metricDefinition112);
final MetricDefinitionDimensionsDb metricDefinitionDimension31 = new MetricDefinitionDimensionsDb(
new byte[]{3, 1},
metricDefinition111,
new byte[]{2, 1}
);
final MetricDefinitionDimensionsDb metricDefinitionDimension32 = new MetricDefinitionDimensionsDb(
new byte[]{3, 2},
metricDefinition112,
new byte[]{2, 2}
);
session.save(metricDefinitionDimension31);
session.save(metricDefinitionDimension32);
session.save(new AlarmMetricDb(alarmDb_234111, metricDefinitionDimension31));
session.save(new AlarmMetricDb(alarmDb_234111, metricDefinitionDimension32));
session.save(new MetricDimensionDb(new byte[]{2, 1}, "service", "monitoring"));
session.save(new MetricDimensionDb(new byte[]{2, 2}, "service", "monitoring"));
session.save(new MetricDimensionDb(new byte[]{2, 1}, "hostname", "roland"));
session.save(new MetricDimensionDb(new byte[]{2, 2}, "hostname", "roland"));
session.save(new MetricDimensionDb(new byte[]{2, 1}, "region", "colorado"));
session.save(new MetricDimensionDb(new byte[]{2, 2}, "region", "colorado"));
session.save(new MetricDimensionDb(new byte[]{2, 2}, "extra", "vivi"));
session.flush();
session.getTransaction().commit();
compoundAlarm =
new Alarm("234111", "234", "50% CPU", "LOW", buildAlarmMetrics(
buildMetricDefinition("cpu.sys_mem", "hostname", "roland", "region", "colorado", "service", "monitoring"),
buildMetricDefinition("cpu.idle_perc", "extra", "vivi", "hostname", "roland", "region", "colorado", "service", "monitoring")),
AlarmState.UNDETERMINED, null, null, timestamp4, timestamp4, timestamp4);
} finally {
if (session != null) {
session.close();
}
}
}
private SubAlarmDefinitionDb newSubAlarmDefinition(final Session session, final String id, final AlarmDefinitionDb alarmDefinition) {
final DateTime now = DateTime.now();
final SubAlarmDefinitionDb db = new SubAlarmDefinitionDb(
id,
alarmDefinition,
String.format("f_%s", id),
String.format("m_%s", id),
AlarmOperator.GT.toString(),
0.0,
1,
2,
now,
now
);
session.save(db);
return db;
}
private AlarmDefinitionDb newAlarmDefinition(final Session session,
final String id,
final String tenantId,
final String name,
final String expression,
final AlarmSeverity severity,
final String matchBy,
final boolean actionEnabled) {
final DateTime now = DateTime.now();
final AlarmDefinitionDb db = new AlarmDefinitionDb(id, tenantId, name, null, expression, severity, matchBy, actionEnabled, now, now, null);
session.save(db);
return db;
}
private List<MetricDefinition> buildAlarmMetrics(final MetricDefinition... metricDefinitions) {
return Arrays.asList(metricDefinitions);
}
private MetricDefinition buildMetricDefinition(final String metricName, final String... dimensions) {
final Builder<String, String> builder = ImmutableMap.builder();
for (int i = 0; i < dimensions.length; ) {
builder.put(dimensions[i], dimensions[i + 1]);
i += 2;
}
return new MetricDefinition(metricName, builder.build());
}
@Test(groups = "orm")
@SuppressWarnings("unchecked")
public void shouldDelete() {
Session session = null;
repo.deleteById(TENANT_ID, ALARM_ID);
try {
session = sessionFactory.openSession();
List<AlarmDefinitionDb> rows = session
.createCriteria(AlarmDefinitionDb.class, "ad")
.add(Restrictions.eq("ad.id", "234"))
.setReadOnly(true)
.list();
assertEquals(rows.size(), 1, "Alarm Definition was deleted as well");
} finally {
if (session != null) {
session.close();
}
}
}
@Test(groups = "orm", expectedExceptions = EntityNotFoundException.class)
public void shouldThowExceptionOnDelete() {
repo.deleteById(TENANT_ID, "Not an alarm ID");
}
@Test(groups = "orm")
public void shouldFindAlarmSubExpressions() {
final Map<String, AlarmSubExpression> subExpressionMap = repo.findAlarmSubExpressions(ALARM_ID);
assertEquals(subExpressionMap.size(), 2);
assertEquals(subExpressionMap.get("4343"), AlarmSubExpression.of("avg(cpu.sys_mem{service=monitoring}) > 20"));
assertEquals(subExpressionMap.get("4242"), AlarmSubExpression.of("avg(cpu.idle_perc{service=monitoring}) < 10"));
}
@Test(groups = "orm")
public void shouldAlarmSubExpressionsForAlarmDefinition() {
final Map<String, Map<String, AlarmSubExpression>> alarmSubExpressionMap =
repo.findAlarmSubExpressionsForAlarmDefinition(alarm1.getAlarmDefinition().getId());
assertEquals(alarmSubExpressionMap.size(), 3);
long subAlarmId = 42;
for (int alarmId = 1; alarmId <= 3; alarmId++) {
final Map<String, AlarmSubExpression> subExpressionMap = alarmSubExpressionMap.get(String.valueOf(alarmId));
assertEquals(subExpressionMap.get(String.valueOf(subAlarmId)),
AlarmSubExpression.of("avg(cpu.idle_perc{flavor_id=777, image_id=888, device=1}) > 10"));
subAlarmId++;
}
}
@Test(groups = "orm")
public void shouldFind() {
checkList(repo.find("Not a tenant id", null, null, null, null, null, null, null, null, 1, false));
checkList(repo.find(TENANT_ID, null, null, null, null, null, null, null, null, 1, false), alarm1, alarm2, alarm3, compoundAlarm);
checkList(repo.find(TENANT_ID, compoundAlarm.getAlarmDefinition().getId(), null, null, null, null, null, null, null, 1, false), compoundAlarm);
checkList(repo.find(TENANT_ID, null, "cpu.sys_mem", null, null, null, null, null, null, 1, false), compoundAlarm);
checkList(repo.find(TENANT_ID, null, "cpu.idle_perc", null, null, null, null, null, null, 1, false), alarm1, alarm2, alarm3, compoundAlarm);
checkList(repo.find(TENANT_ID, null, "cpu.idle_perc", ImmutableMap.<String, String>builder().put("flavor_id", "222").build(), null, null, null,
null, null, 1, false), alarm1, alarm3);
checkList(
repo.find(TENANT_ID, null, "cpu.idle_perc", ImmutableMap.<String, String>builder().put("service", "monitoring").put("hostname", "roland")
.build(), null, null, null, null, null, 1, false), compoundAlarm);
checkList(repo.find(TENANT_ID, null, null, null, AlarmState.UNDETERMINED, null, null, null, null, 1, false), alarm2, compoundAlarm);
checkList(
repo.find(TENANT_ID, alarm1.getAlarmDefinition().getId(), "cpu.idle_perc", ImmutableMap.<String, String>builder()
.put("service", "monitoring").build(), null, null, null, null, null, 1, false), alarm1, alarm2);
checkList(repo.find(TENANT_ID, alarm1.getAlarmDefinition().getId(), "cpu.idle_perc", null, null, null, null, null, null, 1, false), alarm1,
alarm2, alarm3);
checkList(
repo.find(TENANT_ID, compoundAlarm.getAlarmDefinition().getId(), null, null, AlarmState.UNDETERMINED, null, null, null, null, 1, false),
compoundAlarm);
checkList(repo.find(TENANT_ID, null, "cpu.sys_mem", null, AlarmState.UNDETERMINED, null, null, null, null, 1, false), compoundAlarm);
checkList(repo.find(TENANT_ID, null, "cpu.idle_perc", ImmutableMap.<String, String>builder().put("service", "monitoring").build(),
AlarmState.UNDETERMINED, null, null, null, null, 1, false), alarm2, compoundAlarm);
checkList(
repo.find(TENANT_ID, alarm1.getAlarmDefinition().getId(), "cpu.idle_perc", ImmutableMap.<String, String>builder()
.put("service", "monitoring").build(), AlarmState.UNDETERMINED, null, null, null, null, 1, false), alarm2);
checkList(repo.find(TENANT_ID, null, null, null, null, null, null, DateTime.now(UTC_TIMEZONE), null, 0, false));
// This test is failing on a local dev host because the code seems to be time zone dependent.
// This seems to indicate incorrect handling of dates/times in the core logic.
// Fujitsu should make all of the code work regardless of time zone.
// checkList(repo.find(TENANT_ID, null, null, null, null, null, null, ISO_8601_FORMATTER.parseDateTime("2015-03-15T00:00:00Z"), null, 0, false),
// compoundAlarm);
checkList(repo.find(TENANT_ID, null, null, null, null, null, null, ISO_8601_FORMATTER.parseDateTime("2015-03-14T00:00:00Z"), null, 1, false),
alarm1, alarm2, alarm3, compoundAlarm);
}
@Test(groups = "orm")
public void shouldFindById() {
final Alarm alarm = repo.findById(TENANT_ID, compoundAlarm.getId());
assertEquals(alarm.getId(), compoundAlarm.getId());
assertEquals(alarm.getAlarmDefinition(), compoundAlarm.getAlarmDefinition());
assertEquals(alarm.getCreatedTimestamp(), compoundAlarm.getCreatedTimestamp());
assertEquals(alarm.getStateUpdatedTimestamp(), compoundAlarm.getStateUpdatedTimestamp());
assertEquals(alarm.getState(), compoundAlarm.getState());
assertEquals(alarm.getMetrics().size(), compoundAlarm.getMetrics().size());
assertTrue(CollectionUtils.isEqualCollection(alarm.getMetrics(), compoundAlarm.getMetrics()), "Metrics not equal");
}
@Test(groups = "orm", expectedExceptions = EntityNotFoundException.class)
public void shouldFindByIdThrowException() {
repo.findById(TENANT_ID, "Not a valid alarm id");
}
@Test(groups = "orm")
public void shouldUpdate() throws InterruptedException {
final Alarm originalAlarm = repo.findById(TENANT_ID, ALARM_ID);
final DateTime originalStateUpdatedAt = getAlarmStateUpdatedDate(ALARM_ID);
final DateTime originalUpdatedAt = getAlarmUpdatedDate(ALARM_ID);
assertEquals(originalAlarm.getState(), AlarmState.UNDETERMINED);
Thread.sleep(1000);
final Alarm newAlarm = repo.update(TENANT_ID, ALARM_ID, AlarmState.OK, null, null);
final DateTime newStateUpdatedAt = getAlarmStateUpdatedDate(ALARM_ID);
final DateTime newUpdatedAt = getAlarmUpdatedDate(ALARM_ID);
assertNotEquals(newStateUpdatedAt.getMillis(), originalStateUpdatedAt.getMillis(),
"state_updated_at did not change");
assertNotEquals(newUpdatedAt.getMillis(), originalUpdatedAt.getMillis(),
"updated_at did not change");
assertEquals(newAlarm, originalAlarm);
newAlarm.setState(AlarmState.OK);
newAlarm.setStateUpdatedTimestamp(newStateUpdatedAt);
newAlarm.setUpdatedTimestamp(newUpdatedAt);
// Make sure it was updated in the DB
assertEquals(repo.findById(TENANT_ID, ALARM_ID), newAlarm);
Thread.sleep(1000);
final Alarm unchangedAlarm = repo.update(TENANT_ID, ALARM_ID, AlarmState.OK, "OPEN", null);
assertTrue(getAlarmStateUpdatedDate(ALARM_ID).equals(newStateUpdatedAt), "state_updated_at did change");
assertNotEquals(getAlarmUpdatedDate(ALARM_ID).getMillis(), newStateUpdatedAt, "updated_at did not change");
assertEquals(unchangedAlarm, newAlarm);
}
@Test(groups = "orm", expectedExceptions = EntityNotFoundException.class)
public void shouldUpdateThrowException() {
repo.update(TENANT_ID, "Not a valid alarm id", AlarmState.UNDETERMINED, null, null);
}
private void checkList(List<Alarm> found, Alarm... expected) {
assertEquals(found.size(), expected.length);
for (Alarm alarm : expected) {
assertTrue(found.contains(alarm));
}
}
private DateTime getAlarmUpdatedDate(final String alarmId) {
return this.getDateField(alarmId, "updatedAt");
}
private DateTime getAlarmStateUpdatedDate(final String alarmId) {
return this.getDateField(alarmId, "stateUpdatedAt");
}
private DateTime getDateField(final String alarmId, final String fieldName) {
Session session = null;
DateTime time = null;
try {
session = sessionFactory.openSession();
final List<?> rows = session.createQuery(String.format("select %s from AlarmDb where id = :alarmId", fieldName)).setString("alarmId", alarmId).list();
time = (DateTime) rows.get(0);
} finally {
if (session != null) {
session.close();
}
}
return new DateTime(time.getMillis(), UTC_TIMEZONE);
}
}

View File

@ -0,0 +1,111 @@
/*
* Copyright 2015 FUJITSU LIMITED
*
* Licensed under the Apache License, Version 2.0 (the "License"); you may not use this file except
* in compliance with the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software distributed under the License
* is distributed on an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express
* or implied. See the License for the specific language governing permissions and limitations under
* the License.
*/
package monasca.api.infrastructure.persistence.hibernate;
import java.util.Properties;
import org.hibernate.HibernateException;
import org.hibernate.SessionFactory;
import org.hibernate.boot.registry.StandardServiceRegistryBuilder;
import org.hibernate.cfg.Configuration;
import org.hibernate.service.ServiceRegistry;
import monasca.common.hibernate.db.AlarmActionDb;
import monasca.common.hibernate.db.AlarmDb;
import monasca.common.hibernate.db.AlarmDefinitionDb;
import monasca.common.hibernate.db.AlarmMetricDb;
import monasca.common.hibernate.db.MetricDefinitionDb;
import monasca.common.hibernate.db.MetricDefinitionDimensionsDb;
import monasca.common.hibernate.db.MetricDimensionDb;
import monasca.common.hibernate.db.NotificationMethodDb;
import monasca.common.hibernate.db.SubAlarmDb;
import monasca.common.hibernate.db.SubAlarmDefinitionDb;
import monasca.common.hibernate.db.SubAlarmDefinitionDimensionDb;
class HibernateUtil {
private static Configuration CONFIGURATION = null;
static {
try {
Configuration configuration = new Configuration();
configuration.addAnnotatedClass(AlarmDb.class);
configuration.addAnnotatedClass(AlarmDefinitionDb.class);
configuration.addAnnotatedClass(AlarmMetricDb.class);
configuration.addAnnotatedClass(MetricDefinitionDb.class);
configuration.addAnnotatedClass(MetricDefinitionDimensionsDb.class);
configuration.addAnnotatedClass(MetricDimensionDb.class);
configuration.addAnnotatedClass(SubAlarmDefinitionDb.class);
configuration.addAnnotatedClass(SubAlarmDefinitionDimensionDb.class);
configuration.addAnnotatedClass(SubAlarmDb.class);
configuration.addAnnotatedClass(AlarmActionDb.class);
configuration.addAnnotatedClass(NotificationMethodDb.class);
configuration.setProperties(getHikariH2Properties());
HibernateUtil.CONFIGURATION = configuration;
} catch (Throwable ex) {
// Make sure you log the exception, as it might be swallowed
System.err.println("Initial SessionFactory creation failed." + ex);
throw new ExceptionInInitializerError(ex);
}
}
private static Properties getHikariPostgresProperties() {
Properties properties = new Properties();
properties.put("hibernate.connection.provider_class", "com.zaxxer.hikari.hibernate.HikariConnectionProvider");
properties.put("hibernate.hbm2ddl.auto", "validate");
properties.put("show_sql", true);
properties.put("hibernate.hikari.dataSourceClassName", "org.postgresql.ds.PGPoolingDataSource");
properties.put("hibernate.hikari.dataSource.serverName", "localhost");
properties.put("hibernate.hikari.dataSource.portNumber", "5432");
properties.put("hibernate.hikari.dataSource.databaseName", "mon");
properties.put("hibernate.hikari.dataSource.user", "mon");
properties.put("hibernate.hikari.dataSource.password", "mon");
properties.put("hibernate.hikari.dataSource.initialConnections", "25");
properties.put("hibernate.hikari.dataSource.maxConnections", "100");
properties.put("hibernate.hikari.connectionTestQuery", "SELECT 1");
return properties;
}
private static Properties getHikariMySqlProperties() {
Properties properties = new Properties();
properties.put("hibernate.connection.provider_class", "com.zaxxer.hikari.hibernate.HikariConnectionProvider");
properties.put("hibernate.hbm2ddl.auto", "validate");
properties.put("show_sql", true);
properties.put("hibernate.hikari.dataSourceClassName", "com.mysql.jdbc.jdbc2.optional.MysqlDataSource");
properties.put("hibernate.hikari.dataSource.url", "jdbc:mysql://localhost:3306/mon");
properties.put("hibernate.hikari.dataSource.user", "root");
properties.put("hibernate.hikari.dataSource.password", "");
return properties;
}
private static Properties getHikariH2Properties() {
Properties properties = new Properties();
properties.put("hibernate.connection.provider_class", "com.zaxxer.hikari.hibernate.HikariConnectionProvider");
properties.put("hibernate.hbm2ddl.auto", "create-drop");
properties.put("show_sql", false);
properties.put("hibernate.hikari.dataSourceClassName", "org.h2.jdbcx.JdbcDataSource");
properties.put("hibernate.hikari.dataSource.url", "jdbc:h2:mem:mon;MODE=PostgreSQL");
properties.put("hibernate.hikari.dataSource.user", "sa");
properties.put("hibernate.hikari.dataSource.password", "");
return properties;
}
public static SessionFactory getSessionFactory() throws HibernateException {
ServiceRegistry serviceRegistry = new StandardServiceRegistryBuilder().applySettings(CONFIGURATION.getProperties()).build();
return CONFIGURATION.buildSessionFactory(serviceRegistry);
}
}

View File

@ -0,0 +1,150 @@
/*
* Copyright 2015 FUJITSU LIMITED
*
* Licensed under the Apache License, Version 2.0 (the "License"); you may not use this file except
* in compliance with the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software distributed under the License
* is distributed on an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express
* or implied. See the License for the specific language governing permissions and limitations under
* the License.
*/
package monasca.api.infrastructure.persistence.hibernate;
import static org.testng.Assert.assertEquals;
import static org.testng.Assert.assertFalse;
import static org.testng.Assert.assertTrue;
import static org.testng.Assert.fail;
import java.util.Arrays;
import java.util.Collections;
import java.util.List;
import monasca.api.domain.exception.EntityExistsException;
import monasca.api.domain.exception.EntityNotFoundException;
import monasca.api.domain.model.notificationmethod.NotificationMethod;
import monasca.api.domain.model.notificationmethod.NotificationMethodRepo;
import monasca.api.domain.model.notificationmethod.NotificationMethodType;
import monasca.common.hibernate.db.NotificationMethodDb;
import monasca.common.model.alarm.AlarmNotificationMethodType;
import org.hibernate.Session;
import org.hibernate.SessionFactory;
import org.joda.time.DateTime;
import org.testng.annotations.AfterMethod;
import org.testng.annotations.BeforeMethod;
import org.testng.annotations.Test;
@Test(groups = "orm")
public class NotificationMethodSqlRepositoryImplTest {
NotificationMethodRepo repo = null;
private SessionFactory sessionFactory;
@BeforeMethod
protected void beforeMethod() throws Exception {
this.sessionFactory = HibernateUtil.getSessionFactory();
this.repo = new NotificationMethodSqlRepoImpl(sessionFactory);
this.prepareData(this.sessionFactory);
}
@AfterMethod
protected void afterMethod() {
this.sessionFactory.close();
this.sessionFactory = null;
}
protected void prepareData(final SessionFactory sessionFactory) {
Session session = null;
try {
session = sessionFactory.openSession();
session.beginTransaction();
NotificationMethodDb notificationMethodDb1 =
new NotificationMethodDb("123", "444", "MyEmail", AlarmNotificationMethodType.EMAIL, "a@b", new DateTime(), new DateTime());
NotificationMethodDb notificationMethodDb2 =
new NotificationMethodDb("124", "444", "OtherEmail", AlarmNotificationMethodType.EMAIL, "a@b", new DateTime(), new DateTime());
session.save(notificationMethodDb1);
session.save(notificationMethodDb2);
session.getTransaction().commit();
} finally {
if (session != null) {
session.close();
}
}
}
@Test(groups = "orm")
public void shouldCreate() {
NotificationMethod nmA = repo.create("555", "MyEmail", NotificationMethodType.EMAIL, "a@b");
NotificationMethod nmB = repo.findById("555", nmA.getId());
assertEquals(nmA, nmB);
}
@Test(groups = "orm")
public void shouldExistForTenantAndNotificationMethod() {
assertTrue(repo.exists("444", "123"));
assertFalse(repo.exists("444", "1234"));
assertFalse(repo.exists("333", "123"));
}
@Test(groups = "orm")
public void shouldFind() {
List<NotificationMethod> nms1 = repo.find("444", null, 1);
assertEquals(nms1, Arrays.asList(new NotificationMethod("123", "MyEmail", NotificationMethodType.EMAIL, "a@b"), new NotificationMethod("124",
"OtherEmail", NotificationMethodType.EMAIL, "a@b")));
List<NotificationMethod> nms2 = repo.find("444", "123", 1);
assertEquals(nms2, Collections.singletonList(new NotificationMethod("124", "OtherEmail", NotificationMethodType.EMAIL, "a@b")));
}
@Test(groups = "orm")
public void shouldUpdate() {
repo.update("444", "123", "Foo", NotificationMethodType.EMAIL, "abc");
NotificationMethod nm = repo.findById("444", "123");
assertEquals(nm, new NotificationMethod("123", "Foo", NotificationMethodType.EMAIL, "abc"));
}
@Test(groups = "orm")
public void shouldUpdateReturnValue() {
NotificationMethod nm = repo.update("444", "123", "Foo", NotificationMethodType.EMAIL, "abc");
NotificationMethod foundNotificationMethod = repo.findById("444", "123");
assertEquals(nm, foundNotificationMethod);
}
@Test(groups = "orm")
public void shouldDeleteById() {
repo.deleteById("444", "123");
try {
repo.findById("444", "123");
fail();
} catch (EntityNotFoundException ignore) {
}
}
@Test(groups = "orm")
public void shouldUpdateDuplicateWithSameValues() {
repo.update("444", "123", "Foo", NotificationMethodType.EMAIL, "abc");
NotificationMethod nm = repo.findById("444", "123");
assertEquals(nm, new NotificationMethod("123", "Foo", NotificationMethodType.EMAIL, "abc"));
}
@Test(groups = "orm", expectedExceptions = EntityExistsException.class)
public void shouldNotUpdateDuplicateWithSameName() {
repo.update("444", "124", "MyEmail", NotificationMethodType.EMAIL, "abc");
}
}

View File

@ -0,0 +1,47 @@
/*
* Copyright 2015 FUJITSU LIMITED
*
* Licensed under the Apache License, Version 2.0 (the "License"); you may not use this file except
* in compliance with the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software distributed under the License
* is distributed on an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express
* or implied. See the License for the specific language governing permissions and limitations under
* the License.
*
*/
package monasca.api.infrastructure.persistence.hibernate;
import java.util.Random;
import org.joda.time.DateTime;
class TestHelper {
public static final TestHelper INSTANCE = new TestHelper();
private static final int SLEEP_TIME_RANDOM_BYTE_ARRAY = 30;
private TestHelper() {
}
static byte[] randomByteArray(final int length) {
return randomByteArray(length, true);
}
static byte[] randomByteArray(final int length, final boolean sleep) {
if (sleep) {
try {
Thread.sleep(SLEEP_TIME_RANDOM_BYTE_ARRAY);
} catch (InterruptedException e) {
System.err.println(e.getLocalizedMessage());
throw new RuntimeException(e);
}
}
byte[] b = new byte[length];
new Random(DateTime.now().getMillis()).nextBytes(b);
return b;
}
}

View File

@ -116,3 +116,16 @@ logging:
facility: local0
threshold: ALL
logFormat: # TODO
hibernate:
supportEnabled: true
providerClass: com.zaxxer.hikari.hibernate.HikariConnectionProvider
dataSourceClassName: org.postgresql.ds.PGPoolingDataSource
serverName: localhost
portNumber: 5432
databaseName: mon
user: mon
password: mon
initialConnections: 25
maxConnections: 100
autoConfig: validate