Hibernate support added

- added ORM support with Hibernate
- rewritten two mysql repositories to use ORM

Change-Id: I22e342ca57b4cc62b12a44cdf503ce068b9b67b5
This commit is contained in:
Tomasz Trębski 2015-05-22 13:21:51 +02:00
parent 9aeec27d13
commit 622339f6bc
8 changed files with 1546 additions and 33 deletions

View File

@ -23,6 +23,11 @@
<computedName>${project.artifactId}-${computedVersion}</computedName>
<mon.common.version>1.1.0-SNAPSHOT</mon.common.version>
<storm.version>0.9.5</storm.version>
<hibernate.version>4.3.9.Final</hibernate.version>
<postgresql.version>9.1-901.jdbc4</postgresql.version>
<jadira.version>3.1.0.CR1</jadira.version>
<hikaricp.version>2.3.7</hikaricp.version>
<reflections.version>0.9.9-RC1</reflections.version>
<skipITs>false</skipITs>
<project.build.sourceEncoding>UTF-8</project.build.sourceEncoding>
<project.reporting.outputEncoding>UTF-8</project.reporting.outputEncoding>
@ -119,6 +124,11 @@
<artifactId>monasca-common-streaming</artifactId>
<version>${mon.common.version}</version>
</dependency>
<dependency>
<groupId>monasca-common</groupId>
<artifactId>monasca-common-hibernate</artifactId>
<version>${mon.common.version}</version>
</dependency>
<dependency>
<groupId>monasca-common</groupId>
<artifactId>monasca-common-kafka</artifactId>
@ -153,6 +163,27 @@
</exclusion>
</exclusions>
</dependency>
<dependency>
<groupId>org.hibernate</groupId>
<artifactId>hibernate-core</artifactId>
<version>${hibernate.version}</version>
</dependency>
<dependency>
<groupId>postgresql</groupId>
<artifactId>postgresql</artifactId>
<version>${postgresql.version}</version>
</dependency>
<dependency>
<groupId>org.jadira.usertype</groupId>
<artifactId>usertype.core</artifactId>
<version>${jadira.version}</version>
</dependency>
<dependency>
<groupId>com.zaxxer</groupId>
<artifactId>HikariCP-java6</artifactId>
<version>${hikaricp.version}</version>
<scope>compile</scope>
</dependency>
<!-- Test dependencies -->
<dependency>
@ -186,7 +217,7 @@
<artifactId>maven-surefire-plugin</artifactId>
<version>2.16</version>
<configuration>
<excludedGroups>performance,functional,integration,database,slow
<excludedGroups>performance,functional,integration,database,slow,orm
</excludedGroups>
</configuration>
</plugin>
@ -195,7 +226,7 @@
<artifactId>maven-failsafe-plugin</artifactId>
<version>2.16</version>
<configuration>
<groups>performance,functional,integration,database,slow</groups>
<groups>performance,functional,integration,database,slow,orm</groups>
<skipTests>${skipITs}</skipTests>
</configuration>
<executions>

View File

@ -1,38 +1,64 @@
/*
* Copyright (c) 2014 Hewlett-Packard Development Company, L.P.
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
* Licensed under the Apache License, Version 2.0 (the "License"); you may not use this file except
* in compliance with the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or
* implied.
* See the License for the specific language governing permissions and
* limitations under the License.
* Unless required by applicable law or agreed to in writing, software distributed under the License
* is distributed on an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express
* or implied. See the License for the specific language governing permissions and limitations under
* the License.
*/
package monasca.thresh.infrastructure.persistence;
import com.google.inject.AbstractModule;
import com.google.inject.Provides;
import com.google.inject.Scopes;
import monasca.thresh.domain.service.AlarmDAO;
import monasca.thresh.domain.service.AlarmDefinitionDAO;
import monasca.thresh.infrastructure.thresholding.DataSourceFactory;
import org.skife.jdbi.v2.DBI;
import java.util.Arrays;
import java.util.Properties;
import javax.inject.Singleton;
import org.hibernate.SessionFactory;
import org.hibernate.boot.registry.StandardServiceRegistryBuilder;
import org.hibernate.cfg.Configuration;
import org.hibernate.service.ServiceRegistry;
import org.skife.jdbi.v2.DBI;
import com.google.inject.AbstractModule;
import com.google.inject.Provides;
import com.google.inject.ProvisionException;
import com.google.inject.Scopes;
import monasca.common.hibernate.db.AlarmActionDb;
import monasca.common.hibernate.db.AlarmDb;
import monasca.common.hibernate.db.AlarmDefinitionDb;
import monasca.common.hibernate.db.AlarmMetricDb;
import monasca.common.hibernate.db.MetricDefinitionDb;
import monasca.common.hibernate.db.MetricDefinitionDimensionsDb;
import monasca.common.hibernate.db.MetricDimensionDb;
import monasca.common.hibernate.db.NotificationMethodDb;
import monasca.common.hibernate.db.SubAlarmDb;
import monasca.common.hibernate.db.SubAlarmDefinitionDb;
import monasca.common.hibernate.db.SubAlarmDefinitionDimensionDb;
import monasca.thresh.domain.service.AlarmDAO;
import monasca.thresh.domain.service.AlarmDefinitionDAO;
import monasca.thresh.infrastructure.persistence.hibernate.AlarmDefinitionSqlImpl;
import monasca.thresh.infrastructure.persistence.hibernate.AlarmSqlImpl;
import monasca.thresh.infrastructure.thresholding.DataSourceFactory;
/**
* Configures persistence related types.
*/
public class PersistenceModule extends AbstractModule {
/**
* <b>PostgresSQL</b> {@link javax.sql.DataSource} class name
*/
private static final String POSTGRES_DS_CLASS = "org.postgresql.ds.PGPoolingDataSource";
/**
* <b>MySQL</b> {@link javax.sql.DataSource} class name
*/
private static final String MYSQL_DS_CLASS = "com.mysql.jdbc.jdbc2.optional.MysqlDataSource";
private final DataSourceFactory dbConfig;
public PersistenceModule(DataSourceFactory dbConfig) {
@ -41,8 +67,14 @@ public class PersistenceModule extends AbstractModule {
@Override
protected void configure() {
bind(AlarmDAO.class).to(AlarmDAOImpl.class).in(Scopes.SINGLETON);
bind(AlarmDefinitionDAO.class).to(AlarmDefinitionDAOImpl.class).in(Scopes.SINGLETON);
if (dbConfig.isHibernateSupport()) {
bind(AlarmDAO.class).to(AlarmSqlImpl.class).in(Scopes.SINGLETON);
bind(AlarmDefinitionDAO.class).to(AlarmDefinitionSqlImpl.class).in(Scopes.SINGLETON);
} else {
bind(AlarmDAO.class).to(AlarmDAOImpl.class).in(Scopes.SINGLETON);
bind(AlarmDefinitionDAO.class).to(AlarmDefinitionDAOImpl.class).in(Scopes.SINGLETON);
}
}
@Provides
@ -51,4 +83,90 @@ public class PersistenceModule extends AbstractModule {
Class.forName(dbConfig.getDriverClass());
return new DBI(dbConfig.getUrl(), dbConfig.getUser(), dbConfig.getPassword());
}
@Provides
@Singleton
public SessionFactory sessionFactory() {
try {
Configuration configuration = new Configuration();
configuration.addAnnotatedClass(AlarmDb.class);
configuration.addAnnotatedClass(AlarmDefinitionDb.class);
configuration.addAnnotatedClass(AlarmMetricDb.class);
configuration.addAnnotatedClass(MetricDefinitionDb.class);
configuration.addAnnotatedClass(MetricDefinitionDimensionsDb.class);
configuration.addAnnotatedClass(MetricDimensionDb.class);
configuration.addAnnotatedClass(SubAlarmDefinitionDb.class);
configuration.addAnnotatedClass(SubAlarmDefinitionDimensionDb.class);
configuration.addAnnotatedClass(SubAlarmDb.class);
configuration.addAnnotatedClass(AlarmActionDb.class);
configuration.addAnnotatedClass(NotificationMethodDb.class);
// retrieve hikari properties for right driver
configuration.setProperties(this.getHikariProperties(this.dbConfig.getDriverClass()));
final ServiceRegistry serviceRegistry = new StandardServiceRegistryBuilder()
.applySettings(configuration.getProperties())
.build();
// builds a session factory from the service registry
return configuration.buildSessionFactory(serviceRegistry);
} catch (Throwable ex) {
throw new ProvisionException("Failed to provision Hibernate DB", ex);
}
}
private Properties getHikariProperties(final String dataSourceClassName) {
final Properties properties = new Properties();
// different drivers requires different sets of properties
switch (dataSourceClassName) {
case POSTGRES_DS_CLASS:
this.handlePostgresORMProperties(properties);
break;
case MYSQL_DS_CLASS:
this.handleMySQLORMProperties(properties);
break;
default:
throw new ProvisionException(
String.format(
"%s is not supported, valid data sources are %s",
dataSourceClassName,
Arrays.asList(POSTGRES_DS_CLASS, MYSQL_DS_CLASS)
)
);
}
// different drivers requires different sets of properties
// driver agnostic properties
this.handleCommonORMProperties(properties);
// driver agnostic properties
return properties;
}
private void handleCommonORMProperties(final Properties properties) {
properties.put("hibernate.connection.provider_class", this.dbConfig.getProviderClass());
properties.put("hibernate.hbm2ddl.auto", this.dbConfig.getAutoConfig());
properties.put("show_sql", false);
properties.put("hibernate.hikari.dataSource.user", this.dbConfig.getUser());
properties.put("hibernate.hikari.dataSource.password", this.dbConfig.getPassword());
properties.put("hibernate.hikari.dataSourceClassName", this.dbConfig.getDriverClass());
properties.put("hibernate.hikari.connectionTestQuery", dbConfig.getValidationQuery());
}
private void handleMySQLORMProperties(final Properties properties) {
properties.put("hibernate.hikari.dataSource.url", this.dbConfig.getUrl());
}
private void handlePostgresORMProperties(final Properties properties) {
properties.put("hibernate.hikari.dataSource.serverName", this.dbConfig.getServerName());
properties.put("hibernate.hikari.dataSource.portNumber", this.dbConfig.getPortNumber());
properties.put("hibernate.hikari.dataSource.databaseName", this.dbConfig.getDatabaseName());
properties.put("hibernate.hikari.dataSource.initialConnections", this.dbConfig.getMinSize());
properties.put("hibernate.hikari.dataSource.maxConnections", this.dbConfig.getMaxSize());
properties.put("hibernate.hikari.connectionTimeout", "5000");
properties.put("hibernate.hikari.initializationFailFast", "false");
}
}

View File

@ -0,0 +1,238 @@
/*
* Copyright 2015 FUJITSU LIMITED
*
* Licensed under the Apache License, Version 2.0 (the "License"); you may not use this file except
* in compliance with the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software distributed under the License
* is distributed on an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express
* or implied. See the License for the specific language governing permissions and limitations under
* the License.
*/
package monasca.thresh.infrastructure.persistence.hibernate;
import java.util.Collection;
import java.util.Collections;
import java.util.List;
import java.util.Map;
import javax.inject.Inject;
import com.google.common.collect.Lists;
import com.google.common.collect.Maps;
import monasca.common.hibernate.db.AlarmDefinitionDb;
import monasca.common.hibernate.db.SubAlarmDefinitionDb;
import monasca.common.hibernate.db.SubAlarmDefinitionDimensionDb;
import monasca.common.hibernate.db.SubAlarmDefinitionDimensionId;
import monasca.common.model.alarm.AggregateFunction;
import monasca.common.model.alarm.AlarmExpression;
import monasca.common.model.alarm.AlarmOperator;
import monasca.common.model.alarm.AlarmSubExpression;
import monasca.common.model.metric.MetricDefinition;
import monasca.thresh.domain.model.AlarmDefinition;
import monasca.thresh.domain.model.SubExpression;
import monasca.thresh.domain.service.AlarmDefinitionDAO;
import org.hibernate.ScrollMode;
import org.hibernate.ScrollableResults;
import org.hibernate.Session;
import org.hibernate.SessionFactory;
import org.hibernate.criterion.DetachedCriteria;
import org.hibernate.criterion.Order;
import org.hibernate.criterion.Projections;
import org.hibernate.criterion.Property;
import org.hibernate.criterion.Restrictions;
/**
* AlarmDefinitionDAO hibernate implementation.
*
* @author lukasz.zajaczkowski@ts.fujitsu.com
*/
public class AlarmDefinitionSqlImpl
implements AlarmDefinitionDAO {
private final SessionFactory sessionFactory;
@Inject
public AlarmDefinitionSqlImpl(SessionFactory sessionFactory) {
this.sessionFactory = sessionFactory;
}
@Override
@SuppressWarnings("unchecked")
public List<AlarmDefinition> listAll() {
Session session = null;
List<AlarmDefinition> alarmDefinitions = null;
try {
session = sessionFactory.openSession();
List<AlarmDefinitionDb> alarmDefDbList = session
.createCriteria(AlarmDefinitionDb.class)
.add(Restrictions.isNull("deletedAt"))
.addOrder(Order.asc("createdAt"))
.setReadOnly(true)
.list();
if (alarmDefDbList != null) {
alarmDefinitions = Lists.newArrayListWithExpectedSize(alarmDefDbList.size());
for (final AlarmDefinitionDb alarmDefDb : alarmDefDbList) {
final Collection<String> matchBy = alarmDefDb.getMatchByAsCollection();
final boolean actionEnable = alarmDefDb.isActionsEnabled();
alarmDefinitions.add(new AlarmDefinition(
alarmDefDb.getId(),
alarmDefDb.getTenantId(),
alarmDefDb.getName(),
alarmDefDb.getDescription(),
new AlarmExpression(alarmDefDb.getExpression()),
alarmDefDb.getSeverity().name(),
actionEnable,
this.findSubExpressions(session, alarmDefDb.getId()), // TODO add reverse model assoc and use it here
matchBy.isEmpty() ? Collections.<String>emptyList() : Lists.newArrayList(matchBy)
));
session.evict(alarmDefDb);
}
}
return alarmDefinitions == null ? Collections.<AlarmDefinition>emptyList() : alarmDefinitions;
} finally {
if (session != null) {
session.close();
}
}
}
@Override
public AlarmDefinition findById(String id) {
Session session = null;
AlarmDefinition alarmDefinition = null;
try {
session = sessionFactory.openSession();
AlarmDefinitionDb alarmDefDb = (AlarmDefinitionDb) session.get(AlarmDefinitionDb.class, id);
if (alarmDefDb != null) {
final Collection<String> matchBy = alarmDefDb.getMatchByAsCollection();
boolean actionEnable = alarmDefDb.isActionsEnabled();
alarmDefinition = new AlarmDefinition(
alarmDefDb.getId(),
alarmDefDb.getTenantId(),
alarmDefDb.getName(),
alarmDefDb.getDescription(),
new AlarmExpression(alarmDefDb.getExpression()),
alarmDefDb.getSeverity().name(),
actionEnable, null,
matchBy.isEmpty() ? Collections.<String>emptyList() : Lists.newArrayList(matchBy)
);
alarmDefinition.setSubExpressions(findSubExpressions(session, alarmDefinition.getId()));
}
return alarmDefinition;
} finally {
if (session != null) {
session.close();
}
}
}
@SuppressWarnings("unchecked")
private List<SubExpression> findSubExpressions(final Session session, final String alarmDefId) {
final List<SubExpression> subExpressions = Lists.newArrayList();
Map<String, Map<String, String>> dimensionMap = Maps.newHashMap();
final DetachedCriteria subAlarmDefinitionCriteria = DetachedCriteria
.forClass(SubAlarmDefinitionDb.class, "sad")
.createAlias("alarmDefinition", "ad")
.add(
Restrictions.conjunction(
Restrictions.eqProperty("sad.alarmDefinition.id", "ad.id"),
Restrictions.eq("sad.alarmDefinition.id", alarmDefId)
)
)
.addOrder(Order.asc("sad.id"))
.setProjection(Projections.property("sad.id"));
final ScrollableResults subAlarmDefinitionDimensionResult = session
.createCriteria(SubAlarmDefinitionDimensionDb.class)
.add(
Property
.forName("subAlarmDefinitionDimensionId.subExpression.id")
.in(subAlarmDefinitionCriteria)
)
.setReadOnly(true)
.scroll(ScrollMode.FORWARD_ONLY);
final ScrollableResults subAlarmDefinitionResult = session
.getNamedQuery(SubAlarmDefinitionDb.Queries.BY_ALARMDEFINITION_ID)
.setString("id", alarmDefId)
.setReadOnly(true)
.scroll(ScrollMode.FORWARD_ONLY);
while (subAlarmDefinitionDimensionResult.next()) {
final SubAlarmDefinitionDimensionDb dim = (SubAlarmDefinitionDimensionDb) subAlarmDefinitionDimensionResult.get()[0];
final SubAlarmDefinitionDimensionId id = dim.getSubAlarmDefinitionDimensionId();
final String subAlarmId = (String) session.getIdentifier(id.getSubExpression());
final String name = id.getDimensionName();
final String value = dim.getValue();
if (!dimensionMap.containsKey(subAlarmId)) {
dimensionMap.put(subAlarmId, Maps.<String, String>newTreeMap());
}
dimensionMap.get(subAlarmId).put(name, value);
session.evict(dim);
}
while (subAlarmDefinitionResult.next()) {
final SubAlarmDefinitionDb def = (SubAlarmDefinitionDb) subAlarmDefinitionResult.get()[0];
final String id = def.getId();
final AggregateFunction function = AggregateFunction.fromJson(def.getFunction());
final String metricName = def.getMetricName();
final AlarmOperator operator = AlarmOperator.fromJson(def.getOperator());
final Double threshold = def.getThreshold();
final Integer period = def.getPeriod();
final Integer periods = def.getPeriods();
Map<String, String> dimensions = dimensionMap.get(id);
if (dimensions == null) {
dimensions = Collections.emptyMap();
}
subExpressions.add(
new SubExpression(id,
new AlarmSubExpression(
function,
new MetricDefinition(metricName, dimensions),
operator,
threshold,
period,
periods
)
)
);
session.evict(def);
}
subAlarmDefinitionDimensionResult.close();
subAlarmDefinitionResult.close();
return subExpressions;
}
}

View File

@ -0,0 +1,593 @@
/*
* Copyright 2015 FUJITSU LIMITED
*
* Licensed under the Apache License, Version 2.0 (the "License"); you may not use this file except
* in compliance with the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software distributed under the License
* is distributed on an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express
* or implied. See the License for the specific language governing permissions and limitations under
* the License.
*/
package monasca.thresh.infrastructure.persistence.hibernate;
import java.util.HashMap;
import java.util.HashSet;
import java.util.LinkedList;
import java.util.List;
import java.util.Map;
import java.util.Set;
import javax.annotation.Nonnull;
import javax.inject.Inject;
import com.google.common.collect.ImmutableSortedMap;
import com.google.common.collect.Lists;
import com.google.common.collect.Maps;
import com.google.common.collect.Sets;
import org.apache.commons.codec.digest.DigestUtils;
import org.hibernate.Criteria;
import org.hibernate.Query;
import org.hibernate.Session;
import org.hibernate.SessionFactory;
import org.hibernate.StatelessSession;
import org.hibernate.Transaction;
import org.hibernate.criterion.Order;
import org.hibernate.criterion.Projections;
import org.hibernate.criterion.Restrictions;
import org.joda.time.DateTime;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
import monasca.common.hibernate.db.AlarmDb;
import monasca.common.hibernate.db.AlarmDefinitionDb;
import monasca.common.hibernate.db.AlarmMetricDb;
import monasca.common.hibernate.db.MetricDefinitionDb;
import monasca.common.hibernate.db.MetricDefinitionDimensionsDb;
import monasca.common.hibernate.db.MetricDimensionDb;
import monasca.common.hibernate.db.SubAlarmDb;
import monasca.common.hibernate.db.SubAlarmDefinitionDb;
import monasca.common.hibernate.type.BinaryId;
import monasca.common.model.alarm.AlarmState;
import monasca.common.model.alarm.AlarmSubExpression;
import monasca.common.model.metric.MetricDefinition;
import monasca.thresh.domain.model.Alarm;
import monasca.thresh.domain.model.MetricDefinitionAndTenantId;
import monasca.thresh.domain.model.SubAlarm;
import monasca.thresh.domain.model.SubExpression;
import monasca.thresh.domain.service.AlarmDAO;
/**
* AlarmDAO hibernate implementation.
*
* @author lukasz.zajaczkowski@ts.fujitsu.com
* @author tomasz.trebski@ts.fujitsu.com
*/
public class AlarmSqlImpl
implements AlarmDAO {
private static final Logger LOGGER = LoggerFactory.getLogger(AlarmSqlImpl.class);
private static final int ALARM_ID = 0;
private static final int ALARM_DEFINITION_ID = 1;
private static final int ALARM_STATE = 2;
private static final int SUB_ALARM_ID = 3;
private static final int ALARM_EXPRESSION = 4;
private static final int SUB_EXPRESSION_ID = 5;
private static final int TENANT_ID = 6;
private static final int MAX_COLUMN_LENGTH = 255;
private final SessionFactory sessionFactory;
@Inject
public AlarmSqlImpl(SessionFactory sessionFactory) {
this.sessionFactory = sessionFactory;
}
@Override
public Alarm findById(final String id) {
final List<Alarm> alarms = this
.findAlarms(new LookupHelper() {
@Override
public Criteria apply(@Nonnull final Criteria input) {
return input.add(Restrictions.eq("a.id", id));
}
@Override
public Query apply(@Nonnull final Query query) {
return query.setParameter("alarmId", id);
}
@Override
public String formatHQL(@Nonnull final String hqlQuery) {
return String.format(hqlQuery, "a.id=:alarmId");
}
});
return alarms.isEmpty() ? null : alarms.get(0);
}
@Override
public List<Alarm> findForAlarmDefinitionId(final String alarmDefinitionId) {
return this.findAlarms(new LookupHelper() {
@Override
public Criteria apply(@Nonnull final Criteria input) {
return input.add(Restrictions.eq("a.alarmDefinition.id", alarmDefinitionId));
}
@Override
public Query apply(@Nonnull final Query query) {
return query.setParameter("alarmDefinitionId", alarmDefinitionId);
}
@Override
public String formatHQL(@Nonnull final String hqlQuery) {
return String.format(hqlQuery, "a.alarmDefinition.id=:alarmDefinitionId");
}
});
}
@Override
public List<Alarm> listAll() {
return this.findAlarms(LookupHelper.NOOP_HELPER);
}
@Override
public void updateState(String id, AlarmState state) {
Transaction tx = null;
Session session = null;
try {
session = sessionFactory.openSession();
tx = session.beginTransaction();
final DateTime now = DateTime.now();
final AlarmDb alarm = (AlarmDb) session.get(AlarmDb.class, id);
alarm.setState(state);
alarm.setUpdatedAt(now);
alarm.setStateUpdatedAt(now);
session.update(alarm);
tx.commit();
tx = null;
} finally {
this.rollbackIfNotNull(tx);
if (session != null) {
session.close();
}
}
}
@Override
public void addAlarmedMetric(String id, MetricDefinitionAndTenantId metricDefinition) {
Transaction tx = null;
Session session = null;
try {
session = sessionFactory.openSession();
tx = session.beginTransaction();
this.createAlarmedMetric(session, metricDefinition, id);
tx.commit();
tx = null;
} finally {
this.rollbackIfNotNull(tx);
if (session != null) {
session.close();
}
}
}
@Override
@SuppressWarnings("unchecked")
public void createAlarm(Alarm newAlarm) {
Transaction tx = null;
Session session = null;
try {
session = sessionFactory.openSession();
tx = session.beginTransaction();
final DateTime now = DateTime.now();
final AlarmDb alarm = new AlarmDb(
newAlarm.getId(),
(AlarmDefinitionDb) session.get(AlarmDefinitionDb.class, newAlarm.getAlarmDefinitionId()),
newAlarm.getState(),
null,
null,
now,
now,
now
);
session.save(alarm);
for (final SubAlarm subAlarm : newAlarm.getSubAlarms()) {
session.save(new SubAlarmDb()
.setAlarm(alarm)
.setSubExpression((SubAlarmDefinitionDb) session.get(SubAlarmDefinitionDb.class, subAlarm.getAlarmSubExpressionId()))
.setExpression(subAlarm.getExpression().getExpression())
.setUpdatedAt(now)
.setCreatedAt(now)
.setId(subAlarm.getId())
);
}
for (final MetricDefinitionAndTenantId md : newAlarm.getAlarmedMetrics()) {
this.createAlarmedMetric(session, md, newAlarm.getId());
}
tx.commit();
tx = null;
} finally {
this.rollbackIfNotNull(tx);
if (session != null) {
session.close();
}
}
}
@Override
public int updateSubAlarmExpressions(String alarmSubExpressionId, AlarmSubExpression alarmSubExpression) {
Transaction tx = null;
Session session = null;
int updatedItems;
try {
session = sessionFactory.openSession();
tx = session.beginTransaction();
updatedItems = session
.getNamedQuery(SubAlarmDb.Queries.UPDATE_EXPRESSION_BY_SUBEXPRESSION_ID)
.setString("expression", alarmSubExpression.getExpression())
.setString("alarmSubExpressionId", alarmSubExpressionId)
.executeUpdate();
tx.commit();
tx = null;
return updatedItems;
} finally {
this.rollbackIfNotNull(tx);
if (session != null) {
session.close();
}
}
}
@Override
public void deleteByDefinitionId(final String alarmDefinitionId) {
Transaction tx = null;
Session session = null;
try {
session = sessionFactory.openSession();
tx = session.beginTransaction();
session
.getNamedQuery(AlarmDb.Queries.DELETE_BY_ALARMDEFINITION_ID)
.setString("alarmDefinitionId", alarmDefinitionId)
.executeUpdate();
tx.commit();
tx = null;
} finally {
this.rollbackIfNotNull(tx);
if (session != null) {
session.close();
}
}
}
@SuppressWarnings("unchecked")
private List<Alarm> findAlarms(@Nonnull final LookupHelper lookupHelper) {
StatelessSession session = null;
try {
session = sessionFactory.openStatelessSession();
final Criteria criteria = lookupHelper.apply(session
.createCriteria(AlarmDb.class, "a")
.createAlias("a.subAlarms", "sa")
.createAlias("a.alarmDefinition", "ad")
.add(Restrictions.isNull("ad.deletedAt"))
.addOrder(Order.asc("a.id"))
.setProjection(
Projections.projectionList()
.add(Projections.property("a.id"))
.add(Projections.property("a.alarmDefinition.id"))
.add(Projections.property("a.state"))
.add(Projections.alias(Projections.property("sa.id"), "sub_alarm_id"))
.add(Projections.property("sa.expression"))
.add(Projections.property("sa.subExpression.id"))
.add(Projections.property("ad.tenantId"))
)
.setReadOnly(true)
);
assert criteria != null;
return this.createAlarms(session, (List<Object[]>) criteria.list(), lookupHelper);
} finally {
if (session != null) {
session.close();
}
}
}
private Map<String, List<MetricDefinition>> getAlarmedMetrics(List<Object[]> alarmList) {
Map<String, List<MetricDefinition>> result = Maps.newHashMap();
Map<BinaryId, List<MetricDefinition>> metricDefinitionList = Maps.newHashMap();
Map<BinaryId, Map<String, Map<String, String>>> metricList = Maps.newHashMap();
Map<String, Set<BinaryId>> mapAssociationIds = Maps.newHashMap();
for (Object[] alarmRow : alarmList) {
String alarmId = (String) alarmRow[0];
String metric_name = (String) alarmRow[1];
String dimension_name = (String) alarmRow[2];
String dimension_value = (String) alarmRow[3];
BinaryId dimensionSetId = (BinaryId) alarmRow[4];
if (!metricList.containsKey(dimensionSetId)) {
metricList.put(dimensionSetId, new HashMap<String, Map<String, String>>());
}
Map<String, Map<String, String>> dimensions = metricList.get(dimensionSetId);
if (!dimensions.containsKey(metric_name)) {
dimensions.put(metric_name, new HashMap<String, String>());
}
if (!mapAssociationIds.containsKey(alarmId)) {
mapAssociationIds.put(alarmId, new HashSet<BinaryId>());
}
mapAssociationIds.get(alarmId).add(dimensionSetId);
dimensions.get(metric_name).put(dimension_name, dimension_value);
}
for (BinaryId keyDimensionSetId : metricList.keySet()) {
Map<String, Map<String, String>> metrics = metricList.get(keyDimensionSetId);
List<MetricDefinition> valueList = Lists.newArrayListWithExpectedSize(metrics.size());
for (String keyMetricName : metrics.keySet()) {
MetricDefinition md = new MetricDefinition(keyMetricName, metrics.get(keyMetricName));
valueList.add(md);
}
metricDefinitionList.put(keyDimensionSetId, valueList);
}
for (String keyAlarmId : mapAssociationIds.keySet()) {
if (!result.containsKey(keyAlarmId)) {
result.put(keyAlarmId, new LinkedList<MetricDefinition>());
}
Set<BinaryId> setDimensionId = mapAssociationIds.get(keyAlarmId);
for (BinaryId keyDimensionId : setDimensionId) {
List<MetricDefinition> metricDefList = metricDefinitionList.get(keyDimensionId);
result.get(keyAlarmId).addAll(metricDefList);
}
}
return result;
}
private List<Alarm> createAlarms(final StatelessSession session,
final List<Object[]> alarmList,
final LookupHelper lookupHelper) {
final List<Alarm> alarms = Lists.newArrayListWithCapacity(alarmList.size());
List<SubAlarm> subAlarms = null;
String prevAlarmId = null;
Alarm alarm = null;
final Map<String, Alarm> alarmMap = Maps.newHashMapWithExpectedSize(alarmList.size());
final Map<String, String> tenantIdMap = Maps.newHashMapWithExpectedSize(alarmList.size());
for (Object[] alarmRow : alarmList) {
final String alarmId = (String) alarmRow[ALARM_ID];
if (!alarmId.equals(prevAlarmId)) {
if (alarm != null) {
alarm.setSubAlarms(subAlarms);
}
alarm = new Alarm();
alarm.setId(alarmId);
alarm.setAlarmDefinitionId((String) alarmRow[ALARM_DEFINITION_ID]);
alarm.setState((AlarmState) alarmRow[ALARM_STATE]);
subAlarms = Lists.newArrayListWithExpectedSize(alarmList.size());
alarms.add(alarm);
alarmMap.put(alarmId, alarm);
tenantIdMap.put(alarmId, (String) alarmRow[TENANT_ID]);
}
subAlarms.add(new SubAlarm(
(String) alarmRow[SUB_ALARM_ID],
alarmId,
new SubExpression(
(String) alarmRow[SUB_EXPRESSION_ID],
AlarmSubExpression.of((String) alarmRow[ALARM_EXPRESSION])
)
));
prevAlarmId = alarmId;
}
if (alarm != null) {
alarm.setSubAlarms(subAlarms);
}
if (!alarms.isEmpty()) {
this.getAlarmedMetrics(session, alarmMap, tenantIdMap, lookupHelper);
}
return alarms;
}
@SuppressWarnings("unchecked")
private void getAlarmedMetrics(final StatelessSession session,
final Map<String, Alarm> alarmMap,
final Map<String, String> tenantIdMap,
final LookupHelper binder) {
String rawHQLQuery =
"select a.id, md.name as metric_def_name, mdg.id.name, mdg.value, mdg.id.dimensionSetId from MetricDefinitionDb as md, "
+ "MetricDefinitionDimensionsDb as mdd, " + "AlarmMetricDb as am, " + "AlarmDb as a, "
+ "MetricDimensionDb as mdg where md.id = mdd.metricDefinition.id and mdd.id = am.alarmMetricId.metricDefinitionDimensions.id and "
+ "am.alarmMetricId.alarm.id = a.id and mdg.id.dimensionSetId = mdd.metricDimensionSetId and %s";
final Query query = binder.apply(session.createQuery(binder.formatHQL(rawHQLQuery)));
final List<Object[]> metricRows = query.list();
final HashSet<String> existingAlarmId = Sets.newHashSet();
final Map<String, List<MetricDefinition>> alarmMetrics = this.getAlarmedMetrics(metricRows);
for (final Object[] row : metricRows) {
final String alarmId = (String) row[ALARM_ID];
final Alarm alarm = alarmMap.get(alarmId);
// This shouldn't happen but it is possible an Alarm gets created after the AlarmDefinition is
// marked deleted and any existing alarms are deleted but before the Threshold Engine gets the
// AlarmDefinitionDeleted message
if (alarm == null) {
continue;
}
if (!existingAlarmId.contains(alarmId)) {
List<MetricDefinition> mdList = alarmMetrics.get(alarmId);
for (MetricDefinition md : mdList) {
alarm.addAlarmedMetric(new MetricDefinitionAndTenantId(md, tenantIdMap.get(alarmId)));
}
}
existingAlarmId.add(alarmId);
}
}
private AlarmMetricDb createAlarmedMetric(final Session session,
final MetricDefinitionAndTenantId metricDefinition,
final String alarmId) {
final MetricDefinitionDimensionsDb metricDefinitionDimension = this.insertMetricDefinitionDimension(session, metricDefinition);
final AlarmDb alarm = (AlarmDb) session.load(AlarmDb.class, alarmId);
final AlarmMetricDb alarmMetric = new AlarmMetricDb(alarm, metricDefinitionDimension);
session.save(alarmMetric);
return alarmMetric;
}
private MetricDefinitionDimensionsDb insertMetricDefinitionDimension(final Session session,
final MetricDefinitionAndTenantId mdtId) {
final MetricDefinitionDb metricDefinition = this.insertMetricDefinition(session, mdtId);
final BinaryId metricDimensionSetId = this.insertMetricDimensionSet(session, mdtId.metricDefinition.dimensions);
final byte[] definitionDimensionsIdSha1Hash = DigestUtils.sha(
metricDefinition.getId().toHexString() + metricDimensionSetId.toHexString()
);
final MetricDefinitionDimensionsDb metricDefinitionDimensions = new MetricDefinitionDimensionsDb(
definitionDimensionsIdSha1Hash,
metricDefinition,
metricDimensionSetId
);
return (MetricDefinitionDimensionsDb) session.merge(metricDefinitionDimensions);
}
private BinaryId insertMetricDimensionSet(Session session, Map<String, String> dimensions) {
final byte[] dimensionSetId = calculateDimensionSHA1(dimensions);
for (final Map.Entry<String, String> entry : dimensions.entrySet()) {
final MetricDimensionDb metricDimension = new MetricDimensionDb(dimensionSetId, entry.getKey(), entry.getValue());
if (session.get(MetricDimensionDb.class, metricDimension.getId()) == null) {
session.merge(metricDimension);
}
}
return new BinaryId(dimensionSetId);
}
private MetricDefinitionDb insertMetricDefinition(final Session session,
final MetricDefinitionAndTenantId mdtid) {
final String region = ""; // TODO We currently don't have region
final String definitionIdStringToHash =
truncateString(mdtid.metricDefinition.name, MAX_COLUMN_LENGTH) +
truncateString(mdtid.tenantId, MAX_COLUMN_LENGTH) +
truncateString(region, MAX_COLUMN_LENGTH);
final byte[] id = DigestUtils.sha(definitionIdStringToHash);
final MetricDefinitionDb metricDefinition = new MetricDefinitionDb(id, mdtid.metricDefinition.name, mdtid.tenantId, region);
if (session.get(MetricDefinitionDb.class, metricDefinition.getId()) == null) {
session.persist(metricDefinition);
return metricDefinition;
}
session.merge(metricDefinition);
return metricDefinition;
}
private String truncateString(String s, int l) {
if (s == null) {
return "";
} else if (s.length() <= l) {
return s;
} else {
String r = s.substring(0, l);
LOGGER.warn("Input string exceeded max column length. Truncating input string {} to {} chars", s, l);
LOGGER.warn("Resulting string {}", r);
return r;
}
}
private byte[] calculateDimensionSHA1(final Map<String, String> dimensions) {
// Calculate dimensions sha1 hash id.
final StringBuilder dimensionIdStringToHash = new StringBuilder("");
if (dimensions != null && !dimensions.isEmpty()) {
// Sort the dimensions on name and value.
final Map<String, String> dimensionTreeMap = Maps.newTreeMap(ImmutableSortedMap.copyOf(dimensions));
for (final String dimensionName : dimensionTreeMap.keySet()) {
if (dimensionName != null && !dimensionName.isEmpty()) {
final String dimensionValue = dimensionTreeMap.get(dimensionName);
if (dimensionValue != null && !dimensionValue.isEmpty()) {
dimensionIdStringToHash
.append(this.truncateString(dimensionName, MAX_COLUMN_LENGTH))
.append(this.truncateString(dimensionValue, MAX_COLUMN_LENGTH));
}
}
}
}
return DigestUtils.sha(dimensionIdStringToHash.toString());
}
/**
* Rollbacks passed {@code tx} transaction if such is not null.
* Assumption is being made that {@code tx} being null means transaction
* has been successfully comitted.
*
* @param tx {@link Transaction} object
*/
private void rollbackIfNotNull(final Transaction tx) {
if (tx != null) {
try {
tx.rollback();
} catch (RuntimeException rbe) {
LOGGER.error("Couldnt roll back transaction", rbe);
}
}
}
private static class LookupHelper {
static final LookupHelper NOOP_HELPER = new LookupHelper();
public Criteria apply(@Nonnull final Criteria input) {
return input; // by default we do nothing
}
public Query apply(@Nonnull final Query query) {
return query;
}
public String formatHQL(@Nonnull final String hqlQuery) {
return String.format(hqlQuery, "1=1"); // by default no formatting
}
}
}

View File

@ -1,18 +1,15 @@
/*
* Copyright (c) 2014 Hewlett-Packard Development Company, L.P.
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
* Licensed under the Apache License, Version 2.0 (the "License"); you may not use this file except
* in compliance with the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or
* implied.
* See the License for the specific language governing permissions and
* limitations under the License.
* Unless required by applicable law or agreed to in writing, software distributed under the License
* is distributed on an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express
* or implied. See the License for the specific language governing permissions and limitations under
* the License.
*/
package monasca.thresh.infrastructure.thresholding;
@ -47,6 +44,18 @@ public class DataSourceFactory implements Serializable {
private String maxSize;
private boolean hibernateSupport;
private String providerClass;
private String databaseName;
private String serverName;
private String portNumber;
private String autoConfig;
public String getUser() {
return user;
}
@ -118,4 +127,52 @@ public class DataSourceFactory implements Serializable {
public void setMaxSize(String maxSize) {
this.maxSize = maxSize;
}
public boolean isHibernateSupport() {
return hibernateSupport;
}
public void setHibernateSupport(boolean hibernateSupport) {
this.hibernateSupport = hibernateSupport;
}
public String getProviderClass() {
return providerClass;
}
public void setProviderClass(String providerClass) {
this.providerClass = providerClass;
}
public String getDatabaseName() {
return databaseName;
}
public void setDatabaseName(String databaseName) {
this.databaseName = databaseName;
}
public String getServerName() {
return serverName;
}
public void setServerName(String serverName) {
this.serverName = serverName;
}
public String getPortNumber() {
return portNumber;
}
public void setPortNumber(String portNumber) {
this.portNumber = portNumber;
}
public String getAutoConfig() {
return autoConfig;
}
public void setAutoConfig(String autoConfig) {
this.autoConfig = autoConfig;
}
}

View File

@ -0,0 +1,118 @@
/*
* Copyright 2015 FUJITSU LIMITED
*
* Licensed under the Apache License, Version 2.0 (the "License"); you may not use this file except
* in compliance with the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software distributed under the License
* is distributed on an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express
* or implied. See the License for the specific language governing permissions and limitations under
* the License.
*/
package monasca.thresh.infrastructure.persistence.hibernate;
import static org.testng.Assert.assertEquals;
import static org.testng.Assert.assertNull;
import static org.testng.Assert.assertTrue;
import java.util.Arrays;
import java.util.List;
import monasca.common.model.alarm.AlarmExpression;
import monasca.thresh.domain.model.AlarmDefinition;
import monasca.thresh.domain.service.AlarmDefinitionDAO;
import org.hibernate.SessionFactory;
import org.testng.annotations.AfterMethod;
import org.testng.annotations.BeforeMethod;
import org.testng.annotations.Test;
/**
* Test scenarios for AlarmDefinitionSqlRepoImpl.
*
* @author lukasz.zajaczkowski@ts.fujitsu.com
*
*/
@Test(groups = "orm")
public class AlarmDefinitionSqlImplTest {
private static final String TENANT_ID = "bob";
private static final String ALARM_DEFINITION_ID = "123";
private static String ALARM_NAME = "90% CPU";
private static String ALARM_DESCR = "Description for " + ALARM_NAME;
private SessionFactory sessionFactory;
private AlarmDefinitionDAO dao;
@BeforeMethod
protected void setupClass() throws Exception {
sessionFactory = HibernateUtil.getSessionFactory();
dao = new AlarmDefinitionSqlImpl(sessionFactory);
}
@AfterMethod
protected void afterMethod() {
this.sessionFactory.close();
this.sessionFactory = null;
}
public void testGetById() {
assertNull(dao.findById(ALARM_DEFINITION_ID));
final AlarmExpression expression = new AlarmExpression("max(cpu{service=nova}) > 90");
final AlarmDefinition alarmDefinition =
new AlarmDefinition(TENANT_ID, ALARM_NAME, ALARM_DESCR, expression, "LOW",
false, Arrays.asList("fred"));
insertAndCheck(alarmDefinition);
final AlarmExpression expression2 = new AlarmExpression("max(cpu{service=swift}) > 90");
final AlarmDefinition alarmDefinition2 =
new AlarmDefinition(TENANT_ID, ALARM_NAME, ALARM_DESCR, expression2, "LOW",
false, Arrays.asList("hostname", "dev"));
insertAndCheck(alarmDefinition2);
// Make sure it works when there are no dimensions
final AlarmExpression expression3 = new AlarmExpression("max(cpu) > 90");
final AlarmDefinition alarmDefinition3 =
new AlarmDefinition(TENANT_ID, ALARM_NAME, ALARM_DESCR, expression3, "LOW",
false, Arrays.asList("hostname", "dev"));
insertAndCheck(alarmDefinition3);
}
public void testListAll() {
assertEquals(0, dao.listAll().size());
final AlarmExpression expression = new AlarmExpression("max(cpu{service=nova}) > 90");
final AlarmDefinition alarmDefinition =
new AlarmDefinition(TENANT_ID, ALARM_NAME, ALARM_DESCR, expression, "LOW",
false, Arrays.asList("fred", "barney"));
HibernateUtil.insertAlarmDefinition(sessionFactory.openSession(), alarmDefinition);
verifyListAllMatches(alarmDefinition);
final AlarmExpression expression2 = new AlarmExpression("max(cpu{service=swift}) > 90");
final AlarmDefinition alarmDefinition2 =
new AlarmDefinition(TENANT_ID, ALARM_NAME, ALARM_DESCR, expression2, "LOW",
false, Arrays.asList("fred", "barney", "wilma", "betty"));
HibernateUtil.insertAlarmDefinition(sessionFactory.openSession(), alarmDefinition2);
verifyListAllMatches(alarmDefinition, alarmDefinition2);
}
private void insertAndCheck(final AlarmDefinition alarmDefinition) {
HibernateUtil.insertAlarmDefinition(sessionFactory.openSession(), alarmDefinition);
AlarmDefinition fromDb = dao.findById(alarmDefinition.getId());
assertEquals(fromDb, alarmDefinition);
}
private void verifyListAllMatches(final AlarmDefinition... alarmDefinitions) {
List<AlarmDefinition> found = dao.listAll();
assertEquals(alarmDefinitions.length, found.size());
for (AlarmDefinition alarmDef : alarmDefinitions) {
assertTrue(found.contains(alarmDef));
}
}
}

View File

@ -0,0 +1,189 @@
/*
* Copyright 2015 FUJITSU LIMITED
*
* Licensed under the Apache License, Version 2.0 (the "License"); you may not use this file except
* in compliance with the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software distributed under the License
* is distributed on an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express
* or implied. See the License for the specific language governing permissions and limitations under
* the License.
*/
package monasca.thresh.infrastructure.persistence.hibernate;
import static org.testng.Assert.assertEquals;
import static org.testng.Assert.assertTrue;
import static org.testng.AssertJUnit.assertNull;
import java.util.ArrayList;
import java.util.Arrays;
import java.util.HashMap;
import java.util.List;
import java.util.Map;
import com.beust.jcommander.internal.Maps;
import monasca.common.model.alarm.AggregateFunction;
import monasca.common.model.alarm.AlarmExpression;
import monasca.common.model.alarm.AlarmState;
import monasca.common.model.metric.MetricDefinition;
import monasca.thresh.domain.model.Alarm;
import monasca.thresh.domain.model.AlarmDefinition;
import monasca.thresh.domain.model.MetricDefinitionAndTenantId;
import monasca.thresh.domain.model.SubAlarm;
import monasca.thresh.domain.model.SubExpression;
import monasca.thresh.domain.service.AlarmDAO;
import org.hibernate.SessionFactory;
import org.testng.annotations.AfterMethod;
import org.testng.annotations.BeforeMethod;
import org.testng.annotations.Test;
/**
* Test scenarios for AlarmSqlRepoImpl.
*
* @author lukasz.zajaczkowski@ts.fujitsu.com
* @author tomasz.trebski@ts.fujitsu.com
*/
@Test(groups = "orm")
public class AlarmSqlImplTest {
private static final String TENANT_ID = "bob";
private static String ALARM_NAME = "90% CPU";
private static String ALARM_DESCR = "Description for " + ALARM_NAME;
private static Boolean ALARM_ENABLED = Boolean.TRUE;
private MetricDefinitionAndTenantId newMetric;
private AlarmDefinition alarmDef;
private SessionFactory sessionFactory;
private AlarmDAO dao;
@BeforeMethod
protected void setupClass() throws Exception {
sessionFactory = HibernateUtil.getSessionFactory();
dao = new AlarmSqlImpl(sessionFactory);
this.prepareData();
}
@AfterMethod
protected void afterMethod() {
this.sessionFactory.close();
this.sessionFactory = null;
}
protected void prepareData() {
final String expr = "avg(load{first=first_value}) > 10 and max(cpu) < 90";
alarmDef = new AlarmDefinition(TENANT_ID, ALARM_NAME, ALARM_DESCR, new AlarmExpression(expr), "LOW", ALARM_ENABLED, new ArrayList<String>());
HibernateUtil.insertAlarmDefinition(this.sessionFactory.openSession(), alarmDef);
final Map<String, String> dimensions = new HashMap<String, String>();
dimensions.put("first", "first_value");
dimensions.put("second", "second_value");
final MetricDefinition md = new MetricDefinition("load", dimensions);
newMetric = new MetricDefinitionAndTenantId(md, TENANT_ID);
}
@Test(groups = "orm")
public void shouldFindForAlarmDefinitionId() {
verifyAlarmList(dao.findForAlarmDefinitionId(alarmDef.getId()));
final Alarm firstAlarm = new Alarm(alarmDef, AlarmState.OK);
firstAlarm.addAlarmedMetric(newMetric);
dao.createAlarm(firstAlarm);
final Alarm secondAlarm = new Alarm(alarmDef, AlarmState.OK);
secondAlarm.addAlarmedMetric(newMetric);
dao.createAlarm(secondAlarm);
final AlarmDefinition secondAlarmDef =
new AlarmDefinition(TENANT_ID, "Second", null, new AlarmExpression("avg(cpu{disk=vda, instance_id=123}) > 10"), "LOW", true,
Arrays.asList("dev"));
HibernateUtil.insertAlarmDefinition(sessionFactory.openSession(), secondAlarmDef);
final Alarm thirdAlarm = new Alarm(secondAlarmDef, AlarmState.OK);
final Map<String, String> dimensionMap = new HashMap<>();
dimensionMap.put("disk", "vda");
dimensionMap.put("instance_id", "123");
thirdAlarm.addAlarmedMetric(new MetricDefinitionAndTenantId(new MetricDefinition("cpu", dimensionMap), secondAlarmDef.getTenantId()));
dao.createAlarm(thirdAlarm);
verifyAlarmList(dao.findForAlarmDefinitionId(alarmDef.getId()), firstAlarm, secondAlarm);
verifyAlarmList(dao.findForAlarmDefinitionId(secondAlarmDef.getId()), thirdAlarm);
verifyAlarmList(dao.listAll(), firstAlarm, secondAlarm, thirdAlarm);
}
@Test(groups = "orm")
public void shouldUpdateState() {
final Alarm newAlarm = new Alarm(alarmDef, AlarmState.OK);
dao.createAlarm(newAlarm);
dao.updateState(newAlarm.getId(), AlarmState.ALARM);
assertEquals(dao.findById(newAlarm.getId()).getState(), AlarmState.ALARM);
}
private void verifyAlarmList(final List<Alarm> found, Alarm... expected) {
assertEquals(found.size(), expected.length);
for (final Alarm alarm : expected) {
assertTrue(found.contains(alarm));
}
}
@Test(groups = "orm")
public void checkComplexMetrics() {
final Alarm newAlarm = new Alarm(alarmDef, AlarmState.ALARM);
for (final String hostname : Arrays.asList("vivi", "eleanore")) {
for (final String metricName : Arrays.asList("cpu", "load")) {
final Map<String, String> dimensions = Maps.newHashMap();
dimensions.put("first", "first_value");
dimensions.put("second", "second_value");
dimensions.put("hostname", hostname);
final MetricDefinition md = new MetricDefinition(metricName, dimensions);
newAlarm.addAlarmedMetric(new MetricDefinitionAndTenantId(md, TENANT_ID));
}
}
dao.createAlarm(newAlarm);
final Alarm found = dao.findById(newAlarm.getId());
// Have to check both ways because there was a bug in AlarmDAOImpl and it showed up if both
// ways were tested
assertTrue(newAlarm.equals(found));
assertTrue(found.equals(newAlarm));
}
@Test(groups = "orm")
public void shouldUpdate() {
final Alarm newAlarm = new Alarm(alarmDef, AlarmState.OK);
dao.createAlarm(newAlarm);
final SubExpression first = alarmDef.getSubExpressions().get(0);
final AggregateFunction newFunction = AggregateFunction.COUNT;
first.getAlarmSubExpression().setFunction(newFunction);
assertEquals(1, dao.updateSubAlarmExpressions(first.getId(), first.getAlarmSubExpression()));
// Find the SubAlarm that was created from the changed SubExpression
boolean found = false;
for (final SubAlarm subAlarm : newAlarm.getSubAlarms()) {
if (subAlarm.getAlarmSubExpressionId().equals(first.getId())) {
found = true;
// This is what dao.updateSubAlarmExpressions() should have changed
subAlarm.getExpression().setFunction(newFunction);
break;
}
}
assertTrue(found);
assertEquals(dao.findById(newAlarm.getId()), newAlarm);
}
@Test(groups = "orm")
public void shouldDelete() {
final Alarm newAlarm = new Alarm(alarmDef, AlarmState.OK);
dao.createAlarm(newAlarm);
dao.deleteByDefinitionId(newAlarm.getAlarmDefinitionId());
assertNull(dao.findById(newAlarm.getId()));
}
}

View File

@ -0,0 +1,169 @@
/*
* Copyright 2015 FUJITSU LIMITED
*
* Licensed under the Apache License, Version 2.0 (the "License"); you may not use this file except
* in compliance with the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software distributed under the License
* is distributed on an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express
* or implied. See the License for the specific language governing permissions and limitations under
* the License.
*/
package monasca.thresh.infrastructure.persistence.hibernate;
import java.util.Map;
import java.util.Properties;
import com.google.common.base.Joiner;
import monasca.common.hibernate.db.AlarmActionDb;
import monasca.common.hibernate.db.AlarmDb;
import monasca.common.hibernate.db.AlarmDefinitionDb;
import monasca.common.hibernate.db.AlarmMetricDb;
import monasca.common.hibernate.db.MetricDefinitionDb;
import monasca.common.hibernate.db.MetricDefinitionDimensionsDb;
import monasca.common.hibernate.db.MetricDimensionDb;
import monasca.common.hibernate.db.NotificationMethodDb;
import monasca.common.hibernate.db.SubAlarmDb;
import monasca.common.hibernate.db.SubAlarmDefinitionDb;
import monasca.common.hibernate.db.SubAlarmDefinitionDimensionDb;
import monasca.common.model.alarm.AlarmSeverity;
import monasca.common.model.alarm.AlarmSubExpression;
import monasca.thresh.domain.model.AlarmDefinition;
import monasca.thresh.domain.model.SubExpression;
import org.hibernate.Session;
import org.hibernate.SessionFactory;
import org.hibernate.boot.registry.StandardServiceRegistryBuilder;
import org.hibernate.cfg.Configuration;
import org.joda.time.DateTime;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
class HibernateUtil {
public static final Joiner COMMA_JOINER = Joiner.on(',');
private static final Logger logger = LoggerFactory.getLogger(HibernateUtil.class);
private static Configuration CONFIGURATION = null;
static {
try {
Configuration configuration = new Configuration();
configuration.addAnnotatedClass(AlarmDb.class);
configuration.addAnnotatedClass(AlarmDefinitionDb.class);
configuration.addAnnotatedClass(AlarmMetricDb.class);
configuration.addAnnotatedClass(MetricDefinitionDb.class);
configuration.addAnnotatedClass(MetricDefinitionDimensionsDb.class);
configuration.addAnnotatedClass(MetricDimensionDb.class);
configuration.addAnnotatedClass(SubAlarmDefinitionDb.class);
configuration.addAnnotatedClass(SubAlarmDefinitionDimensionDb.class);
configuration.addAnnotatedClass(SubAlarmDb.class);
configuration.addAnnotatedClass(AlarmActionDb.class);
configuration.addAnnotatedClass(NotificationMethodDb.class);
configuration.setProperties(getHikariH2Properties());
HibernateUtil.CONFIGURATION = configuration;
} catch (Throwable ex) {
// Make sure you log the exception, as it might be swallowed
System.err.println("Initial SessionFactory creation failed." + ex);
throw new ExceptionInInitializerError(ex);
}
}
private static Properties getHikariH2Properties() {
Properties properties = new Properties();
properties.put("hibernate.connection.provider_class", "com.zaxxer.hikari.hibernate.HikariConnectionProvider");
properties.put("hibernate.hbm2ddl.auto", "create-drop");
properties.put("show_sql", true);
properties.put("hibernate.hikari.dataSourceClassName", "org.h2.jdbcx.JdbcDataSource");
properties.put("hibernate.hikari.dataSource.url", "jdbc:h2:mem:mon;MODE=PostgreSQL");
properties.put("hibernate.hikari.dataSource.user", "sa");
properties.put("hibernate.hikari.dataSource.password", "");
return properties;
}
private static Properties getHikariPostgresProperties() {
Properties properties = new Properties();
properties.put("hibernate.connection.provider_class", "com.zaxxer.hikari.hibernate.HikariConnectionProvider");
properties.put("hibernate.hbm2ddl.auto", "validate");
properties.put("show_sql", true);
properties.put("hibernate.hikari.dataSourceClassName", "org.postgresql.ds.PGPoolingDataSource");
properties.put("hibernate.hikari.dataSource.serverName", "localhost");
properties.put("hibernate.hikari.dataSource.portNumber", "5432");
properties.put("hibernate.hikari.dataSource.databaseName", "mon");
properties.put("hibernate.hikari.dataSource.user", "mon");
properties.put("hibernate.hikari.dataSource.password", "mon");
properties.put("hibernate.hikari.dataSource.initialConnections", "25");
properties.put("hibernate.hikari.dataSource.maxConnections", "100");
properties.put("hibernate.hikari.connectionTestQuery", "SELECT 1");
return properties;
}
public static void insertAlarmDefinition(Session session, AlarmDefinition ad) {
AlarmDefinitionDb def;
try {
session.beginTransaction();
final DateTime now = DateTime.now();
def = new AlarmDefinitionDb();
def.setId(ad.getId());
def.setTenantId(ad.getTenantId());
def.setName(ad.getName());
def.setDescription(ad.getDescription());
def.setSeverity(AlarmSeverity.LOW);
def.setExpression(ad.getAlarmExpression().getExpression());
def.setMatchBy(ad.getMatchBy().isEmpty() ? null : COMMA_JOINER.join(ad.getMatchBy()));
def.setActionsEnabled(ad.isActionsEnabled());
def.setCreatedAt(now);
def.setUpdatedAt(now);
def.setDeletedAt(null);
session.save(def);
for (final SubExpression subExpression : ad.getSubExpressions()) {
final AlarmSubExpression alarmSubExpr = subExpression.getAlarmSubExpression();
final SubAlarmDefinitionDb subAlarmDef = new SubAlarmDefinitionDb(
subExpression.getId(),
def,
alarmSubExpr.getFunction().name(),
alarmSubExpr.getMetricDefinition().name,
alarmSubExpr.getOperator().name(),
alarmSubExpr.getThreshold(),
alarmSubExpr.getPeriod(),
alarmSubExpr.getPeriods(),
now,
now
);
session.save(subAlarmDef);
for (final Map.Entry<String, String> entry : alarmSubExpr.getMetricDefinition().dimensions.entrySet()) {
SubAlarmDefinitionDimensionDb dimension = new SubAlarmDefinitionDimensionDb(subAlarmDef, entry.getKey(), entry.getValue());
session.save(dimension);
}
}
session.getTransaction().commit();
} catch (RuntimeException e) {
try {
session.getTransaction().rollback();
} catch (RuntimeException rbe) {
logger.error("Couldnt roll back transaction", rbe);
}
throw e;
} finally {
if (session != null) {
session.close();
}
}
}
public static SessionFactory getSessionFactory() {
return CONFIGURATION.buildSessionFactory(
new StandardServiceRegistryBuilder()
.applySettings(CONFIGURATION.getProperties())
.build()
);
}
}