ORM (sqlalchemy) implementation for python

It is sqlalchemy part from proposition of oiskam for python.

Change-Id: I699d87c1cd43fe0ab52aa8ca7044f4d90c8a1ba2
This commit is contained in:
oiskam1 2015-12-02 10:23:34 +01:00
parent d94ecf28ec
commit 0462177fb7
31 changed files with 3975 additions and 43 deletions

View File

@ -43,6 +43,7 @@ henriquetruta <henrique@lsd.ufcg.edu.br>
hochmuth <roland.hochmuth@hp.com>
kaiyan-sheng <kaiyan.sheng@hp.com>
liu-sheng <liusheng@huawei.com>
oiskam1 <oiskam1@yandex.ru>
raymondr <raymondr@users.noreply.github.com>
satsuki_fukazu <fukazu.satsuki@po.ntts.co.jp>
venkatamahesh <venkatamaheshkotha@gmail.com>

View File

@ -42,13 +42,13 @@ driver = monasca_api.common.messaging.kafka_publisher:KafkaPublisher
metrics_driver = monasca_api.common.repositories.influxdb.metrics_repository:MetricsRepository
# The driver to use for the alarm definitions repository
alarm_definitions_driver = monasca_api.common.repositories.mysql.alarm_definitions_repository:AlarmDefinitionsRepository
alarm_definitions_driver = monasca_api.common.repositories.sqla.alarm_definitions_repository:AlarmDefinitionsRepository
# The driver to use for the alarms repository
alarms_driver = monasca_api.common.repositories.mysql.alarms_repository:AlarmsRepository
alarms_driver = monasca_api.common.repositories.sqla.alarms_repository:AlarmsRepository
# The driver to use for the notifications repository
notifications_driver = monasca_api.common.repositories.mysql.notifications_repository:NotificationsRepository
notifications_driver = monasca_api.common.repositories.sqla.notifications_repository:NotificationsRepository
[dispatcher]
driver = v2_reference
@ -100,11 +100,14 @@ password = password
# The name of the InfluxDB database to use.
database_name = mon
[mysql]
database_name = mon
hostname = 127.0.0.1
username = monapi
password = password
[database]
url = "mysql+pymysql://monapi:password@127.0.0.1/mon"
# [mysql]
# database_name = mon
# hostname = 127.0.0.1
# username = monapi
# password = password
[keystone_authtoken]
identity_uri = http://127.0.0.1:35357

View File

@ -20,3 +20,8 @@ host = 127.0.0.1
port = 8070
workers = 1
proc_name = monasca_api
[logger_sqlalchemy]
level = DEBUG
handlers =
qualname = sqlalchemy.engine

View File

@ -1,5 +1,6 @@
#
# (C) Copyright 2015 Hewlett Packard Enterprise Development Company LP
# Copyright 2016 FUJITSU LIMITED
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
@ -714,7 +715,6 @@ function install_monasca_api_python {
echo_summary "Install Monasca monasca_api_python"
sudo apt-get -y install python-dev
sudo apt-get -y install python-mysqldb
sudo apt-get -y install libmysqlclient-dev
sudo mkdir -p /opt/monasca-api
@ -726,6 +726,7 @@ function install_monasca_api_python {
PIP_VIRTUAL_ENV=/opt/monasca-api
pip_install gunicorn
pip_install PyMySQL
(cd "${MONASCA_BASE}"/monasca-api ; sudo python setup.py sdist)

View File

@ -15,22 +15,22 @@
# limitations under the License.
#
# (cd $BASE/new/tempest/; sudo virtualenv .venv)
# source $BASE/new/tempest/.venv/bin/activate
(cd $BASE/new/tempest/; sudo virtualenv .venv)
source $BASE/new/tempest/.venv/bin/activate
# (cd $BASE/new/tempest/; sudo pip install -r requirements.txt -r test-requirements.txt)
# sudo pip install nose
# sudo pip install numpy
(cd $BASE/new/tempest/; sudo pip install -r requirements.txt -r test-requirements.txt)
sudo pip install nose
sudo pip install numpy
# (cd $BASE/new/tempest/; sudo oslo-config-generator --config-file etc/config-generator.tempest.conf --output-file etc/tempest.conf)
# (cd $BASE/new/; sudo sh -c 'cat monasca-api/devstack/files/tempest/tempest.conf >> tempest/etc/tempest.conf')
(cd $BASE/new/tempest/; sudo oslo-config-generator --config-file etc/config-generator.tempest.conf --output-file etc/tempest.conf)
(cd $BASE/new/; sudo sh -c 'cat monasca-api/devstack/files/tempest/tempest.conf >> tempest/etc/tempest.conf')
# sudo cp $BASE/new/tempest/etc/logging.conf.sample $BASE/new/tempest/etc/logging.conf
sudo cp $BASE/new/tempest/etc/logging.conf.sample $BASE/new/tempest/etc/logging.conf
# (cd $BASE/new/monasca-api/; sudo pip install -r requirements.txt -r test-requirements.txt)
# (cd $BASE/new/monasca-api/; sudo python setup.py install)
(cd $BASE/new/monasca-api/; sudo pip install -r requirements.txt -r test-requirements.txt)
(cd $BASE/new/monasca-api/; sudo python setup.py install)
# (cd $BASE/new/tempest/; sudo testr init)
(cd $BASE/new/tempest/; sudo testr init)
# (cd $BASE/new/tempest/; sudo sh -c 'testr list-tests monasca_tempest_tests | grep gate > monasca_tempest_tests_gate')
# (cd $BASE/new/tempest/; sudo sh -c 'testr run --subunit --load-list=monasca_tempest_tests_gate | subunit-trace --fails')
(cd $BASE/new/tempest/; sudo sh -c 'testr list-tests monasca_tempest_tests | grep gate > monasca_tempest_tests_gate')
(cd $BASE/new/tempest/; sudo sh -c 'testr run --subunit --load-list=monasca_tempest_tests_gate | subunit-trace --fails')

View File

@ -100,12 +100,35 @@ password = password
# The name of the InfluxDB database to use.
database_name = mon
# Below is configuration for database.
# The order of reading configuration for database is:
# 1) [mysql] section
# 2) [database]
# url
# 3) [database]
# host = 127.0.0.1
# username = monapi
# password = password
# drivername = mysq+pymysql
# port = 3306
# database = mon
# query = ""
[mysql]
database_name = mon
hostname = 192.168.10.4
username = monapi
password = password
# [database]
# url = "mysql+pymysql://monapi:password@127.0.0.1/mon"
# host = 127.0.0.1
# username = monapi
# password = password
# drivername = mysq+pymysql
# port = 3306
# database = mon
# query = ""
[keystone_authtoken]
identity_uri = http://192.168.10.5:35357
auth_uri = http://192.168.10.5:5000

View File

@ -1,5 +1,6 @@
# Copyright 2014 Hewlett-Packard
# (C) Copyright 2015 Hewlett Packard Enterprise Development Company LP
# Copyright 2016 FUJITSU LIMITED
#
# Licensed under the Apache License, Version 2.0 (the "License"); you may
# not use this file except in compliance with the License. You may obtain

View File

@ -13,9 +13,10 @@
# under the License.
from datetime import datetime
from oslo_log import log
from time import time
from oslo_log import log
from monasca_api.common.repositories import alarms_repository
from monasca_api.common.repositories import exceptions
from monasca_common.repositories.mysql import mysql_repository

View File

@ -1,4 +1,5 @@
# Copyright 2014 Hewlett-Packard
# Copyright 2016 FUJITSU LIMITED
#
# Licensed under the Apache License, Version 2.0 (the "License"); you may
# not use this file except in compliance with the License. You may obtain
@ -21,7 +22,7 @@ import six
class NotificationsRepository(object):
@abc.abstractmethod
def create_notification(self, id, tenant_id, name, notification_type,
def create_notification(self, tenant_id, name, notification_type,
address):
return
@ -38,6 +39,6 @@ class NotificationsRepository(object):
return
@abc.abstractmethod
def update_notification(self, id, tenant_id, name, notification_type,
def update_notification(self, notification_id, tenant_id, name, notification_type,
address):
return

View File

@ -0,0 +1,765 @@
# Copyright 2014 Hewlett-Packard
# Copyright 2016 FUJITSU LIMITED
#
# Licensed under the Apache License, Version 2.0 (the "License"); you may
# not use this file except in compliance with the License. You may obtain
# a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
# License for the specific language governing permissions and limitations
# under the License.
import datetime
from oslo_log import log
from oslo_utils import uuidutils
from monasca_api.common.repositories import alarm_definitions_repository as adr
from monasca_api.common.repositories import exceptions
from monasca_api.common.repositories.model import sub_alarm_definition
from monasca_api.common.repositories.sqla import models
from monasca_api.common.repositories.sqla import sql_repository
from sqlalchemy import MetaData, update, delete, insert
from sqlalchemy import select, text, bindparam, null, literal_column
LOG = log.getLogger(__name__)
class AlarmDefinitionsRepository(sql_repository.SQLRepository,
adr.AlarmDefinitionsRepository):
def __init__(self):
super(AlarmDefinitionsRepository, self).__init__()
metadata = MetaData()
self.a = models.create_a_model(metadata)
self.aa = models.create_aa_model(metadata)
self.ad = models.create_ad_model(metadata)
self.am = models.create_am_model(metadata)
self.nm = models.create_nm_model(metadata)
self.md = models.create_md_model(metadata)
self.mde = models.create_mde_model(metadata)
self.mdd = models.create_mdd_model(metadata)
self.sa = models.create_sa_model(metadata)
self.sad = models.create_sad_model(metadata)
self.sadd = models.create_sadd_model(metadata)
a = self.a
aa = self.aa
ad = self.ad
am = self.am
nm = self.nm
md = self.md
sa = self.sa
mdd = self.mdd
mde = self.mde
sad = self.sad
sadd = self.sadd
a_s = a.alias('a')
ad_s = ad.alias('ad')
self.ad_s = ad_s
am_s = am.alias('am')
nm_s = nm.alias('nm')
md_s = md.alias('md')
sa_s = sa.alias('sa')
mdd_s = mdd.alias('mdd')
mde_s = mde.alias('mde')
sad_s = sad.alias('sad')
sadd_s = sadd.alias('sadd')
aaa_aa = aa.alias('aaa_aa')
aaa = (select([aaa_aa.c.alarm_definition_id,
models.group_concat([aaa_aa.c.action_id]).label('alarm_actions')])
.select_from(aaa_aa)
.where(aaa_aa.c.alarm_state == text("'ALARM'"))
.group_by(aaa_aa.c.alarm_definition_id)
.alias('aaa'))
aao_aa = aa.alias('aao_aa')
aao = (select([aao_aa.c.alarm_definition_id,
models.group_concat([aao_aa.c.action_id]).label('ok_actions')])
.select_from(aao_aa)
.where(aao_aa.c.alarm_state == text("'OK'"))
.group_by(aao_aa.c.alarm_definition_id)
.alias('aao'))
aau_aa = aa.alias('aau_aa')
aau = (select([aau_aa.c.alarm_definition_id,
models.group_concat([aau_aa.c.action_id]).label('undetermined_actions')])
.select_from(aau_aa)
.where(aau_aa.c.alarm_state == text("'UNDETERMINED'"))
.group_by(aau_aa.c.alarm_definition_id)
.alias('aau'))
self.base_query_from = (ad_s.outerjoin(aaa, aaa.c.alarm_definition_id == ad_s.c.id)
.outerjoin(aao, aao.c.alarm_definition_id == ad_s.c.id)
.outerjoin(aau, aau.c.alarm_definition_id == ad_s.c.id))
self.base_query = (select([ad_s.c.id,
ad_s.c.name,
ad_s.c.description,
ad_s.c.expression,
ad_s.c.match_by,
ad_s.c.severity,
ad_s.c.actions_enabled,
aaa.c.alarm_actions,
aao.c.ok_actions,
aau.c.undetermined_actions]))
self.get_sub_alarms_query = (select([sa_s.c.id.label('sub_alarm_id'),
sa_s.c.alarm_id,
sa_s.c.expression])
.select_from(sa_s.join(a_s, a_s.c.id == sa_s.c.alarm_id)
.join(ad_s, ad_s.c.id == a_s.c.alarm_definition_id))
.where(ad_s.c.tenant_id == bindparam('b_tenant_id'))
.where(ad_s.c.id == bindparam('b_id'))
.distinct())
mdg = (select([md_s.c.dimension_set_id,
models.group_concat([md_s.c.name + text("'='") + md_s.c.value]).label('dimensions')])
.select_from(md_s)
.group_by(md_s.c.dimension_set_id)
.alias('mdg'))
self.get_alarm_metrics_query = (select([a_s.c.id.label('alarm_id'),
mde_s.c.name,
mdg.c.dimensions])
.select_from(a_s.join(ad_s, ad_s.c.id == a_s.c.alarm_definition_id)
.join(am_s, am_s.c.alarm_id == a_s.c.id)
.join(mdd_s, mdd_s.c.id
== am_s.c.metric_definition_dimensions_id)
.join(mde_s, mde_s.c.id == mdd_s.c.metric_definition_id)
.join(mdg, mdg.c.dimension_set_id
== mdd_s.c.metric_dimension_set_id))
.where(ad_s.c.tenant_id == bindparam('b_tenant_id'))
.where(ad_s.c.id == bindparam('b_id'))
.order_by(a_s.c.id)
.distinct())
self.soft_delete_ad_query = (update(ad)
.where(ad.c.tenant_id == bindparam('b_tenant_id'))
.where(ad.c.id == bindparam('b_id'))
.where(ad.c.deleted_at == null())
.values(deleted_at=datetime.datetime.utcnow()))
self.delete_a_query = (delete(a)
.where(a.c.alarm_definition_id == bindparam('b_id')))
columns_gc = [sadd_s.c.dimension_name + text("'='") + sadd_s.c.value]
saddg = (select([sadd_s.c.sub_alarm_definition_id,
models.group_concat(columns_gc).label('dimensions')])
.select_from(sadd_s)
.group_by(sadd_s.c.sub_alarm_definition_id)
.alias('saddg'))
self.get_sub_alarm_definitions_query = (select([sad_s, saddg.c.dimensions])
.select_from(sad_s.outerjoin(saddg,
saddg.c.sub_alarm_definition_id
== sad_s.c.id))
.where(sad_s.c.alarm_definition_id
== bindparam('b_alarm_definition_id')))
self.create_alarm_definition_insert_ad_query = (insert(ad)
.values(
id=bindparam('b_id'),
tenant_id=bindparam('b_tenant_id'),
name=bindparam('b_name'),
description=bindparam('b_description'),
expression=bindparam('b_expression'),
severity=bindparam('b_severity'),
match_by=bindparam('b_match_by'),
actions_enabled=bindparam('b_actions_enabled'),
created_at=bindparam('b_created_at'),
updated_at=bindparam('b_updated_at')))
self.create_alarm_definition_insert_sad_query = (insert(sad)
.values(
id=bindparam('b_id'),
alarm_definition_id=bindparam('b_alarm_definition_id'),
function=bindparam('b_function'),
metric_name=bindparam('b_metric_name'),
operator=bindparam('b_operator'),
threshold=bindparam('b_threshold'),
period=bindparam('b_period'),
periods=bindparam('b_periods'),
created_at=bindparam('b_created_at'),
updated_at=bindparam('b_updated_at')))
b_sad_id = bindparam('b_sub_alarm_definition_id')
self.create_alarm_definition_insert_sadd_query = (insert(sadd)
.values(
sub_alarm_definition_id=b_sad_id,
dimension_name=bindparam('b_dimension_name'),
value=bindparam('b_value')))
self.update_or_patch_alarm_definition_update_ad_query = (update(ad)
.where(ad.c.tenant_id == bindparam('b_tenant_id'))
.where(ad.c.id == bindparam('b_id')))
self.update_or_patch_alarm_definition_delete_sad_query = (delete(sad)
.where(sad.c.id == bindparam('b_id')))
self.update_or_patch_alarm_definition_update_sad_query = (update(sad)
.where(sad.c.id == bindparam('b_id'))
.values(
operator=bindparam('b_operator'),
threshold=bindparam('b_threshold'),
updated_at=bindparam('b_updated_at')))
b_ad_id = bindparam('b_alarm_definition_id'),
self.update_or_patch_alarm_definition_insert_sad_query = (insert(sad)
.values(
id=bindparam('b_id'),
alarm_definition_id=b_ad_id,
function=bindparam('b_function'),
metric_name=bindparam('b_metric_name'),
operator=bindparam('b_operator'),
threshold=bindparam('b_threshold'),
period=bindparam('b_period'),
periods=bindparam('b_periods'),
created_at=bindparam('b_created_at'),
updated_at=bindparam('b_updated_at')))
self.update_or_patch_alarm_definition_insert_sadd_query = (insert(sadd)
.values(
sub_alarm_definition_id=b_sad_id,
dimension_name=bindparam('b_dimension_name'),
value=bindparam('b_value')))
self.delete_aa_query = (delete(aa)
.where(aa.c.alarm_definition_id
== bindparam('b_alarm_definition_id')))
self.delete_aa_state_query = (delete(aa)
.where(aa.c.alarm_definition_id == bindparam('b_alarm_definition_id'))
.where(aa.c.alarm_state == bindparam('b_alarm_state')))
self.select_nm_query = (select([nm_s.c.id])
.select_from(nm_s)
.where(nm_s.c.id == bindparam('b_id')))
self.insert_aa_query = (insert(aa)
.values(
alarm_definition_id=bindparam('b_alarm_definition_id'),
alarm_state=bindparam('b_alarm_state'),
action_id=bindparam('b_action_id')))
@sql_repository.sql_try_catch_block
def get_alarm_definition(self, tenant_id, _id):
with self._db_engine.connect() as conn:
return self._get_alarm_definition(conn, tenant_id, _id)
def _get_alarm_definition(self, conn, tenant_id, _id):
ad = self.ad_s
query = (self.base_query
.select_from(self.base_query_from)
.where(ad.c.tenant_id == bindparam('b_tenant_id'))
.where(ad.c.id == bindparam('b_id'))
.where(ad.c.deleted_at == null()))
row = conn.execute(query,
b_tenant_id=tenant_id,
b_id=_id).fetchone()
if row is not None:
return dict(row)
else:
raise exceptions.DoesNotExistException
@sql_repository.sql_try_catch_block
def get_alarm_definitions(self, tenant_id, name=None, dimensions=None, severity=None,
sort_by=None, offset=None, limit=1000):
with self._db_engine.connect() as conn:
ad = self.ad_s
sad = self.sad.alias('sad')
sadd = self.sadd.alias('sadd')
query_from = self.base_query_from
parms = {'b_tenant_id': tenant_id}
if dimensions:
sadi = sad.c.alarm_definition_id
query_from = query_from.join(sad, sadi == ad.c.id)
i = 0
for n, v in dimensions.iteritems():
bind_dimension_name = 'b_sadd_dimension_name_{}'.format(i)
bind_value = 'b_sadd_value_{}'.format(i)
sadd_ = (select([sadd.c.sub_alarm_definition_id])
.select_from(sadd)
.where(sadd.c.dimension_name == bindparam(bind_dimension_name))
.where(sadd.c.value == bindparam(bind_value))
.distinct().alias('saad_{}'.format(i)))
sadd_id = sadd_.c.sub_alarm_definition_id
query_from = query_from.join(sadd_, sadd_id == sad.c.id)
parms[bind_dimension_name] = n.encode('utf8')
parms[bind_value] = v.encode('utf8')
i += 1
query = (self.base_query
.select_from(query_from)
.where(ad.c.tenant_id == bindparam('b_tenant_id'))
.where(ad.c.deleted_at == null()))
if name:
query = query.where(ad.c.name == bindparam('b_name'))
parms['b_name'] = name.encode('utf8')
if severity:
query = query.where(ad.c.severity == bindparam('b_severity'))
parms['b_severity'] = severity.encode('utf8')
order_columns = []
if sort_by is not None:
order_columns = [literal_column(col) for col in sort_by]
if 'id' not in sort_by:
order_columns.append(ad.c.id)
else:
order_columns = [ad.c.id]
if offset:
query = query.offset(bindparam('b_offset'))
parms['b_offset'] = offset
query = query.order_by(*order_columns)
query = query.limit(bindparam('b_limit'))
parms['b_limit'] = limit + 1
return [dict(row) for row in conn.execute(query, parms).fetchall()]
@sql_repository.sql_try_catch_block
def get_sub_alarms(self, tenant_id, alarm_definition_id):
with self._db_engine.connect() as conn:
return [dict(row) for row in conn.execute(self.get_sub_alarms_query,
b_tenant_id=tenant_id,
b_id=alarm_definition_id).fetchall()]
@sql_repository.sql_try_catch_block
def get_alarm_metrics(self, tenant_id, alarm_definition_id):
with self._db_engine.connect() as conn:
return [dict(row) for row in conn.execute(self.get_alarm_metrics_query,
b_tenant_id=tenant_id,
b_id=alarm_definition_id).fetchall()]
@sql_repository.sql_try_catch_block
def delete_alarm_definition(self, tenant_id, alarm_definition_id):
"""Soft delete the alarm definition.
Soft delete the alarm definition and hard delete any associated
alarms.
:param tenant_id:
:param alarm_definition_id:
:returns True: -- if alarm definition exists and was deleted.
:returns False: -- if the alarm definition does not exists.
:raises RepositoryException:
"""
with self._db_engine.begin() as conn:
cursor = conn.execute(self.soft_delete_ad_query,
b_tenant_id=tenant_id,
b_id=alarm_definition_id)
if cursor.rowcount < 1:
return False
conn.execute(self.delete_a_query,
b_tenant_id=tenant_id,
b_id=alarm_definition_id)
return True
@sql_repository.sql_try_catch_block
def get_sub_alarm_definitions(self, alarm_definition_id):
with self._db_engine.connect() as conn:
return self._get_sub_alarm_definitions(conn, alarm_definition_id)
def _get_sub_alarm_definitions(self, conn, alarm_definition_id):
return [dict(row) for row in conn.execute(self.get_sub_alarm_definitions_query,
b_alarm_definition_id=alarm_definition_id).fetchall()]
@sql_repository.sql_try_catch_block
def create_alarm_definition(self, tenant_id, name, expression,
sub_expr_list, description, severity, match_by,
alarm_actions, undetermined_actions,
ok_actions):
with self._db_engine.begin() as conn:
now = datetime.datetime.utcnow()
alarm_definition_id = uuidutils.generate_uuid()
conn.execute(self.create_alarm_definition_insert_ad_query,
b_id=alarm_definition_id,
b_tenant_id=tenant_id,
b_name=name.encode('utf8'),
b_description=description.encode('utf8'),
b_expression=expression.encode('utf8'),
b_severity=severity.upper().encode('utf8'),
b_match_by=",".join(match_by).encode('utf8'),
b_actions_enabled=True,
b_created_at=now,
b_updated_at=now)
for sub_expr in sub_expr_list:
sub_alarm_definition_id = uuidutils.generate_uuid()
sub_expr.id = sub_alarm_definition_id
metric_name = sub_expr.normalized_metric_name.encode("utf8")
operator = sub_expr.normalized_operator.encode('utf8')
conn.execute(self.create_alarm_definition_insert_sad_query,
b_id=sub_alarm_definition_id,
b_alarm_definition_id=alarm_definition_id,
b_function=sub_expr.normalized_func.encode('utf8'),
b_metric_name=metric_name,
b_operator=operator,
b_threshold=sub_expr.threshold.encode('utf8'),
b_period=sub_expr.period.encode('utf8'),
b_periods=sub_expr.periods.encode('utf8'),
b_created_at=now,
b_updated_at=now)
for dimension in sub_expr.dimensions_as_list:
parsed_dimension = dimension.split('=')
query = self.create_alarm_definition_insert_sadd_query
sadi = sub_alarm_definition_id
dimension_name = parsed_dimension[0].encode('utf8')
conn.execute(query,
b_sub_alarm_definition_id=sadi,
b_dimension_name=dimension_name,
b_value=parsed_dimension[1].encode('utf8'))
self._insert_into_alarm_action(conn, alarm_definition_id,
alarm_actions, u"ALARM")
self._insert_into_alarm_action(conn, alarm_definition_id,
undetermined_actions,
u"UNDETERMINED")
self._insert_into_alarm_action(conn, alarm_definition_id,
ok_actions, u"OK")
return alarm_definition_id
@sql_repository.sql_try_catch_block
def update_or_patch_alarm_definition(self, tenant_id, alarm_definition_id,
name, expression,
sub_expr_list, actions_enabled,
description, alarm_actions,
ok_actions, undetermined_actions,
match_by, severity, patch=False):
with self._db_engine.begin() as conn:
original_row = self._get_alarm_definition(conn,
tenant_id,
alarm_definition_id)
rows = self._get_sub_alarm_definitions(conn, alarm_definition_id)
old_sub_alarm_defs_by_id = {}
for row in rows:
sad = sub_alarm_definition.SubAlarmDefinition(row=row)
old_sub_alarm_defs_by_id[sad.id] = sad
if expression:
(
changed_sub_alarm_defs_by_id,
new_sub_alarm_defs_by_id,
old_sub_alarm_defs_by_id,
unchanged_sub_alarm_defs_by_id
) = self._determine_sub_expr_changes(
alarm_definition_id, old_sub_alarm_defs_by_id,
sub_expr_list)
if old_sub_alarm_defs_by_id or new_sub_alarm_defs_by_id:
new_count = (len(new_sub_alarm_defs_by_id) +
len(changed_sub_alarm_defs_by_id) +
len(unchanged_sub_alarm_defs_by_id))
old_count = len(old_sub_alarm_defs_by_id)
if new_count != old_count:
msg = 'number of subexpressions must not change'
else:
msg = 'metrics in subexpression must not change'
raise exceptions.InvalidUpdateException(msg.encode('utf8'))
else:
unchanged_sub_alarm_defs_by_id = old_sub_alarm_defs_by_id
changed_sub_alarm_defs_by_id = {}
new_sub_alarm_defs_by_id = {}
old_sub_alarm_defs_by_id = {}
# Get a common update time
now = datetime.datetime.utcnow()
if name is None:
new_name = original_row['name']
else:
new_name = name.encode('utf8')
if description is None:
if patch:
new_description = original_row['description']
else:
new_description = ''
else:
new_description = description.encode('utf8')
if expression is None:
new_expression = original_row['expression']
else:
new_expression = expression.encode('utf8')
if severity is None:
if patch:
new_severity = original_row['severity']
else:
new_severity = 'LOW'
else:
new_severity = severity.encode('utf8')
if match_by is None:
if patch:
new_match_by = original_row['match_by']
else:
new_match_by = None
else:
new_match_by = ",".join(match_by).encode('utf8')
if new_match_by != original_row['match_by']:
msg = "match_by must not change".encode('utf8')
raise exceptions.InvalidUpdateException(msg)
if actions_enabled is None:
new_actions_enabled = original_row['actions_enabled']
else:
new_actions_enabled = actions_enabled
conn.execute(self.update_or_patch_alarm_definition_update_ad_query
.values(
name=bindparam('b_name'),
description=bindparam('b_description'),
expression=bindparam('b_expression'),
match_by=bindparam('b_match_by'),
severity=bindparam('b_severity'),
actions_enabled=bindparam('b_actions_enabled'),
updated_at=bindparam('b_updated_at')
),
b_name=new_name,
b_description=new_description,
b_expression=new_expression,
b_match_by=new_match_by,
b_severity=new_severity,
b_actions_enabled=bool(new_actions_enabled),
b_updated_at=now,
b_tenant_id=tenant_id,
b_id=alarm_definition_id)
parms = []
for sub_alarm_def_id in old_sub_alarm_defs_by_id.values():
parms.append({'b_id': sub_alarm_def_id.id})
if len(parms) > 0:
query = self.update_or_patch_alarm_definition_delete_sad_query
conn.execute(query, parms)
parms = []
for sub_alarm_definition_id, sub_alarm_def in (
changed_sub_alarm_defs_by_id.iteritems()):
parms.append({'b_operator': sub_alarm_def.operator,
'b_threshold': sub_alarm_def.threshold,
'b_updated_at': now,
'b_id': sub_alarm_definition_id})
if len(parms) > 0:
query = self.update_or_patch_alarm_definition_update_sad_query
conn.execute(query, parms)
parms = []
parms_sadd = []
for sub_alarm_def in new_sub_alarm_defs_by_id.values():
adi = sub_alarm_def.alarm_definition_id
function = sub_alarm_def.function.encode('utf8')
metric_name = sub_alarm_def.metric_name.encode('utf8')
operator = sub_alarm_def.operator.encode('utf8')
threshold = str(sub_alarm_def.threshold).encode('utf8')
period = str(sub_alarm_def.period).encode('utf8')
periods = str(sub_alarm_def.periods).encode('utf8')
parms.append({'b_id': sub_alarm_def.id,
'b_alarm_definition_id': adi,
'b_function': function,
'b_metric_name': metric_name,
'b_operator': operator,
'b_threshold': threshold,
'b_period': period,
'b_periods': periods,
'b_created_at': now,
'b_updated_at': now})
for name, value in sub_alarm_def.dimensions.items():
sadi = sub_alarm_def.id
parms_sadd.append({'b_sub_alarm_definition_id': sadi,
'b_dimension_name': name.encode('utf8'),
'b_value': value.encode('utf8')})
if len(parms) > 0:
query = self.update_or_patch_alarm_definition_insert_sad_query
conn.execute(query, parms)
if len(parms_sadd) > 0:
query = self.update_or_patch_alarm_definition_insert_sadd_query
conn.execute(query, parms_sadd)
# Delete old alarm actions
if patch:
if alarm_actions is not None:
self._delete_alarm_actions(conn, alarm_definition_id,
'ALARM')
if ok_actions is not None:
self._delete_alarm_actions(conn, alarm_definition_id,
'OK')
if undetermined_actions is not None:
self._delete_alarm_actions(conn, alarm_definition_id,
'UNDETERMINED')
else:
conn.execute(self.delete_aa_query,
b_alarm_definition_id=alarm_definition_id)
# Insert new alarm actions
self._insert_into_alarm_action(conn, alarm_definition_id,
alarm_actions,
u"ALARM")
self._insert_into_alarm_action(conn, alarm_definition_id,
undetermined_actions,
u"UNDETERMINED")
self._insert_into_alarm_action(conn, alarm_definition_id,
ok_actions,
u"OK")
ad = self.ad_s
query = (self.base_query
.select_from(self.base_query_from)
.where(ad.c.tenant_id == bindparam('b_tenant_id'))
.where(ad.c.id == bindparam('b_id'))
.where(ad.c.deleted_at == null()))
updated_row = conn.execute(query,
b_id=alarm_definition_id,
b_tenant_id=tenant_id).fetchone()
if updated_row is None:
raise Exception("Failed to find current alarm definition")
sub_alarm_defs_dict = {'old': old_sub_alarm_defs_by_id,
'changed': changed_sub_alarm_defs_by_id,
'new': new_sub_alarm_defs_by_id,
'unchanged': unchanged_sub_alarm_defs_by_id}
# Return the alarm def and the sub alarm defs
return updated_row, sub_alarm_defs_dict
def _determine_sub_expr_changes(self, alarm_definition_id,
old_sub_alarm_defs_by_id,
sub_expr_list):
old_sub_alarm_defs_set = set(
old_sub_alarm_defs_by_id.values())
new_sub_alarm_defs_set = set()
for sub_expr in sub_expr_list:
sad = sub_alarm_definition.SubAlarmDefinition(
sub_expr=sub_expr)
# Inject the alarm definition id.
sad.alarm_definition_id = alarm_definition_id.decode('utf8')
new_sub_alarm_defs_set.add(sad)
# Identify old or changed expressions
old_or_changed_sub_alarm_defs_set = (
old_sub_alarm_defs_set - new_sub_alarm_defs_set)
# Identify new or changed expressions
new_or_changed_sub_alarm_defs_set = (
new_sub_alarm_defs_set - old_sub_alarm_defs_set)
# Find changed expressions. O(n^2) == bad!
# This algo may not work if sub expressions are duplicated.
changed_sub_alarm_defs_by_id = {}
old_or_changed_sub_alarm_defs_set_to_remove = set()
new_or_changed_sub_alarm_defs_set_to_remove = set()
for old_or_changed in old_or_changed_sub_alarm_defs_set:
for new_or_changed in new_or_changed_sub_alarm_defs_set:
if old_or_changed.same_key_fields(new_or_changed):
old_or_changed_sub_alarm_defs_set_to_remove.add(
old_or_changed
)
new_or_changed_sub_alarm_defs_set_to_remove.add(
new_or_changed
)
changed_sub_alarm_defs_by_id[
old_or_changed.id] = (
new_or_changed)
old_or_changed_sub_alarm_defs_set = (
old_or_changed_sub_alarm_defs_set -
old_or_changed_sub_alarm_defs_set_to_remove
)
new_or_changed_sub_alarm_defs_set = (
new_or_changed_sub_alarm_defs_set -
new_or_changed_sub_alarm_defs_set_to_remove
)
# Create the list of unchanged expressions
unchanged_sub_alarm_defs_by_id = (
old_sub_alarm_defs_by_id.copy())
for old_sub_alarm_def in old_or_changed_sub_alarm_defs_set:
del unchanged_sub_alarm_defs_by_id[old_sub_alarm_def.id]
for sub_alarm_definition_id in (
changed_sub_alarm_defs_by_id.keys()):
del unchanged_sub_alarm_defs_by_id[
sub_alarm_definition_id]
# Remove old sub expressions
temp = {}
for old_sub_alarm_def in old_or_changed_sub_alarm_defs_set:
temp[old_sub_alarm_def.id] = old_sub_alarm_def
old_sub_alarm_defs_by_id = temp
# Create IDs for new expressions
new_sub_alarm_defs_by_id = {}
for new_sub_alarm_def in new_or_changed_sub_alarm_defs_set:
sub_alarm_definition_id = uuidutils.generate_uuid()
new_sub_alarm_def.id = sub_alarm_definition_id
new_sub_alarm_defs_by_id[sub_alarm_definition_id] = (
new_sub_alarm_def)
return (changed_sub_alarm_defs_by_id,
new_sub_alarm_defs_by_id,
old_sub_alarm_defs_by_id,
unchanged_sub_alarm_defs_by_id)
def _delete_alarm_actions(self, conn, _id, alarm_action_name):
conn.execute(self.delete_aa_state_query,
b_alarm_definition_id=_id,
b_alarm_state=alarm_action_name)
def _insert_into_alarm_action(self, conn, alarm_definition_id, actions,
alarm_state):
if actions is None:
return
for action in actions:
row = conn.execute(self.select_nm_query,
b_id=action.encode('utf8')).fetchone()
if row is None:
raise exceptions.RepositoryException(
"Non-existent notification id {} submitted for {} "
"notification action".format(action.encode('utf8'),
alarm_state.encode('utf8')))
conn.execute(self.insert_aa_query,
b_alarm_definition_id=alarm_definition_id,
b_alarm_state=alarm_state.encode('utf8'),
b_action_id=action.encode('utf8')
)

View File

@ -0,0 +1,571 @@
# -*- coding: utf-8 -*-
# Copyright 2014 Hewlett-Packard
# Copyright 2016 FUJITSU LIMITED
#
# Licensed under the Apache License, Version 2.0 (the "License"); you may
# not use this file except in compliance with the License. You may obtain
# a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
# License for the specific language governing permissions and limitations
# under the License.
from datetime import datetime
from time import time
from oslo_log import log
from monasca_api.common.repositories import alarms_repository
from monasca_api.common.repositories import exceptions
from monasca_api.common.repositories.sqla import models
from monasca_api.common.repositories.sqla import sql_repository
from sqlalchemy import MetaData, update, delete, select, text, bindparam, func, literal_column, asc, desc
from sqlalchemy import or_
LOG = log.getLogger(__name__)
class AlarmsRepository(sql_repository.SQLRepository,
alarms_repository.AlarmsRepository):
def __init__(self):
super(AlarmsRepository, self).__init__()
metadata = MetaData()
self.a_du = models.create_a_model(metadata)
self.aa = models.create_aa_model(metadata).alias('aa')
self.sa = models.create_sa_model(metadata).alias('sa')
self.ad = models.create_ad_model(metadata).alias('ad')
self.am = models.create_am_model(metadata).alias('am')
self.md = models.create_md_model(metadata).alias('md')
self.mdd = models.create_mdd_model(metadata).alias('mdd')
self.mde = models.create_mde_model(metadata).alias('mde')
self.sad = models.create_sad_model(metadata).alias('sad')
self.sadd = models.create_sadd_model(metadata).alias('sadd')
a = self.a_du
self.a = a.alias('a')
a_s = self.a
sa = self.sa
ad = self.ad
am = self.am
md = self.md
mdd = self.mdd
mde = self.mde
gc_columns = [md.c.name + text("'='") + md.c.value]
mdg = (select([md.c.dimension_set_id,
models.group_concat(gc_columns).label('dimensions')])
.select_from(md)
.group_by(md.c.dimension_set_id).alias('mdg'))
self.base_query_from = (a_s.join(ad, ad.c.id == a_s.c.alarm_definition_id)
.join(am, am.c.alarm_id == a_s.c.id)
.join(mdd, mdd.c.id == am.c.metric_definition_dimensions_id)
.join(mde, mde.c.id == mdd.c.metric_definition_id)
.outerjoin(mdg, mdg.c.dimension_set_id == mdd.c.metric_dimension_set_id))
self.base_query = select([a_s.c.id.label('alarm_id'),
a_s.c.state,
a_s.c.state_updated_at.
label('state_updated_timestamp'),
a_s.c.updated_at.label('updated_timestamp'),
a_s.c.created_at.label('created_timestamp'),
a_s.c.lifecycle_state,
a_s.c.link,
ad.c.id.label('alarm_definition_id'),
ad.c.name.label('alarm_definition_name'),
ad.c.severity,
mde.c.name.label('metric_name'),
mdg.c.dimensions.label('metric_dimensions')])
self.base_subquery_list = (select([a_s.c.id])
.select_from(a_s.join(ad, a_s.c.alarm_definition_id == ad.c.id)))
self.get_ad_query = (select([ad])
.where(ad.c.tenant_id == bindparam('b_tenant_id'))
.where(ad.c.id == bindparam('b_id')))
self.get_am_query = (select([a_s.c.id.label('alarm_id'),
mde.c.name,
mdg.c.dimensions])
.select_from(a_s.join(am, am.c.alarm_id == a_s.c.id)
.join(mdd,
mdd.c.id ==
am.c.metric_definition_dimensions_id)
.join(mde, mde.c.id == mdd.c.metric_definition_id)
.outerjoin(mdg,
mdg.c.dimension_set_id ==
mdd.c.metric_dimension_set_id))
.where(a_s.c.id == bindparam('b_id'))
.order_by(a_s.c.id)
.distinct())
self.get_sa_query = (select([sa.c.id.label('sub_alarm_id'),
sa.c.alarm_id,
sa.c.expression,
ad.c.id.label('alarm_definition_id')])
.select_from(sa.join(a_s,
a_s.c.id == sa.c.alarm_id)
.join(ad,
ad.c.id == a_s.c.alarm_definition_id))
.where(ad.c.tenant_id == bindparam('b_tenant_id'))
.where(a_s.c.id == bindparam('b_id'))
.distinct())
self.get_a_query = (select([a_s.c.state, a_s.c.link, a_s.c.lifecycle_state])
.select_from(a_s.join(ad,
ad.c.id == a_s.c.alarm_definition_id))
.where(ad.c.tenant_id == bindparam('b_tenant_id'))
.where(a_s.c.id == bindparam('b_id')))
self.get_a_ad_query = (select([a_s.c.id])
.select_from(a_s.join(ad,
ad.c.id ==
a_s.c.alarm_definition_id))
.where(ad.c.tenant_id == bindparam('b_tenant_id'))
.where(a_s.c.id == bindparam('b_id'))
.alias('a_ad'))
select_tmp = (select([literal_column('id')])
.select_from(self.get_a_ad_query)
.distinct()
.alias('temporarytable'))
self.delete_alarm_query = (delete(a)
.where(a.c.id.in_(select_tmp)))
md_ = (select([mde.c.id])
.where(mde.c.name == bindparam('b_md_name')).alias('md_'))
self.get_a_am_query = (select([a_s.c.id])
.select_from(a_s.join(am,
am.c.alarm_id ==
a_s.c.id)
.join(mdd,
mdd.c.id ==
am.c.metric_definition_dimensions_id)
.join(md_,
md_.c.id ==
mdd.c.metric_definition_id)))
@sql_repository.sql_try_catch_block
def get_alarm_definition(self, tenant_id, alarm_id):
with self._db_engine.connect() as conn:
row = conn.execute(self.get_ad_query,
b_tenant_id=tenant_id,
b_id=alarm_id).fetchone()
if row is not None:
return dict(row)
else:
raise exceptions.DoesNotExistException
@sql_repository.sql_try_catch_block
def get_alarm_metrics(self, alarm_id):
with self._db_engine.connect() as conn:
rows = conn.execute(self.get_am_query, b_id=alarm_id).fetchall()
return [dict(row) for row in rows]
@sql_repository.sql_try_catch_block
def get_sub_alarms(self, tenant_id, alarm_id):
with self._db_engine.connect() as conn:
rows = conn.execute(self.get_sa_query,
b_tenant_id=tenant_id,
b_id=alarm_id).fetchall()
return [dict(row) for row in rows]
@sql_repository.sql_try_catch_block
def update_alarm(self, tenant_id, _id, state, lifecycle_state, link):
time_ms = int(round(time() * 1000.0))
with self._db_engine.connect() as conn:
self.get_a_query.bind = self._db_engine
prev_alarm = conn.execute(self.get_a_query,
b_tenant_id=tenant_id,
b_id=_id).fetchone()
if prev_alarm is None:
raise exceptions.DoesNotExistException
parms = {'b_lifecycle_state': lifecycle_state,
'b_link': link}
set_values = {'lifecycle_state':
bindparam('b_lifecycle_state'),
'link': bindparam('b_link'),
'updated_at': func.now()}
if state != prev_alarm['state']:
parms['b_state'] = state
set_values['state'] = bindparam('b_state')
set_values['state_updated_at'] = func.now()
parms['b_tenant_id'] = tenant_id
parms['b_id'] = _id
select_tmp = (select([literal_column('id')])
.select_from(self.get_a_ad_query)
.distinct()
.alias('temporarytable'))
a = self.a_du
update_query = (update(a)
.values(set_values)
.where(a.c.id.in_(select_tmp)))
conn.execute(update_query, parms)
return prev_alarm, time_ms
@sql_repository.sql_try_catch_block
def delete_alarm(self, tenant_id, _id):
with self._db_engine.connect() as conn:
cursor = conn.execute(self.delete_alarm_query,
b_tenant_id=tenant_id,
b_id=_id)
if cursor.rowcount < 1:
raise exceptions.DoesNotExistException
@sql_repository.sql_try_catch_block
def get_alarm(self, tenant_id, _id):
with self._db_engine.connect() as conn:
ad = self.ad
a = self.a
query = (self.base_query
.select_from(self.base_query_from)
.where(ad.c.tenant_id == bindparam('b_tenant_id'))
.where(a.c.id == bindparam('b_id'))
.distinct())
rows = conn.execute(query,
b_tenant_id=tenant_id,
b_id=_id).fetchall()
if rows is None or len(rows) == 0:
raise exceptions.DoesNotExistException
return [dict(row) for row in rows]
@sql_repository.sql_try_catch_block
def get_alarms(self, tenant_id, query_parms=None, offset=None, limit=None):
if not query_parms:
query_parms = {}
with self._db_engine.connect() as conn:
parms = {}
ad = self.ad
am = self.am
mdd = self.mdd
md = self.md
a = self.a
query = (self.base_subquery_list
.where(ad.c.tenant_id == bindparam('b_tenant_id')))
parms['b_tenant_id'] = tenant_id
if 'alarm_definition_id' in query_parms:
query = query.where(ad.c.id ==
bindparam('b_alarm_definition_id'))
parms['b_alarm_definition_id'] = query_parms['alarm_definition_id']
if 'metric_name' in query_parms:
query = query.where(a.c.id.in_(self.get_a_am_query))
parms['b_md_name'] = query_parms['metric_name'].encode('utf8')
if 'severity' in query_parms:
query = query.where(ad.c.severity == bindparam('b_severity'))
parms['b_severity'] = query_parms['severity'].encode('utf8')
if 'state' in query_parms:
query = query.where(a.c.state == bindparam('b_state'))
parms['b_state'] = query_parms['state'].encode('utf8')
if 'lifecycle_state' in query_parms:
query = (query
.where(a.c.lifecycle_state ==
bindparam('b_lifecycle_state')))
parms['b_lifecycle_state'] = query_parms['lifecycle_state'].encode('utf8')
if 'link' in query_parms:
query = query.where(a.c.link == bindparam('b_link'))
parms['b_link'] = query_parms['link'].encode('utf8')
if 'state_updated_start_time' in query_parms:
query = (query
.where(a.c.state_updated_at >=
bindparam('b_state_updated_at')))
date_str = query_parms['state_updated_start_time'].encode('utf8')
date_param = datetime.strptime(date_str,
'%Y-%m-%dT%H:%M:%S.%fZ')
parms['b_state_updated_at'] = date_param
if 'metric_dimensions' in query_parms:
sub_query = select([a.c.id])
sub_query_from = (a.join(am, am.c.alarm_id == a.c.id)
.join(mdd,
mdd.c.id ==
am.c.metric_definition_dimensions_id))
sub_query_md_base = select([md.c.dimension_set_id]).select_from(md)
for i, metric_dimension in enumerate(query_parms['metric_dimensions']):
md_name = "b_md_name_{}".format(i)
values_cond = None
values_cond_flag = False
parsed_dimension = metric_dimension.split(':')
if parsed_dimension and len(parsed_dimension) > 1:
if '|' in parsed_dimension[1]:
values = parsed_dimension[1].encode('utf8').split('|')
sub_values_cond = []
for j, value in values:
sub_md_value = "b_md_value_{}_{}".format(i, j)
sub_values_cond.append = (md.c.value == bindparam(sub_md_value))
parms[sub_md_value] = value
values_cond = or_(*sub_values_cond)
values_cond_flag = True
else:
md_value = "b_md_value_{}".format(i)
values_cond = (md.c.value == bindparam(md_value))
values_cond_flag = True
parms[md_value] = parsed_dimension[1]
sub_query_md = (sub_query_md_base
.where(md.c.name == bindparam(md_name)))
if values_cond_flag:
sub_query_md = (sub_query_md
.where(values_cond))
sub_query_md = (sub_query_md
.distinct()
.alias('md_{}'.format(i)))
sub_query_from = (sub_query_from
.join(sub_query_md,
sub_query_md.c.dimension_set_id ==
mdd.c.metric_dimension_set_id))
parms[md_name] = parsed_dimension[0].encode('utf8')
sub_query = (sub_query
.select_from(sub_query_from)
.distinct())
query = query.where(a.c.id.in_(sub_query))
order_columns = []
if 'sort_by' in query_parms:
columns_mapper = {'alarm_id': a.c.id,
'alarm_definition_id': ad.c.id,
'alarm_definition_name': ad.c.name,
'state_updated_timestamp': a.c.state_updated_at,
'updated_timestamp': a.c.updated_at,
'created_timestamp': a.c.created_at,
'severity': models.field_sort(ad.c.severity, map(text, ["'LOW'",
"'MEDIUM'",
"'HIGH'",
"'CRITICAL'"])),
'state': models.field_sort(a.c.state, map(text, ["'OK'",
"'UNDETERMINED'",
"'ALARM'"]))}
order_columns, received_cols = self._remap_columns(query_parms['sort_by'], columns_mapper)
if not received_cols.get('alarm_id', False):
order_columns.append(a.c.id)
else:
order_columns = [a.c.id]
if limit:
query = query.limit(bindparam('b_limit'))
parms['b_limit'] = limit + 1
if offset:
query = query.offset(bindparam('b_offset'))
parms['b_offset'] = offset
query = (query
.order_by(*order_columns)
.alias('alarm_id_list'))
main_query = (self.base_query
.select_from(self.base_query_from
.join(query, query.c.id == a.c.id))
.distinct())
main_query = main_query.order_by(*order_columns)
return [dict(row) for row in conn.execute(main_query, parms).fetchall()]
def _remap_columns(self, columns, columns_mapper):
received_cols = {}
order_columns = []
for col in columns:
col_values = col.split()
col_name = col_values[0]
order_column = columns_mapper.get(col_name, literal_column(col_name))
if len(col_values) > 1:
mode = col_values[1]
if mode:
if mode == 'asc':
order_column = asc(order_column)
elif mode == 'desc':
order_column = desc(order_column)
order_columns.append(order_column)
received_cols[col_name] = True
return order_columns, received_cols
@sql_repository.sql_try_catch_block
def get_alarms_count(self, tenant_id, query_parms=None, offset=None, limit=None):
if not query_parms:
query_parms = {}
with self._db_engine.connect() as conn:
parms = {}
ad = self.ad
am = self.am
mdd = self.mdd
mde = self.mde
md = self.md
a = self.a
query_from = a.join(ad, ad.c.id == a.c.alarm_definition_id)
parms['b_tenant_id'] = tenant_id
group_by_columns = []
if 'group_by' in query_parms:
group_by_columns = query_parms['group_by']
sub_group_by_columns = []
metric_group_by = {'metric_name',
'dimension_name',
'dimension_value'}.intersection(set(query_parms['group_by']))
if metric_group_by:
sub_query_columns = [am.c.alarm_id]
if 'metric_name' in metric_group_by:
sub_group_by_columns.append(mde.c.name.label('metric_name'))
if 'dimension_name' in metric_group_by:
sub_group_by_columns.append(md.c.name.label('dimension_name'))
if 'dimension_value' in metric_group_by:
sub_group_by_columns.append(md.c.value.label('dimension_value'))
sub_query_columns.extend(sub_group_by_columns)
sub_query_from = (mde.join(mdd, mde.c.id == mdd.c.metric_definition_id)
.join(md, mdd.c.metric_dimension_set_id == md.c.dimension_set_id)
.join(am, am.c.metric_definition_dimensions_id == mdd.c.id))
sub_query = (select(sub_query_columns)
.select_from(sub_query_from)
.distinct()
.alias('metrics'))
query_from = query_from.join(sub_query, sub_query.c.alarm_id == a.c.id)
query_columns = [func.count().label('count')]
query_columns.extend(group_by_columns)
query = (select(query_columns)
.select_from(query_from)
.where(ad.c.tenant_id == bindparam('b_tenant_id')))
parms['b_tenant_id'] = tenant_id
if 'alarm_definition_id' in query_parms:
parms['b_alarm_definition_id'] = query_parms['alarm_definition_id']
query = query.where(ad.c.id == bindparam('b_alarm_definition_id'))
if 'state' in query_parms:
parms['b_state'] = query_parms['state'].encode('utf8')
query = query.where(a.c.state == bindparam('b_state'))
if 'severity' in query_parms:
query = query.where(ad.c.severity == bindparam('b_severity'))
parms['b_severity'] = query_parms['severity'].encode('utf8')
if 'lifecycle_state' in query_parms:
parms['b_lifecycle_state'] = query_parms['lifecycle_state'].encode('utf8')
query = query.where(a.c.lifecycle_state == bindparam('b_lifecycle_state'))
if 'link' in query_parms:
parms['b_link'] = query_parms['link'].encode('utf8')
query = query.where(a.c.link == bindparam('b_link'))
if 'state_updated_start_time' in query_parms:
parms['b_state_updated_at'] = query_parms['state_updated_start_time'].encode("utf8")
query = query.where(a.c.state_updated_at >= bindparam('b_state_updated_at'))
if 'metric_name' in query_parms:
subquery_md = (select([md])
.where(md.c.name == bindparam('b_metric_name'))
.distinct()
.alias('md_'))
subquery = (select([a.c.id])
.select_from(am.join(a, a.c.id == am.c.alarm_id)
.join(mdd, mdd.c.id == am.c.metric_definition_dimensions_id)
.join(subquery_md, subquery_md.c.id == mdd.c.metric_definition_id))
.distinct())
query = query.where(a.c.id.in_(subquery))
parms['b_metric_name'] = query_parms['metric_name'].encode('utf8')
if 'metric_dimensions' in query_parms:
sub_query = select([a.c.id])
sub_query_from = (a.join(am, am.c.alarm_id == a.c.id)
.join(mdd,
mdd.c.id ==
am.c.metric_definition_dimensions_id))
sub_query_md_base = select([md.c.dimension_set_id]).select_from(md)
for i, metric_dimension in enumerate(query_parms['metric_dimensions']):
md_name = "b_md_name_{}".format(i)
md_value = "b_md_value_{}".format(i)
sub_query_md = (sub_query_md_base
.where(md.c.name == bindparam(md_name))
.where(md.c.value == bindparam(md_value))
.distinct()
.alias('md_{}'.format(i)))
parsed_dimension = metric_dimension.split(':')
sub_query_from = (sub_query_from
.join(sub_query_md,
sub_query_md.c.dimension_set_id ==
mdd.c.metric_dimension_set_id))
parms[md_name] = parsed_dimension[0].encode('utf8')
parms[md_value] = parsed_dimension[1].encode('utf8')
sub_query = (sub_query
.select_from(sub_query_from)
.distinct())
query = query.where(a.c.id.in_(sub_query))
if group_by_columns:
query = (query
.order_by(*group_by_columns)
.group_by(*group_by_columns))
if limit:
query = query.limit(bindparam('b_limit'))
parms['b_limit'] = limit + 1
if offset:
query = query.offset(bindparam('b_offset'))
parms['b_offset'] = offset
query = query.distinct()
return [dict(row) for row in conn.execute(query, parms).fetchall()]

View File

@ -0,0 +1,231 @@
# -*- coding: utf-8 -*-
# Copyright 2015 Robin Hood
# Copyright 2016 FUJITSU LIMITED
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
from __future__ import absolute_import
from __future__ import division
from __future__ import print_function
from __future__ import unicode_literals
from sqlalchemy import Column
from sqlalchemy.ext import compiler
from sqlalchemy.sql import expression
from sqlalchemy import String, DateTime, Boolean, Integer, Binary, Float
from sqlalchemy import Table
def create_a_model(metadata=None):
return Table('alarm', metadata,
Column('id', String(36)),
Column('alarm_definition_id', String(36)),
Column('state', String(20)),
Column('lifecycle_state', String(50)),
Column('link', String(512)),
Column('created_at', DateTime),
Column('state_updated_at', DateTime),
Column('updated_at', DateTime))
def create_sadd_model(metadata=None):
return Table('sub_alarm_definition_dimension', metadata,
Column('sub_alarm_definition_id', String(36)),
Column('dimension_name', String(255)),
Column('value', String(255)))
def create_aa_model(metadata=None):
return Table('alarm_action', metadata,
Column('alarm_definition_id', String(36)),
Column('alarm_state', String(20)),
Column('action_id', String(36)))
def create_md_model(metadata=None):
return Table('metric_dimension', metadata,
Column('dimension_set_id', Binary),
Column('name', String(255)),
Column('value', String(255)))
def create_mde_model(metadata=None):
return Table('metric_definition', metadata,
Column('id', Binary),
Column('name', String(255)),
Column('tenant_id', String(255)),
Column('region', String(255)))
def create_nm_model(metadata=None):
return Table('notification_method', metadata,
Column('id', String(36)),
Column('tenant_id', String(36)),
Column('name', String(250)),
Column('type', String(20)),
Column('address', String(512)),
Column('created_at', DateTime),
Column('updated_at', DateTime))
def create_mdd_model(metadata=None):
return Table('metric_definition_dimensions', metadata,
Column('id', Binary),
Column('metric_definition_id', Binary),
Column('metric_dimension_set_id', Binary))
def create_am_model(metadata=None):
return Table('alarm_metric', metadata,
Column('alarm_id', String(36)),
Column('metric_definition_dimensions_id', Binary))
def create_ad_model(metadata=None):
return Table('alarm_definition', metadata,
Column('id', String(36)),
Column('tenant_id', String(36)),
Column('name', String(255)),
Column('description', String(255)),
Column('expression', String),
Column('severity', String(20)),
Column('match_by', String(255)),
Column('actions_enabled', Boolean),
Column('created_at', DateTime),
Column('updated_at', DateTime),
Column('deleted_at', DateTime))
def create_sa_model(metadata=None):
return Table('sub_alarm', metadata,
Column('id', String(36)),
Column('alarm_id', String(36)),
Column('sub_expression_id', String(36)),
Column('expression', String),
Column('created_at', DateTime),
Column('updated_at', DateTime))
def create_sad_model(metadata=None):
return Table('sub_alarm_definition', metadata,
Column('id', String(36)),
Column('alarm_definition_id', String(36)),
Column('function', String(10)),
Column('metric_name', String(100)),
Column('operator', String(5)),
Column('threshold', Float),
Column('period', Integer),
Column('periods', Integer),
Column('created_at', DateTime),
Column('updated_at', DateTime))
class group_concat(expression.ColumnElement):
name = "group_concat"
order_by = None
separator = ','
columns = []
def __init__(self, columns, separator=',', order_by=None):
self.order_by = order_by
self.separator = separator
self.columns = columns
@compiler.compiles(group_concat, 'oracle')
def _group_concat_oracle(element, compiler_, **kw):
str_order_by = ''
if element.order_by is not None and len(element.order_by) > 0:
str_order_by = "ORDER BY {0}".format(", ".join([compiler_.process(x) for x in element.order_by]))
else:
str_order_by = "ORDER BY {0}".format(", ".join([compiler_.process(x) for x in element.columns]))
return "LISTAGG({0}, '{2}') WITHIN GROUP ({1})".format(
", ".join([compiler_.process(x) for x in element.columns]),
str_order_by,
element.separator,
)
@compiler.compiles(group_concat, 'postgresql')
def _group_concat_postgresql(element, compiler_, **kw):
str_order_by = ''
if element.order_by is not None and len(element.order_by) > 0:
str_order_by = "ORDER BY {0}".format(", ".join([compiler_.process(x) for x in element.order_by]))
return "STRING_AGG({0}, '{2}' {1})".format(
", ".join([compiler_.process(x) for x in element.columns]),
str_order_by,
element.separator,
)
@compiler.compiles(group_concat, 'sybase')
def _group_concat_sybase(element, compiler_, **kw):
return "LIST({0}, '{1}')".format(
", ".join([compiler_.process(x) for x in element.columns]),
element.separator,
)
@compiler.compiles(group_concat, 'mysql')
def _group_concat_mysql(element, compiler_, **kw):
str_order_by = ''
if element.order_by is not None and len(element.order_by) > 0:
str_order_by = "ORDER BY {0}".format(",".join([compiler_.process(x) for x in element.order_by]))
return "GROUP_CONCAT({0} {1} SEPARATOR '{2}')".format(
", ".join([compiler_.process(x) for x in element.columns]),
str_order_by,
element.separator,
)
@compiler.compiles(group_concat)
def _group_concat_default(element, compiler_, **kw):
return "GROUP_CONCAT({0}, '{1}')".format(
", ".join([compiler_.process(x) for x in element.columns]),
element.separator,
)
class field_sort(expression.ColumnElement):
name = "field_sort"
column = None
fields = []
def __init__(self, column, fields):
self.column = column
self.fields = fields
@compiler.compiles(field_sort, "mysql")
def _field_sort_mysql(element, compiler_, **kw):
if element.fields:
return "FIELD({0}, {1})".format(compiler_.process(element.column),
", ".join(map(compiler_.process,
element.fields)))
else:
return str(compiler_.process(element.column))
@compiler.compiles(field_sort)
def _field_sort_general(element, compiler_, **kw):
fields_list = []
if element.fields:
fields_list.append("CASE")
for idx, field in enumerate(element.fields):
fields_list.append("WHEN {0}={1} THEN {2}".format(compiler_.process(element.column),
compiler_.process(field),
idx))
fields_list.append("ELSE {0}".format(len(element.fields)))
fields_list.append("END")
return " ".join(fields_list)

View File

@ -0,0 +1,195 @@
# Copyright 2014 Hewlett-Packard
# Copyright 2016 FUJITSU LIMITED
#
# Licensed under the Apache License, Version 2.0 (the "License"); you may
# not use this file except in compliance with the License. You may obtain
# a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
# License for the specific language governing permissions and limitations
# under the License.
import datetime
from oslo_log import log
from oslo_utils import uuidutils
from monasca_api.common.repositories import exceptions
from monasca_api.common.repositories import notifications_repository as nr
from monasca_api.common.repositories.sqla import models
from monasca_api.common.repositories.sqla import sql_repository
from sqlalchemy import MetaData, update, insert, delete
from sqlalchemy import select, bindparam, func, and_
LOG = log.getLogger(__name__)
class NotificationsRepository(sql_repository.SQLRepository,
nr.NotificationsRepository):
def __init__(self):
super(NotificationsRepository, self).__init__()
metadata = MetaData()
self.nm = models.create_nm_model(metadata)
nm = self.nm
self._select_nm_count_name_query = (select([func.count()])
.select_from(nm)
.where(
and_(nm.c.tenant_id == bindparam('b_tenant_id'),
nm.c.name == bindparam('b_name'))))
self._select_nm_count_id_query = (select([func.count()])
.select_from(nm)
.where(
and_(nm.c.tenant_id == bindparam('b_tenant_id'),
nm.c.id == bindparam('b_id'))))
self._insert_nm_query = (insert(nm)
.values(
id=bindparam('b_id'),
tenant_id=bindparam('b_tenant_id'),
name=bindparam('b_name'),
type=bindparam('b_type'),
address=bindparam('b_address'),
created_at=bindparam('b_created_at'),
updated_at=bindparam('b_updated_at')))
self._delete_nm_query = (delete(nm)
.where(nm.c.tenant_id == bindparam('b_tenant_id'))
.where(nm.c.id == bindparam('b_id')))
self._update_nm_query = (update(nm)
.where(nm.c.tenant_id == bindparam('b_tenant_id'))
.where(nm.c.id == bindparam('b_id'))
.values(
name=bindparam('b_name'),
type=bindparam('b_type'),
address=bindparam('b_address'),
created_at=bindparam('b_created_at'),
updated_at=bindparam('b_updated_at')))
self._select_nm_id_query = (select([nm])
.where(
and_(nm.c.tenant_id == bindparam('b_tenant_id'),
nm.c.id == bindparam('b_id'))))
self._select_nm_name_query = (select([nm])
.where(
and_(nm.c.tenant_id == bindparam('b_tenant_id'),
nm.c.name == bindparam('b_name'))))
def create_notification(self, tenant_id, name,
notification_type, address):
with self._db_engine.connect() as conn:
row = conn.execute(self._select_nm_count_name_query,
b_tenant_id=tenant_id,
b_name=name.encode('utf8')).fetchone()
if int(row[0]) > 0:
raise exceptions.AlreadyExistsException('Notification already '
'exists')
now = datetime.datetime.utcnow()
notification_id = uuidutils.generate_uuid()
conn.execute(self._insert_nm_query,
b_id=notification_id,
b_tenant_id=tenant_id,
b_name=name.encode('utf8'),
b_type=notification_type.encode('utf8'),
b_address=address.encode('utf8'),
b_created_at=now,
b_updated_at=now)
return notification_id
@sql_repository.sql_try_catch_block
def list_notifications(self, tenant_id, offset, limit):
rows = []
with self._db_engine.connect() as conn:
nm = self.nm
select_nm_query = (select([nm])
.where(nm.c.tenant_id == bindparam('b_tenant_id')))
parms = {'b_tenant_id': tenant_id}
if offset:
select_nm_query = (select_nm_query
.where(nm.c.id > bindparam('b_offset')))
parms['b_offset'] = offset.encode('utf8')
select_nm_query = (select_nm_query
.order_by(nm.c.id)
.limit(bindparam('b_limit')))
parms['b_limit'] = limit + 1
rows = conn.execute(select_nm_query, parms).fetchall()
return [dict(row) for row in rows]
@sql_repository.sql_try_catch_block
def delete_notification(self, tenant_id, _id):
with self._db_engine.connect() as conn:
row = conn.execute(self._select_nm_count_id_query,
b_tenant_id=tenant_id,
b_id=_id).fetchone()
if int(row[0]) < 1:
raise exceptions.DoesNotExistException
conn.execute(self._delete_nm_query,
b_tenant_id=tenant_id,
b_id=_id)
@sql_repository.sql_try_catch_block
def list_notification(self, tenant_id, notification_id):
with self._db_engine.connect() as conn:
row = conn.execute(self._select_nm_id_query,
b_tenant_id=tenant_id,
b_id=notification_id).fetchone()
if row is not None:
return dict(row)
else:
raise exceptions.DoesNotExistException
@sql_repository.sql_try_catch_block
def find_notification_by_name(self, tenant_id, name):
with self._db_engine.connect() as conn:
return conn.execute(self._select_nm_name_query,
b_tenant_id=tenant_id,
b_name=name.encode('utf8')).fetchone()
@sql_repository.sql_try_catch_block
def update_notification(self, notification_id, tenant_id, name, notification_type, address):
with self._db_engine.connect() as conn:
now = datetime.datetime.utcnow()
cursor = conn.execute(self._update_nm_query,
b_id=notification_id,
b_tenant_id=tenant_id,
b_name=name.encode('utf8'),
b_type=notification_type.encode('utf8'),
b_address=address.encode('utf8'),
b_created_at=now,
b_updated_at=now)
if cursor.rowcount < 1:
raise exceptions.DoesNotExistException('Not Found')

View File

@ -0,0 +1,80 @@
# Copyright 2014 Hewlett-Packard
# Copyright 2016 FUJITSU LIMITED
#
# Licensed under the Apache License, Version 2.0 (the "License"); you may
# not use this file except in compliance with the License. You may obtain
# a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
# License for the specific language governing permissions and limitations
# under the License.
from oslo_config import cfg
from oslo_log import log
from sqlalchemy.engine.url import URL, make_url
from sqlalchemy import MetaData
from monasca_api.common.repositories import exceptions
LOG = log.getLogger(__name__)
class SQLRepository(object):
def __init__(self):
try:
super(SQLRepository, self).__init__()
self.conf = cfg.CONF
url = None
if self.conf.mysql.database_name is not None:
settings_db = (self.conf.mysql.username,
self.conf.mysql.password,
self.conf.mysql.hostname,
self.conf.mysql.database_name)
url = make_url("mysql+pymysql://{}:{}@{}/{}" % settings_db)
else:
if self.conf.database.url is not None:
url = make_url(self.conf.database.url)
else:
database_conf = dict(self.conf.database)
if 'url' in database_conf:
del database_conf['url']
url = URL(**database_conf)
from sqlalchemy import create_engine
self._db_engine = create_engine(url)
self.metadata = MetaData()
except Exception as ex:
LOG.exception(ex)
raise exceptions.RepositoryException(ex)
def sql_try_catch_block(fun):
def try_it(*args, **kwargs):
try:
return fun(*args, **kwargs)
except exceptions.DoesNotExistException:
raise
except exceptions.InvalidUpdateException:
raise
except exceptions.AlreadyExistsException:
raise
except Exception as ex:
LOG.exception(ex)
raise
# exceptions.RepositoryException(ex)
return try_it

View File

@ -0,0 +1,124 @@
PRAGMA synchronous = OFF;
PRAGMA journal_mode = MEMORY;
BEGIN TRANSACTION;
CREATE TABLE `alarm_state` (
`name` varchar(20) NOT NULL,
PRIMARY KEY (`name`)
);
CREATE TABLE `alarm_definition_severity` (
`name` varchar(20) NOT NULL,
PRIMARY KEY (`name`)
);
CREATE TABLE `notification_method_type` (
`name` varchar(20) NOT NULL,
PRIMARY KEY (`name`)
);
CREATE TABLE `notification_method` (
`id` varchar(36) NOT NULL,
`tenant_id` varchar(36) NOT NULL,
`name` varchar(250) DEFAULT NULL,
`type` varchar(20) NOT NULL,
`address` varchar(512) DEFAULT NULL,
`created_at` datetime NOT NULL,
`updated_at` datetime NOT NULL,
PRIMARY KEY (`id`)
);
CREATE TABLE `alarm_definition` (
`id` varchar(36) NOT NULL,
`tenant_id` varchar(36) NOT NULL,
`name` varchar(255) NOT NULL DEFAULT '',
`description` varchar(255) DEFAULT NULL,
`expression` longtext NOT NULL,
`severity` varchar(20) NOT NULL,
`match_by` varchar(255) DEFAULT '',
`actions_enabled` tinyint(1) NOT NULL DEFAULT '1',
`created_at` datetime NOT NULL,
`updated_at` datetime NOT NULL,
`deleted_at` datetime DEFAULT NULL,
PRIMARY KEY (`id`)
);
CREATE TABLE `alarm` (
`id` varchar(36) NOT NULL,
`alarm_definition_id` varchar(36) NOT NULL DEFAULT '',
`state` varchar(20) NOT NULL,
`lifecycle_state` varchar(50) DEFAULT NULL,
`link` varchar(512) DEFAULT NULL,
`created_at` datetime NOT NULL,
`state_updated_at` datetime,
`updated_at` datetime NOT NULL,
PRIMARY KEY (`id`)
);
CREATE TABLE `alarm_action` (
`alarm_definition_id` varchar(36) NOT NULL,
`alarm_state` varchar(20) NOT NULL,
`action_id` varchar(36) NOT NULL,
PRIMARY KEY (`alarm_definition_id`,`alarm_state`,`action_id`)
);
CREATE TABLE `alarm_metric` (
`alarm_id` varchar(36) NOT NULL,
`metric_definition_dimensions_id` binary(20) NOT NULL DEFAULT '\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0',
PRIMARY KEY (`alarm_id`,`metric_definition_dimensions_id`)
);
CREATE TABLE `metric_definition` (
`id` binary(20) NOT NULL DEFAULT '\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0',
`name` varchar(255) NOT NULL,
`tenant_id` varchar(36) NOT NULL,
`region` varchar(255) NOT NULL DEFAULT '',
PRIMARY KEY (`id`)
);
CREATE TABLE `metric_definition_dimensions` (
`id` binary(20) NOT NULL DEFAULT '\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0',
`metric_definition_id` binary(20) NOT NULL DEFAULT '\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0',
`metric_dimension_set_id` binary(20) NOT NULL DEFAULT '\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0',
PRIMARY KEY (`id`)
);
CREATE TABLE `metric_dimension` (
`dimension_set_id` binary(20) NOT NULL DEFAULT '\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0',
`name` varchar(255) NOT NULL DEFAULT '',
`value` varchar(255) NOT NULL DEFAULT ''
);
CREATE TABLE `sub_alarm_definition` (
`id` varchar(36) NOT NULL,
`alarm_definition_id` varchar(36) NOT NULL DEFAULT '',
`function` varchar(10) NOT NULL,
`metric_name` varchar(100) DEFAULT NULL,
`operator` varchar(5) NOT NULL,
`threshold` double NOT NULL,
`period` int(11) NOT NULL,
`periods` int(11) NOT NULL,
`created_at` datetime NOT NULL,
`updated_at` datetime NOT NULL,
PRIMARY KEY (`id`)
);
CREATE TABLE `sub_alarm_definition_dimension` (
`sub_alarm_definition_id` varchar(36) NOT NULL DEFAULT '',
`dimension_name` varchar(255) NOT NULL DEFAULT '',
`value` varchar(255) DEFAULT NULL
);
CREATE TABLE `sub_alarm` (
`id` varchar(36) NOT NULL,
`alarm_id` varchar(36) NOT NULL DEFAULT '',
`sub_expression_id` varchar(36) NOT NULL DEFAULT '',
`expression` longtext NOT NULL,
`created_at` datetime NOT NULL,
`updated_at` datetime NOT NULL,
PRIMARY KEY (`id`)
);
CREATE TABLE `schema_migrations` (
`version` varchar(255) NOT NULL
);
insert into `alarm_state` values ('UNDETERMINED');
insert into `alarm_state` values ('OK');
insert into `alarm_state` values ('ALARM');
insert into `alarm_definition_severity` values ('LOW');
insert into `alarm_definition_severity` values ('MEDIUM');
insert into `alarm_definition_severity` values ('HIGH');
insert into `alarm_definition_severity` values ('CRITICAL');
insert into `notification_method_type` values ('EMAIL');
insert into `notification_method_type` values ('WEBHOOK');
insert into `notification_method_type` values ('PAGERDUTY');
END TRANSACTION;

View File

@ -0,0 +1,875 @@
# Copyright 2015 Cray
# Copyright 2016 FUJITSU LIMITED
#
# Licensed under the Apache License, Version 2.0 (the "License"); you may
# not use this file except in compliance with the License. You may obtain
# a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
# License for the specific language governing permissions and limitations
# under the License.
import datetime
import time
import fixtures
from oslo_config import cfg
from oslo_config import fixture as fixture_config
import testtools
from sqlalchemy import delete, MetaData, insert, bindparam
from monasca_api.common.repositories.sqla import models
CONF = cfg.CONF
class TestAlarmRepoDB(testtools.TestCase, fixtures.TestWithFixtures):
@classmethod
def setUpClass(cls):
from sqlalchemy import engine_from_config
engine = engine_from_config({'url': 'sqlite://'}, prefix='')
qry = open('monasca_api/tests/sqlite_alarm.sql', 'r').read()
sconn = engine.raw_connection()
c = sconn.cursor()
c.executescript(qry)
sconn.commit()
c.close()
cls.engine = engine
def _fake_engine_from_config(*args, **kw):
return cls.engine
cls.fixture = fixtures.MonkeyPatch(
'sqlalchemy.create_engine', _fake_engine_from_config)
cls.fixture.setUp()
metadata = MetaData()
cls.aa = models.create_aa_model(metadata)
cls._delete_aa_query = delete(cls.aa)
cls._insert_aa_query = (insert(cls.aa)
.values(
alarm_definition_id=bindparam('alarm_definition_id'),
alarm_state=bindparam('alarm_state'),
action_id=bindparam('action_id')))
cls.ad = models.create_ad_model(metadata)
cls._delete_ad_query = delete(cls.ad)
cls._insert_ad_query = (insert(cls.ad)
.values(
id=bindparam('id'),
tenant_id=bindparam('tenant_id'),
name=bindparam('name'),
severity=bindparam('severity'),
expression=bindparam('expression'),
match_by=bindparam('match_by'),
actions_enabled=bindparam('actions_enabled'),
created_at=bindparam('created_at'),
updated_at=bindparam('updated_at'),
deleted_at=bindparam('deleted_at')))
cls.sad = models.create_sad_model(metadata)
cls._delete_sad_query = delete(cls.sad)
cls._insert_sad_query = (insert(cls.sad)
.values(
id=bindparam('id'),
alarm_definition_id=bindparam('alarm_definition_id'),
function=bindparam('function'),
metric_name=bindparam('metric_name'),
operator=bindparam('operator'),
threshold=bindparam('threshold'),
period=bindparam('period'),
periods=bindparam('periods'),
created_at=bindparam('created_at'),
updated_at=bindparam('updated_at')))
cls.sadd = models.create_sadd_model(metadata)
cls._delete_sadd_query = delete(cls.sadd)
cls._insert_sadd_query = (insert(cls.sadd)
.values(
sub_alarm_definition_id=bindparam('sub_alarm_definition_id'),
dimension_name=bindparam('dimension_name'),
value=bindparam('value')))
cls.nm = models.create_nm_model(metadata)
cls._delete_nm_query = delete(cls.nm)
cls._insert_nm_query = (insert(cls.nm)
.values(
id=bindparam('id'),
tenant_id=bindparam('tenant_id'),
name=bindparam('name'),
type=bindparam('type'),
address=bindparam('address'),
created_at=bindparam('created_at'),
updated_at=bindparam('updated_at')))
cls.a = models.create_a_model(metadata)
cls._delete_a_query = delete(cls.a)
cls._insert_a_query = (insert(cls.a)
.values(
id=bindparam('id'),
alarm_definition_id=bindparam('alarm_definition_id'),
state=bindparam('state'),
lifecycle_state=bindparam('lifecycle_state'),
link=bindparam('link'),
created_at=bindparam('created_at'),
updated_at=bindparam('updated_at'),
state_updated_at=bindparam('state_updated_at')))
cls.sa = models.create_sa_model(metadata)
cls._delete_sa_query = delete(cls.sa)
cls._insert_sa_query = (insert(cls.sa)
.values(
id=bindparam('id'),
sub_expression_id=bindparam('sub_expression_id'),
alarm_id=bindparam('alarm_id'),
expression=bindparam('expression'),
created_at=bindparam('created_at'),
updated_at=bindparam('updated_at')))
cls.am = models.create_am_model(metadata)
cls._delete_am_query = delete(cls.am)
cls._insert_am_query = (insert(cls.am)
.values(
alarm_id=bindparam('alarm_id'),
metric_definition_dimensions_id=bindparam(
'metric_definition_dimensions_id')))
cls.md = models.create_md_model(metadata)
cls._delete_md_query = delete(cls.md)
cls._insert_md_query = (insert(cls.md)
.values(
dimension_set_id=bindparam('dimension_set_id'),
name=bindparam('name'),
value=bindparam('value')))
cls.mdd = models.create_mdd_model(metadata)
cls._delete_mdd_query = delete(cls.mdd)
cls._insert_mdd_query = (insert(cls.mdd)
.values(
id=bindparam('id'),
metric_definition_id=bindparam('metric_definition_id'),
metric_dimension_set_id=bindparam('metric_dimension_set_id')))
cls.mde = models.create_mde_model(metadata)
cls._delete_mde_query = delete(cls.mde)
cls._insert_mde_query = (insert(cls.mde)
.values(
id=bindparam('id'),
name=bindparam('name'),
tenant_id=bindparam('tenant_id'),
region=bindparam('region')))
@classmethod
def tearDownClass(cls):
cls.fixture.cleanUp()
def setUp(self):
super(TestAlarmRepoDB, self).setUp()
self._fixture_config = self.useFixture(
fixture_config.Config(cfg.CONF))
self._fixture_config.config(url='sqlite://',
group='database')
from monasca_api.common.repositories.sqla import alarms_repository as ar
self.repo = ar.AlarmsRepository()
timestamp1 = datetime.datetime(2015, 3, 14, 9, 26, 53)
timestamp2 = datetime.datetime(2015, 3, 14, 9, 26, 54)
timestamp3 = datetime.datetime(2015, 3, 14, 9, 26, 55)
timestamp4 = datetime.datetime(2015, 3, 15, 9, 26, 53)
self.default_as = [{'id': '1',
'alarm_definition_id': '1',
'state': 'OK',
'lifecycle_state': 'OPEN',
'link': 'http://somesite.com/this-alarm-info',
'created_at': timestamp1,
'updated_at': timestamp1,
'state_updated_at': timestamp1},
{'id': '2',
'alarm_definition_id': '1',
'state': 'UNDETERMINED',
'lifecycle_state': 'OPEN',
'link': 'http://somesite.com/this-alarm-info',
'created_at': timestamp2,
'updated_at': timestamp2,
'state_updated_at': timestamp2},
{'id': '3',
'alarm_definition_id': '1',
'state': 'ALARM',
'lifecycle_state': None,
'link': 'http://somesite.com/this-alarm-info',
'created_at': timestamp3,
'updated_at': timestamp3,
'state_updated_at': timestamp3},
{'id': '234111',
'alarm_definition_id': '234',
'state': 'UNDETERMINED',
'lifecycle_state': None,
'link': None,
'created_at': timestamp4,
'updated_at': timestamp4,
'state_updated_at': timestamp4}]
self.default_ads = [{'id': '1',
'tenant_id': 'bob',
'name': '90% CPU',
'severity': 'LOW',
'expression': 'AVG(cpu.idle_perc{flavor_id=777,'
' image_id=888, device=1}) > 10',
'match_by': 'flavor_id,image_id',
'actions_enabled': False,
'created_at': datetime.datetime.now(),
'updated_at': datetime.datetime.now(),
'deleted_at': None},
{'id': '234',
'tenant_id': 'bob',
'name': '50% CPU',
'severity': 'LOW',
'expression': 'AVG(cpu.sys_mem'
'{service=monitoring})'
' > 20 and AVG(cpu.idle_perc'
'{service=monitoring}) < 10',
'match_by': 'hostname,region',
'actions_enabled': False,
'created_at': datetime.datetime.now(),
'updated_at': datetime.datetime.now(),
'deleted_at': None}]
self.default_sadds = [{'sub_alarm_definition_id': '111',
'dimension_name': 'flavor_id',
'value': '777'},
{'sub_alarm_definition_id': '111',
'dimension_name': 'image_id',
'value': '888'},
{'sub_alarm_definition_id': '111',
'dimension_name': 'metric_name',
'value': 'cpu'},
{'sub_alarm_definition_id': '111',
'dimension_name': 'device',
'value': '1'},
{'sub_alarm_definition_id': '222',
'dimension_name': 'flavor_id',
'value': '777'},
{'sub_alarm_definition_id': '222',
'dimension_name': 'image_id',
'value': '888'},
{'sub_alarm_definition_id': '222',
'dimension_name': 'metric_name',
'value': 'mem'}]
self.default_nms = [{'id': '29387234',
'tenant_id': 'alarm-test',
'name': 'MyEmail',
'type': 'EMAIL',
'address': 'a@b',
'created_at': datetime.datetime.now(),
'updated_at': datetime.datetime.now()},
{'id': '77778687',
'tenant_id': 'alarm-test',
'name': 'OtherEmail',
'type': 'EMAIL',
'address': 'a@b',
'created_at': datetime.datetime.now(),
'updated_at': datetime.datetime.now()}]
self.default_aas = [{'alarm_definition_id': '123',
'alarm_state': 'ALARM',
'action_id': '29387234'},
{'alarm_definition_id': '123',
'alarm_state': 'ALARM',
'action_id': '77778687'},
{'alarm_definition_id': '234',
'alarm_state': 'ALARM',
'action_id': '29387234'},
{'alarm_definition_id': '234',
'alarm_state': 'ALARM',
'action_id': '77778687'}]
self.default_sads = [{'id': '43',
'alarm_definition_id': '234',
'function': 'f_43',
'metric_name': 'm_43',
'operator': 'GT',
'threshold': 0,
'period': 1,
'periods': 2,
'created_at': datetime.datetime.now(),
'updated_at': datetime.datetime.now()},
{'id': '45',
'alarm_definition_id': '234',
'function': 'f_45',
'metric_name': 'm_45',
'operator': 'GT',
'threshold': 0,
'period': 1,
'periods': 2,
'created_at': datetime.datetime.now(),
'updated_at': datetime.datetime.now()},
{'id': '47',
'alarm_definition_id': '234',
'function': 'f_47',
'metric_name': 'm_47',
'operator': 'GT',
'threshold': 0,
'period': 1,
'periods': 2,
'created_at': datetime.datetime.now(),
'updated_at': datetime.datetime.now()},
{'id': '8484',
'alarm_definition_id': '234',
'function': 'f_49',
'metric_name': 'm_49',
'operator': 'GT',
'threshold': 0,
'period': 1,
'periods': 2,
'created_at': datetime.datetime.now(),
'updated_at': datetime.datetime.now()},
{'id': '8686',
'alarm_definition_id': '234',
'function': 'f_51',
'metric_name': 'm_51',
'operator': 'GT',
'threshold': 0,
'period': 1,
'periods': 2,
'created_at': datetime.datetime.now(),
'updated_at': datetime.datetime.now()}]
self.default_sas = [{'sub_expression_id': '43',
'id': '42',
'alarm_id': '1',
'expression': 'avg(cpu.idle_perc{flavor_id=777,'
' image_id=888, device=1}) > 10',
'created_at': datetime.datetime.now(),
'updated_at': datetime.datetime.now()},
{'sub_expression_id': '45',
'id': '43',
'alarm_id': '2',
'expression': 'avg(cpu.idle_perc{flavor_id=777,'
' image_id=888, device=1}) > 10',
'created_at': datetime.datetime.now(),
'updated_at': datetime.datetime.now()},
{'sub_expression_id': '47',
'id': '44',
'alarm_id': '3',
'expression': 'avg(cpu.idle_perc{flavor_id=777,'
' image_id=888, device=1}) > 10',
'created_at': datetime.datetime.now(),
'updated_at': datetime.datetime.now()}]
self.default_ams = [{'alarm_id': '1',
'metric_definition_dimensions_id': '11'},
{'alarm_id': '1',
'metric_definition_dimensions_id': '22'},
{'alarm_id': '2',
'metric_definition_dimensions_id': '11'},
{'alarm_id': '3',
'metric_definition_dimensions_id': '22'},
{'alarm_id': '234111',
'metric_definition_dimensions_id': '31'},
{'alarm_id': '234111',
'metric_definition_dimensions_id': '32'}]
self.default_mdes = [{'id': '1',
'name': 'cpu.idle_perc',
'tenant_id': 'bob',
'region': 'west'},
{'id': '111',
'name': 'cpu.sys_mem',
'tenant_id': 'bob',
'region': 'west'},
{'id': '112',
'name': 'cpu.idle_perc',
'tenant_id': 'bob',
'region': 'west'}]
self.default_mdds = [{'id': '11',
'metric_definition_id': '1',
'metric_dimension_set_id': '1'},
{'id': '22',
'metric_definition_id': '1',
'metric_dimension_set_id': '2'},
{'id': '31',
'metric_definition_id': '111',
'metric_dimension_set_id': '21'},
{'id': '32',
'metric_definition_id': '112',
'metric_dimension_set_id': '22'}]
self.default_mds = [{'dimension_set_id': '1',
'name': 'instance_id',
'value': '123'},
{'dimension_set_id': '1',
'name': 'service',
'value': 'monitoring'},
{'dimension_set_id': '2',
'name': 'flavor_id',
'value': '222'},
{'dimension_set_id': '21',
'name': 'service',
'value': 'monitoring'},
{'dimension_set_id': '22',
'name': 'service',
'value': 'monitoring'},
{'dimension_set_id': '21',
'name': 'hostname',
'value': 'roland'},
{'dimension_set_id': '22',
'name': 'hostname',
'value': 'roland'},
{'dimension_set_id': '21',
'name': 'region',
'value': 'colorado'},
{'dimension_set_id': '22',
'name': 'region',
'value': 'colorado'},
{'dimension_set_id': '22',
'name': 'extra',
'value': 'vivi'}]
self.alarm1 = {'alarm_definition': {'id': '1',
'name': '90% CPU',
'severity': 'LOW'},
'created_timestamp': '2015-03-14T09:26:53Z',
'id': '1',
'lifecycle_state': 'OPEN',
'link': 'http://somesite.com/this-alarm-info',
'metrics': [{'dimensions': {'instance_id': '123',
'service': 'monitoring'},
'name': 'cpu.idle_perc'},
{'dimensions': {'flavor_id': '222'},
'name': 'cpu.idle_perc'}],
'state': 'OK',
'state_updated_timestamp': '2015-03-14T09:26:53Z',
'updated_timestamp': '2015-03-14T09:26:53Z'}
self.alarm2 = {'alarm_definition': {'id': '1',
'name': '90% CPU',
'severity': 'LOW'},
'created_timestamp': '2015-03-14T09:26:54Z',
'id': '2',
'lifecycle_state': 'OPEN',
'link': 'http://somesite.com/this-alarm-info',
'metrics': [{'dimensions': {'instance_id': '123',
'service': 'monitoring'},
'name': 'cpu.idle_perc'}],
'state': 'UNDETERMINED',
'state_updated_timestamp': '2015-03-14T09:26:54Z',
'updated_timestamp': '2015-03-14T09:26:54Z'}
self.alarm_compound = {'alarm_definition': {'id': '234',
'name': '50% CPU',
'severity': 'LOW'},
'created_timestamp': '2015-03-15T09:26:53Z',
'id': '234111',
'lifecycle_state': None,
'link': None,
'metrics': [
{'dimensions': {'hostname': 'roland',
'region': 'colorado',
'service': 'monitoring'},
'name': 'cpu.sys_mem'},
{'dimensions': {'extra': 'vivi',
'hostname': 'roland',
'region': 'colorado',
'service': 'monitoring'},
'name': 'cpu.idle_perc'}],
'state': 'UNDETERMINED',
'state_updated_timestamp':
'2015-03-15T09:26:53Z',
'updated_timestamp': '2015-03-15T09:26:53Z'}
self.alarm3 = {'alarm_definition': {'id': '1',
'name': '90% CPU',
'severity': 'LOW'},
'created_timestamp': '2015-03-14T09:26:55Z',
'id': '3',
'lifecycle_state': None,
'link': 'http://somesite.com/this-alarm-info',
'metrics': [{'dimensions': {'flavor_id': '222'},
'name': 'cpu.idle_perc'}],
'state': 'ALARM',
'state_updated_timestamp': '2015-03-14T09:26:55Z',
'updated_timestamp': '2015-03-14T09:26:55Z'}
with self.engine.begin() as conn:
conn.execute(self._delete_am_query)
conn.execute(self._insert_am_query, self.default_ams)
conn.execute(self._delete_md_query)
conn.execute(self._insert_md_query, self.default_mds)
conn.execute(self._delete_mdd_query)
conn.execute(self._insert_mdd_query, self.default_mdds)
conn.execute(self._delete_a_query)
conn.execute(self._insert_a_query, self.default_as)
conn.execute(self._delete_sa_query)
conn.execute(self._insert_sa_query, self.default_sas)
conn.execute(self._delete_mde_query)
conn.execute(self._insert_mde_query, self.default_mdes)
conn.execute(self._delete_ad_query)
conn.execute(self._insert_ad_query, self.default_ads)
conn.execute(self._delete_sad_query)
conn.execute(self._insert_sad_query, self.default_sads)
conn.execute(self._delete_sadd_query)
conn.execute(self._insert_sadd_query, self.default_sadds)
conn.execute(self._delete_nm_query)
conn.execute(self._insert_nm_query, self.default_nms)
conn.execute(self._delete_aa_query)
conn.execute(self._insert_aa_query, self.default_aas)
def helper_builder_result(self, alarm_rows):
result = []
if not alarm_rows:
return result
# Forward declaration
alarm = {}
prev_alarm_id = None
for alarm_row in alarm_rows:
if prev_alarm_id != alarm_row['alarm_id']:
if prev_alarm_id is not None:
result.append(alarm)
ad = {u'id': alarm_row['alarm_definition_id'],
u'name': alarm_row['alarm_definition_name'],
u'severity': alarm_row['severity'], }
metrics = []
alarm = {u'id': alarm_row['alarm_id'], u'metrics': metrics,
u'state': alarm_row['state'],
u'lifecycle_state': alarm_row['lifecycle_state'],
u'link': alarm_row['link'],
u'state_updated_timestamp':
alarm_row['state_updated_timestamp'].isoformat() +
'Z',
u'updated_timestamp':
alarm_row['updated_timestamp'].isoformat() + 'Z',
u'created_timestamp':
alarm_row['created_timestamp'].isoformat() + 'Z',
u'alarm_definition': ad}
prev_alarm_id = alarm_row['alarm_id']
dimensions = {}
metric = {u'name': alarm_row['metric_name'],
u'dimensions': dimensions}
if alarm_row['metric_dimensions']:
for dimension in alarm_row['metric_dimensions'].split(','):
parsed_dimension = dimension.split('=')
dimensions[parsed_dimension[0]] = parsed_dimension[1]
metrics.append(metric)
result.append(alarm)
return result
def test_should_delete(self):
tenant_id = 'bob'
alarm_id = '1'
alarm1 = self.repo.get_alarm(tenant_id, alarm_id)
alarm1 = self.helper_builder_result(alarm1)
self.assertEqual(alarm1[0], self.alarm1)
self.repo.delete_alarm(tenant_id, alarm_id)
from monasca_api.common.repositories import exceptions
self.assertRaises(exceptions.DoesNotExistException,
self.repo.get_alarm, tenant_id, alarm_id)
def test_should_throw_exception_on_delete(self):
tenant_id = 'bob'
from monasca_api.common.repositories import exceptions
self.assertRaises(exceptions.DoesNotExistException,
self.repo.delete_alarm, tenant_id, 'Not an alarm ID')
def test_should_find_alarm_def(self):
tenant_id = 'bob'
alarm_id = '1'
expected = {'actions_enabled': False,
'deleted_at': None,
'description': None,
'expression': 'AVG(cpu.idle_perc{flavor_id=777,'
' image_id=888, device=1}) > 10',
'id': '1',
'match_by': 'flavor_id,image_id',
'name': '90% CPU',
'severity': 'LOW',
'tenant_id': 'bob'}
alarm_def = self.repo.get_alarm_definition(tenant_id, alarm_id)
expected['created_at'] = alarm_def['created_at']
expected['updated_at'] = alarm_def['updated_at']
self.assertEqual(alarm_def, expected)
from monasca_api.common.repositories import exceptions
self.assertRaises(exceptions.DoesNotExistException,
self.repo.get_alarm_definition,
tenant_id, 'Not an alarm ID')
def test_should_find(self):
res = self.repo.get_alarms(tenant_id='Not a tenant id', limit=1)
self.assertEqual(res, [])
tenant_id = 'bob'
res = self.repo.get_alarms(tenant_id=tenant_id, limit=1000)
res = self.helper_builder_result(res)
expected = [self.alarm1,
self.alarm2,
self.alarm_compound,
self.alarm3]
self.assertEqual(res, expected)
alarm_def_id = self.alarm_compound['alarm_definition']['id']
query_parms = {'alarm_definition_id': alarm_def_id}
res = self.repo.get_alarms(tenant_id=tenant_id,
query_parms=query_parms,
limit=1000)
res = self.helper_builder_result(res)
expected = [self.alarm_compound]
self.assertEqual(res, expected)
query_parms = {'metric_name': 'cpu.sys_mem'}
res = self.repo.get_alarms(tenant_id=tenant_id,
query_parms=query_parms,
limit=1000)
res = self.helper_builder_result(res)
expected = [self.alarm_compound]
self.assertEqual(res, expected)
query_parms = {'metric_name': 'cpu.idle_perc'}
res = self.repo.get_alarms(tenant_id=tenant_id,
query_parms=query_parms,
limit=1000)
res = self.helper_builder_result(res)
expected = [self.alarm1,
self.alarm2,
self.alarm_compound,
self.alarm3]
self.assertEqual(res, expected)
query_parms = {'metric_name': 'cpu.idle_perc',
'metric_dimensions': ['flavor_id:222']}
res = self.repo.get_alarms(tenant_id=tenant_id,
query_parms=query_parms,
limit=1000)
res = self.helper_builder_result(res)
expected = [self.alarm1,
self.alarm3]
self.assertEqual(res, expected)
query_parms = {'metric_name': 'cpu.idle_perc',
'metric_dimensions': ['service:monitoring', 'hostname:roland']}
res = self.repo.get_alarms(tenant_id=tenant_id,
query_parms=query_parms,
limit=1000)
res = self.helper_builder_result(res)
expected = [self.alarm_compound]
self.assertEqual(res, expected)
query_parms = {'state': 'UNDETERMINED'}
res = self.repo.get_alarms(tenant_id=tenant_id,
query_parms=query_parms,
limit=1000)
res = self.helper_builder_result(res)
expected = [self.alarm2,
self.alarm_compound]
self.assertEqual(res, expected)
alarm_def_id = self.alarm1['alarm_definition']['id']
query_parms = {'metric_name': 'cpu.idle_perc',
'metric_dimensions': ['service:monitoring'],
'alarm_definition_id': alarm_def_id}
res = self.repo.get_alarms(tenant_id=tenant_id,
query_parms=query_parms,
limit=1000)
res = self.helper_builder_result(res)
expected = [self.alarm1,
self.alarm2]
self.assertEqual(res, expected)
alarm_def_id = self.alarm1['alarm_definition']['id']
query_parms = {'metric_name': 'cpu.idle_perc',
'alarm_definition_id': alarm_def_id}
res = self.repo.get_alarms(tenant_id=tenant_id,
query_parms=query_parms,
limit=1000)
res = self.helper_builder_result(res)
expected = [self.alarm1,
self.alarm2,
self.alarm3]
self.assertEqual(res, expected)
alarm_def_id = self.alarm_compound['alarm_definition']['id']
query_parms = {'alarm_definition_id': alarm_def_id,
'state': 'UNDETERMINED'}
res = self.repo.get_alarms(tenant_id=tenant_id,
query_parms=query_parms,
limit=1000)
res = self.helper_builder_result(res)
expected = [self.alarm_compound]
self.assertEqual(res, expected)
query_parms = {'metric_name': 'cpu.sys_mem',
'state': 'UNDETERMINED'}
res = self.repo.get_alarms(tenant_id=tenant_id,
query_parms=query_parms,
limit=1000)
res = self.helper_builder_result(res)
expected = [self.alarm_compound]
self.assertEqual(res, expected)
query_parms = {'metric_name': 'cpu.idle_perc',
'metric_dimensions': ['service:monitoring'],
'state': 'UNDETERMINED'}
res = self.repo.get_alarms(tenant_id=tenant_id,
query_parms=query_parms,
limit=1000)
res = self.helper_builder_result(res)
expected = [self.alarm2,
self.alarm_compound]
self.assertEqual(res, expected)
time_now = datetime.datetime.now().isoformat() + 'Z'
query_parms = {'metric_name': 'cpu.idle_perc',
'metric_dimensions': ['service:monitoring'],
'state': 'UNDETERMINED',
'state_updated_start_time': time_now}
res = self.repo.get_alarms(tenant_id=tenant_id,
query_parms=query_parms,
limit=1000)
res = self.helper_builder_result(res)
expected = []
self.assertEqual(res, expected)
time_now = '2015-03-15T00:00:00.0Z'
query_parms = {'state_updated_start_time': time_now}
res = self.repo.get_alarms(tenant_id=tenant_id,
query_parms=query_parms,
limit=1000)
res = self.helper_builder_result(res)
expected = [self.alarm_compound]
self.assertEqual(res, expected)
time_now = '2015-03-14T00:00:00.0Z'
query_parms = {'state_updated_start_time': time_now}
res = self.repo.get_alarms(tenant_id=tenant_id,
query_parms=query_parms,
limit=1000)
res = self.helper_builder_result(res)
expected = [self.alarm1,
self.alarm2,
self.alarm_compound,
self.alarm3]
self.assertEqual(res, expected)
query_parms = {'state_updated_start_time': time_now,
'link': 'http://google.com',
'lifecycle_state': 'OPEN'}
res = self.repo.get_alarms(tenant_id=tenant_id,
query_parms=query_parms,
limit=None,
offset='10')
res = self.helper_builder_result(res)
expected = []
self.assertEqual(res, expected)
def test_should_update(self):
tenant_id = 'bob'
alarm_id = '2'
alarm = self.repo.get_alarm(tenant_id, alarm_id)
alarm = self.helper_builder_result(alarm)[0]
original_state_updated_date = alarm['state_updated_timestamp']
original_updated_timestamp = alarm['updated_timestamp']
self.assertEqual(alarm['state'], 'UNDETERMINED')
prev_state, _ = self.repo.update_alarm(tenant_id, alarm_id, 'OK', None, None)
alarm_new = self.repo.get_alarm(tenant_id, alarm_id)
alarm_new = self.helper_builder_result(alarm_new)[0]
new_state_updated_date = alarm_new['state_updated_timestamp']
new_updated_timestamp = alarm_new['updated_timestamp']
self.assertNotEqual(original_updated_timestamp,
new_updated_timestamp,
'updated_at did not change')
self.assertNotEqual(original_state_updated_date,
new_state_updated_date,
'state_updated_at did not change')
alarm_tmp = tuple(alarm[k] for k in ('state', 'link', 'lifecycle_state'))
self.assertEqual(alarm_tmp, prev_state)
alarm['state_updated_timestamp'] = alarm_new['state_updated_timestamp']
alarm['updated_timestamp'] = alarm_new['updated_timestamp']
alarm['state'] = alarm_new['state']
alarm['link'] = alarm_new['link']
alarm['lifecycle_state'] = alarm_new['lifecycle_state']
self.assertEqual(alarm, alarm_new)
time.sleep(1)
prev_state, _ = self.repo.update_alarm(tenant_id, alarm_id, 'OK', None, None)
alarm_unchanged = self.repo.get_alarm(tenant_id, alarm_id)
alarm_unchanged = self.helper_builder_result(alarm_unchanged)[0]
unchanged_state_updated_date = alarm_unchanged['state_updated_timestamp']
unchanged_updated_timestamp = alarm_unchanged['updated_timestamp']
self.assertNotEqual(unchanged_updated_timestamp,
new_updated_timestamp,
'updated_at did not change')
self.assertEqual(unchanged_state_updated_date,
new_state_updated_date,
'state_updated_at did change')
alarm_new_tmp = tuple(alarm_new[k] for k in ('state', 'link', 'lifecycle_state'))
self.assertEqual(alarm_new_tmp, prev_state)
def test_should_throw_exception_on_update(self):
tenant_id = 'bob'
alarm_id = 'Not real alarm id'
from monasca_api.common.repositories import exceptions
self.assertRaises(exceptions.DoesNotExistException,
self.repo.update_alarm,
tenant_id,
alarm_id,
'UNDETERMINED',
None,
None)
def test_get_alarm_metrics(self):
alarm_id = '2'
alarm_metrics = self.repo.get_alarm_metrics(alarm_id)
expected = [{'alarm_id': '2',
'dimensions': 'instance_id=123,service=monitoring',
'name': 'cpu.idle_perc'}]
self.assertEqual(alarm_metrics, expected)
def test_get_subalarms(self):
tenant_id = 'bob'
alarm_id = '2'
sub_alarms = self.repo.get_sub_alarms(tenant_id, alarm_id)
expected = [{'alarm_definition_id': '1',
'alarm_id': '2',
'expression': 'avg(cpu.idle_perc{flavor_id=777, image_id=888, device=1}) > 10',
'sub_alarm_id': '43'}]
self.assertEqual(sub_alarms, expected)

View File

@ -0,0 +1,685 @@
# Copyright 2015 Cray
# Copyright 2016 FUJITSU LIMITED
#
# Licensed under the Apache License, Version 2.0 (the "License"); you may
# not use this file except in compliance with the License. You may obtain
# a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
# License for the specific language governing permissions and limitations
# under the License.
import datetime
import fixtures
from oslo_config import cfg
from oslo_config import fixture as fixture_config
from sqlalchemy import delete, MetaData, insert, bindparam, select, func
import testtools
from monasca_api.common.repositories.sqla import models
CONF = cfg.CONF
class TestAlarmDefinitionRepoDB(testtools.TestCase, fixtures.TestWithFixtures):
@classmethod
def setUpClass(cls):
from sqlalchemy import engine_from_config
engine = engine_from_config({'url': 'sqlite://'}, prefix='')
qry = open('monasca_api/tests/sqlite_alarm.sql', 'r').read()
sconn = engine.raw_connection()
c = sconn.cursor()
c.executescript(qry)
sconn.commit()
c.close()
cls.engine = engine
def _fake_engine_from_config(*args, **kw):
return cls.engine
cls.fixture = fixtures.MonkeyPatch(
'sqlalchemy.create_engine', _fake_engine_from_config)
cls.fixture.setUp()
metadata = MetaData()
cls.aa = models.create_aa_model(metadata)
cls._delete_aa_query = delete(cls.aa)
cls._insert_aa_query = (insert(cls.aa)
.values(
alarm_definition_id=bindparam('alarm_definition_id'),
alarm_state=bindparam('alarm_state'),
action_id=bindparam('action_id')))
cls.ad = models.create_ad_model(metadata)
cls._delete_ad_query = delete(cls.ad)
cls._insert_ad_query = (insert(cls.ad)
.values(
id=bindparam('id'),
tenant_id=bindparam('tenant_id'),
name=bindparam('name'),
severity=bindparam('severity'),
expression=bindparam('expression'),
match_by=bindparam('match_by'),
actions_enabled=bindparam('actions_enabled'),
created_at=bindparam('created_at'),
updated_at=bindparam('updated_at'),
deleted_at=bindparam('deleted_at')))
cls.sad = models.create_sad_model(metadata)
cls._delete_sad_query = delete(cls.sad)
cls._insert_sad_query = (insert(cls.sad)
.values(
id=bindparam('id'),
alarm_definition_id=bindparam('alarm_definition_id'),
function=bindparam('function'),
metric_name=bindparam('metric_name'),
operator=bindparam('operator'),
threshold=bindparam('threshold'),
period=bindparam('period'),
periods=bindparam('periods'),
created_at=bindparam('created_at'),
updated_at=bindparam('updated_at')))
cls.sadd = models.create_sadd_model(metadata)
cls._delete_sadd_query = delete(cls.sadd)
cls._insert_sadd_query = (insert(cls.sadd)
.values(
sub_alarm_definition_id=bindparam('sub_alarm_definition_id'),
dimension_name=bindparam('dimension_name'),
value=bindparam('value')))
cls.nm = models.create_nm_model(metadata)
cls._delete_nm_query = delete(cls.nm)
cls._insert_nm_query = (insert(cls.nm)
.values(
id=bindparam('id'),
tenant_id=bindparam('tenant_id'),
name=bindparam('name'),
type=bindparam('type'),
address=bindparam('address'),
created_at=bindparam('created_at'),
updated_at=bindparam('updated_at')))
@classmethod
def tearDownClass(cls):
cls.fixture.cleanUp()
def setUp(self):
super(TestAlarmDefinitionRepoDB, self).setUp()
self._fixture_config = self.useFixture(
fixture_config.Config(cfg.CONF))
self._fixture_config.config(url='sqlite://',
group='database')
from monasca_api.common.repositories.sqla import alarm_definitions_repository as adr
self.repo = adr.AlarmDefinitionsRepository()
self.default_ads = [{'id': '123',
'tenant_id': 'bob',
'name': '90% CPU',
'severity': 'LOW',
'expression': 'AVG(hpcs.compute{flavor_id=777,'
' image_id=888, metric_name=cpu, device=1}) > 10',
'match_by': 'flavor_id,image_id',
'actions_enabled': False,
'created_at': datetime.datetime.now(),
'updated_at': datetime.datetime.now(),
'deleted_at': None},
{'id': '234',
'tenant_id': 'bob',
'name': '50% CPU',
'severity': 'LOW',
'expression': 'AVG(hpcs.compute{flavor_id=777, '
'image_id=888, metric_name=mem}) > 20'
' and AVG(hpcs.compute) < 100',
'match_by': 'flavor_id,image_id',
'actions_enabled': False,
'created_at': datetime.datetime.now(),
'updated_at': datetime.datetime.now(),
'deleted_at': None}]
self.default_sads = [{'id': '111',
'alarm_definition_id': '123',
'function': 'AVG',
'metric_name': 'hpcs.compute',
'operator': 'GT',
'threshold': 10,
'period': 60,
'periods': 1,
'created_at': datetime.datetime.now(),
'updated_at': datetime.datetime.now()},
{'id': '222',
'alarm_definition_id': '234',
'function': 'AVG',
'metric_name': 'hpcs.compute',
'operator': 'GT',
'threshold': 20,
'period': 60,
'periods': 1,
'created_at': datetime.datetime.now(),
'updated_at': datetime.datetime.now()},
{'id': '223',
'alarm_definition_id': '234',
'function': 'AVG',
'metric_name': 'hpcs.compute',
'operator': 'LT',
'threshold': 100,
'period': 60,
'periods': 1,
'created_at': datetime.datetime.now(),
'updated_at': datetime.datetime.now()},
]
self.default_sadds = [{'sub_alarm_definition_id': '111',
'dimension_name': 'flavor_id',
'value': '777'},
{'sub_alarm_definition_id': '111',
'dimension_name': 'image_id',
'value': '888'},
{'sub_alarm_definition_id': '111',
'dimension_name': 'metric_name',
'value': 'cpu'},
{'sub_alarm_definition_id': '111',
'dimension_name': 'device',
'value': '1'},
{'sub_alarm_definition_id': '222',
'dimension_name': 'flavor_id',
'value': '777'},
{'sub_alarm_definition_id': '222',
'dimension_name': 'image_id',
'value': '888'},
{'sub_alarm_definition_id': '222',
'dimension_name': 'metric_name',
'value': 'mem'}]
self.default_nms = [{'id': '29387234',
'tenant_id': 'alarm-test',
'name': 'MyEmail',
'type': 'EMAIL',
'address': 'a@b',
'created_at': datetime.datetime.now(),
'updated_at': datetime.datetime.now()},
{'id': '77778687',
'tenant_id': 'alarm-test',
'name': 'OtherEmail',
'type': 'EMAIL',
'address': 'a@b',
'created_at': datetime.datetime.now(),
'updated_at': datetime.datetime.now()}]
self.default_aas = [{'alarm_definition_id': '123',
'alarm_state': 'ALARM',
'action_id': '29387234'},
{'alarm_definition_id': '123',
'alarm_state': 'ALARM',
'action_id': '77778687'},
{'alarm_definition_id': '234',
'alarm_state': 'ALARM',
'action_id': '29387234'},
{'alarm_definition_id': '234',
'alarm_state': 'ALARM',
'action_id': '77778687'}]
with self.engine.begin() as conn:
conn.execute(self._delete_ad_query)
conn.execute(self._insert_ad_query, self.default_ads)
conn.execute(self._delete_sad_query)
conn.execute(self._insert_sad_query, self.default_sads)
conn.execute(self._delete_sadd_query)
conn.execute(self._insert_sadd_query, self.default_sadds)
conn.execute(self._delete_nm_query)
conn.execute(self._insert_nm_query, self.default_nms)
conn.execute(self._delete_aa_query)
conn.execute(self._insert_aa_query, self.default_aas)
def test_should_create(self):
expression = ('AVG(hpcs.compute{flavor_id=777, image_id=888,'
' metric_name=cpu}) > 10')
description = ''
match_by = ['flavor_id', 'image_id']
from monasca_api.expression_parser.alarm_expr_parser import AlarmExprParser
sub_expr_list = (AlarmExprParser(expression).sub_expr_list)
alarm_actions = ['29387234', '77778687']
alarmA_id = self.repo.create_alarm_definition('555',
'90% CPU',
expression,
sub_expr_list,
description,
'LOW',
match_by,
alarm_actions,
None,
None)
alarmB = self.repo.get_alarm_definition('555', alarmA_id)
self.assertEqual(alarmA_id, alarmB['id'])
query_sad = (select([self.sad.c.id])
.select_from(self.sad)
.where(self.sad.c.alarm_definition_id == alarmA_id))
query_sadd = (select([func.count()])
.select_from(self.sadd)
.where(self.sadd.c.sub_alarm_definition_id == bindparam('id')))
with self.engine.connect() as conn:
count_sad = conn.execute(query_sad).fetchall()
self.assertEqual(len(count_sad), 1)
count_sadd = conn.execute(query_sadd, id=count_sad[0][0]).fetchone()
self.assertEqual(count_sadd[0], 3)
def test_should_update(self):
expression = ''.join(['AVG(hpcs.compute{flavor_id=777, image_id=888,',
' metric_name=mem}) > 20 and',
' AVG(hpcs.compute) < 100'])
description = ''
match_by = ['flavor_id', 'image_id']
from monasca_api.expression_parser.alarm_expr_parser import AlarmExprParser
sub_expr_list = (AlarmExprParser(expression).sub_expr_list)
alarm_actions = ['29387234', '77778687']
self.repo.update_or_patch_alarm_definition('bob', '234',
'90% CPU', expression,
sub_expr_list, False,
description, alarm_actions,
None, None,
match_by, 'LOW')
alarm = self.repo.get_alarm_definition('bob', '234')
expected = {'actions_enabled': False,
'alarm_actions': '29387234,77778687',
'description': '',
'expression': 'AVG(hpcs.compute{flavor_id=777, '
'image_id=888, metric_name=mem}) > 20 and'
' AVG(hpcs.compute) < 100',
'id': '234',
'match_by': 'flavor_id,image_id',
'name': '90% CPU',
'ok_actions': None,
'severity': 'LOW',
'undetermined_actions': None}
self.assertEqual(alarm, expected)
sub_alarms = self.repo.get_sub_alarm_definitions('234')
expected = [{'alarm_definition_id': '234',
'dimensions': 'flavor_id=777,image_id=888,'
'metric_name=mem',
'function': 'AVG',
'id': '222',
'metric_name': 'hpcs.compute',
'operator': 'GT',
'period': 60,
'periods': 1,
'threshold': 20.0},
{'alarm_definition_id': '234',
'dimensions': None,
'function': 'AVG',
'id': '223',
'metric_name': 'hpcs.compute',
'operator': 'LT',
'period': 60,
'periods': 1,
'threshold': 100.0}]
self.assertEqual(len(sub_alarms), len(expected))
for s, e in zip(sub_alarms, expected):
e['created_at'] = s['created_at']
e['updated_at'] = s['updated_at']
self.assertEqual(sub_alarms, expected)
am = self.repo.get_alarm_metrics('bob', '234')
self.assertEqual(am, [])
sub_alarms = self.repo.get_sub_alarms('bob', '234')
self.assertEqual(sub_alarms, [])
ads = self.repo.get_alarm_definitions('bob', '90% CPU', {'image_id': '888'}, None, None, 0, 100)
expected = [{'actions_enabled': False,
'alarm_actions': '29387234,77778687',
'description': None,
'expression': 'AVG(hpcs.compute{flavor_id=777, '
'image_id=888, metric_name=cpu, device=1}) > 10',
'id': '123',
'match_by': 'flavor_id,image_id',
'name': '90% CPU',
'ok_actions': None,
'severity': 'LOW',
'undetermined_actions': None},
{'actions_enabled': False,
'alarm_actions': '29387234,77778687',
'description': '',
'expression': 'AVG(hpcs.compute{flavor_id=777, '
'image_id=888, metric_name=mem}) > 20 and'
' AVG(hpcs.compute) < 100',
'id': '234',
'match_by': 'flavor_id,image_id',
'name': '90% CPU',
'ok_actions': None,
'severity': 'LOW',
'undetermined_actions': None}]
self.assertEqual(ads, expected)
ads = self.repo.get_alarm_definitions('bob', '90% CPU', {'image_id': '888'}, 'LOW', None, 0, 100)
expected = [{'actions_enabled': False,
'alarm_actions': '29387234,77778687',
'description': None,
'expression': 'AVG(hpcs.compute{flavor_id=777, '
'image_id=888, metric_name=cpu, device=1}) > 10',
'id': '123',
'match_by': 'flavor_id,image_id',
'name': '90% CPU',
'ok_actions': None,
'severity': 'LOW',
'undetermined_actions': None},
{'actions_enabled': False,
'alarm_actions': '29387234,77778687',
'description': '',
'expression': 'AVG(hpcs.compute{flavor_id=777, '
'image_id=888, metric_name=mem}) > 20 and'
' AVG(hpcs.compute) < 100',
'id': '234',
'match_by': 'flavor_id,image_id',
'name': '90% CPU',
'ok_actions': None,
'severity': 'LOW',
'undetermined_actions': None}]
self.assertEqual(ads, expected)
ads = self.repo.get_alarm_definitions('bob', '90% CPU', {'image_id': '888'}, 'CRITICAL', None, 0, 100)
expected = []
self.assertEqual(ads, expected)
self.repo.update_or_patch_alarm_definition('bob', '234',
'90% CPU', None,
sub_expr_list, False,
description, alarm_actions,
None, None,
match_by, 'LOW')
self.repo.update_or_patch_alarm_definition('bob', '234',
None, None,
None, True,
None, None,
None, None,
None, None,
True)
self.repo.update_or_patch_alarm_definition('bob', '234',
None, None,
None, None,
None, None,
None, None,
None, None,
True)
self.repo.update_or_patch_alarm_definition('bob', '234',
None, None,
None, None,
None, [],
[], [],
None, None,
True)
from monasca_api.common.repositories import exceptions
self.assertRaises(exceptions.InvalidUpdateException,
self.repo.update_or_patch_alarm_definition,
'bob', '234',
None, None,
None, False,
None, None,
None, None,
None, None,
False)
self.repo.delete_alarm_definition('bob', '234')
self.assertRaises(exceptions.DoesNotExistException,
self.repo.get_alarm_definition, 'bob', '234')
def test_should_find_by_id(self):
alarmDef1 = self.repo.get_alarm_definition('bob', '123')
expected = {'actions_enabled': False,
'alarm_actions': '29387234,77778687',
'description': None,
'expression': 'AVG(hpcs.compute{flavor_id=777, '
'image_id=888, metric_name=cpu, device=1}) > 10',
'id': '123',
'match_by': 'flavor_id,image_id',
'name': '90% CPU',
'ok_actions': None,
'severity': 'LOW',
'undetermined_actions': None}
self.assertEqual(alarmDef1, expected)
with self.engine.begin() as conn:
conn.execute(self._delete_aa_query)
alarmDef2 = self.repo.get_alarm_definition('bob', '123')
expected['alarm_actions'] = None
self.assertEqual(alarmDef2, expected)
def test_shoud_find_sub_alarm_metric_definitions(self):
sub_alarms = self.repo.get_sub_alarm_definitions('123')
expected = [{'alarm_definition_id': '123',
'dimensions': 'flavor_id=777,image_id=888,'
'metric_name=cpu,device=1',
'function': 'AVG',
'id': '111',
'metric_name': 'hpcs.compute',
'operator': 'GT',
'period': 60,
'periods': 1,
'threshold': 10.0}]
self.assertEqual(len(sub_alarms), len(expected))
for s, e in zip(sub_alarms, expected):
e['created_at'] = s['created_at']
e['updated_at'] = s['updated_at']
self.assertEqual(sub_alarms, expected)
sub_alarms = self.repo.get_sub_alarm_definitions('234')
expected = [{'alarm_definition_id': '234',
'dimensions': 'flavor_id=777,image_id=888,metric_name=mem',
'function': 'AVG',
'id': '222',
'metric_name': 'hpcs.compute',
'operator': 'GT',
'period': 60,
'periods': 1,
'threshold': 20.0},
{'alarm_definition_id': '234',
'dimensions': None,
'function': 'AVG',
'id': '223',
'metric_name': 'hpcs.compute',
'operator': 'LT',
'period': 60,
'periods': 1,
'threshold': 100.0}]
self.assertEqual(len(sub_alarms), len(expected))
for s, e in zip(sub_alarms, expected):
e['created_at'] = s['created_at']
e['updated_at'] = s['updated_at']
self.assertEqual(sub_alarms, expected)
sub_alarms = self.repo.get_sub_alarm_definitions('asdfasdf')
self.assertEqual(sub_alarms, [])
def test_exists(self):
alarmDef1 = self.repo.get_alarm_definitions(tenant_id='bob',
name='90% CPU')
expected = {'actions_enabled': False,
'alarm_actions': '29387234,77778687',
'description': None,
'expression': 'AVG(hpcs.compute{flavor_id=777, '
'image_id=888, metric_name=cpu, device=1}) > 10',
'id': '123',
'match_by': 'flavor_id,image_id',
'name': '90% CPU',
'ok_actions': None,
'severity': 'LOW',
'undetermined_actions': None}
self.assertEqual(alarmDef1, [expected])
alarmDef2 = self.repo.get_alarm_definitions(tenant_id='bob',
name='999% CPU')
self.assertEqual(alarmDef2, [])
def test_should_find(self):
alarmDef1 = self.repo.get_alarm_definitions(tenant_id='bob',
limit=1)
expected = [{'actions_enabled': False,
'alarm_actions': '29387234,77778687',
'description': None,
'expression': 'AVG(hpcs.compute{flavor_id=777, '
'image_id=888, metric_name=cpu, device=1}) > 10',
'id': '123',
'match_by': 'flavor_id,image_id',
'name': '90% CPU',
'ok_actions': None,
'severity': 'LOW',
'undetermined_actions': None},
{'actions_enabled': False,
'alarm_actions': '29387234,77778687',
'description': None,
'expression': 'AVG(hpcs.compute{flavor_id=777, '
'image_id=888, metric_name=mem}) > 20 and '
'AVG(hpcs.compute) < 100',
'id': '234',
'match_by': 'flavor_id,image_id',
'name': '50% CPU',
'ok_actions': None,
'severity': 'LOW',
'undetermined_actions': None}]
self.assertEqual(alarmDef1, expected)
with self.engine.begin() as conn:
conn.execute(self._delete_aa_query)
alarmDef2 = self.repo.get_alarm_definitions(tenant_id='bob',
limit=1)
expected[0]['alarm_actions'] = None
expected[1]['alarm_actions'] = None
self.assertEqual(alarmDef2, expected)
alarmDef3 = self.repo.get_alarm_definitions(tenant_id='bill',
limit=1)
self.assertEqual(alarmDef3, [])
alarmDef3 = self.repo.get_alarm_definitions(tenant_id='bill',
offset='10',
limit=1)
self.assertEqual(alarmDef3, [])
def test_should_find_by_dimension(self):
expected = [{'actions_enabled': False,
'alarm_actions': '29387234,77778687',
'description': None,
'expression': 'AVG(hpcs.compute{flavor_id=777,'
' image_id=888, metric_name=mem}) > 20 '
'and AVG(hpcs.compute) < 100',
'id': '234',
'match_by': 'flavor_id,image_id',
'name': '50% CPU',
'ok_actions': None,
'severity': 'LOW',
'undetermined_actions': None}]
dimensions = {'image_id': '888', 'metric_name': 'mem'}
alarmDef1 = self.repo.get_alarm_definitions(tenant_id='bob',
dimensions=dimensions,
limit=1)
self.assertEqual(alarmDef1, expected)
expected = [{'actions_enabled': False,
'alarm_actions': '29387234,77778687',
'description': None,
'expression': 'AVG(hpcs.compute{flavor_id=777, '
'image_id=888, metric_name=cpu, device=1}) > 10',
'id': '123',
'match_by': 'flavor_id,image_id',
'name': '90% CPU',
'ok_actions': None,
'severity': 'LOW',
'undetermined_actions': None},
{'actions_enabled': False,
'alarm_actions': '29387234,77778687',
'description': None,
'expression': 'AVG(hpcs.compute{flavor_id=777, '
'image_id=888, metric_name=mem}) > 20 and '
'AVG(hpcs.compute) < 100',
'id': '234',
'match_by': 'flavor_id,image_id',
'name': '50% CPU',
'ok_actions': None,
'severity': 'LOW',
'undetermined_actions': None}]
dimensions = {'image_id': '888'}
alarmDef1 = self.repo.get_alarm_definitions(tenant_id='bob',
dimensions=dimensions,
limit=1)
self.assertEqual(alarmDef1, expected)
expected = [{'actions_enabled': False,
'alarm_actions': '29387234,77778687',
'description': None,
'expression': 'AVG(hpcs.compute{flavor_id=777, '
'image_id=888, metric_name=cpu, device=1}) > 10',
'id': '123',
'match_by': 'flavor_id,image_id',
'name': '90% CPU',
'ok_actions': None,
'severity': 'LOW',
'undetermined_actions': None}]
dimensions = {'device': '1'}
alarmDef1 = self.repo.get_alarm_definitions(tenant_id='bob',
dimensions=dimensions,
limit=1)
self.assertEqual(alarmDef1, expected)
dimensions = {'Not real': 'AA'}
alarmDef1 = self.repo.get_alarm_definitions(tenant_id='bob',
dimensions=dimensions,
limit=1)
self.assertEqual(alarmDef1, [])
def test_should_delete_by_id(self):
self.repo.delete_alarm_definition('bob', '123')
from monasca_api.common.repositories import exceptions
self.assertRaises(exceptions.DoesNotExistException,
self.repo.get_alarm_definition,
'bob',
'123')
expected = [{'actions_enabled': False,
'alarm_actions': '29387234,77778687',
'description': None,
'expression': 'AVG(hpcs.compute{flavor_id=777, '
'image_id=888, metric_name=mem}) > 20 '
'and AVG(hpcs.compute) < 100',
'id': '234',
'match_by': 'flavor_id,image_id',
'name': '50% CPU',
'ok_actions': None,
'severity': 'LOW',
'undetermined_actions': None}]
alarmDef1 = self.repo.get_alarm_definitions(tenant_id='bob',
limit=1)
self.assertEqual(alarmDef1, expected)

View File

@ -1,6 +1,7 @@
# -*- coding: utf-8 -*-
# Copyright 2015 Cray Inc.
# (C) Copyright 2015 Hewlett Packard Enterprise Development Company LP
# Copyright 2016 FUJITSU LIMITED
#
# Licensed under the Apache License, Version 2.0 (the "License"); you may
# not use this file except in compliance with the License. You may obtain
@ -91,7 +92,7 @@ class MonascaApiConfigFixture(oslo_config.fixture.Config):
# [repositories]
self.conf.set_override(
'alarms_driver',
'monasca_api.common.repositories.mysql.alarms_repository:AlarmsRepository',
'monasca_api.common.repositories.sqla.alarms_repository:AlarmsRepository',
group='repositories', enforce_type=True)
self.conf.set_override(
'alarm_definitions_driver',
@ -175,7 +176,7 @@ class TestAlarmsStateHistory(AlarmTestBase):
self.useFixture(InfluxClientAlarmHistoryResponseFixture(
'monasca_api.common.repositories.influxdb.metrics_repository.client.InfluxDBClient'))
self.useFixture(fixtures.MockPatch(
'monasca_api.common.repositories.mysql.alarms_repository.AlarmsRepository'))
'monasca_api.common.repositories.sqla.alarms_repository.AlarmsRepository'))
self.alarms_resource = alarms.AlarmsStateHistory()
self.api.add_route(
@ -214,9 +215,9 @@ class TestAlarmDefinition(AlarmTestBase):
self.alarm_definition_resource)
def test_alarm_definition_create(self):
self.alarm_def_repo_mock.return_value.get_alarm_definitions.return_value = []
self.alarm_def_repo_mock.return_value.create_alarm_definition.return_value = \
u"00000001-0001-0001-0001-000000000001"
return_value = self.alarm_def_repo_mock.return_value
return_value.get_alarm_definitions.return_value = []
return_value.create_alarm_definition.return_value = u"00000001-0001-0001-0001-000000000001"
alarm_def = {
"name": "Test Definition",
@ -245,9 +246,9 @@ class TestAlarmDefinition(AlarmTestBase):
self.assertThat(response, RESTResponseEquals(expected_data))
def test_alarm_definition_create_with_valid_expressions(self):
self.alarm_def_repo_mock.return_value.get_alarm_definitions.return_value = []
self.alarm_def_repo_mock.return_value.create_alarm_definition.return_value = \
u"00000001-0001-0001-0001-000000000001"
return_value = self.alarm_def_repo_mock.return_value
return_value.get_alarm_definitions.return_value = []
return_value.create_alarm_definition.return_value = u"00000001-0001-0001-0001-000000000001"
valid_expressions = [
"max(-_.千幸福的笑脸{घोड़ा=馬, "

View File

@ -0,0 +1,153 @@
# Copyright 2015 Cray
# Copyright 2016 FUJITSU LIMITED
#
# Licensed under the Apache License, Version 2.0 (the "License"); you may
# not use this file except in compliance with the License. You may obtain
# a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
# License for the specific language governing permissions and limitations
# under the License.
import fixtures
import testtools
from sqlalchemy import select, MetaData, text, asc
from monasca_api.common.repositories.sqla import models
class TestModelsDB(testtools.TestCase, fixtures.TestWithFixtures):
@classmethod
def setUpClass(cls):
metadata = MetaData()
md = models.create_md_model(metadata)
gc_columns = [md.c.name + text("'='") + md.c.value]
cls.group_concat_md = (select([md.c.dimension_set_id,
models.group_concat(gc_columns).label('dimensions')])
.select_from(md)
.group_by(md.c.dimension_set_id))
cls.group_concat_md_order = (select([md.c.dimension_set_id,
models.group_concat(gc_columns,
order_by=[md.c.name.asc()]).label('dimensions')])
.select_from(md)
.group_by(md.c.dimension_set_id))
cls.order_by_field = (select([md.c.dimension_set_id])
.select_from(md)
.order_by(asc(models.field_sort(md.c.dimension_set_id, map(text,
["'A'",
"'B'",
"'C'"])))))
def test_oracle(self):
from sqlalchemy.dialects import oracle
dialect = oracle.dialect()
query = str(self.group_concat_md.compile(dialect=dialect))
expected = ('''SELECT metric_dimension.dimension_set_id, LISTAGG(metric_dimension.name '''
'''|| '=' || metric_dimension.value, ',') WITHIN GROUP (ORDER BY '''
'''metric_dimension.name || '=' || metric_dimension.value) AS dimensions '''
'''
FROM metric_dimension GROUP BY metric_dimension.dimension_set_id''')
self.assertEqual(expected, query)
query = str(self.group_concat_md_order.compile(dialect=dialect))
expected = ('''SELECT metric_dimension.dimension_set_id, LISTAGG(metric_dimension.name '''
'''|| '=' || metric_dimension.value, ',') WITHIN GROUP (ORDER BY '''
'''metric_dimension.name ASC) AS dimensions '''
'''
FROM metric_dimension GROUP BY metric_dimension.dimension_set_id''')
self.assertEqual(expected, query)
expected = ("""SELECT metric_dimension.dimension_set_id \n"""
"""FROM metric_dimension ORDER BY CASE WHEN metric_dimension.dimension_set_id='A'"""
""" THEN 0 WHEN metric_dimension.dimension_set_id='B' THEN 1 WHEN"""
""" metric_dimension.dimension_set_id='C' THEN 2 ELSE 3 END ASC""")
query = str(self.order_by_field.compile(dialect=dialect))
self.assertEqual(expected, query)
def test_postgres(self):
from sqlalchemy.dialects import postgres as diale_
dialect = diale_.dialect()
query = str(self.group_concat_md.compile(dialect=dialect))
expected = ('''SELECT metric_dimension.dimension_set_id, STRING_AGG(metric_dimension.name '''
'''|| '=' || metric_dimension.value, ',' ) AS dimensions '''
'''
FROM metric_dimension GROUP BY metric_dimension.dimension_set_id''')
self.assertEqual(expected, query)
query = str(self.group_concat_md_order.compile(dialect=dialect))
expected = ('''SELECT metric_dimension.dimension_set_id, STRING_AGG(metric_dimension.name '''
'''|| '=' || metric_dimension.value, ',' ORDER BY metric_dimension.name ASC) '''
'''AS dimensions '''
'''
FROM metric_dimension GROUP BY metric_dimension.dimension_set_id''')
self.assertEqual(expected, query)
expected = ("""SELECT metric_dimension.dimension_set_id \n"""
"""FROM metric_dimension ORDER BY CASE WHEN metric_dimension.dimension_set_id='A'"""
""" THEN 0 WHEN metric_dimension.dimension_set_id='B' THEN 1 WHEN"""
""" metric_dimension.dimension_set_id='C' THEN 2 ELSE 3 END ASC""")
query = str(self.order_by_field.compile(dialect=dialect))
self.assertEqual(expected, query)
def test_sybase(self):
from sqlalchemy.dialects import sybase as diale_
dialect = diale_.dialect()
query = str(self.group_concat_md.compile(dialect=dialect))
expected = ('''SELECT metric_dimension.dimension_set_id, LIST(metric_dimension.name || '=' '''
'''|| metric_dimension.value, ',') AS dimensions '''
'''
FROM metric_dimension GROUP BY metric_dimension.dimension_set_id''')
self.assertEqual(expected, query)
query = str(self.group_concat_md_order.compile(dialect=dialect))
expected = ('''SELECT metric_dimension.dimension_set_id, LIST(metric_dimension.name || '=' '''
'''|| metric_dimension.value, ',') AS dimensions '''
'''
FROM metric_dimension GROUP BY metric_dimension.dimension_set_id''')
self.assertEqual(expected, query)
expected = ("""SELECT metric_dimension.dimension_set_id \n"""
"""FROM metric_dimension ORDER BY CASE WHEN metric_dimension.dimension_set_id='A'"""
""" THEN 0 WHEN metric_dimension.dimension_set_id='B' THEN 1 WHEN"""
""" metric_dimension.dimension_set_id='C' THEN 2 ELSE 3 END ASC""")
query = str(self.order_by_field.compile(dialect=dialect))
self.assertEqual(expected, query)
def test_mysql(self):
from sqlalchemy.dialects import mysql as diale_
dialect = diale_.dialect()
query = str(self.group_concat_md.compile(dialect=dialect))
expected = ('''SELECT metric_dimension.dimension_set_id, GROUP_CONCAT(concat(concat(metric_dimension.name, '''
''''='), metric_dimension.value) SEPARATOR ',') AS dimensions '''
'''
FROM metric_dimension GROUP BY metric_dimension.dimension_set_id''')
self.assertEqual(expected, query)
query = str(self.group_concat_md_order.compile(dialect=dialect))
expected = ('''SELECT metric_dimension.dimension_set_id, GROUP_CONCAT(concat(concat(metric_dimension.name, '''
''''='), metric_dimension.value) ORDER BY metric_dimension.name ASC '''
'''SEPARATOR ',') AS dimensions '''
'''
FROM metric_dimension GROUP BY metric_dimension.dimension_set_id''')
self.assertEqual(expected, query)
expected = ('''SELECT metric_dimension.dimension_set_id \n'''
'''FROM metric_dimension ORDER BY FIELD(metric_dimension.dimension_set_id, 'A', 'B', 'C') ASC''')
query = str(self.order_by_field.compile(dialect=dialect))
self.assertEqual(expected, query)

View File

@ -0,0 +1,199 @@
# Copyright 2015 Cray
# Copyright 2016 FUJITSU LIMITED
#
# Licensed under the Apache License, Version 2.0 (the "License"); you may
# not use this file except in compliance with the License. You may obtain
# a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
# License for the specific language governing permissions and limitations
# under the License.
import datetime
import fixtures
from oslo_config import cfg
from oslo_config import fixture as fixture_config
from sqlalchemy import delete, MetaData, insert, bindparam
import testtools
from monasca_api.common.repositories.sqla import models
CONF = cfg.CONF
class TestNotificationMethodRepoDB(testtools.TestCase, fixtures.TestWithFixtures):
@classmethod
def setUpClass(cls):
from sqlalchemy import engine_from_config
engine = engine_from_config({'url': 'sqlite://'}, prefix='')
qry = open('monasca_api/tests/sqlite_alarm.sql', 'r').read()
sconn = engine.raw_connection()
c = sconn.cursor()
c.executescript(qry)
sconn.commit()
c.close()
cls.engine = engine
def _fake_engine_from_config(*args, **kw):
return cls.engine
cls.fixture = fixtures.MonkeyPatch(
'sqlalchemy.create_engine', _fake_engine_from_config)
cls.fixture.setUp()
metadata = MetaData()
cls.nm = models.create_nm_model(metadata)
cls._delete_nm_query = delete(cls.nm)
cls._insert_nm_query = (insert(cls.nm)
.values(
id=bindparam('id'),
tenant_id=bindparam('tenant_id'),
name=bindparam('name'),
type=bindparam('type'),
address=bindparam('address'),
created_at=bindparam('created_at'),
updated_at=bindparam('updated_at')))
@classmethod
def tearDownClass(cls):
cls.fixture.cleanUp()
def setUp(self):
super(TestNotificationMethodRepoDB, self).setUp()
self._fixture_config = self.useFixture(
fixture_config.Config(cfg.CONF))
self._fixture_config.config(url='sqlite://',
group='database')
from monasca_api.common.repositories.sqla import notifications_repository as nr
self.repo = nr.NotificationsRepository()
self.default_nms = [{'id': '123',
'tenant_id': '444',
'name': 'MyEmail',
'type': 'EMAIL',
'address': 'a@b',
'created_at': datetime.datetime.now(),
'updated_at': datetime.datetime.now()},
{'id': '124',
'tenant_id': '444',
'name': 'OtherEmail',
'type': 'EMAIL',
'address': 'a@b',
'created_at': datetime.datetime.now(),
'updated_at': datetime.datetime.now()}]
with self.engine.connect() as conn:
conn.execute(self._delete_nm_query)
conn.execute(self._insert_nm_query, self.default_nms)
def test_fixture_and_setup(self):
class A(object):
def __init__(self):
from sqlalchemy import create_engine
self.engine = create_engine(None)
a = A()
expected_list_tables = ['alarm',
'alarm_action',
'alarm_definition',
'alarm_definition_severity',
'alarm_metric',
'alarm_state',
'metric_definition',
'metric_definition_dimensions',
'metric_dimension',
'notification_method',
'notification_method_type',
'schema_migrations',
'sub_alarm',
'sub_alarm_definition',
'sub_alarm_definition_dimension']
self.assertEqual(self.engine, a.engine)
self.assertEqual(self.engine.table_names(), expected_list_tables)
def test_should_create(self):
from monasca_api.common.repositories import exceptions
nmA = self.repo.create_notification('555',
'MyEmail',
'EMAIL',
'a@b')
nmB = self.repo.list_notification('555', nmA)
self.assertEqual(nmA, nmB['id'])
self.assertRaises(exceptions.AlreadyExistsException,
self.repo.create_notification,
'555',
'MyEmail',
'EMAIL',
'a@b')
def test_should_exists(self):
from monasca_api.common.repositories import exceptions
self.assertTrue(self.repo.list_notification("444", "123"))
self.assertRaises(exceptions.DoesNotExistException,
self.repo.list_notification, "444", "1234")
self.assertRaises(exceptions.DoesNotExistException,
self.repo.list_notification, "333", "123")
def test_should_find_by_id(self):
nm = self.repo.list_notification('444', '123')
self.assertEqual(nm['id'], '123')
self.assertEqual(nm['type'], 'EMAIL')
self.assertEqual(nm['address'], 'a@b')
def test_should_find(self):
nms = self.repo.list_notifications('444', None, 1)
self.assertEqual(nms, self.default_nms)
nms = self.repo.list_notifications('444', 'z', 1)
self.assertEqual(nms, [])
def test_update(self):
import copy
self.repo.update_notification('123', '444', 'Foo', 'EMAIL', 'abc')
nm = self.repo.list_notification('444', '123')
new_nm = copy.deepcopy(self.default_nms[0])
new_nm['name'] = 'Foo'
new_nm['type'] = 'EMAIL'
new_nm['address'] = 'abc'
new_nm['created_at'] = nm['created_at']
new_nm['updated_at'] = nm['updated_at']
self.assertEqual(nm, new_nm)
from monasca_api.common.repositories import exceptions
self.assertRaises(exceptions.DoesNotExistException,
self.repo.update_notification,
'no really id',
'no really tenant',
'',
'',
'')
def test_should_delete(self):
from monasca_api.common.repositories import exceptions
self.repo.delete_notification('444', '123')
self.assertRaises(exceptions.DoesNotExistException,
self.repo.list_notification, '444', '123')
self.assertRaises(exceptions.DoesNotExistException,
self.repo.delete_notification, 'no really tenant', '123')
def test_should_update_duplicate_with_same_values(self):
import copy
self.repo.update_notification('123', '444', 'Foo', 'EMAIL', 'abc')
self.repo.update_notification('123', '444', 'Foo', 'EMAIL', 'abc')
nm = self.repo.list_notification('444', '123')
new_nm = copy.deepcopy(self.default_nms[0])
new_nm['name'] = 'Foo'
new_nm['type'] = 'EMAIL'
new_nm['address'] = 'abc'
new_nm['created_at'] = nm['created_at']
new_nm['updated_at'] = nm['updated_at']
self.assertEqual(nm, new_nm)

View File

@ -13,12 +13,12 @@
# License for the specific language governing permissions and limitations
# under the License.
import monasca_api.v2.reference.helpers as helpers
import unittest
from mock import Mock
from monasca_api.v2.common.exceptions import HTTPUnprocessableEntityError
import unittest
from monasca_api.v2.common.exceptions import HTTPUnprocessableEntityError
import monasca_api.v2.reference.helpers as helpers
class TestGetQueryDimension(unittest.TestCase):

View File

@ -15,6 +15,7 @@
import unittest
from mock import patch
import monasca_api.common.repositories.influxdb.metrics_repository as influxdb_repo
from oslo_config import cfg

View File

@ -13,9 +13,10 @@
# under the License.
import datetime
import json
import falcon
import falcon.testing as testing
import json
from monasca_api.v2.reference import versions

View File

@ -12,8 +12,9 @@
# License for the specific language governing permissions and limitations
# under the License.
from oslo_log import log
import six.moves.urllib.parse as urlparse
from oslo_log import log
from validate_email import validate_email
import voluptuous

View File

@ -1,4 +1,5 @@
# Copyright 2014 IBM Corp.
# Copyright 2016 FUJITSU LIMITED
#
# Licensed under the Apache License, Version 2.0 (the "License"); you may
# not use this file except in compliance with the License. You may obtain
@ -131,5 +132,15 @@ mysql_opts = [cfg.StrOpt('database_name'), cfg.StrOpt('hostname'),
cfg.StrOpt('username'), cfg.StrOpt('password')]
mysql_group = cfg.OptGroup(name='mysql', title='mysql')
cfg.CONF.register_group(mysql_group)
cfg.CONF.register_opts(mysql_opts, mysql_group)
sql_opts = [cfg.StrOpt('url', default=None), cfg.StrOpt('host', default=None),
cfg.StrOpt('username', default=None), cfg.StrOpt('password', default=None),
cfg.StrOpt('drivername', default=None), cfg.PortOpt('port', default=None),
cfg.StrOpt('database', default=None), cfg.StrOpt('query', default=None)]
sql_group = cfg.OptGroup(name='database', title='sql')
cfg.CONF.register_group(sql_group)
cfg.CONF.register_opts(sql_opts, sql_group)

View File

@ -16,12 +16,13 @@
import datetime
import json
import six.moves.urllib.parse as urlparse
import falcon
from oslo_log import log
from oslo_utils import timeutils
import simplejson
import six.moves.urllib.parse as urlparse
from monasca_api.common.repositories import constants
from monasca_api.v2.common.exceptions import HTTPUnprocessableEntityError

View File

@ -35,7 +35,7 @@ class TestAlarms(base.BaseMonascaTest):
@test.attr(type="gate")
def test_list_alarms(self):
alarm_definition_ids, expected_metric \
= self._create_alarms_for_test_alarms(num=1)
= self._create_alarms_for_test_alarms(num=1)
resp, response_body = self.monasca_client.list_alarms()
self.assertEqual(200, resp.status)
for element in response_body['elements']:

View File

@ -18,8 +18,9 @@ pyparsing
voluptuous
influxdb
eventlet
kafka-python
kafka-python>=0.9.5,<1.0.0
simplejson
simport
validate_email>=1.3
monasca-common>=0.0.2
sqlalchemy

View File

@ -21,4 +21,5 @@ sphinxcontrib-httpdomain
sphinxcontrib-pecanwsme>=0.8
testrepository>=0.0.18
testscenarios>=0.4
testtools>=0.9.34
testtools>=0.9.34
tempest-lib

View File

@ -1,7 +1,7 @@
[tox]
minversion = 1.6
skipsdist = True
envlist = py33,py27,pep8
envlist = py27,pep8
[testenv]
setenv = VIRTUAL_ENV={envdir}