Cleanup
Fixed imports Removed repositories not specific to events Removed some unused sample objects
This commit is contained in:
parent
9e688a1169
commit
9ebf70a81d
|
@ -1,21 +1,17 @@
|
|||
[DEFAULT]
|
||||
# logging, make sure that the user under whom the server runs has permission
|
||||
# to write to the directory.
|
||||
log_file = monasca.log
|
||||
log_dir = .
|
||||
log_file = events_api.log
|
||||
log_dir = /var/log/monasca/events_api
|
||||
log_level = DEBUG
|
||||
|
||||
# Identifies the region that the Monasca API is running in.
|
||||
region = useast
|
||||
|
||||
# Dispatchers to be loaded to serve restful APIs
|
||||
dispatcher = v2_ref_metrics
|
||||
dispatcher = v2_ref_stream_definitions
|
||||
dispatcher = v2_ref_alarms
|
||||
dispatcher = v2_ref_alarm_definitions
|
||||
dispatcher = v2_ref_events
|
||||
dispatcher = v2_ref_transforms
|
||||
dispatcher = v2_ref_notifications
|
||||
|
||||
[security]
|
||||
# The roles that are allowed full access to the API.
|
||||
|
@ -32,16 +28,10 @@ delegate_authorized_roles = admin
|
|||
# The message queue driver to use
|
||||
driver = kafka
|
||||
|
||||
# The type of metrics message format to publish to the message queue.
|
||||
metrics_message_format = reference
|
||||
|
||||
# The type of events message format to publish to the message queue.
|
||||
events_message_format = reference
|
||||
|
||||
[repositories]
|
||||
# The driver to use for the metrics repository
|
||||
metrics_driver = influxdb_metrics_repo
|
||||
|
||||
# The driver to use for the stream definitions repository
|
||||
streams_driver = mysql_streams_repo
|
||||
|
||||
|
@ -51,15 +41,6 @@ events_driver = mysql_events_repo
|
|||
# The driver to use for the transforms repository
|
||||
transforms_driver = mysql_transforms_repo
|
||||
|
||||
# The driver to use for the alarm definitions repository
|
||||
alarm_definitions_driver = mysql_alarm_definitions_repo
|
||||
|
||||
# The driver to use for the alarms repository
|
||||
alarms_driver = mysql_alarms_repo
|
||||
|
||||
# The driver to use for the notifications repository
|
||||
notifications_driver = mysql_notifications_repo
|
||||
|
||||
[dispatcher]
|
||||
driver = v2_reference
|
||||
|
||||
|
@ -67,9 +48,6 @@ driver = v2_reference
|
|||
# The endpoint to the kafka server
|
||||
uri = 192.168.10.4:9092
|
||||
|
||||
# The topic that metrics will be published too
|
||||
metrics_topic = metrics
|
||||
|
||||
# The topic that events will be published too
|
||||
events_topic = transformed-events
|
||||
|
||||
|
@ -97,22 +75,6 @@ compact = False
|
|||
# default to listen on partition 0.
|
||||
partitions = 0
|
||||
|
||||
[influxdb]
|
||||
# The IP address of the InfluxDB service.
|
||||
ip_address = 192.168.10.4
|
||||
|
||||
# The port number that the InfluxDB service is listening on.
|
||||
port = 8086
|
||||
|
||||
# The username to authenticate with.
|
||||
user = mon_api
|
||||
|
||||
# The password to authenticate with.
|
||||
password = password
|
||||
|
||||
# The name of the InfluxDB database to use.
|
||||
database_name = mon
|
||||
|
||||
[mysql]
|
||||
database_name = mon
|
||||
hostname = 192.168.10.4
|
|
@ -1,31 +1,31 @@
|
|||
[DEFAULT]
|
||||
name = monasca
|
||||
name = monasca_events_api
|
||||
|
||||
[pipeline:main]
|
||||
# Add validator in the pipeline so the metrics messages can be validated.
|
||||
pipeline = auth keystonecontext api
|
||||
|
||||
[app:api]
|
||||
paste.app_factory = monasca.api.server:api_app
|
||||
paste.app_factory = monasca_events_api.api.server:api_app
|
||||
|
||||
[filter:login]
|
||||
use = egg: monasca_api#login
|
||||
use = egg: monasca_events_api#login
|
||||
|
||||
[filter:inspector]
|
||||
use = egg: monasca_api#inspector
|
||||
use = egg: monasca_events_api#inspector
|
||||
|
||||
[filter:validator]
|
||||
use = egg: monasca_api#metric_validator
|
||||
use = egg: monasca_events_api#metric_validator
|
||||
|
||||
[filter:auth]
|
||||
paste.filter_factory = keystonemiddleware.auth_token:filter_factory
|
||||
|
||||
[filter:keystonecontext]
|
||||
paste.filter_factory = monasca.middleware.keystone_context_filter:filter_factory
|
||||
paste.filter_factory = monasca_events_api.middleware.keystone_context_filter:filter_factory
|
||||
|
||||
[server:main]
|
||||
use = egg:gunicorn#main
|
||||
host = 127.0.0.1
|
||||
port = 8080
|
||||
workers = 1
|
||||
proc_name = monasca
|
||||
proc_name = monasca_events_api
|
|
@ -12,8 +12,8 @@
|
|||
# License for the specific language governing permissions and limitations
|
||||
# under the License.
|
||||
|
||||
from monasca.common import resource_api
|
||||
from monasca.openstack.common import log
|
||||
from monasca_events_api.common import resource_api
|
||||
from monasca_events_api.openstack.common import log
|
||||
|
||||
|
||||
LOG = log.getLogger(__name__)
|
||||
|
|
|
@ -12,8 +12,8 @@
|
|||
# License for the specific language governing permissions and limitations
|
||||
# under the License.
|
||||
|
||||
from monasca.common import resource_api
|
||||
from monasca.openstack.common import log
|
||||
from monasca_events_api.common import resource_api
|
||||
from monasca_events_api.openstack.common import log
|
||||
|
||||
|
||||
LOG = log.getLogger(__name__)
|
||||
|
|
|
@ -22,10 +22,10 @@ import paste.deploy
|
|||
from stevedore import named
|
||||
|
||||
|
||||
from monasca.common import resource_api
|
||||
from monasca.openstack.common import log
|
||||
from monasca_events_api.common import resource_api
|
||||
from monasca_events_api.openstack.common import log
|
||||
|
||||
DISPATCHER_NAMESPACE = 'monasca.dispatcher'
|
||||
DISPATCHER_NAMESPACE = 'monasca_events_api.dispatcher'
|
||||
|
||||
OPTS = [
|
||||
cfg.MultiStrOpt('dispatcher',
|
||||
|
@ -38,10 +38,12 @@ LOG = log.getLogger(__name__)
|
|||
|
||||
|
||||
def api_app(conf):
|
||||
cfg.CONF(args=[], project='monasca')
|
||||
cfg.CONF(args=[],
|
||||
project='monasca_events_api',
|
||||
default_config_files=["/etc/monasca/events_api.conf"])
|
||||
log_levels = (cfg.CONF.default_log_levels)
|
||||
cfg.set_defaults(log.log_opts, default_log_levels=log_levels)
|
||||
log.setup('monasca')
|
||||
log.setup('monasca_events_api')
|
||||
|
||||
dispatcher_manager = named.NamedExtensionManager(
|
||||
namespace=DISPATCHER_NAMESPACE,
|
||||
|
@ -68,7 +70,7 @@ def api_app(conf):
|
|||
|
||||
if __name__ == '__main__':
|
||||
wsgi_app = (
|
||||
paste.deploy.loadapp('config:etc/monasca.ini',
|
||||
paste.deploy.loadapp('config:etc/events_api.ini',
|
||||
relative_to=os.getcwd()))
|
||||
httpd = simple_server.make_server('127.0.0.1', 8080, wsgi_app)
|
||||
httpd.serve_forever()
|
||||
|
|
|
@ -11,8 +11,8 @@
|
|||
# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
|
||||
# License for the specific language governing permissions and limitations
|
||||
# under the License.
|
||||
from monasca.common import resource_api
|
||||
from monasca.openstack.common import log
|
||||
from monasca_events_api.common import resource_api
|
||||
from monasca_events_api.openstack.common import log
|
||||
|
||||
LOG = log.getLogger(__name__)
|
||||
|
||||
|
|
|
@ -24,7 +24,7 @@ try:
|
|||
except ImportError:
|
||||
import json
|
||||
|
||||
from monasca.openstack.common import log
|
||||
from monasca_events_api.openstack.common import log
|
||||
|
||||
LOG = log.getLogger(__name__)
|
||||
|
||||
|
@ -174,4 +174,4 @@ class KafkaConnection(object):
|
|||
code = 500
|
||||
LOG.exception('Unknown error.')
|
||||
|
||||
return code
|
||||
return code
|
||||
|
|
|
@ -12,7 +12,7 @@
|
|||
# License for the specific language governing permissions and limitations
|
||||
# under the License.
|
||||
|
||||
from monasca.common.messaging import publisher
|
||||
from monasca_events_api.common.messaging import publisher
|
||||
|
||||
|
||||
class FakePublisher(publisher.Publisher):
|
||||
|
@ -21,4 +21,4 @@ class FakePublisher(publisher.Publisher):
|
|||
pass
|
||||
|
||||
def send_message(self, message):
|
||||
pass
|
||||
pass
|
||||
|
|
|
@ -18,9 +18,9 @@ from kafka import common
|
|||
from kafka import producer
|
||||
from oslo.config import cfg
|
||||
|
||||
from monasca.common.messaging import exceptions
|
||||
from monasca.common.messaging import publisher
|
||||
from monasca.openstack.common import log
|
||||
from monasca_events_api.common.messaging import exceptions
|
||||
from monasca_events_api.common.messaging import publisher
|
||||
from monasca_events_api.openstack.common import log
|
||||
|
||||
|
||||
LOG = log.getLogger(__name__)
|
||||
|
@ -107,4 +107,4 @@ class KafkaPublisher(publisher.Publisher):
|
|||
raise exceptions.MessageQueueException()
|
||||
except Exception:
|
||||
LOG.exception('Unknown error.')
|
||||
raise exceptions.MessageQueueException()
|
||||
raise exceptions.MessageQueueException()
|
||||
|
|
|
@ -14,9 +14,9 @@
|
|||
|
||||
from oslo.config import cfg
|
||||
|
||||
import monasca.common.messaging.message_formats.cadf.events as cadf_events
|
||||
import monasca.common.messaging.message_formats.identity.events as ident_events
|
||||
import monasca.common.messaging.message_formats.reference.events as ref_events
|
||||
import monasca_events_api.common.messaging.message_formats.cadf.events as cadf_events
|
||||
import monasca_events_api.common.messaging.message_formats.identity.events as ident_events
|
||||
import monasca_events_api.common.messaging.message_formats.reference.events as ref_events
|
||||
|
||||
|
||||
def create_events_transform():
|
||||
|
@ -26,4 +26,4 @@ def create_events_transform():
|
|||
elif message_format == 'cadf':
|
||||
return cadf_events.transform
|
||||
else:
|
||||
return ident_events.transform
|
||||
return ident_events.transform
|
||||
|
|
|
@ -14,9 +14,9 @@
|
|||
|
||||
from oslo.config import cfg
|
||||
|
||||
import monasca.common.messaging.message_formats.cadf.metrics as cadf_metrics
|
||||
import monasca.common.messaging.message_formats.identity.metrics as id_metrics
|
||||
import monasca.common.messaging.message_formats.reference.metrics as r_metrics
|
||||
import monasca_events_api.common.messaging.message_formats.cadf.metrics as cadf_metrics
|
||||
import monasca_events_api.common.messaging.message_formats.identity.metrics as id_metrics
|
||||
import monasca_events_api.common.messaging.message_formats.reference.metrics as r_metrics
|
||||
|
||||
|
||||
def create_metrics_transform():
|
||||
|
@ -26,4 +26,4 @@ def create_metrics_transform():
|
|||
elif metrics_message_format == 'cadf':
|
||||
return cadf_metrics.transform
|
||||
else:
|
||||
return id_metrics.transform
|
||||
return id_metrics.transform
|
||||
|
|
|
@ -12,7 +12,7 @@
|
|||
# License for the specific language governing permissions and limitations
|
||||
# under the License.
|
||||
|
||||
from monasca.common.messaging import publisher
|
||||
from monasca_events_api.common.messaging import publisher
|
||||
|
||||
|
||||
class RabbitmqPublisher(publisher.Publisher):
|
||||
|
@ -21,4 +21,4 @@ class RabbitmqPublisher(publisher.Publisher):
|
|||
pass
|
||||
|
||||
def send_message(self, message):
|
||||
raise NotImplemented()
|
||||
raise NotImplemented()
|
||||
|
|
|
@ -1,66 +0,0 @@
|
|||
# Copyright 2014 Hewlett-Packard
|
||||
#
|
||||
# Licensed under the Apache License, Version 2.0 (the "License"); you may
|
||||
# not use this file except in compliance with the License. You may obtain
|
||||
# a copy of the License at
|
||||
#
|
||||
# http://www.apache.org/licenses/LICENSE-2.0
|
||||
#
|
||||
# Unless required by applicable law or agreed to in writing, software
|
||||
# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
|
||||
# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
|
||||
# License for the specific language governing permissions and limitations
|
||||
# under the License.
|
||||
import abc
|
||||
|
||||
import six
|
||||
|
||||
|
||||
@six.add_metaclass(abc.ABCMeta)
|
||||
class AlarmDefinitionsRepository(object):
|
||||
def __init__(self):
|
||||
super(AlarmDefinitionsRepository, self).__init__()
|
||||
|
||||
@abc.abstractmethod
|
||||
def create_alarm_definition(self, tenant_id, name, expression,
|
||||
sub_expr_list, description, severity, match_by,
|
||||
alarm_actions, undetermined_actions,
|
||||
ok_action):
|
||||
pass
|
||||
|
||||
@abc.abstractmethod
|
||||
def get_sub_alarms(self, tenant_id, alarm_definition_id):
|
||||
pass
|
||||
|
||||
@abc.abstractmethod
|
||||
def get_alarm_metrics(self, tenant_id, alarm_definition_id):
|
||||
pass
|
||||
|
||||
@abc.abstractmethod
|
||||
def delete_alarm_definition(self, tenant_id, alarm_definition_id):
|
||||
pass
|
||||
|
||||
@abc.abstractmethod
|
||||
def get_sub_alarm_definitions(self, alarm_definition_id):
|
||||
pass
|
||||
|
||||
@abc.abstractmethod
|
||||
def get_alarm_definition(self, tenant_id, id):
|
||||
pass
|
||||
|
||||
@abc.abstractmethod
|
||||
def get_alarm_definitions(self, tenant_id, name, dimensions, offset):
|
||||
pass
|
||||
|
||||
@abc.abstractmethod
|
||||
def update_or_patch_alarm_definition(self, tenant_id, id,
|
||||
name,
|
||||
expression,
|
||||
sub_expr_list,
|
||||
actions_enabled,
|
||||
description,
|
||||
alarm_actions,
|
||||
ok_actions,
|
||||
undetermined_actions,
|
||||
match_by, severity, patch):
|
||||
pass
|
|
@ -1,44 +0,0 @@
|
|||
# Copyright 2014 Hewlett-Packard
|
||||
#
|
||||
# Licensed under the Apache License, Version 2.0 (the "License"); you may
|
||||
# not use this file except in compliance with the License. You may obtain
|
||||
# a copy of the License at
|
||||
#
|
||||
# http://www.apache.org/licenses/LICENSE-2.0
|
||||
#
|
||||
# Unless required by applicable law or agreed to in writing, software
|
||||
# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
|
||||
# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
|
||||
# License for the specific language governing permissions and limitations
|
||||
# under the License.
|
||||
import abc
|
||||
|
||||
import six
|
||||
|
||||
|
||||
@six.add_metaclass(abc.ABCMeta)
|
||||
class AlarmsRepository(object):
|
||||
|
||||
def __init__(self):
|
||||
|
||||
super(AlarmsRepository, self).__init__()
|
||||
|
||||
@abc.abstractmethod
|
||||
def get_alarm_metrics(self, alarm_id):
|
||||
pass
|
||||
|
||||
@abc.abstractmethod
|
||||
def get_sub_alarms(self, tenant_id, alarm_id):
|
||||
pass
|
||||
|
||||
@abc.abstractmethod
|
||||
def delete_alarm(self, tenant_id, id):
|
||||
pass
|
||||
|
||||
@abc.abstractmethod
|
||||
def get_alarm(self, tenant_id, id):
|
||||
pass
|
||||
|
||||
@abc.abstractmethod
|
||||
def get_alarms(self, tenant_id, query_parms, offset):
|
||||
pass
|
|
@ -1,27 +0,0 @@
|
|||
# Copyright 2014 Hewlett-Packard
|
||||
#
|
||||
# Licensed under the Apache License, Version 2.0 (the "License"); you may
|
||||
# not use this file except in compliance with the License. You may obtain
|
||||
# a copy of the License at
|
||||
#
|
||||
# http://www.apache.org/licenses/LICENSE-2.0
|
||||
#
|
||||
# Unless required by applicable law or agreed to in writing, software
|
||||
# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
|
||||
# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
|
||||
# License for the specific language governing permissions and limitations
|
||||
# under the License.
|
||||
|
||||
from monasca.common.repositories import events_repository
|
||||
from monasca.openstack.common import log
|
||||
|
||||
LOG = log.getLogger(__name__)
|
||||
|
||||
|
||||
class EventsRepository(events_repository.EventsRepository):
|
||||
|
||||
def __init__(self):
|
||||
return
|
||||
|
||||
def list_events(self, tenant_id, name, dimensions):
|
||||
return {}
|
|
@ -1,27 +0,0 @@
|
|||
# Copyright 2014 Hewlett-Packard
|
||||
#
|
||||
# Licensed under the Apache License, Version 2.0 (the "License"); you may
|
||||
# not use this file except in compliance with the License. You may obtain
|
||||
# a copy of the License at
|
||||
#
|
||||
# http://www.apache.org/licenses/LICENSE-2.0
|
||||
#
|
||||
# Unless required by applicable law or agreed to in writing, software
|
||||
# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
|
||||
# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
|
||||
# License for the specific language governing permissions and limitations
|
||||
# under the License.
|
||||
|
||||
from monasca.common.repositories import metrics_repository
|
||||
from monasca.openstack.common import log
|
||||
|
||||
LOG = log.getLogger(__name__)
|
||||
|
||||
|
||||
class MetricsRepository(metrics_repository.MetricsRepository):
|
||||
|
||||
def __init__(self):
|
||||
return
|
||||
|
||||
def list_metrics(self, tenant_id, name, dimensions):
|
||||
return {}
|
|
@ -1,559 +0,0 @@
|
|||
# -*- coding: utf8 -*-
|
||||
# Copyright 2014 Hewlett-Packard
|
||||
#
|
||||
# Licensed under the Apache License, Version 2.0 (the "License"); you may
|
||||
# not use this file except in compliance with the License. You may obtain
|
||||
# a copy of the License at
|
||||
#
|
||||
# http://www.apache.org/licenses/LICENSE-2.0
|
||||
#
|
||||
# Unless required by applicable law or agreed to in writing, software
|
||||
# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
|
||||
# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
|
||||
# License for the specific language governing permissions and limitations
|
||||
# under the License.
|
||||
import json
|
||||
import re
|
||||
import time
|
||||
import urllib
|
||||
|
||||
from influxdb import client
|
||||
from oslo.config import cfg
|
||||
|
||||
from monasca.common.repositories import constants
|
||||
from monasca.common.repositories import exceptions
|
||||
from monasca.common.repositories import metrics_repository
|
||||
from monasca.openstack.common import log
|
||||
|
||||
|
||||
LOG = log.getLogger(__name__)
|
||||
|
||||
|
||||
class MetricsRepository(metrics_repository.MetricsRepository):
|
||||
def __init__(self):
|
||||
|
||||
try:
|
||||
self.conf = cfg.CONF
|
||||
self.influxdb_client = client.InfluxDBClient(
|
||||
self.conf.influxdb.ip_address, self.conf.influxdb.port,
|
||||
self.conf.influxdb.user, self.conf.influxdb.password,
|
||||
self.conf.influxdb.database_name)
|
||||
|
||||
# compile regex only once for efficiency
|
||||
self._serie_name_reqex = re.compile(
|
||||
'([^?&=]+)\?([^?&=]+)&([^?&=]+)(&[^?&=]+=[^?&=]+)*')
|
||||
self._serie_tenant_id_region_name_regex = re.compile(
|
||||
'[^?&=]+\?[^?&=]+&[^?&=]+')
|
||||
self._serie_name_dimension_regex = re.compile('&[^?&=]+=[^?&=]+')
|
||||
self._serie_name_dimension_parts_regex = re.compile(
|
||||
'&([^?&=]+)=([^?&=]+)')
|
||||
|
||||
except Exception as ex:
|
||||
LOG.exception()
|
||||
raise exceptions.RepositoryException(ex)
|
||||
|
||||
def _build_list_series_query(self, dimensions, name, tenant_id, region):
|
||||
|
||||
regex_clause = self._build_regex_clause(dimensions, name, tenant_id,
|
||||
region)
|
||||
|
||||
query = 'list series ' + regex_clause
|
||||
|
||||
return query
|
||||
|
||||
def _build_select_query(self, dimensions, name, tenant_id,
|
||||
region, start_timestamp, end_timestamp, offset):
|
||||
|
||||
from_clause = self._build_from_clause(dimensions, name, tenant_id,
|
||||
region, start_timestamp,
|
||||
end_timestamp)
|
||||
|
||||
offset_clause = self._build_offset_clause(offset)
|
||||
|
||||
query = 'select * ' + from_clause + offset_clause
|
||||
|
||||
return query
|
||||
|
||||
def _build_statistics_query(self, dimensions, name, tenant_id,
|
||||
region, start_timestamp, end_timestamp,
|
||||
statistics, period):
|
||||
|
||||
from_clause = self._build_from_clause(dimensions, name, tenant_id,
|
||||
region, start_timestamp,
|
||||
end_timestamp)
|
||||
|
||||
statistics = [statistic.replace('avg', 'mean') for statistic in
|
||||
statistics]
|
||||
statistics = [statistic + '(value)' for statistic in statistics]
|
||||
|
||||
statistic_string = ",".join(statistics)
|
||||
|
||||
query = 'select ' + statistic_string + ' ' + from_clause
|
||||
|
||||
if period is None:
|
||||
period = str(300)
|
||||
|
||||
query += " group by time(" + period + "s)"
|
||||
|
||||
return query
|
||||
|
||||
def _build_regex_clause(self, dimensions, name, tenant_id, region,
|
||||
start_timestamp=None, end_timestamp=None):
|
||||
|
||||
regex_clause = '/^'
|
||||
|
||||
# tenant id
|
||||
regex_clause += urllib.quote(tenant_id.encode('utf8'), safe='')
|
||||
|
||||
# region
|
||||
regex_clause += '\?' + urllib.quote(region.encode('utf8'), safe='')
|
||||
|
||||
# name - optional
|
||||
if name:
|
||||
regex_clause += '&' + urllib.quote(name.encode('utf8'), safe='')
|
||||
regex_clause += '(&|$)'
|
||||
|
||||
# dimensions - optional
|
||||
if dimensions:
|
||||
for dimension_name, dimension_value in iter(
|
||||
sorted(dimensions.iteritems())):
|
||||
regex_clause += '(.*&)*'
|
||||
regex_clause += urllib.quote(dimension_name.encode('utf8'),
|
||||
safe='')
|
||||
regex_clause += '='
|
||||
regex_clause += urllib.quote(dimension_value.encode('utf8'),
|
||||
safe='')
|
||||
regex_clause += '(&|$)'
|
||||
|
||||
regex_clause += '/'
|
||||
|
||||
if start_timestamp is not None:
|
||||
# subtract 1 from timestamp to get >= semantics
|
||||
regex_clause += " where time > " + str(start_timestamp - 1) + "s"
|
||||
if end_timestamp is not None:
|
||||
# add 1 to timestamp to get <= semantics
|
||||
regex_clause += " and time < " + str(end_timestamp + 1) + "s"
|
||||
|
||||
return regex_clause
|
||||
|
||||
def _build_from_clause(self, dimensions, name, tenant_id, region,
|
||||
start_timestamp=None, end_timestamp=None):
|
||||
from_clause = 'from '
|
||||
from_clause += self._build_regex_clause(dimensions, name, tenant_id,
|
||||
region, start_timestamp,
|
||||
end_timestamp)
|
||||
return from_clause
|
||||
|
||||
def list_metrics(self, tenant_id, region, name, dimensions, offset):
|
||||
|
||||
try:
|
||||
|
||||
query = self._build_list_series_query(dimensions, name, tenant_id,
|
||||
region)
|
||||
|
||||
result = self.influxdb_client.query(query, 's')
|
||||
|
||||
json_metric_list = self._decode_influxdb_serie_name_list(result,
|
||||
offset)
|
||||
|
||||
return json_metric_list
|
||||
|
||||
except Exception as ex:
|
||||
LOG.exception(ex)
|
||||
raise exceptions.RepositoryException(ex)
|
||||
|
||||
def _decode_influxdb_serie_name_list(self, series_names, offset):
|
||||
|
||||
"""Example series_names from InfluxDB.
|
||||
|
||||
[
|
||||
{
|
||||
"points": [
|
||||
[
|
||||
0,
|
||||
"tenant?useast&%E5%8D%83&dim1=%E5%8D%83&dim2=%E5%8D%83"
|
||||
]
|
||||
],
|
||||
"name": "list_series_result",
|
||||
"columns": [
|
||||
"time",
|
||||
"name"
|
||||
]
|
||||
}
|
||||
]
|
||||
|
||||
|
||||
:param series_names:
|
||||
:return:
|
||||
"""
|
||||
|
||||
json_metric_list = []
|
||||
serie_names_list_list = series_names[0]['points']
|
||||
for serie_name_list in serie_names_list_list:
|
||||
serie_name = serie_name_list[1]
|
||||
if offset is not None:
|
||||
if serie_name < urllib.unquote(offset):
|
||||
continue
|
||||
metric = self._decode_influxdb_serie_name(serie_name)
|
||||
if metric is None:
|
||||
continue
|
||||
json_metric_list.append(metric)
|
||||
|
||||
if offset is not None:
|
||||
if len(json_metric_list) >= constants.PAGE_LIMIT:
|
||||
break
|
||||
|
||||
return json_metric_list
|
||||
|
||||
def _decode_influxdb_serie_name(self, serie_name):
|
||||
|
||||
"""Decodes a serie name from InfluxDB.
|
||||
|
||||
The raw serie name is
|
||||
formed by url encoding the tenant id, region, name, and dimensions,
|
||||
and concatenating them into a quasi URL query string.
|
||||
|
||||
urlencode(tenant)?urlencode(region)&urlencode(name)[&urlencode(
|
||||
dim_name)=urlencode(dim_value)]...
|
||||
|
||||
|
||||
:param serie_name:
|
||||
:return:
|
||||
"""
|
||||
|
||||
match = self._serie_name_reqex.match(serie_name)
|
||||
if match:
|
||||
|
||||
# throw tenant_id (match.group(1) and region (match.group(2) away
|
||||
|
||||
metric_name = (
|
||||
urllib.unquote_plus(match.group(3).encode(
|
||||
'utf8')).decode('utf8'))
|
||||
|
||||
metric = {u'name': metric_name,
|
||||
u'id': urllib.quote(serie_name)}
|
||||
|
||||
# only returns the last match. we need all dimensions.
|
||||
dimensions = match.group(4)
|
||||
if dimensions:
|
||||
# remove the name, tenant_id, and region; just
|
||||
# dimensions remain
|
||||
dimensions_part = self._serie_tenant_id_region_name_regex.sub(
|
||||
'', serie_name)
|
||||
dimensions = {}
|
||||
dimension_list = self._serie_name_dimension_regex.findall(
|
||||
dimensions_part)
|
||||
for dimension in dimension_list:
|
||||
match = self._serie_name_dimension_parts_regex.match(
|
||||
dimension)
|
||||
dimension_name = urllib.unquote(
|
||||
match.group(1).encode('utf8')).decode('utf8')
|
||||
dimension_value = urllib.unquote(
|
||||
match.group(2).encode('utf8')).decode('utf8')
|
||||
dimensions[dimension_name] = dimension_value
|
||||
|
||||
metric["dimensions"] = dimensions
|
||||
else:
|
||||
metric = None
|
||||
|
||||
return metric
|
||||
|
||||
def measurement_list(self, tenant_id, region, name, dimensions,
|
||||
start_timestamp,
|
||||
end_timestamp, offset):
|
||||
"""Example result from InfluxDB.
|
||||
|
||||
[
|
||||
{
|
||||
"points": [
|
||||
[
|
||||
1413230362,
|
||||
5369370001,
|
||||
99.99
|
||||
]
|
||||
],
|
||||
"name": "tenant?useast&%E5%8D%83&dim1=%E5%8D%83&dim2
|
||||
=%E5%8D%83",
|
||||
"columns": [
|
||||
"time",
|
||||
"sequence_number",
|
||||
"value"
|
||||
]
|
||||
}
|
||||
]
|
||||
|
||||
After url decoding the result would look like this. In this example
|
||||
the name, dim1 value, and dim2 value were non-ascii chars.
|
||||
|
||||
[
|
||||
{
|
||||
"points": [
|
||||
[
|
||||
1413230362,
|
||||
5369370001,
|
||||
99.99
|
||||
]
|
||||
],
|
||||
"name": "tenant?useast&千&dim1=千&dim2=千",
|
||||
"columns": [
|
||||
"time",
|
||||
"sequence_number",
|
||||
"value"
|
||||
]
|
||||
}
|
||||
]
|
||||
|
||||
:param tenant_id:
|
||||
:param name:
|
||||
:param dimensions:
|
||||
:return:
|
||||
"""
|
||||
|
||||
json_measurement_list = []
|
||||
|
||||
try:
|
||||
query = self._build_select_query(dimensions, name, tenant_id,
|
||||
region, start_timestamp,
|
||||
end_timestamp, offset)
|
||||
|
||||
try:
|
||||
result = self.influxdb_client.query(query, 'ms')
|
||||
except client.InfluxDBClientError as ex:
|
||||
# check for non-existent serie name.
|
||||
msg = "Couldn't look up columns"
|
||||
if ex.code == 400 and ex.content == (msg):
|
||||
return json_measurement_list
|
||||
else:
|
||||
raise ex
|
||||
|
||||
for serie in result:
|
||||
|
||||
metric = self._decode_influxdb_serie_name(serie['name'])
|
||||
|
||||
if metric is None:
|
||||
continue
|
||||
|
||||
# Replace 'sequence_number' -> 'id' for column name
|
||||
columns = [column.replace('sequence_number', 'id') for column
|
||||
in serie['columns']]
|
||||
# Replace 'time' -> 'timestamp' for column name
|
||||
columns = [column.replace('time', 'timestamp') for column in
|
||||
columns]
|
||||
|
||||
# format the utc date in the points
|
||||
fmtd_pts = [['%s.%03dZ' % (time.strftime('%Y-%m-%dT%H:%M:%S',
|
||||
time.gmtime(
|
||||
point[0] / 1000)),
|
||||
point[0] % 1000), point[1],
|
||||
point[2]] for point in serie['points']]
|
||||
|
||||
# Set the last point's time as the id. Used for next link.
|
||||
measurement = {u"name": metric['name'],
|
||||
u"id": serie['points'][-1][0],
|
||||
u"dimensions": metric['dimensions'],
|
||||
u"columns": columns,
|
||||
u"measurements": fmtd_pts}
|
||||
|
||||
json_measurement_list.append(measurement)
|
||||
|
||||
return json_measurement_list
|
||||
|
||||
except Exception as ex:
|
||||
LOG.exception(ex)
|
||||
raise exceptions.RepositoryException(ex)
|
||||
|
||||
def metrics_statistics(self, tenant_id, region, name, dimensions,
|
||||
start_timestamp,
|
||||
end_timestamp, statistics, period):
|
||||
|
||||
json_statistics_list = []
|
||||
|
||||
try:
|
||||
query = self._build_statistics_query(dimensions, name, tenant_id,
|
||||
region,
|
||||
start_timestamp,
|
||||
end_timestamp, statistics,
|
||||
period)
|
||||
|
||||
try:
|
||||
result = self.influxdb_client.query(query, 's')
|
||||
except client.InfluxDBClientError as ex:
|
||||
# check for non-existent serie name.
|
||||
msg = "Couldn't look up columns"
|
||||
if ex.code == 400 and ex.content == (msg):
|
||||
return json_statistics_list
|
||||
else:
|
||||
raise ex
|
||||
|
||||
for serie in result:
|
||||
|
||||
metric = self._decode_influxdb_serie_name(serie['name'])
|
||||
|
||||
if metric is None:
|
||||
continue
|
||||
|
||||
# Replace 'avg' -> 'mean' for column name
|
||||
columns = [column.replace('mean', 'avg') for column in
|
||||
serie['columns']]
|
||||
# Replace 'time' -> 'timestamp' for column name
|
||||
columns = [column.replace('time', 'timestamp') for column in
|
||||
columns]
|
||||
|
||||
fmtd_pts_list_list = [[time.strftime("%Y-%m-%dT%H:%M:%SZ",
|
||||
time.gmtime(pts_list[
|
||||
0]))] + pts_list[1:]
|
||||
for pts_list in serie['points']]
|
||||
|
||||
measurement = {"name": metric['name'],
|
||||
"dimensions": metric['dimensions'],
|
||||
"columns": columns,
|
||||
"measurements": fmtd_pts_list_list}
|
||||
|
||||
json_statistics_list.append(measurement)
|
||||
|
||||
return json_statistics_list
|
||||
|
||||
except Exception as ex:
|
||||
LOG.exception(ex)
|
||||
raise exceptions.RepositoryException(ex)
|
||||
|
||||
def _build_offset_clause(self, offset):
|
||||
|
||||
if offset is not None:
|
||||
|
||||
# If offset is not empty.
|
||||
if offset:
|
||||
|
||||
offset_clause = (
|
||||
' and time < {}s limit {}'.
|
||||
format(offset, str(constants.PAGE_LIMIT)))
|
||||
else:
|
||||
|
||||
offset_clause = ' limit {}'.format(str(
|
||||
constants.PAGE_LIMIT))
|
||||
|
||||
else:
|
||||
|
||||
offset_clause = ''
|
||||
|
||||
return offset_clause
|
||||
|
||||
def alarm_history(self, tenant_id, alarm_id_list,
|
||||
offset, start_timestamp=None,
|
||||
end_timestamp=None):
|
||||
"""Example result from Influxdb.
|
||||
|
||||
[
|
||||
{
|
||||
"points": [
|
||||
[
|
||||
1415894490,
|
||||
272140001,
|
||||
"6ac10841-d02f-4f7d-a191-ae0a3d9a25f2",
|
||||
"[{\"name\": \"cpu.system_perc\", \"dimensions\": {
|
||||
\"hostname\": \"mini-mon\", \"component\":
|
||||
\"monasca-agent\", \"service\": \"monitoring\"}},
|
||||
{\"name\": \"load.avg_1_min\", \"dimensions\": {
|
||||
\"hostname\": \"mini-mon\", \"component\":
|
||||
\"monasca-agent\", \"service\": \"monitoring\"}}]",
|
||||
"ALARM",
|
||||
"OK",
|
||||
"Thresholds were exceeded for the sub-alarms: [max(
|
||||
load.avg_1_min{hostname=mini-mon}) > 0.0,
|
||||
max(cpu.system_perc) > 0.0]",
|
||||
"{}"
|
||||
],
|
||||
],
|
||||
"name": "alarm_state_history",
|
||||
"columns": [
|
||||
"time",
|
||||
"sequence_number",
|
||||
"alarm_id",
|
||||
"metrics",
|
||||
"new_state",
|
||||
"old_state",
|
||||
"reason",
|
||||
"reason_data"
|
||||
]
|
||||
}
|
||||
]
|
||||
|
||||
:param tenant_id:
|
||||
:param alarm_id:
|
||||
:return:
|
||||
"""
|
||||
|
||||
try:
|
||||
|
||||
json_alarm_history_list = []
|
||||
|
||||
if not alarm_id_list:
|
||||
return json_alarm_history_list
|
||||
|
||||
for alarm_id in alarm_id_list:
|
||||
if '\'' in alarm_id or ';' in alarm_id:
|
||||
raise Exception(
|
||||
"Input from user contains single quote ['] or "
|
||||
"semi-colon [;] characters[ {} ]".format(alarm_id))
|
||||
|
||||
query = """
|
||||
select alarm_id, metrics, old_state, new_state,
|
||||
reason, reason_data
|
||||
from alarm_state_history
|
||||
"""
|
||||
|
||||
where_clause = (
|
||||
" where tenant_id = '{}' ".format(tenant_id.encode('utf8')))
|
||||
|
||||
alarm_id_where_clause_list = (
|
||||
[" alarm_id = '{}' ".format(id.encode('utf8'))
|
||||
for id in alarm_id_list])
|
||||
|
||||
alarm_id_where_clause = " or ".join(alarm_id_where_clause_list)
|
||||
|
||||
where_clause += ' and (' + alarm_id_where_clause + ')'
|
||||
|
||||
time_clause = ''
|
||||
if start_timestamp:
|
||||
# subtract 1 from timestamp to get >= semantics
|
||||
time_clause += " and time > " + str(start_timestamp - 1) + "s"
|
||||
if end_timestamp:
|
||||
# add 1 to timestamp to get <= semantics
|
||||
time_clause += " and time < " + str(end_timestamp + 1) + "s"
|
||||
|
||||
offset_clause = self._build_offset_clause(offset)
|
||||
|
||||
query += where_clause + time_clause + offset_clause
|
||||
|
||||
try:
|
||||
result = self.influxdb_client.query(query, 's')
|
||||
except client.InfluxDBClientError as ex:
|
||||
# check for non-existent serie name. only happens
|
||||
# if alarm_state_history serie does not exist.
|
||||
msg = "Couldn't look up columns"
|
||||
if ex.code == 400 and ex.content == (msg):
|
||||
return json_alarm_history_list
|
||||
else:
|
||||
raise ex
|
||||
|
||||
if not result:
|
||||
return json_alarm_history_list
|
||||
|
||||
# There's only one serie, alarm_state_history.
|
||||
for point in result[0]['points']:
|
||||
alarm_point = {u'alarm_id': point[2],
|
||||
u'metrics': json.loads(point[3]),
|
||||
u'old_state': point[4], u'new_state': point[5],
|
||||
u'reason': point[6], u'reason_data': point[7],
|
||||
u'timestamp': time.strftime(
|
||||
"%Y-%m-%dT%H:%M:%SZ",
|
||||
time.gmtime(point[0])),
|
||||
u'id': point[0]}
|
||||
|
||||
json_alarm_history_list.append(alarm_point)
|
||||
|
||||
return json_alarm_history_list
|
||||
|
||||
except Exception as ex:
|
||||
LOG.exception(ex)
|
||||
raise exceptions.RepositoryException(ex)
|
|
@ -1,39 +0,0 @@
|
|||
# Copyright 2014 Hewlett-Packard
|
||||
#
|
||||
# Licensed under the Apache License, Version 2.0 (the "License"); you may
|
||||
# not use this file except in compliance with the License. You may obtain
|
||||
# a copy of the License at
|
||||
#
|
||||
# http://www.apache.org/licenses/LICENSE-2.0
|
||||
#
|
||||
# Unless required by applicable law or agreed to in writing, software
|
||||
# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
|
||||
# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
|
||||
# License for the specific language governing permissions and limitations
|
||||
# under the License.
|
||||
|
||||
import abc
|
||||
|
||||
import six
|
||||
|
||||
|
||||
@six.add_metaclass(abc.ABCMeta)
|
||||
class MetricsRepository(object):
|
||||
@abc.abstractmethod
|
||||
def list_metrics(self, tenant_id, region, name, dimensions, offset):
|
||||
pass
|
||||
|
||||
@abc.abstractmethod
|
||||
def measurement_list(self, tenant_id, region, name, dimensions,
|
||||
start_timestamp, end_timestamp, offset):
|
||||
pass
|
||||
|
||||
@abc.abstractmethod
|
||||
def metrics_statistics(self, tenant_id, region, name, dimensions,
|
||||
start_timestamp, end_timestamp, statistics, period):
|
||||
pass
|
||||
|
||||
@abc.abstractmethod
|
||||
def alarm_history(self, tenant_id, alarm_id_list,
|
||||
offset, start_timestamp, end_timestamp):
|
||||
pass
|
|
@ -1,143 +0,0 @@
|
|||
# Copyright 2014 Hewlett-Packard
|
||||
#
|
||||
# Licensed under the Apache License, Version 2.0 (the "License"); you may
|
||||
# not use this file except in compliance with the License. You may obtain
|
||||
# a copy of the License at
|
||||
#
|
||||
# http://www.apache.org/licenses/LICENSE-2.0
|
||||
#
|
||||
# Unless required by applicable law or agreed to in writing, software
|
||||
# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
|
||||
# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
|
||||
# License for the specific language governing permissions and limitations
|
||||
# under the License.
|
||||
|
||||
|
||||
class SubAlarmDefinition(object):
|
||||
"""Holds sub alarm definition
|
||||
|
||||
Used for comparing sub alarm definitions for equality.
|
||||
"""
|
||||
|
||||
def __init__(self, row=None, sub_expr=None):
|
||||
|
||||
"""Initialize
|
||||
|
||||
:param row: Database row
|
||||
:param sub_expr: Result from expression parser
|
||||
:return:
|
||||
"""
|
||||
|
||||
super(SubAlarmDefinition, self).__init__()
|
||||
|
||||
if row and sub_expr:
|
||||
raise Exception('Only one of row or sub_expr can be specified, '
|
||||
'not both')
|
||||
|
||||
if row:
|
||||
# id is not used for compare or hash.
|
||||
self.id = row['id']
|
||||
self.alarm_definition_id = row['alarm_definition_id']
|
||||
self.metric_name = row['metric_name']
|
||||
self.dimensions_str = row['dimensions']
|
||||
self.dimensions = self._init_dimensions(row['dimensions'])
|
||||
self.function = row['function']
|
||||
self.operator = row['operator']
|
||||
# Make sure that the following are converted to unicode
|
||||
self.period = str(row['period']).decode('utf8')
|
||||
self.periods = str(row['periods']).decode('utf8')
|
||||
# threshold comes from the DB as a float in 0.0 form.
|
||||
self.threshold = str(row['threshold']).decode('utf8')
|
||||
|
||||
if sub_expr:
|
||||
# id is not used for compare or hash.
|
||||
self.id = ''
|
||||
# Must be injected.
|
||||
self.alarm_definition_id = ''
|
||||
self.metric_name = sub_expr.metric_name
|
||||
self.dimensions_str = sub_expr.dimensions_str
|
||||
self.dimensions = self._init_dimensions(sub_expr.dimensions_str)
|
||||
self.function = sub_expr.normalized_func.decode('utf8')
|
||||
self.operator = sub_expr.normalized_operator
|
||||
self.period = sub_expr.period
|
||||
self.periods = sub_expr.periods
|
||||
self.threshold = sub_expr.threshold
|
||||
|
||||
def _init_dimensions(self, dimensions_str):
|
||||
|
||||
dimensions = {}
|
||||
|
||||
if dimensions_str:
|
||||
for dimension in dimensions_str.split(','):
|
||||
name, value = dimension.split('=')
|
||||
dimensions[name] = value
|
||||
|
||||
return dimensions
|
||||
|
||||
@property
|
||||
def expression(self):
|
||||
|
||||
"""Build the entire expressions as a string with spaces."""
|
||||
|
||||
result = "{}({}".format(self.function.lower().encode('utf8'),
|
||||
self.metric_name.encode('utf8'))
|
||||
|
||||
if self.dimensions_str:
|
||||
result += "{{{}}}".format(self.dimensions_str.encode('utf8'))
|
||||
|
||||
if self.period:
|
||||
result += ", {}".format(self.period.encode('utf8'))
|
||||
|
||||
result += ")"
|
||||
|
||||
result += " {} {}".format(self.operator.encode('utf8'),
|
||||
self.threshold.encode('utf8'))
|
||||
|
||||
if self.periods:
|
||||
result += " times {}".format(self.periods.encode('utf8'))
|
||||
|
||||
return result.decode('utf8')
|
||||
|
||||
def __hash__(self):
|
||||
|
||||
dimensions_str = "".join(sorted([name + value for name, value in
|
||||
self.dimensions.iteritems()]))
|
||||
|
||||
# don't use id to hash.
|
||||
return (hash(self.alarm_definition_id) ^
|
||||
hash(dimensions_str) ^
|
||||
hash(self.function) ^
|
||||
hash(self.metric_name) ^
|
||||
hash(self.operator) ^
|
||||
hash(self.period) ^
|
||||
hash(self.periods) ^
|
||||
# Convert to float to handle cases like 0.0 == 0
|
||||
hash(float(self.threshold)))
|
||||
|
||||
def __eq__(self, other):
|
||||
|
||||
if id(self) == id(other):
|
||||
return True
|
||||
|
||||
if not isinstance(other, SubAlarmDefinition):
|
||||
return False
|
||||
|
||||
# don't use id to compare.
|
||||
return (self.alarm_definition_id == other.alarm_definition_id and
|
||||
self.dimensions == other.dimensions and
|
||||
self.function == other.function and
|
||||
self.metric_name == other.metric_name and
|
||||
self.operator == other.operator and
|
||||
self.period == other.period and
|
||||
self.periods == other.periods and
|
||||
# Convert to float to handle cases like 0.0 == 0
|
||||
float(self.threshold) == float(other.threshold))
|
||||
|
||||
def same_key_fields(self, other):
|
||||
|
||||
# compare everything but operator and threshold
|
||||
return (self.metric_name == other.metric_name and
|
||||
self.dimensions == other.dimensions and
|
||||
self.function == other.function and
|
||||
self.period == other.period and
|
||||
self.periods == other.periods)
|
|
@ -1,673 +0,0 @@
|
|||
# Copyright 2014 Hewlett-Packard
|
||||
#
|
||||
# Licensed under the Apache License, Version 2.0 (the "License"); you may
|
||||
# not use this file except in compliance with the License. You may obtain
|
||||
# a copy of the License at
|
||||
#
|
||||
# http://www.apache.org/licenses/LICENSE-2.0
|
||||
#
|
||||
# Unless required by applicable law or agreed to in writing, software
|
||||
# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
|
||||
# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
|
||||
# License for the specific language governing permissions and limitations
|
||||
# under the License.
|
||||
import datetime
|
||||
|
||||
from monasca.common.repositories import alarm_definitions_repository as adr
|
||||
from monasca.common.repositories import constants
|
||||
from monasca.common.repositories import exceptions
|
||||
from monasca.common.repositories.model import sub_alarm_definition
|
||||
from monasca.common.repositories.mysql import mysql_repository
|
||||
from monasca.openstack.common import log
|
||||
from monasca.openstack.common import uuidutils
|
||||
|
||||
|
||||
LOG = log.getLogger(__name__)
|
||||
|
||||
|
||||
class AlarmDefinitionsRepository(mysql_repository.MySQLRepository,
|
||||
adr.AlarmDefinitionsRepository):
|
||||
|
||||
base_query = """
|
||||
select ad.id, ad.name, ad.description, ad.expression,
|
||||
ad.match_by, ad.severity, ad.actions_enabled,
|
||||
aaa.alarm_actions, aao.ok_actions, aau.undetermined_actions
|
||||
from alarm_definition as ad
|
||||
left join (select alarm_definition_id,
|
||||
group_concat(action_id) as alarm_actions
|
||||
from alarm_action
|
||||
where alarm_state = 'ALARM'
|
||||
group by alarm_definition_id) as aaa
|
||||
on aaa.alarm_definition_id = ad.id
|
||||
left join (select alarm_definition_id,
|
||||
group_concat(action_id) as ok_actions
|
||||
from alarm_action
|
||||
where alarm_state = 'OK'
|
||||
group by alarm_definition_id) as aao
|
||||
on aao.alarm_definition_id = ad.id
|
||||
left join (select alarm_definition_id,
|
||||
group_concat(action_id) as undetermined_actions
|
||||
from alarm_action
|
||||
where alarm_state = 'UNDETERMINED'
|
||||
group by alarm_definition_id) as aau
|
||||
on aau.alarm_definition_id = ad.id
|
||||
"""
|
||||
|
||||
def __init__(self):
|
||||
|
||||
super(AlarmDefinitionsRepository, self).__init__()
|
||||
|
||||
@mysql_repository.mysql_try_catch_block
|
||||
def get_alarm_definition(self, tenant_id, id):
|
||||
|
||||
parms = [tenant_id, id]
|
||||
|
||||
where_clause = """ where ad.tenant_id = %s
|
||||
and ad.id = %s
|
||||
and deleted_at is NULL """
|
||||
|
||||
query = AlarmDefinitionsRepository.base_query + where_clause
|
||||
|
||||
rows = self._execute_query(query, parms)
|
||||
|
||||
if rows:
|
||||
return rows[0]
|
||||
else:
|
||||
raise exceptions.DoesNotExistException
|
||||
|
||||
@mysql_repository.mysql_try_catch_block
|
||||
def get_alarm_definitions(self, tenant_id, name, dimensions, offset):
|
||||
|
||||
parms = [tenant_id]
|
||||
|
||||
select_clause = AlarmDefinitionsRepository.base_query
|
||||
|
||||
where_clause = " where ad.tenant_id = %s and deleted_at is NULL "
|
||||
|
||||
if name:
|
||||
where_clause += " and ad.name = %s "
|
||||
parms.append(name.encode('utf8'))
|
||||
|
||||
if offset is not None:
|
||||
order_by_clause = " order by ad.id, ad.created_at "
|
||||
where_clause += " and ad.id > %s "
|
||||
parms.append(offset.encode('utf8'))
|
||||
limit_clause = " limit %s "
|
||||
parms.append(constants.PAGE_LIMIT)
|
||||
else:
|
||||
order_by_clause = " order by ad.created_at "
|
||||
limit_clause = ""
|
||||
|
||||
if dimensions:
|
||||
inner_join = """ inner join sub_alarm_definition as sad
|
||||
on sad.alarm_definition_id = ad.id """
|
||||
|
||||
i = 0
|
||||
inner_join_parms = []
|
||||
for n, v in dimensions.iteritems():
|
||||
inner_join += """
|
||||
inner join
|
||||
(select distinct sub_alarm_definition_id
|
||||
from sub_alarm_definition_dimension
|
||||
where dimension_name = %s and value = %s) as
|
||||
sadd{}
|
||||
on sadd{}.sub_alarm_definition_id = sad.id
|
||||
""".format(i, i)
|
||||
inner_join_parms += [n.encode('utf8'), v.encode('utf8')]
|
||||
i += 1
|
||||
|
||||
select_clause += inner_join
|
||||
parms = inner_join_parms + parms
|
||||
|
||||
query = select_clause + where_clause + order_by_clause + limit_clause
|
||||
|
||||
return self._execute_query(query, parms)
|
||||
|
||||
@mysql_repository.mysql_try_catch_block
|
||||
def get_sub_alarms(self, tenant_id, alarm_definition_id):
|
||||
|
||||
parms = [tenant_id, alarm_definition_id]
|
||||
|
||||
query = """select distinct sa.id as sub_alarm_id, sa.alarm_id,
|
||||
sa.expression
|
||||
from sub_alarm as sa
|
||||
inner join alarm as a
|
||||
on a.id = sa.alarm_id
|
||||
inner join alarm_definition as ad
|
||||
on ad.id = a.alarm_definition_id
|
||||
where ad.tenant_id = %s and ad.id = %s
|
||||
"""
|
||||
|
||||
return self._execute_query(query, parms)
|
||||
|
||||
@mysql_repository.mysql_try_catch_block
|
||||
def get_alarm_metrics(self, tenant_id, alarm_definition_id):
|
||||
|
||||
parms = [tenant_id, alarm_definition_id]
|
||||
|
||||
query = """select distinct a.id as alarm_id, md.name,
|
||||
mdg.dimensions
|
||||
from alarm as a
|
||||
inner join alarm_definition as ad
|
||||
on ad.id = a.alarm_definition_id
|
||||
inner join alarm_metric as am on am.alarm_id = a.id
|
||||
inner join metric_definition_dimensions as mdd
|
||||
on mdd.id = am.metric_definition_dimensions_id
|
||||
inner join metric_definition as md
|
||||
on md.id = mdd.metric_definition_id
|
||||
left join (select dimension_set_id,
|
||||
group_concat(name, '=', value) as dimensions
|
||||
from metric_dimension group by dimension_set_id) as mdg
|
||||
on mdg.dimension_set_id = mdd.metric_dimension_set_id
|
||||
where ad.tenant_id = %s and ad.id = %s
|
||||
order by a.id
|
||||
"""
|
||||
|
||||
return self._execute_query(query, parms)
|
||||
|
||||
@mysql_repository.mysql_try_catch_block
|
||||
def delete_alarm_definition(self, tenant_id, alarm_definition_id):
|
||||
"""Soft delete the alarm definition.
|
||||
|
||||
Soft delete the alarm definition and hard delete any associated
|
||||
alarms.
|
||||
|
||||
:param tenant_id:
|
||||
:param alarm_definition_id:
|
||||
:returns True: -- if alarm definition exists and was deleted.
|
||||
:returns False: -- if the alarm definition does not exists.
|
||||
:raises RepositoryException:
|
||||
"""
|
||||
|
||||
cnxn, cursor = self._get_cnxn_cursor_tuple()
|
||||
|
||||
with cnxn:
|
||||
|
||||
cursor.execute("""update alarm_definition
|
||||
set deleted_at = NOW()
|
||||
where tenant_id = %s and id = %s and deleted_at is
|
||||
NULL""",
|
||||
[tenant_id, alarm_definition_id])
|
||||
|
||||
if cursor.rowcount < 1:
|
||||
return False
|
||||
|
||||
cursor.execute(
|
||||
"""delete from alarm where alarm_definition_id = %s""",
|
||||
[alarm_definition_id])
|
||||
|
||||
return True
|
||||
|
||||
@mysql_repository.mysql_try_catch_block
|
||||
def get_sub_alarm_definitions(self, alarm_definition_id):
|
||||
|
||||
parms = [alarm_definition_id]
|
||||
|
||||
query = """select sad.*, sadd.dimensions
|
||||
from sub_alarm_definition as sad
|
||||
left join (select sub_alarm_definition_id,
|
||||
group_concat(dimension_name, '=', value)
|
||||
as dimensions
|
||||
from sub_alarm_definition_dimension
|
||||
group by sub_alarm_definition_id)
|
||||
as sadd
|
||||
on sadd.sub_alarm_definition_id = sad.id
|
||||
where sad.alarm_definition_id = %s
|
||||
"""
|
||||
|
||||
return self._execute_query(query, parms)
|
||||
|
||||
@mysql_repository.mysql_try_catch_block
|
||||
def create_alarm_definition(self, tenant_id, name, expression,
|
||||
sub_expr_list, description, severity, match_by,
|
||||
alarm_actions, undetermined_actions,
|
||||
ok_actions):
|
||||
cnxn, cursor = self._get_cnxn_cursor_tuple()
|
||||
|
||||
with cnxn:
|
||||
|
||||
now = datetime.datetime.utcnow()
|
||||
alarm_definition_id = uuidutils.generate_uuid()
|
||||
cursor.execute("""insert into alarm_definition(
|
||||
id,
|
||||
tenant_id,
|
||||
name,
|
||||
description,
|
||||
expression,
|
||||
severity,
|
||||
match_by,
|
||||
actions_enabled,
|
||||
created_at,
|
||||
updated_at)
|
||||
values (%s, %s, %s, %s, %s, %s, %s, %s, %s,
|
||||
%s)""", (
|
||||
alarm_definition_id, tenant_id, name.encode('utf8'),
|
||||
description.encode('utf8'), expression.encode('utf8'),
|
||||
severity.upper().encode('utf8'),
|
||||
",".join(match_by).encode('utf8'), 1, now, now))
|
||||
|
||||
for sub_expr in sub_expr_list:
|
||||
sub_alarm_definition_id = uuidutils.generate_uuid()
|
||||
sub_expr.id = sub_alarm_definition_id
|
||||
cursor.execute("""insert into sub_alarm_definition(
|
||||
id,
|
||||
alarm_definition_id,
|
||||
function,
|
||||
metric_name,
|
||||
operator,
|
||||
threshold,
|
||||
period,
|
||||
periods,
|
||||
created_at,
|
||||
updated_at)
|
||||
values(%s,%s,%s,%s,%s,%s,%s,%s,%s,
|
||||
%s)""",
|
||||
(
|
||||
sub_alarm_definition_id,
|
||||
alarm_definition_id,
|
||||
sub_expr.normalized_func.encode('utf8'),
|
||||
sub_expr.normalized_metric_name.encode(
|
||||
"utf8"),
|
||||
sub_expr.normalized_operator.encode('utf8'),
|
||||
sub_expr.threshold.encode('utf8'),
|
||||
sub_expr.period.encode('utf8'),
|
||||
sub_expr.periods.encode('utf8'), now, now))
|
||||
|
||||
for dimension in sub_expr.dimensions_as_list:
|
||||
parsed_dimension = dimension.split('=')
|
||||
cursor.execute("""insert into
|
||||
sub_alarm_definition_dimension(
|
||||
sub_alarm_definition_id,
|
||||
dimension_name,
|
||||
value)
|
||||
values(%s,%s,%s)""", (
|
||||
sub_alarm_definition_id,
|
||||
parsed_dimension[0].encode('utf8'),
|
||||
parsed_dimension[1].encode('utf8')))
|
||||
|
||||
self._insert_into_alarm_action(cursor, alarm_definition_id,
|
||||
alarm_actions, u"ALARM")
|
||||
self._insert_into_alarm_action(cursor, alarm_definition_id,
|
||||
undetermined_actions,
|
||||
u"UNDETERMINED")
|
||||
self._insert_into_alarm_action(cursor, alarm_definition_id,
|
||||
ok_actions, u"OK")
|
||||
|
||||
return alarm_definition_id
|
||||
|
||||
@mysql_repository.mysql_try_catch_block
|
||||
def update_or_patch_alarm_definition(self, tenant_id, alarm_definition_id,
|
||||
name, expression,
|
||||
sub_expr_list, actions_enabled,
|
||||
description, alarm_actions,
|
||||
ok_actions, undetermined_actions,
|
||||
match_by, severity, patch=False):
|
||||
|
||||
cnxn, cursor = self._get_cnxn_cursor_tuple()
|
||||
|
||||
with cnxn:
|
||||
|
||||
# Get the original alarm definition from the DB
|
||||
parms = [tenant_id, alarm_definition_id]
|
||||
|
||||
where_clause = """ where ad.tenant_id = %s
|
||||
and ad.id = %s
|
||||
and deleted_at is NULL """
|
||||
|
||||
query = AlarmDefinitionsRepository.base_query + where_clause
|
||||
|
||||
cursor.execute(query, parms)
|
||||
|
||||
if cursor.rowcount < 1:
|
||||
raise exceptions.DoesNotExistException
|
||||
|
||||
original_row = cursor.fetchall()[0]
|
||||
|
||||
query = """
|
||||
select sad.*, sadd.dimensions
|
||||
from sub_alarm_definition as sad
|
||||
left join (select sub_alarm_definition_id,
|
||||
group_concat(dimension_name, '=',
|
||||
value) as dimensions
|
||||
from sub_alarm_definition_dimension
|
||||
group by sub_alarm_definition_id) as sadd
|
||||
on sadd.sub_alarm_definition_id = sad.id
|
||||
where sad.alarm_definition_id = %s"""
|
||||
|
||||
cursor.execute(query, [alarm_definition_id])
|
||||
|
||||
rows = cursor.fetchall()
|
||||
|
||||
old_sub_alarm_defs_by_id = {}
|
||||
|
||||
for row in rows:
|
||||
sad = sub_alarm_definition.SubAlarmDefinition(row=row)
|
||||
old_sub_alarm_defs_by_id[sad.id] = sad
|
||||
|
||||
if expression:
|
||||
(
|
||||
changed_sub_alarm_defs_by_id,
|
||||
new_sub_alarm_defs_by_id,
|
||||
old_sub_alarm_defs_by_id,
|
||||
unchanged_sub_alarm_defs_by_id
|
||||
) = self._determine_sub_expr_changes(
|
||||
alarm_definition_id, old_sub_alarm_defs_by_id,
|
||||
sub_expr_list)
|
||||
if old_sub_alarm_defs_by_id or new_sub_alarm_defs_by_id:
|
||||
new_count = (len(new_sub_alarm_defs_by_id) +
|
||||
len(changed_sub_alarm_defs_by_id) +
|
||||
len(unchanged_sub_alarm_defs_by_id))
|
||||
old_count = len(old_sub_alarm_defs_by_id)
|
||||
if new_count != old_count:
|
||||
msg = 'number of subexpressions must not change'
|
||||
else:
|
||||
msg = 'metrics in subexpression must not change'
|
||||
raise exceptions.InvalidUpdateException(
|
||||
msg.encode('utf8'))
|
||||
else:
|
||||
unchanged_sub_alarm_defs_by_id = old_sub_alarm_defs_by_id
|
||||
changed_sub_alarm_defs_by_id = {}
|
||||
new_sub_alarm_defs_by_id = {}
|
||||
old_sub_alarm_defs_by_id = {}
|
||||
|
||||
# Get a common update time
|
||||
now = datetime.datetime.utcnow()
|
||||
|
||||
# Update the alarm definition
|
||||
query = """
|
||||
update alarm_definition
|
||||
set name = %s,
|
||||
description = %s,
|
||||
expression = %s,
|
||||
match_by = %s,
|
||||
severity = %s,
|
||||
actions_enabled = %s,
|
||||
updated_at = %s
|
||||
where tenant_id = %s and id = %s"""
|
||||
|
||||
if name is None:
|
||||
new_name = original_row['name']
|
||||
else:
|
||||
new_name = name.encode('utf8')
|
||||
|
||||
if description is None:
|
||||
if patch:
|
||||
new_description = original_row['description']
|
||||
else:
|
||||
new_description = ''
|
||||
else:
|
||||
new_description = description.encode('utf8')
|
||||
|
||||
if expression is None:
|
||||
new_expression = original_row['expression']
|
||||
else:
|
||||
new_expression = expression.encode('utf8')
|
||||
|
||||
if severity is None:
|
||||
if patch:
|
||||
new_severity = original_row['severity']
|
||||
else:
|
||||
new_severity = 'LOW'
|
||||
else:
|
||||
new_severity = severity.encode('utf8')
|
||||
|
||||
if match_by is None:
|
||||
if patch:
|
||||
new_match_by = original_row['match_by']
|
||||
else:
|
||||
new_match_by = None
|
||||
else:
|
||||
new_match_by = ",".join(match_by).encode('utf8')
|
||||
|
||||
if new_match_by != original_row['match_by']:
|
||||
msg = "match_by must not change".encode('utf8')
|
||||
raise exceptions.InvalidUpdateException(msg)
|
||||
|
||||
if actions_enabled is None:
|
||||
new_actions_enabled = original_row['actions_enabled']
|
||||
else:
|
||||
new_actions_enabled = actions_enabled
|
||||
|
||||
parms = [new_name,
|
||||
new_description,
|
||||
new_expression,
|
||||
new_match_by,
|
||||
new_severity,
|
||||
1 if new_actions_enabled else 0,
|
||||
now,
|
||||
tenant_id,
|
||||
alarm_definition_id]
|
||||
|
||||
cursor.execute(query, parms)
|
||||
|
||||
# Delete the old sub alarm definitions
|
||||
query = """
|
||||
delete from sub_alarm_definition where id = %s"""
|
||||
|
||||
for sub_alarm_def_id in old_sub_alarm_defs_by_id.values():
|
||||
parms = [sub_alarm_def_id]
|
||||
cursor.execute(query, parms)
|
||||
|
||||
# Update changed sub alarm definitions
|
||||
query = """
|
||||
update sub_alarm_definition
|
||||
set operator = %s,
|
||||
threshold = %s,
|
||||
updated_at = %s
|
||||
where id = %s"""
|
||||
|
||||
for sub_alarm_definition_id, sub_alarm_def in (
|
||||
changed_sub_alarm_defs_by_id.iteritems()):
|
||||
parms = [sub_alarm_def.operator, sub_alarm_def.threshold,
|
||||
now, sub_alarm_definition_id]
|
||||
cursor.execute(query, parms)
|
||||
|
||||
# Insert new sub alarm definitions
|
||||
query = """
|
||||
insert into sub_alarm_definition(
|
||||
id,
|
||||
alarm_definition_id,
|
||||
function,
|
||||
metric_name,
|
||||
operator,
|
||||
threshold,
|
||||
period,
|
||||
periods,
|
||||
created_at,
|
||||
updated_at)
|
||||
values(%s, %s, %s, %s, %s, %s, %s, %s, %s, %s)"""
|
||||
|
||||
sub_query = """
|
||||
insert into sub_alarm_definition_dimension(
|
||||
sub_alarm_definition_id,
|
||||
dimension_name,
|
||||
value)
|
||||
values(%s, %s,%s)"""
|
||||
|
||||
for sub_alarm_def in new_sub_alarm_defs_by_id.values():
|
||||
parms = [sub_alarm_def.id,
|
||||
sub_alarm_def.alarm_definition_id,
|
||||
sub_alarm_def.function.encode('utf8'),
|
||||
sub_alarm_def.metric_name.encode('utf8'),
|
||||
sub_alarm_def.operator.encode('utf8'),
|
||||
str(sub_alarm_def.threshold).encode('utf8'),
|
||||
str(sub_alarm_def.period).encode('utf8'),
|
||||
str(sub_alarm_def.periods).encode('utf8'),
|
||||
now,
|
||||
now]
|
||||
|
||||
cursor.execute(query, parms)
|
||||
|
||||
for name, value in sub_alarm_def.dimensions.items():
|
||||
parms = [sub_alarm_def.id, name.encode('utf8'),
|
||||
value.encode('utf8')]
|
||||
|
||||
cursor.execute(sub_query, parms)
|
||||
|
||||
# Delete old alarm actions
|
||||
if patch:
|
||||
if alarm_actions is not None:
|
||||
self._delete_alarm_actions(cursor, alarm_definition_id,
|
||||
'ALARM')
|
||||
if ok_actions is not None:
|
||||
self._delete_alarm_actions(cursor, alarm_definition_id,
|
||||
'OK')
|
||||
if undetermined_actions is not None:
|
||||
self._delete_alarm_actions(cursor, alarm_definition_id,
|
||||
'UNDETERMINED')
|
||||
else:
|
||||
query = """
|
||||
delete from alarm_action
|
||||
where alarm_definition_id = %s"""
|
||||
|
||||
parms = [alarm_definition_id]
|
||||
|
||||
cursor.execute(query, parms)
|
||||
|
||||
# Insert new alarm actions
|
||||
self._insert_into_alarm_action(cursor, alarm_definition_id,
|
||||
alarm_actions,
|
||||
u"ALARM")
|
||||
|
||||
self._insert_into_alarm_action(cursor, alarm_definition_id,
|
||||
undetermined_actions,
|
||||
u"UNDETERMINED")
|
||||
|
||||
self._insert_into_alarm_action(cursor, alarm_definition_id,
|
||||
ok_actions,
|
||||
u"OK")
|
||||
|
||||
# Get the updated alarm definition from the DB
|
||||
parms = [tenant_id, alarm_definition_id]
|
||||
|
||||
where_clause = """ where ad.tenant_id = %s
|
||||
and ad.id = %s
|
||||
and deleted_at is NULL """
|
||||
|
||||
query = AlarmDefinitionsRepository.base_query + where_clause
|
||||
|
||||
cursor.execute(query, parms)
|
||||
|
||||
if cursor.rowcount < 1:
|
||||
raise Exception("Failed to find current alarm definition")
|
||||
|
||||
updated_row = cursor.fetchall()[0]
|
||||
|
||||
sub_alarm_defs_dict = {'old': old_sub_alarm_defs_by_id,
|
||||
'changed':
|
||||
changed_sub_alarm_defs_by_id,
|
||||
'new':
|
||||
new_sub_alarm_defs_by_id,
|
||||
'unchanged':
|
||||
unchanged_sub_alarm_defs_by_id}
|
||||
|
||||
# Return the alarm def and the sub alarm defs
|
||||
return updated_row, sub_alarm_defs_dict
|
||||
|
||||
def _determine_sub_expr_changes(self, alarm_definition_id,
|
||||
old_sub_alarm_defs_by_id,
|
||||
sub_expr_list):
|
||||
|
||||
old_sub_alarm_defs_set = set(
|
||||
old_sub_alarm_defs_by_id.values())
|
||||
|
||||
new_sub_alarm_defs_set = set()
|
||||
for sub_expr in sub_expr_list:
|
||||
sad = sub_alarm_definition.SubAlarmDefinition(
|
||||
sub_expr=sub_expr)
|
||||
# Inject the alarm definition id.
|
||||
sad.alarm_definition_id = alarm_definition_id.decode('utf8')
|
||||
new_sub_alarm_defs_set.add(sad)
|
||||
|
||||
# Identify old or changed expressions
|
||||
old_or_changed_sub_alarm_defs_set = (
|
||||
old_sub_alarm_defs_set - new_sub_alarm_defs_set)
|
||||
# Identify new or changed expressions
|
||||
new_or_changed_sub_alarm_defs_set = (
|
||||
new_sub_alarm_defs_set - old_sub_alarm_defs_set)
|
||||
# Find changed expressions. O(n^2) == bad!
|
||||
# This algo may not work if sub expressions are duplicated.
|
||||
changed_sub_alarm_defs_by_id = {}
|
||||
old_or_changed_sub_alarm_defs_set_to_remove = set()
|
||||
new_or_changed_sub_alarm_defs_set_to_remove = set()
|
||||
for old_or_changed in old_or_changed_sub_alarm_defs_set:
|
||||
for new_or_changed in new_or_changed_sub_alarm_defs_set:
|
||||
if old_or_changed.same_key_fields(new_or_changed):
|
||||
old_or_changed_sub_alarm_defs_set_to_remove.add(
|
||||
old_or_changed
|
||||
)
|
||||
new_or_changed_sub_alarm_defs_set_to_remove.add(
|
||||
new_or_changed
|
||||
)
|
||||
changed_sub_alarm_defs_by_id[
|
||||
old_or_changed.id] = (
|
||||
new_or_changed)
|
||||
old_or_changed_sub_alarm_defs_set = (
|
||||
old_or_changed_sub_alarm_defs_set -
|
||||
old_or_changed_sub_alarm_defs_set_to_remove
|
||||
)
|
||||
new_or_changed_sub_alarm_defs_set = (
|
||||
new_or_changed_sub_alarm_defs_set -
|
||||
new_or_changed_sub_alarm_defs_set_to_remove
|
||||
)
|
||||
# Create the list of unchanged expressions
|
||||
unchanged_sub_alarm_defs_by_id = (
|
||||
old_sub_alarm_defs_by_id.copy())
|
||||
for old_sub_alarm_def in old_or_changed_sub_alarm_defs_set:
|
||||
del unchanged_sub_alarm_defs_by_id[old_sub_alarm_def.id]
|
||||
for sub_alarm_definition_id in (
|
||||
changed_sub_alarm_defs_by_id.keys()):
|
||||
del unchanged_sub_alarm_defs_by_id[
|
||||
sub_alarm_definition_id]
|
||||
|
||||
# Remove old sub expressions
|
||||
temp = {}
|
||||
for old_sub_alarm_def in old_or_changed_sub_alarm_defs_set:
|
||||
temp[old_sub_alarm_def.id] = old_sub_alarm_def
|
||||
old_sub_alarm_defs_by_id = temp
|
||||
# Create IDs for new expressions
|
||||
new_sub_alarm_defs_by_id = {}
|
||||
for new_sub_alarm_def in new_or_changed_sub_alarm_defs_set:
|
||||
sub_alarm_definition_id = uuidutils.generate_uuid()
|
||||
new_sub_alarm_def.id = sub_alarm_definition_id
|
||||
new_sub_alarm_defs_by_id[sub_alarm_definition_id] = (
|
||||
new_sub_alarm_def)
|
||||
|
||||
return (changed_sub_alarm_defs_by_id,
|
||||
new_sub_alarm_defs_by_id,
|
||||
old_sub_alarm_defs_by_id,
|
||||
unchanged_sub_alarm_defs_by_id)
|
||||
|
||||
def _delete_alarm_actions(self, cursor, id, alarm_action_name):
|
||||
|
||||
query = """
|
||||
delete
|
||||
from alarm_action
|
||||
where alarm_definition_id = %s and alarm_state = %s
|
||||
"""
|
||||
parms = [id, alarm_action_name]
|
||||
|
||||
cursor.execute(query, parms)
|
||||
|
||||
def _insert_into_alarm_action(self, cursor, alarm_definition_id, actions,
|
||||
alarm_state):
|
||||
|
||||
if actions is None:
|
||||
return
|
||||
|
||||
for action in actions:
|
||||
cursor.execute("select id from notification_method where id = %s",
|
||||
(action.encode('utf8'),))
|
||||
row = cursor.fetchone()
|
||||
if not row:
|
||||
raise exceptions.RepositoryException(
|
||||
"Non-existent notification id {} submitted for {} "
|
||||
"notification action".format(action.encode('utf8'),
|
||||
alarm_state.encode('utf8')))
|
||||
cursor.execute("""insert into alarm_action(
|
||||
alarm_definition_id,
|
||||
alarm_state,
|
||||
action_id)
|
||||
values(%s,%s,%s)""", (
|
||||
alarm_definition_id, alarm_state.encode('utf8'),
|
||||
action.encode('utf8')))
|
|
@ -1,271 +0,0 @@
|
|||
# Copyright 2014 Hewlett-Packard
|
||||
#
|
||||
# Licensed under the Apache License, Version 2.0 (the "License"); you may
|
||||
# not use this file except in compliance with the License. You may obtain
|
||||
# a copy of the License at
|
||||
#
|
||||
# http://www.apache.org/licenses/LICENSE-2.0
|
||||
#
|
||||
# Unless required by applicable law or agreed to in writing, software
|
||||
# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
|
||||
# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
|
||||
# License for the specific language governing permissions and limitations
|
||||
# under the License.
|
||||
|
||||
from monasca.common.repositories import alarms_repository
|
||||
from monasca.common.repositories import constants
|
||||
from monasca.common.repositories import exceptions
|
||||
from monasca.common.repositories.mysql import mysql_repository
|
||||
from monasca.openstack.common import log
|
||||
|
||||
|
||||
LOG = log.getLogger(__name__)
|
||||
|
||||
|
||||
class AlarmsRepository(mysql_repository.MySQLRepository,
|
||||
alarms_repository.AlarmsRepository):
|
||||
|
||||
base_query = """
|
||||
select distinct a.id as alarm_id, a.state,
|
||||
ad.id as alarm_definition_id, ad.name as alarm_definition_name,
|
||||
ad.severity,
|
||||
md.name as metric_name, mdg.dimensions as metric_dimensions
|
||||
from alarm as a
|
||||
inner join alarm_definition as ad
|
||||
on ad.id = a.alarm_definition_id
|
||||
inner join alarm_metric as am on am.alarm_id = a.id
|
||||
inner join metric_definition_dimensions as mdd
|
||||
on mdd.id = am.metric_definition_dimensions_id
|
||||
inner join metric_definition as md
|
||||
on md.id = mdd.metric_definition_id
|
||||
left join (select dimension_set_id, name, value,
|
||||
group_concat(name, '=', value) as dimensions
|
||||
from metric_dimension group by dimension_set_id) as mdg
|
||||
on mdg.dimension_set_id = mdd.metric_dimension_set_id
|
||||
"""
|
||||
|
||||
def __init__(self):
|
||||
|
||||
super(AlarmsRepository, self).__init__()
|
||||
|
||||
@mysql_repository.mysql_try_catch_block
|
||||
def get_alarm_definition(self, tenant_id, alarm_id):
|
||||
|
||||
query = """
|
||||
select *
|
||||
from alarm_definition as ad
|
||||
inner join alarm as a on a.alarm_definition_id = ad.id
|
||||
where ad.tenant_id = %s and a.id = %s"""
|
||||
|
||||
alarm_definition_rows = self._execute_query(query,
|
||||
(tenant_id, alarm_id))
|
||||
|
||||
if not alarm_definition_rows:
|
||||
raise exceptions.DoesNotExistException
|
||||
|
||||
# There should only be 1 row.
|
||||
return alarm_definition_rows[0]
|
||||
|
||||
@mysql_repository.mysql_try_catch_block
|
||||
def get_alarm_metrics(self, alarm_id):
|
||||
|
||||
parms = [alarm_id]
|
||||
|
||||
query = """select distinct a.id as alarm_id, md.name,
|
||||
mdg.dimensions
|
||||
from alarm as a
|
||||
inner join alarm_metric as am on am.alarm_id = a.id
|
||||
inner join metric_definition_dimensions as mdd
|
||||
on mdd.id = am.metric_definition_dimensions_id
|
||||
inner join metric_definition as md
|
||||
on md.id = mdd.metric_definition_id
|
||||
left join (select dimension_set_id,
|
||||
group_concat(name, '=', value) as dimensions
|
||||
from metric_dimension group by dimension_set_id) as mdg
|
||||
on mdg.dimension_set_id = mdd.metric_dimension_set_id
|
||||
where a.id = %s
|
||||
order by a.id
|
||||
"""
|
||||
|
||||
return self._execute_query(query, parms)
|
||||
|
||||
@mysql_repository.mysql_try_catch_block
|
||||
def get_sub_alarms(self, tenant_id, alarm_id):
|
||||
|
||||
parms = [tenant_id, alarm_id]
|
||||
|
||||
query = """select distinct sa.id as sub_alarm_id, sa.alarm_id,
|
||||
sa.expression, ad.id as alarm_definition_id
|
||||
from sub_alarm as sa
|
||||
inner join alarm as a
|
||||
on a.id = sa.alarm_id
|
||||
inner join alarm_definition as ad
|
||||
on ad.id = a.alarm_definition_id
|
||||
where ad.tenant_id = %s and a.id = %s
|
||||
"""
|
||||
|
||||
return self._execute_query(query, parms)
|
||||
|
||||
@mysql_repository.mysql_try_catch_block
|
||||
def update_alarm(self, tenant_id, id, state):
|
||||
|
||||
cnxn, cursor = self._get_cnxn_cursor_tuple()
|
||||
|
||||
with cnxn:
|
||||
|
||||
select_query = """
|
||||
select a.state
|
||||
from alarm as a
|
||||
inner join alarm_definition as ad
|
||||
on ad.id = a.alarm_definition_id
|
||||
where ad.tenant_id = %s and a.id = %s"""
|
||||
|
||||
cursor.execute(select_query, (tenant_id, id))
|
||||
|
||||
if cursor.rowcount < 1:
|
||||
raise exceptions.DoesNotExistException
|
||||
|
||||
prev_state = cursor.fetchone()['state']
|
||||
|
||||
if state != prev_state:
|
||||
|
||||
parms = [state, tenant_id, id]
|
||||
|
||||
update_query = """
|
||||
update alarm
|
||||
set state = %s
|
||||
where alarm.id in
|
||||
(select distinct id
|
||||
from
|
||||
(select distinct alarm.id
|
||||
from alarm
|
||||
inner join alarm_definition
|
||||
on alarm_definition.id = alarm.alarm_definition_id
|
||||
where alarm_definition.tenant_id = %s and alarm.id = %s)
|
||||
as tmptable
|
||||
)"""
|
||||
|
||||
cursor.execute(update_query, parms)
|
||||
|
||||
return prev_state
|
||||
|
||||
@mysql_repository.mysql_try_catch_block
|
||||
def delete_alarm(self, tenant_id, id):
|
||||
|
||||
parms = [tenant_id, id]
|
||||
|
||||
query = """
|
||||
delete alarm.*
|
||||
from alarm
|
||||
join
|
||||
(select distinct a.id
|
||||
from alarm as a
|
||||
inner join alarm_definition as ad
|
||||
on ad.id = a.alarm_definition_id
|
||||
where ad.tenant_id = %s and a.id = %s) as b
|
||||
on b.id = alarm.id
|
||||
"""
|
||||
|
||||
cnxn, cursor = self._get_cnxn_cursor_tuple()
|
||||
|
||||
with cnxn:
|
||||
|
||||
cursor.execute(query, parms)
|
||||
|
||||
if cursor.rowcount < 1:
|
||||
raise exceptions.DoesNotExistException
|
||||
|
||||
@mysql_repository.mysql_try_catch_block
|
||||
def get_alarm(self, tenant_id, id):
|
||||
|
||||
parms = [tenant_id, id]
|
||||
|
||||
select_clause = AlarmsRepository.base_query
|
||||
|
||||
where_clause = """ where ad.tenant_id = %s
|
||||
and a.id = %s """
|
||||
|
||||
query = select_clause + where_clause
|
||||
|
||||
rows = self._execute_query(query, parms)
|
||||
|
||||
if not rows:
|
||||
raise exceptions.DoesNotExistException
|
||||
else:
|
||||
return rows
|
||||
|
||||
@mysql_repository.mysql_try_catch_block
|
||||
def get_alarms(self, tenant_id, query_parms, offset):
|
||||
|
||||
parms = [tenant_id]
|
||||
|
||||
select_clause = AlarmsRepository.base_query
|
||||
|
||||
order_by_clause = " order by a.id "
|
||||
|
||||
where_clause = " where ad.tenant_id = %s "
|
||||
|
||||
if offset is not None:
|
||||
where_clause += " and ad.id > %s"
|
||||
parms.append(offset.encode('utf8'))
|
||||
limit_clause = " limit %s "
|
||||
parms.append(constants.PAGE_LIMIT)
|
||||
else:
|
||||
limit_clause = ""
|
||||
|
||||
if 'alarm_definition_id' in query_parms:
|
||||
parms.append(query_parms['alarm_definition_id'])
|
||||
where_clause += " and ad.id = %s "
|
||||
|
||||
if 'metric_name' in query_parms:
|
||||
sub_select_clause = """
|
||||
and a.id in (select distinct a.id from alarm as a
|
||||
inner join alarm_metric as am on am.alarm_id
|
||||
= a.id
|
||||
inner join metric_definition_dimensions as mdd
|
||||
on mdd.id =
|
||||
am.metric_definition_dimensions_id
|
||||
inner join (select distinct id from
|
||||
metric_definition
|
||||
where name = %s) as md
|
||||
on md.id = mdd.metric_definition_id)
|
||||
"""
|
||||
|
||||
parms.append(query_parms['metric_name'].encode('utf8'))
|
||||
where_clause += sub_select_clause
|
||||
|
||||
if 'state' in query_parms:
|
||||
parms.append(query_parms['state'].encode('utf8'))
|
||||
where_clause += " and a.state = %s "
|
||||
|
||||
if 'metric_dimensions' in query_parms:
|
||||
sub_select_clause = """
|
||||
and a.id in (select distinct a.id from alarm as a
|
||||
inner join alarm_metric as am on am.alarm_id
|
||||
= a.id
|
||||
inner join metric_definition_dimensions as mdd
|
||||
on mdd.id =
|
||||
am.metric_definition_dimensions_id
|
||||
"""
|
||||
sub_select_parms = []
|
||||
i = 0
|
||||
for metric_dimension in query_parms['metric_dimensions'].split(
|
||||
','):
|
||||
parsed_dimension = metric_dimension.split(':')
|
||||
sub_select_clause += """
|
||||
inner join (select distinct dimension_set_id
|
||||
from metric_dimension
|
||||
where name = %s and value = %s) as md{}
|
||||
on md{}.dimension_set_id = mdd.metric_dimension_set_id
|
||||
""".format(i, i)
|
||||
i += 1
|
||||
sub_select_parms += [parsed_dimension[0].encode('utf8'),
|
||||
parsed_dimension[1].encode('utf8')]
|
||||
|
||||
sub_select_clause += ")"
|
||||
parms += sub_select_parms
|
||||
where_clause += sub_select_clause
|
||||
|
||||
query = select_clause + where_clause + order_by_clause + limit_clause
|
||||
|
||||
return self._execute_query(query, parms)
|
|
@ -12,10 +12,10 @@
|
|||
# License for the specific language governing permissions and limitations
|
||||
# under the License.
|
||||
|
||||
from monasca.common.repositories import constants
|
||||
from monasca.common.repositories import events_repository as er
|
||||
from monasca.common.repositories.mysql import mysql_repository
|
||||
from monasca.openstack.common import log
|
||||
from monasca_events_api.common.repositories import constants
|
||||
from monasca_events_api.common.repositories import events_repository as er
|
||||
from monasca_events_api.common.repositories.mysql import mysql_repository
|
||||
from monasca_events_api.openstack.common import log
|
||||
|
||||
|
||||
LOG = log.getLogger(__name__)
|
||||
|
|
|
@ -15,7 +15,7 @@
|
|||
from oslo.config import cfg
|
||||
import peewee
|
||||
|
||||
from monasca.openstack.common import log
|
||||
from monasca_events_api.openstack.common import log
|
||||
|
||||
|
||||
LOG = log.getLogger(__name__)
|
||||
|
@ -28,4 +28,4 @@ db = peewee.MySQLDatabase(cfg.CONF.mysql.database_name,
|
|||
|
||||
class Model(peewee.Model):
|
||||
class Meta:
|
||||
database = db
|
||||
database = db
|
||||
|
|
|
@ -15,8 +15,8 @@
|
|||
import MySQLdb as mdb
|
||||
from oslo.config import cfg
|
||||
|
||||
from monasca.common.repositories import exceptions
|
||||
from monasca.openstack.common import log
|
||||
from monasca_events_api.common.repositories import exceptions
|
||||
from monasca_events_api.openstack.common import log
|
||||
|
||||
|
||||
LOG = log.getLogger(__name__)
|
||||
|
|
|
@ -1,167 +0,0 @@
|
|||
# Copyright 2014 Hewlett-Packard
|
||||
#
|
||||
# Licensed under the Apache License, Version 2.0 (the "License"); you may
|
||||
# not use this file except in compliance with the License. You may obtain
|
||||
# a copy of the License at
|
||||
#
|
||||
# http://www.apache.org/licenses/LICENSE-2.0
|
||||
#
|
||||
# Unless required by applicable law or agreed to in writing, software
|
||||
# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
|
||||
# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
|
||||
# License for the specific language governing permissions and limitations
|
||||
# under the License.
|
||||
|
||||
import datetime
|
||||
|
||||
from monasca.common.repositories import constants
|
||||
from monasca.common.repositories import exceptions
|
||||
from monasca.common.repositories.mysql import mysql_repository
|
||||
from monasca.common.repositories import notifications_repository as nr
|
||||
from monasca.openstack.common import log
|
||||
from monasca.openstack.common import uuidutils
|
||||
|
||||
|
||||
LOG = log.getLogger(__name__)
|
||||
|
||||
|
||||
class NotificationsRepository(mysql_repository.MySQLRepository,
|
||||
nr.NotificationsRepository):
|
||||
|
||||
def __init__(self):
|
||||
|
||||
super(NotificationsRepository, self).__init__()
|
||||
|
||||
def create_notification(self, tenant_id, name,
|
||||
notification_type, address):
|
||||
|
||||
cnxn, cursor = self._get_cnxn_cursor_tuple()
|
||||
|
||||
with cnxn:
|
||||
|
||||
query = """
|
||||
select *
|
||||
from notification_method
|
||||
where tenant_id = %s and name = %s"""
|
||||
|
||||
parms = [tenant_id, name.encode('utf8')]
|
||||
|
||||
cursor.execute(query, parms)
|
||||
|
||||
if cursor.rowcount > 0:
|
||||
raise exceptions.AlreadyExistsException('Notification already '
|
||||
'exists')
|
||||
|
||||
now = datetime.datetime.utcnow()
|
||||
notification_id = uuidutils.generate_uuid()
|
||||
query = """
|
||||
insert into notification_method(
|
||||
id,
|
||||
tenant_id,
|
||||
name,
|
||||
type,
|
||||
address,
|
||||
created_at,
|
||||
updated_at
|
||||
) values (%s, %s, %s, % s, %s, %s, %s)"""
|
||||
|
||||
parms = [notification_id,
|
||||
tenant_id,
|
||||
name.encode('utf8'),
|
||||
notification_type.encode('utf8'),
|
||||
address.encode('utf8'),
|
||||
now,
|
||||
now]
|
||||
|
||||
cursor.execute(query, parms)
|
||||
|
||||
return notification_id
|
||||
|
||||
@mysql_repository.mysql_try_catch_block
|
||||
def list_notifications(self, tenant_id, offset):
|
||||
|
||||
query = """
|
||||
select *
|
||||
from notification_method
|
||||
where tenant_id = %s"""
|
||||
|
||||
parms = [tenant_id]
|
||||
|
||||
if offset is not None:
|
||||
query += " and id > %s order by id limit %s"
|
||||
parms.append(offset.encode('utf8'))
|
||||
parms.append(constants.PAGE_LIMIT)
|
||||
|
||||
rows = self._execute_query(query, parms)
|
||||
|
||||
return rows
|
||||
|
||||
@mysql_repository.mysql_try_catch_block
|
||||
def delete_notification(self, tenant_id, id):
|
||||
|
||||
cnxn, cursor = self._get_cnxn_cursor_tuple()
|
||||
|
||||
with cnxn:
|
||||
|
||||
query = """
|
||||
select *
|
||||
from notification_method
|
||||
where tenant_id = %s and id = %s"""
|
||||
|
||||
parms = [tenant_id, id]
|
||||
|
||||
cursor.execute(query, parms)
|
||||
|
||||
if cursor.rowcount < 1:
|
||||
raise exceptions.DoesNotExistException
|
||||
|
||||
query = """
|
||||
delete
|
||||
from notification_method
|
||||
where tenant_id = %s and id = %s"""
|
||||
|
||||
cursor.execute(query, parms)
|
||||
|
||||
@mysql_repository.mysql_try_catch_block
|
||||
def list_notification(self, tenant_id, notification_id):
|
||||
|
||||
parms = [tenant_id, notification_id]
|
||||
|
||||
query = """
|
||||
select *
|
||||
from notification_method
|
||||
where tenant_id = %s and id = %s"""
|
||||
|
||||
rows = self._execute_query(query, parms)
|
||||
|
||||
if rows:
|
||||
return rows[0]
|
||||
else:
|
||||
raise exceptions.DoesNotExistException
|
||||
|
||||
@mysql_repository.mysql_try_catch_block
|
||||
def update_notification(
|
||||
self, id, tenant_id, name, type, address):
|
||||
|
||||
cnxn, cursor = self._get_cnxn_cursor_tuple()
|
||||
|
||||
with cnxn:
|
||||
|
||||
now = datetime.datetime.utcnow()
|
||||
|
||||
query = """
|
||||
update notification_method
|
||||
set name = %s,
|
||||
type = %s,
|
||||
address = %s,
|
||||
created_at = %s,
|
||||
updated_at = %s
|
||||
where tenant_id = %s and id = %s"""
|
||||
|
||||
parms = [name.encode('utf8'), type.encode('utf8'), address.encode(
|
||||
'utf8'), now, now, tenant_id, id]
|
||||
|
||||
cursor.execute(query, parms)
|
||||
|
||||
if cursor.rowcount < 1:
|
||||
raise exceptions.DoesNotExistException('Not Found')
|
|
@ -16,11 +16,11 @@ import uuid
|
|||
import MySQLdb
|
||||
from oslo_utils import timeutils
|
||||
|
||||
from monasca.common.repositories import constants
|
||||
from monasca.common.repositories import exceptions
|
||||
from monasca.common.repositories.mysql import mysql_repository
|
||||
from monasca.common.repositories import streams_repository as sdr
|
||||
from monasca.openstack.common import log
|
||||
from monasca_events_api.common.repositories import constants
|
||||
from monasca_events_api.common.repositories import exceptions
|
||||
from monasca_events_api.common.repositories.mysql import mysql_repository
|
||||
from monasca_events_api.common.repositories import streams_repository as sdr
|
||||
from monasca_events_api.openstack.common import log
|
||||
|
||||
|
||||
LOG = log.getLogger(__name__)
|
||||
|
|
|
@ -15,9 +15,9 @@
|
|||
import model
|
||||
import peewee
|
||||
|
||||
from monasca.common.repositories import exceptions
|
||||
from monasca.common.repositories import transforms_repository
|
||||
from monasca.openstack.common import log
|
||||
from monasca_events_api.common.repositories import exceptions
|
||||
from monasca_events_api.common.repositories import transforms_repository
|
||||
from monasca_events_api.openstack.common import log
|
||||
|
||||
|
||||
LOG = log.getLogger(__name__)
|
||||
|
|
|
@ -1,43 +0,0 @@
|
|||
# Copyright 2014 Hewlett-Packard
|
||||
#
|
||||
# Licensed under the Apache License, Version 2.0 (the "License"); you may
|
||||
# not use this file except in compliance with the License. You may obtain
|
||||
# a copy of the License at
|
||||
#
|
||||
# http://www.apache.org/licenses/LICENSE-2.0
|
||||
#
|
||||
# Unless required by applicable law or agreed to in writing, software
|
||||
# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
|
||||
# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
|
||||
# License for the specific language governing permissions and limitations
|
||||
# under the License.
|
||||
|
||||
import abc
|
||||
|
||||
import six
|
||||
|
||||
|
||||
@six.add_metaclass(abc.ABCMeta)
|
||||
class NotificationsRepository(object):
|
||||
|
||||
@abc.abstractmethod
|
||||
def create_notification(self, id, tenant_id, name, notification_type,
|
||||
address):
|
||||
return
|
||||
|
||||
@abc.abstractmethod
|
||||
def list_notifications(self, tenant_id, offset):
|
||||
return
|
||||
|
||||
@abc.abstractmethod
|
||||
def delete_notification(self, tenant_id, notification_id):
|
||||
return
|
||||
|
||||
@abc.abstractmethod
|
||||
def list_notification(self, tenant_id, notification_id):
|
||||
return
|
||||
|
||||
@abc.abstractmethod
|
||||
def update_notification(self, id, tenant_id, name, notification_type,
|
||||
address):
|
||||
return
|
|
@ -18,7 +18,7 @@ import falcon
|
|||
from falcon import api_helpers
|
||||
from stevedore import driver
|
||||
|
||||
from monasca.openstack.common import log
|
||||
from monasca_events_api.openstack.common import log
|
||||
|
||||
RESOURCE_METHOD_FLAG = 'fab05a04-b861-4651-bd0c-9cb3eb9a6088'
|
||||
|
||||
|
@ -29,7 +29,7 @@ def init_driver(namespace, driver_name, drv_invoke_args=()):
|
|||
"""Initialize the resource driver and returns it.
|
||||
|
||||
:param namespace: the resource namespace (in setup.cfg).
|
||||
:param driver_name: the driver name (in monasca.conf)
|
||||
:param driver_name: the driver name (in monasca_events_api.conf)
|
||||
:param invoke_args: args to pass to the driver (a tuple)
|
||||
"""
|
||||
mgr = driver.DriverManager(namespace=namespace, name=driver_name,
|
||||
|
@ -135,7 +135,7 @@ class ResourceAPI(falcon.API):
|
|||
|
||||
:param resource_name: the name of the resource.
|
||||
:param namespace: the resource namespace (in setup.cfg).
|
||||
:param driver_name: the driver name (in monasca.conf)
|
||||
:param driver_name: the driver name (in monasca_events_api.conf)
|
||||
:param invoke_args: args to pass to the driver (a tuple)
|
||||
:param uri: the uri to associate with the resource
|
||||
"""
|
||||
|
@ -144,4 +144,4 @@ class ResourceAPI(falcon.API):
|
|||
(resource_name, driver_name))
|
||||
self.add_route(uri, resource_driver)
|
||||
LOG.debug('%s dispatcher driver has been added to the routes!' %
|
||||
(resource_name))
|
||||
(resource_name))
|
||||
|
|
|
@ -1,30 +0,0 @@
|
|||
# Copyright 2013 IBM Corp
|
||||
#
|
||||
# Author: Tong Li <litong01@us.ibm.com>
|
||||
#
|
||||
# Licensed under the Apache License, Version 2.0 (the "License"); you may
|
||||
# not use this file except in compliance with the License. You may obtain
|
||||
# a copy of the License at
|
||||
#
|
||||
# http://www.apache.org/licenses/LICENSE-2.0
|
||||
#
|
||||
# Unless required by applicable law or agreed to in writing, software
|
||||
# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
|
||||
# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
|
||||
# License for the specific language governing permissions and limitations
|
||||
# under the License.
|
||||
|
||||
import abc
|
||||
|
||||
import six
|
||||
|
||||
|
||||
@six.add_metaclass(abc.ABCMeta)
|
||||
class Base(object):
|
||||
|
||||
def __init__(self, conf):
|
||||
self.conf = conf
|
||||
|
||||
@abc.abstractmethod
|
||||
def define_routes(self, app):
|
||||
"""Post metric data interface."""
|
|
@ -1,41 +0,0 @@
|
|||
# Copyright 2013 IBM Corp
|
||||
#
|
||||
# Author: Tong Li <litong01@us.ibm.com>
|
||||
#
|
||||
# Licensed under the Apache License, Version 2.0 (the "License"); you may
|
||||
# not use this file except in compliance with the License. You may obtain
|
||||
# a copy of the License at
|
||||
#
|
||||
# http://www.apache.org/licenses/LICENSE-2.0
|
||||
#
|
||||
# Unless required by applicable law or agreed to in writing, software
|
||||
# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
|
||||
# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
|
||||
# License for the specific language governing permissions and limitations
|
||||
# under the License.
|
||||
|
||||
import falcon
|
||||
|
||||
from monasca.api import monasca_api_v2
|
||||
from monasca.common import kafka_conn
|
||||
from monasca.common import resource_api
|
||||
from monasca.openstack.common import log
|
||||
|
||||
|
||||
LOG = log.getLogger(__name__)
|
||||
|
||||
|
||||
class KafkaDispatcher(monasca_api_v2.V2API):
|
||||
def __init__(self, global_conf):
|
||||
LOG.debug('initializing KafkaDispatcher!')
|
||||
super(KafkaDispatcher, self).__init__(global_conf)
|
||||
|
||||
self._kafka_conn = kafka_conn.KafkaConnection()
|
||||
|
||||
@resource_api.Restify('/v2.0/metrics/', method='post')
|
||||
def do_post_metrics(self, req, res):
|
||||
LOG.debug('Getting the call.')
|
||||
msg = req.stream.read()
|
||||
|
||||
code = self._kafka_conn.send_messages(msg)
|
||||
res.status = getattr(falcon, 'HTTP_' + str(code))
|
|
@ -1,81 +0,0 @@
|
|||
# Copyright 2013 IBM Corp
|
||||
#
|
||||
# Author: Tong Li <litong01@us.ibm.com>
|
||||
#
|
||||
# Licensed under the Apache License, Version 2.0 (the "License"); you may
|
||||
# not use this file except in compliance with the License. You may obtain
|
||||
# a copy of the License at
|
||||
#
|
||||
# http://www.apache.org/licenses/LICENSE-2.0
|
||||
#
|
||||
# Unless required by applicable law or agreed to in writing, software
|
||||
# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
|
||||
# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
|
||||
# License for the specific language governing permissions and limitations
|
||||
# under the License.
|
||||
|
||||
import falcon
|
||||
from oslo.config import cfg
|
||||
|
||||
from monasca.api import monasca_api_v2
|
||||
from monasca.common import resource_api
|
||||
from monasca.openstack.common import log
|
||||
|
||||
OPTS = [
|
||||
cfg.MultiStrOpt('id',
|
||||
default=['sample'],
|
||||
help='Multiple String configuration.'),
|
||||
cfg.StrOpt('prefix',
|
||||
default='monasca_',
|
||||
help='String configuration sample.'),
|
||||
]
|
||||
cfg.CONF.register_opts(OPTS, group='sample_dispatcher')
|
||||
|
||||
|
||||
LOG = log.getLogger(__name__)
|
||||
|
||||
|
||||
class SampleDispatcher(monasca_api_v2.V2API):
|
||||
"""Monasca dispatcher sample class
|
||||
|
||||
This class shows how to develop a dispatcher and how the configuration
|
||||
parameters should be defined and how these configuration parameters
|
||||
should be set in monasca.conf file.
|
||||
|
||||
This class uses configuration parameters appear in sample_dispatcher
|
||||
section such as the following:
|
||||
|
||||
[sample_dispatcher]
|
||||
id = 101
|
||||
id = 105
|
||||
id = 180
|
||||
prefix = sample__
|
||||
|
||||
If the above section appears in file monasca.conf, these values will be
|
||||
loaded to cfg.CONF after the dispatcher gets loaded. The cfg.CONF should
|
||||
have the following values:
|
||||
|
||||
cfg.CONF.sample_dispatcher.id = [101, 105, 180]
|
||||
cfg.CONF.sample_dispatcher.prefix = "sample__"
|
||||
"""
|
||||
def __init__(self, global_conf):
|
||||
LOG.debug('initializing SampleDispatcher!')
|
||||
super(SampleDispatcher, self).__init__(global_conf)
|
||||
|
||||
LOG.debug('SampleDispatcher conf entries: prefix')
|
||||
LOG.debug(global_conf.sample_dispatcher.prefix)
|
||||
LOG.debug('SampleDispatcher conf entries: id')
|
||||
LOG.debug(global_conf.sample_dispatcher.id)
|
||||
|
||||
@resource_api.Restify('/v2.0/datapoints/', method='post')
|
||||
def do_post_metrics(self, req, res):
|
||||
LOG.debug('Getting the call at endpoint datapoints.')
|
||||
msg = req.stream.read()
|
||||
LOG.debug('The msg:', msg)
|
||||
res.status = getattr(falcon, 'HTTP_201')
|
||||
|
||||
@resource_api.Restify('/v2.0/demopoints/', method='get')
|
||||
def do_get_metrics(self, req, res):
|
||||
LOG.debug('Getting the call at endpoint demopoints.')
|
||||
res.body = 'demo response'
|
||||
res.status = getattr(falcon, 'HTTP_200')
|
|
@ -12,13 +12,13 @@
|
|||
# License for the specific language governing permissions and limitations
|
||||
# under the License.
|
||||
|
||||
"""RequestContext: context for requests that persist through monasca."""
|
||||
"""RequestContext: context for requests that persist through monasca_events_api."""
|
||||
|
||||
import uuid
|
||||
|
||||
from oslo.utils import timeutils
|
||||
|
||||
from monasca.openstack.common import log
|
||||
from monasca_events_api.openstack.common import log
|
||||
|
||||
LOG = log.getLogger(__name__)
|
||||
|
||||
|
|
|
@ -25,7 +25,7 @@ class Inspector(object):
|
|||
pipeline = inspector api
|
||||
|
||||
[filter:inspector]
|
||||
use = egg: monasca_api_server#inspector
|
||||
use = egg: monasca_events_api_api_server#inspector
|
||||
"""
|
||||
def __init__(self, app, conf):
|
||||
"""Inspect each request
|
||||
|
|
|
@ -14,8 +14,8 @@
|
|||
|
||||
import falcon
|
||||
|
||||
from monasca.middleware import context
|
||||
from monasca.openstack.common import log
|
||||
from monasca_events_api.middleware import context
|
||||
from monasca_events_api.openstack.common import log
|
||||
|
||||
from oslo.middleware import request_id
|
||||
from oslo.serialization import jsonutils
|
||||
|
@ -91,7 +91,7 @@ class KeystoneContextFilter(object):
|
|||
request_id=req_id,
|
||||
user_auth_plugin=user_auth_plugin)
|
||||
|
||||
env['monasca.context'] = ctx
|
||||
env['monasca_events_api.context'] = ctx
|
||||
|
||||
LOG.debug("Keystone Context succesfully created.")
|
||||
|
||||
|
|
|
@ -29,8 +29,8 @@ import eventlet.backdoor
|
|||
import greenlet
|
||||
from oslo.config import cfg
|
||||
|
||||
from monasca.openstack.common.gettextutils import _LI
|
||||
from monasca.openstack.common import log as logging
|
||||
from monasca_events_api.openstack.common.gettextutils import _LI
|
||||
from monasca_events_api.openstack.common import log as logging
|
||||
|
||||
help_for_backdoor_port = (
|
||||
"Acceptable values are 0, <port>, and <start>:<end>, where 0 results "
|
||||
|
|
|
@ -24,7 +24,7 @@ import traceback
|
|||
|
||||
import six
|
||||
|
||||
from monasca.openstack.common.gettextutils import _LE
|
||||
from monasca_events_api.openstack.common.gettextutils import _LE
|
||||
|
||||
|
||||
class save_and_reraise_exception(object):
|
||||
|
|
|
@ -18,8 +18,8 @@ import errno
|
|||
import os
|
||||
import tempfile
|
||||
|
||||
from monasca.openstack.common import excutils
|
||||
from monasca.openstack.common import log as logging
|
||||
from monasca_events_api.openstack.common import excutils
|
||||
from monasca_events_api.openstack.common import log as logging
|
||||
|
||||
LOG = logging.getLogger(__name__)
|
||||
|
||||
|
|
|
@ -15,7 +15,7 @@
|
|||
|
||||
import fixtures
|
||||
|
||||
from monasca.openstack.common import lockutils
|
||||
from monasca_events_api.openstack.common import lockutils
|
||||
|
||||
|
||||
class LockFixture(fixtures.Fixture):
|
||||
|
|
|
@ -19,7 +19,7 @@ gettext for openstack-common modules.
|
|||
|
||||
Usual usage in an openstack.common module:
|
||||
|
||||
from monasca.openstack.common.gettextutils import _
|
||||
from monasca_events_api.openstack.common.gettextutils import _
|
||||
"""
|
||||
|
||||
import copy
|
||||
|
@ -121,7 +121,7 @@ class TranslatorFactory(object):
|
|||
# module within each application.
|
||||
|
||||
# Create the global translation functions.
|
||||
_translators = TranslatorFactory('monasca')
|
||||
_translators = TranslatorFactory('monasca_events_api')
|
||||
|
||||
# The primary translation function using the well-known name "_"
|
||||
_ = _translators.primary
|
||||
|
@ -182,7 +182,7 @@ class Message(six.text_type):
|
|||
"""
|
||||
|
||||
def __new__(cls, msgid, msgtext=None, params=None,
|
||||
domain='monasca', *args):
|
||||
domain='monasca_events_api', *args):
|
||||
"""Create a new Message object.
|
||||
|
||||
In order for translation to work gettext requires a message ID, this
|
||||
|
|
|
@ -59,7 +59,7 @@ def import_module(import_str):
|
|||
|
||||
|
||||
def import_versioned_module(version, submodule=None):
|
||||
module = 'monasca.v%s' % version
|
||||
module = 'monasca_events_api.v%s' % version
|
||||
if submodule:
|
||||
module = '.'.join((module, submodule))
|
||||
return import_module(module)
|
||||
|
|
|
@ -59,10 +59,10 @@ else:
|
|||
import six
|
||||
import six.moves.xmlrpc_client as xmlrpclib
|
||||
|
||||
from monasca.openstack.common import gettextutils
|
||||
from monasca.openstack.common import importutils
|
||||
from monasca.openstack.common import strutils
|
||||
from monasca.openstack.common import timeutils
|
||||
from monasca_events_api.openstack.common import gettextutils
|
||||
from monasca_events_api.openstack.common import importutils
|
||||
from monasca_events_api.openstack.common import strutils
|
||||
from monasca_events_api.openstack.common import timeutils
|
||||
|
||||
netaddr = importutils.try_import("netaddr")
|
||||
|
||||
|
|
|
@ -28,8 +28,8 @@ import weakref
|
|||
|
||||
from oslo.config import cfg
|
||||
|
||||
from monasca.openstack.common import fileutils
|
||||
from monasca.openstack.common.gettextutils import _, _LE, _LI
|
||||
from monasca_events_api.openstack.common import fileutils
|
||||
from monasca_events_api.openstack.common.gettextutils import _, _LE, _LI
|
||||
|
||||
|
||||
LOG = logging.getLogger(__name__)
|
||||
|
|
|
@ -43,13 +43,13 @@ from six import moves
|
|||
|
||||
_PY26 = sys.version_info[0:2] == (2, 6)
|
||||
|
||||
from monasca.openstack.common.gettextutils import _
|
||||
from monasca.openstack.common import importutils
|
||||
from monasca.openstack.common import jsonutils
|
||||
from monasca.openstack.common import local
|
||||
from monasca_events_api.openstack.common.gettextutils import _
|
||||
from monasca_events_api.openstack.common import importutils
|
||||
from monasca_events_api.openstack.common import jsonutils
|
||||
from monasca_events_api.openstack.common import local
|
||||
# NOTE(flaper87): Pls, remove when graduating this module
|
||||
# from the incubator.
|
||||
from monasca.openstack.common.strutils import mask_password # noqa
|
||||
from monasca_events_api.openstack.common.strutils import mask_password # noqa
|
||||
|
||||
|
||||
_DEFAULT_LOG_DATE_FORMAT = "%Y-%m-%d %H:%M:%S"
|
||||
|
@ -497,7 +497,7 @@ def _setup_logging_from_conf(project, version):
|
|||
if CONF.publish_errors:
|
||||
try:
|
||||
handler = importutils.import_object(
|
||||
"monasca.openstack.common.log_handler.PublishErrorsHandler",
|
||||
"monasca_events_api.openstack.common.log_handler.PublishErrorsHandler",
|
||||
logging.ERROR)
|
||||
except ImportError:
|
||||
handler = importutils.import_object(
|
||||
|
|
|
@ -21,8 +21,8 @@ import time
|
|||
from eventlet import event
|
||||
from eventlet import greenthread
|
||||
|
||||
from monasca.openstack.common.gettextutils import _LE, _LW
|
||||
from monasca.openstack.common import log as logging
|
||||
from monasca_events_api.openstack.common.gettextutils import _LE, _LW
|
||||
from monasca_events_api.openstack.common import log as logging
|
||||
|
||||
LOG = logging.getLogger(__name__)
|
||||
|
||||
|
|
|
@ -37,15 +37,15 @@ import eventlet
|
|||
from eventlet import event
|
||||
from oslo.config import cfg
|
||||
|
||||
from monasca.openstack.common import eventlet_backdoor
|
||||
from monasca.openstack.common.gettextutils import _LE, _LI, _LW
|
||||
from monasca.openstack.common import importutils
|
||||
from monasca.openstack.common import log as logging
|
||||
from monasca.openstack.common import systemd
|
||||
from monasca.openstack.common import threadgroup
|
||||
from monasca_events_api.openstack.common import eventlet_backdoor
|
||||
from monasca_events_api.openstack.common.gettextutils import _LE, _LI, _LW
|
||||
from monasca_events_api.openstack.common import importutils
|
||||
from monasca_events_api.openstack.common import log as logging
|
||||
from monasca_events_api.openstack.common import systemd
|
||||
from monasca_events_api.openstack.common import threadgroup
|
||||
|
||||
|
||||
rpc = importutils.try_import('monasca.openstack.common.rpc')
|
||||
rpc = importutils.try_import('monasca_events_api.openstack.common.rpc')
|
||||
CONF = cfg.CONF
|
||||
LOG = logging.getLogger(__name__)
|
||||
|
||||
|
|
|
@ -24,7 +24,7 @@ import unicodedata
|
|||
|
||||
import six
|
||||
|
||||
from monasca.openstack.common.gettextutils import _
|
||||
from monasca_events_api.openstack.common.gettextutils import _
|
||||
|
||||
|
||||
UNIT_PREFIX_EXPONENT = {
|
||||
|
|
|
@ -20,7 +20,7 @@ import os
|
|||
import socket
|
||||
import sys
|
||||
|
||||
from monasca.openstack.common import log as logging
|
||||
from monasca_events_api.openstack.common import log as logging
|
||||
|
||||
|
||||
LOG = logging.getLogger(__name__)
|
||||
|
|
|
@ -16,8 +16,8 @@ import threading
|
|||
import eventlet
|
||||
from eventlet import greenpool
|
||||
|
||||
from monasca.openstack.common import log as logging
|
||||
from monasca.openstack.common import loopingcall
|
||||
from monasca_events_api.openstack.common import log as logging
|
||||
from monasca_events_api.openstack.common import loopingcall
|
||||
|
||||
|
||||
LOG = logging.getLogger(__name__)
|
||||
|
|
|
@ -14,8 +14,8 @@
|
|||
|
||||
import voluptuous
|
||||
|
||||
from monasca.openstack.common import log
|
||||
from monasca.v2.common.schemas import exceptions
|
||||
from monasca_events_api.openstack.common import log
|
||||
from monasca_events_api.v2.common.schemas import exceptions
|
||||
|
||||
LOG = log.getLogger(__name__)
|
||||
|
||||
|
|
|
@ -14,8 +14,8 @@
|
|||
|
||||
import voluptuous
|
||||
|
||||
from monasca.openstack.common import log
|
||||
from monasca.v2.common.schemas import exceptions
|
||||
from monasca_events_api.openstack.common import log
|
||||
from monasca_events_api.v2.common.schemas import exceptions
|
||||
|
||||
|
||||
LOG = log.getLogger(__name__)
|
||||
|
|
|
@ -14,8 +14,8 @@
|
|||
|
||||
import voluptuous
|
||||
|
||||
from monasca.openstack.common import log
|
||||
from monasca.v2.common.schemas import exceptions
|
||||
from monasca_events_api.openstack.common import log
|
||||
from monasca_events_api.v2.common.schemas import exceptions
|
||||
|
||||
LOG = log.getLogger(__name__)
|
||||
|
||||
|
|
|
@ -48,9 +48,6 @@ cfg.CONF.register_opts(security_opts, security_group)
|
|||
|
||||
messaging_opts = [cfg.StrOpt('driver', default='kafka',
|
||||
help='The message queue driver to use'),
|
||||
cfg.StrOpt('metrics_message_format', default='reference',
|
||||
help='The type of metrics message format to '
|
||||
'publish to the message queue'),
|
||||
cfg.StrOpt('events_message_format', default='reference',
|
||||
help='The type of events message format to '
|
||||
'publish to the message queue')]
|
||||
|
@ -60,21 +57,12 @@ cfg.CONF.register_group(messaging_group)
|
|||
cfg.CONF.register_opts(messaging_opts, messaging_group)
|
||||
|
||||
repositories_opts = [
|
||||
cfg.StrOpt('metrics_driver', default='influxdb_metrics_repo',
|
||||
help='The repository driver to use for metrics'),
|
||||
cfg.StrOpt('alarm_definitions_driver',
|
||||
default='mysql_alarm_definitions_repo',
|
||||
help='The repository driver to use for alarm definitions'),
|
||||
cfg.StrOpt('alarms_driver', default='mysql_alarms_repo',
|
||||
help='The repository driver to use for alarms'),
|
||||
cfg.StrOpt('streams_driver', default='mysql_streams_repo',
|
||||
help='The repository driver to use for streams'),
|
||||
cfg.StrOpt('events_driver', default='mysql_events_repo',
|
||||
help='The repository driver to use for events'),
|
||||
cfg.StrOpt('transforms_driver', default='mysql_transforms_repo',
|
||||
help='The repository driver to use for transforms'),
|
||||
cfg.StrOpt('notifications_driver', default='mysql_notifications_repo',
|
||||
help='The repository driver to use for notifications')]
|
||||
help='The repository driver to use for transforms')]
|
||||
|
||||
repositories_group = cfg.OptGroup(name='repositories', title='repositories')
|
||||
cfg.CONF.register_group(repositories_group)
|
||||
|
@ -83,8 +71,6 @@ cfg.CONF.register_opts(repositories_opts, repositories_group)
|
|||
|
||||
kafka_opts = [cfg.StrOpt('uri', help='Address to kafka server. For example: '
|
||||
'uri=192.168.1.191:9092'),
|
||||
cfg.StrOpt('metrics_topic', default='metrics',
|
||||
help='The topic that metrics will be published too.'),
|
||||
cfg.StrOpt('events_topic', default='raw-events',
|
||||
help='The topic that events will be published too.'),
|
||||
cfg.StrOpt('group', default='api',
|
||||
|
@ -117,14 +103,6 @@ kafka_group = cfg.OptGroup(name='kafka', title='title')
|
|||
cfg.CONF.register_group(kafka_group)
|
||||
cfg.CONF.register_opts(kafka_opts, kafka_group)
|
||||
|
||||
influxdb_opts = [cfg.StrOpt('database_name'), cfg.StrOpt('ip_address'),
|
||||
cfg.StrOpt('port'), cfg.StrOpt('user'),
|
||||
cfg.StrOpt('password')]
|
||||
|
||||
influxdb_group = cfg.OptGroup(name='influxdb', title='influxdb')
|
||||
cfg.CONF.register_group(influxdb_group)
|
||||
cfg.CONF.register_opts(influxdb_opts, influxdb_group)
|
||||
|
||||
mysql_opts = [cfg.StrOpt('database_name'), cfg.StrOpt('hostname'),
|
||||
cfg.StrOpt('username'), cfg.StrOpt('password')]
|
||||
|
||||
|
|
|
@ -18,17 +18,17 @@ import falcon
|
|||
|
||||
import collections
|
||||
|
||||
from monasca.api import monasca_events_api_v2
|
||||
from monasca.common.messaging import exceptions as message_queue_exceptions
|
||||
from monasca.common.messaging.message_formats import events_transform_factory
|
||||
from monasca.common import resource_api
|
||||
from monasca.openstack.common import log
|
||||
from monasca.v2.common.schemas import (
|
||||
from monasca_events_api.api import monasca_events_api_v2
|
||||
from monasca_events_api.common.messaging import exceptions as message_queue_exceptions
|
||||
from monasca_events_api.common.messaging.message_formats import events_transform_factory
|
||||
from monasca_events_api.common import resource_api
|
||||
from monasca_events_api.openstack.common import log
|
||||
from monasca_events_api.v2.common.schemas import (
|
||||
events_request_body_schema as schemas_event)
|
||||
from monasca.v2.common.schemas import exceptions as schemas_exceptions
|
||||
from monasca.v2.common import utils
|
||||
from monasca.v2.reference import helpers
|
||||
from monasca.v2.reference import resource
|
||||
from monasca_events_api.v2.common.schemas import exceptions as schemas_exceptions
|
||||
from monasca_events_api.v2.common import utils
|
||||
from monasca_events_api.v2.reference import helpers
|
||||
from monasca_events_api.v2.reference import resource
|
||||
|
||||
from oslo.config import cfg
|
||||
|
||||
|
@ -52,11 +52,11 @@ class Events(monasca_events_api_v2.EventsV2API):
|
|||
self._event_transform = (
|
||||
events_transform_factory.create_events_transform())
|
||||
self._message_queue = (
|
||||
resource_api.init_driver('monasca.messaging',
|
||||
resource_api.init_driver('monasca_events_api.messaging',
|
||||
cfg.CONF.messaging.driver,
|
||||
['raw-events']))
|
||||
self._events_repo = resource_api.init_driver(
|
||||
'monasca.repositories', cfg.CONF.repositories.events_driver)
|
||||
'monasca_events_api.repositories', cfg.CONF.repositories.events_driver)
|
||||
|
||||
def _validate_event(self, event):
|
||||
"""Validates the event
|
||||
|
|
|
@ -19,11 +19,9 @@ import urlparse
|
|||
import falcon
|
||||
import simplejson
|
||||
|
||||
from monasca.common.repositories import constants
|
||||
from monasca.openstack.common import log
|
||||
from monasca.v2.common.schemas import dimensions_schema
|
||||
from monasca.v2.common.schemas import exceptions as schemas_exceptions
|
||||
from monasca.v2.common.schemas import metric_name_schema
|
||||
from monasca_events_api.common.repositories import constants
|
||||
from monasca_events_api.openstack.common import log
|
||||
from monasca_events_api.v2.common.schemas import exceptions as schemas_exceptions
|
||||
|
||||
LOG = log.getLogger(__name__)
|
||||
|
||||
|
@ -247,32 +245,6 @@ def get_query_period(req):
|
|||
raise falcon.HTTPBadRequest('Bad request', ex.message)
|
||||
|
||||
|
||||
def validate_query_name(name):
|
||||
"""Validates the query param name.
|
||||
|
||||
:param name: Query param name.
|
||||
:raises falcon.HTTPBadRequest: If name is not valid.
|
||||
"""
|
||||
try:
|
||||
metric_name_schema.validate(name)
|
||||
except schemas_exceptions.ValidationException as ex:
|
||||
LOG.debug(ex)
|
||||
raise falcon.HTTPBadRequest('Bad request', ex.message)
|
||||
|
||||
|
||||
def validate_query_dimensions(dimensions):
|
||||
"""Validates the query param dimensions.
|
||||
|
||||
:param dimensions: Query param dimensions.
|
||||
:raises falcon.HTTPBadRequest: If dimensions are not valid.
|
||||
"""
|
||||
try:
|
||||
dimensions_schema.validate(dimensions)
|
||||
except schemas_exceptions.ValidationException as ex:
|
||||
LOG.debug(ex)
|
||||
raise falcon.HTTPBadRequest('Bad request', ex.message)
|
||||
|
||||
|
||||
def paginate(resource, uri, offset):
|
||||
|
||||
if offset is not None:
|
||||
|
|
|
@ -13,8 +13,8 @@
|
|||
# under the License.
|
||||
import falcon
|
||||
|
||||
from monasca.common.repositories import exceptions
|
||||
from monasca.openstack.common import log
|
||||
from monasca_events_api.common.repositories import exceptions
|
||||
from monasca_events_api.openstack.common import log
|
||||
|
||||
|
||||
LOG = log.getLogger(__name__)
|
||||
|
|
|
@ -18,16 +18,16 @@ import re
|
|||
import falcon
|
||||
from oslo.config import cfg
|
||||
|
||||
from monasca.api import stream_definitions_api_v2
|
||||
from monasca.common.messaging import exceptions as message_queue_exceptions
|
||||
from monasca.common.repositories import exceptions
|
||||
from monasca.common import resource_api
|
||||
from monasca.openstack.common import log
|
||||
from monasca.v2.common.schemas import (stream_definition_request_body_schema
|
||||
as schema_streams)
|
||||
from monasca.v2.common.schemas import exceptions as schemas_exceptions
|
||||
from monasca.v2.reference import helpers
|
||||
from monasca.v2.reference import resource
|
||||
from monasca_events_api.api import stream_definitions_api_v2
|
||||
from monasca_events_api.common.messaging import exceptions as message_queue_exceptions
|
||||
from monasca_events_api.common.repositories import exceptions
|
||||
from monasca_events_api.common import resource_api
|
||||
from monasca_events_api.openstack.common import log
|
||||
from monasca_events_api.v2.common.schemas import (stream_definition_request_body_schema
|
||||
as schema_streams)
|
||||
from monasca_events_api.v2.common.schemas import exceptions as schemas_exceptions
|
||||
from monasca_events_api.v2.reference import helpers
|
||||
from monasca_events_api.v2.reference import resource
|
||||
|
||||
|
||||
LOG = log.getLogger(__name__)
|
||||
|
@ -51,9 +51,9 @@ class StreamDefinitions(stream_definitions_api_v2.StreamDefinitionsV2API):
|
|||
cfg.CONF.security.default_authorized_roles +
|
||||
cfg.CONF.security.agent_authorized_roles)
|
||||
self._stream_definitions_repo = resource_api.init_driver(
|
||||
'monasca.repositories', cfg.CONF.repositories.streams_driver)
|
||||
'monasca_events_api.repositories', cfg.CONF.repositories.streams_driver)
|
||||
self.stream_definition_event_message_queue = (
|
||||
resource_api.init_driver('monasca.messaging',
|
||||
resource_api.init_driver('monasca_events_api.messaging',
|
||||
cfg.CONF.messaging.driver,
|
||||
(['stream-definitions'])))
|
||||
except Exception as ex:
|
||||
|
@ -278,7 +278,7 @@ class StreamDefinitions(stream_definitions_api_v2.StreamDefinitionsV2API):
|
|||
u'group_by': group_by,
|
||||
u'fire_criteria': fire_criteria,
|
||||
u'expiration': expiration}
|
||||
}
|
||||
}
|
||||
|
||||
self.send_event(self.stream_definition_event_message_queue,
|
||||
stream_definition_created_event_msg)
|
||||
|
|
|
@ -17,15 +17,15 @@ import json
|
|||
import falcon
|
||||
from oslo.config import cfg
|
||||
|
||||
from monasca.api import monasca_transforms_api_v2
|
||||
from monasca.common.repositories import exceptions as repository_exceptions
|
||||
from monasca.common import resource_api
|
||||
from monasca.openstack.common import log
|
||||
from monasca.openstack.common import uuidutils
|
||||
from monasca.v2.common.schemas import (exceptions as schemas_exceptions)
|
||||
from monasca.v2.common.schemas import (
|
||||
from monasca_events_api.api import monasca_transforms_api_v2
|
||||
from monasca_events_api.common.repositories import exceptions as repository_exceptions
|
||||
from monasca_events_api.common import resource_api
|
||||
from monasca_events_api.openstack.common import log
|
||||
from monasca_events_api.openstack.common import uuidutils
|
||||
from monasca_events_api.v2.common.schemas import (exceptions as schemas_exceptions)
|
||||
from monasca_events_api.v2.common.schemas import (
|
||||
transforms_request_body_schema as schemas_transforms)
|
||||
from monasca.v2.reference import helpers
|
||||
from monasca_events_api.v2.reference import helpers
|
||||
|
||||
|
||||
LOG = log.getLogger(__name__)
|
||||
|
@ -39,7 +39,7 @@ class Transforms(monasca_transforms_api_v2.TransformsV2API):
|
|||
self._default_authorized_roles = (
|
||||
cfg.CONF.security.default_authorized_roles)
|
||||
self._transforms_repo = resource_api.init_driver(
|
||||
'monasca.repositories', cfg.CONF.repositories.transforms_driver)
|
||||
'monasca_events_api.repositories', cfg.CONF.repositories.transforms_driver)
|
||||
|
||||
def _validate_transform(self, transform):
|
||||
"""Validates the transform
|
||||
|
|
19
setup.cfg
19
setup.cfg
|
@ -26,8 +26,8 @@ packages =
|
|||
|
||||
data_files =
|
||||
/etc/monasca =
|
||||
etc/monasca_events_api.conf
|
||||
etc/monasca_events_api.ini
|
||||
etc/events_api.conf
|
||||
etc/events_api.ini
|
||||
|
||||
[entry_points]
|
||||
console_scripts =
|
||||
|
@ -35,15 +35,8 @@ console_scripts =
|
|||
|
||||
|
||||
monasca_events_api.dispatcher =
|
||||
sample = monasca_events_api.dispatcher.sample_dispatcher:SampleDispatcher
|
||||
kafka = monasca_events_api.dispatcher.kafka_dispatcher:KafkaDispatcher
|
||||
v2_ref_metrics = monasca_events_api.v2.reference.metrics:Metrics
|
||||
v2_ref_alarm_definitions = monasca_events_api.v2.reference.alarm_definitions:AlarmDefinitions
|
||||
v2_ref_alarms = monasca_events_api.v2.reference.alarms:Alarms
|
||||
v2_ref_events = monasca_events_api.v2.reference.events:Events
|
||||
v2_ref_transforms = monasca_events_api.v2.reference.transforms:Transforms
|
||||
v2_ref_notifications = monasca_events_api.v2.reference.notifications:Notifications
|
||||
demo = monasca_events_api.v2.reference.demo_dispatcher:DemoDispatcher
|
||||
v2_ref_stream_definitions = monasca_events_api.v2.reference.stream_definitions:StreamDefinitions
|
||||
|
||||
paste.filter_factory =
|
||||
|
@ -53,18 +46,10 @@ paste.filter_factory =
|
|||
mock_auth_filter = monasca_events_api.middleware.mock_auth_filter:filter_factory
|
||||
|
||||
monasca_events_api.messaging =
|
||||
fake = monasca_events_api.common.messaging.fake_publisher:FakePublisher
|
||||
kafka = monasca_events_api.common.messaging.kafka_publisher:KafkaPublisher
|
||||
rabbitmq = monasca_events_api.common.messaging.rabbitmq_publisher:RabbitmqPublisher
|
||||
|
||||
monasca_events_api.repositories =
|
||||
fake_metrics_repo = monasca_events_api.common.repositories.fake.metrics_repository:MetricsRepository
|
||||
influxdb_metrics_repo = monasca_events_api.common.repositories.influxdb.metrics_repository:MetricsRepository
|
||||
fake_events_repo = monasca_events_api.common.repositories.fake.events_repository:EventsRepository
|
||||
mysql_transforms_repo = monasca_events_api.common.repositories.mysql.transforms_repository:TransformsRepository
|
||||
mysql_alarm_definitions_repo = monasca_events_api.common.repositories.mysql.alarm_definitions_repository:AlarmDefinitionsRepository
|
||||
mysql_alarms_repo = monasca_events_api.common.repositories.mysql.alarms_repository:AlarmsRepository
|
||||
mysql_notifications_repo = monasca_events_api.common.repositories.mysql.notifications_repository:NotificationsRepository
|
||||
mysql_events_repo = monasca_events_api.common.repositories.mysql.events_repository:EventsRepository
|
||||
mysql_streams_repo = monasca_events_api.common.repositories.mysql.streams_repository:StreamsRepository
|
||||
|
||||
|
|
Loading…
Reference in New Issue