pipeline: simplfy classes

Change-Id: I40a54a3cf94f8171db17f2b9ae2def4d0e9d1ab1
This commit is contained in:
Julien Danjou 2018-07-09 17:27:09 +02:00
parent f7b1218b8e
commit 152fbec8f1
4 changed files with 10 additions and 14 deletions

View File

@ -335,14 +335,10 @@ class NotificationEndpoint(object):
# for the generic notification exchange we have to consume all its
# queues
class MainNotificationEndpoint(NotificationEndpoint):
"""Listens to queues on all priority levels and clears by default."""
audit = NotificationEndpoint._consume_and_drop
critical = NotificationEndpoint._consume_and_drop
debug = NotificationEndpoint._consume_and_drop
error = NotificationEndpoint._consume_and_drop
info = NotificationEndpoint._consume_and_drop
sample = NotificationEndpoint._consume_and_drop
warn = NotificationEndpoint._consume_and_drop
audit = _consume_and_drop
critical = _consume_and_drop
debug = _consume_and_drop
error = _consume_and_drop
info = _consume_and_drop
sample = _consume_and_drop
warn = _consume_and_drop

View File

@ -22,7 +22,7 @@ from ceilometer.pipeline import base
LOG = log.getLogger(__name__)
class EventEndpoint(base.MainNotificationEndpoint):
class EventEndpoint(base.NotificationEndpoint):
event_types = []

View File

@ -19,7 +19,7 @@ from ceilometer.pipeline import base
LOG = log.getLogger(__name__)
class SampleEndpoint(base.MainNotificationEndpoint):
class SampleEndpoint(base.NotificationEndpoint):
def info(self, notifications):
"""Convert message at info level to Ceilometer sample.

View File

@ -92,7 +92,7 @@ pipelines based on a given configuration file. Pipelines are required to define
`Source` and `Sink` permutations to describe how to process notification.
Additionally, it must set ``get_main_endpoints`` which provides endpoints to be
added to the main queue listener in the notification agent. This main queue
endpoint inherits :class:`ceilometer.pipeline.base.MainNotificationEndpoint`
endpoint inherits :class:`ceilometer.pipeline.base.NotificationEndpoint`
and defines which notification priorities to listen, normalises the data,
and redirects the data for pipeline processing.