Clean imports in code

This patch set modifies lines which are importing objects
instead of modules. As per openstack import guide lines, user should
import modules in a file not objects.

http://docs.openstack.org/developer/hacking/#imports

Change-Id: I8af9278ffefcc20cf7447ecff7470217cd0e4632
This commit is contained in:
Cao Xuan Hoang 2016-09-23 16:41:12 +07:00
parent 2867dedc6d
commit 6733c064d9
8 changed files with 40 additions and 35 deletions

View File

@ -31,7 +31,7 @@ from monasca_common.simport import simport
from oslo_config import cfg
from oslo_log import log
from repositories.persister import Persister
from monasca_persister.repositories import persister
LOG = log.getLogger(__name__)
@ -123,8 +123,9 @@ def clean_exit(signum, frame=None):
def start_process(respository, kafka_config):
LOG.info("start process: {}".format(respository))
persister = Persister(kafka_config, cfg.CONF.zookeeper, respository)
persister.run()
m_persister = persister.Persister(kafka_config, cfg.CONF.zookeeper,
respository)
m_persister.run()
def main():

View File

@ -13,25 +13,25 @@
# See the License for the specific language governing permissions and
# limitations under the License.
import abc
from cassandra.cluster import Cluster
from cassandra.query import BatchStatement
from cassandra import cluster
from cassandra import query
from oslo_config import cfg
import six
from repositories.abstract_repository import AbstractRepository
from monasca_persister.repositories import abstract_repository
@six.add_metaclass(abc.ABCMeta)
class AbstractCassandraRepository(AbstractRepository):
class AbstractCassandraRepository(abstract_repository.AbstractRepository):
def __init__(self):
super(AbstractCassandraRepository, self).__init__()
self.conf = cfg.CONF
self._cassandra_cluster = Cluster(
self._cassandra_cluster = cluster.Cluster(
self.conf.cassandra.cluster_ip_addresses.split(','))
self.cassandra_session = self._cassandra_cluster.connect(
self.conf.cassandra.keyspace)
self._batch_stmt = BatchStatement()
self._batch_stmt = query.BatchStatement()

View File

@ -14,16 +14,17 @@
# limitations under the License.
import json
from cassandra.query import BatchStatement
from cassandra import query
from oslo_log import log
from repositories.cassandra.abstract_repository import AbstractCassandraRepository
from repositories.utils import parse_alarm_state_hist_message
from monasca_persister.repositories.cassandra import abstract_repository
from monasca_persister.repositories.utils import parse_alarm_state_hist_message
LOG = log.getLogger(__name__)
class AlarmStateHistCassandraRepository(AbstractCassandraRepository):
class AlarmStateHistCassandraRepository(
abstract_repository.AbstractCassandraRepository):
def __init__(self):
@ -68,4 +69,4 @@ class AlarmStateHistCassandraRepository(AbstractCassandraRepository):
self.cassandra_session.execute(self._batch_stmt)
self._batch_stmt = BatchStatement()
self._batch_stmt = query.BatchStatement()

View File

@ -15,17 +15,18 @@
import hashlib
import json
from cassandra.query import BatchStatement
from cassandra import query
from oslo_log import log
import urllib
from repositories.cassandra.abstract_repository import AbstractCassandraRepository
from repositories.utils import parse_measurement_message
from monasca_persister.repositories.cassandra import abstract_repository
from monasca_persister.repositories.utils import parse_measurement_message
LOG = log.getLogger(__name__)
class MetricCassandraRepository(AbstractCassandraRepository):
class MetricCassandraRepository(
abstract_repository.AbstractCassandraRepository):
def __init__(self):
@ -84,7 +85,7 @@ class MetricCassandraRepository(AbstractCassandraRepository):
self.cassandra_session.execute(self._batch_stmt)
self._batch_stmt = BatchStatement()
self._batch_stmt = query.BatchStatement()
class MetricMeasurementInfo(object):

View File

@ -13,24 +13,25 @@
# See the License for the specific language governing permissions and
# limitations under the License.
import abc
from influxdb import InfluxDBClient
import influxdb
from oslo_config import cfg
import six
from repositories.abstract_repository import AbstractRepository
from monasca_persister.repositories import abstract_repository
@six.add_metaclass(abc.ABCMeta)
class AbstractInfluxdbRepository(AbstractRepository):
class AbstractInfluxdbRepository(abstract_repository.AbstractRepository):
def __init__(self):
super(AbstractInfluxdbRepository, self).__init__()
self.conf = cfg.CONF
self._influxdb_client = InfluxDBClient(self.conf.influxdb.ip_address,
self.conf.influxdb.port,
self.conf.influxdb.user,
self.conf.influxdb.password,
self.conf.influxdb.database_name)
self._influxdb_client = influxdb.InfluxDBClient(
self.conf.influxdb.ip_address,
self.conf.influxdb.port,
self.conf.influxdb.user,
self.conf.influxdb.password,
self.conf.influxdb.database_name)
def write_batch(self, data_points):
self._influxdb_client.write_points(data_points, 'ms')

View File

@ -18,13 +18,14 @@ import json
from oslo_log import log
import pytz
from repositories.influxdb.abstract_repository import AbstractInfluxdbRepository
from repositories.utils import parse_alarm_state_hist_message
from monasca_persister.repositories.influxdb import abstract_repository
from monasca_persister.repositories.utils import parse_alarm_state_hist_message
LOG = log.getLogger(__name__)
class AlarmStateHistInfluxdbRepository(AbstractInfluxdbRepository):
class AlarmStateHistInfluxdbRepository(
abstract_repository.AbstractInfluxdbRepository):
def __init__(self):

View File

@ -18,13 +18,13 @@ import json
from oslo_log import log
import pytz
from repositories.influxdb.abstract_repository import AbstractInfluxdbRepository
from repositories.utils import parse_measurement_message
from monasca_persister.repositories.influxdb import abstract_repository
from monasca_persister.repositories.utils import parse_measurement_message
LOG = log.getLogger(__name__)
class MetricInfluxdbRepository(AbstractInfluxdbRepository):
class MetricInfluxdbRepository(abstract_repository.AbstractInfluxdbRepository):
def __init__(self):

View File

@ -16,7 +16,7 @@ import os
from oslo_log import log
from monasca_common.kafka.consumer import KafkaConsumer
from monasca_common.kafka import consumer
LOG = log.getLogger(__name__)
@ -31,7 +31,7 @@ class Persister(object):
self._database_batch_size = kafka_conf.database_batch_size
self._consumer = KafkaConsumer(
self._consumer = consumer.KafkaConsumer(
kafka_conf.uri,
zookeeper_conf.uri,
kafka_conf.zookeeper_path,