Make entropy suitable for pypi distribution, part 2
Correct logging: reset loggers in engine.py, and setup your own Added functions to setup logging in engine.py and Audit class, can be used by audit scripts. Added a reset_logger function in utils Expose __main__.py as a CLI on packaging. So after installing the entropy package, you can call commands like entropy register-audit entropy start-engine from anywhere in your machine. Made changes to setup.cfg to make the main function an entry point. Moved the engine_cfg file to /tmp/engines.cfg. That way, even though it is hardcoded, it's at least fairly uniform across machines. Change-Id: I704bf5e4635ffc539d7a73c5f84ef4bf8b2e801e
This commit is contained in:
parent
92b73b563b
commit
24983c6fc6
|
@ -19,6 +19,7 @@ import argparse
|
|||
import logging
|
||||
import os
|
||||
import sys
|
||||
import tempfile
|
||||
|
||||
import yaml
|
||||
|
||||
|
@ -31,8 +32,7 @@ from entropy import utils
|
|||
LOG = logging.getLogger(__name__)
|
||||
|
||||
# TODO(praneshp): Only hardcoded stuff in the project. Find a way to move
|
||||
engine_cfg = os.path.join(os.getcwd(), 'entropy', 'examples',
|
||||
'cfg', 'engines.cfg')
|
||||
engine_cfg = os.path.join(tempfile.gettempdir(), 'engines.cfg')
|
||||
|
||||
|
||||
def get_cfg_file(engine, script_type):
|
||||
|
@ -100,7 +100,7 @@ def start_engine(args):
|
|||
return
|
||||
|
||||
cfg_data = dict(utils.load_yaml(args.engine_cfg).next())[args.name]
|
||||
cfg = {args.name: args.engine_cfg}
|
||||
cfg = {args.name: os.path.join(os.getcwd(), args.engine_cfg)}
|
||||
with open(engine_cfg, "w") as cfg_file:
|
||||
cfg_file.write(yaml.dump(cfg, canonical=False,
|
||||
default_flow_style=False,
|
||||
|
@ -150,12 +150,12 @@ def parse():
|
|||
args.func(args)
|
||||
|
||||
|
||||
if __name__ == '__main__':
|
||||
FORMAT = '%(lineno)s %(message)s'
|
||||
console = logging.StreamHandler()
|
||||
console.setLevel(logging.DEBUG)
|
||||
console.setFormatter(FORMAT)
|
||||
LOG.addHandler(console)
|
||||
print LOG.handlers
|
||||
# logging.basicConfig(level=logging.DEBUG)
|
||||
def main():
|
||||
console_format = '%(filename)s %(lineno)s %(message)s'
|
||||
logging.basicConfig(format=console_format,
|
||||
level=logging.INFO)
|
||||
parse()
|
||||
|
||||
|
||||
if __name__ == '__main__':
|
||||
main()
|
||||
|
|
|
@ -16,6 +16,7 @@ import logging
|
|||
|
||||
from kombu import Queue
|
||||
|
||||
from entropy import utils
|
||||
|
||||
LOG = logging.getLogger(__name__)
|
||||
|
||||
|
@ -24,6 +25,7 @@ class AuditBase(object):
|
|||
__metaclass__ = abc.ABCMeta
|
||||
|
||||
def __init__(self, **kwargs):
|
||||
utils.reset_logger(logging.getLogger())
|
||||
self.name = kwargs['name']
|
||||
self.exchange = kwargs['exchange']
|
||||
self.routing_key = kwargs['routing_key']
|
||||
|
@ -31,6 +33,16 @@ class AuditBase(object):
|
|||
self.exchange,
|
||||
self.routing_key)
|
||||
|
||||
@staticmethod
|
||||
def set_logger(logger, **kwargs):
|
||||
logger.handlers = []
|
||||
log_to_file = logging.FileHandler(kwargs['log_file'])
|
||||
log_to_file.setLevel(logging.DEBUG)
|
||||
log_format = logging.Formatter(kwargs['log_format'])
|
||||
log_to_file.setFormatter(log_format)
|
||||
logger.addHandler(log_to_file)
|
||||
logger.propagate = False
|
||||
|
||||
@abc.abstractmethod
|
||||
def send_message(self, **kwargs):
|
||||
pass
|
||||
|
|
|
@ -31,6 +31,7 @@ LOG = logging.getLogger(__name__)
|
|||
|
||||
class Engine(object):
|
||||
def __init__(self, name, **cfg_data):
|
||||
utils.reset_logger(logging.getLogger())
|
||||
Engine.set_logger(**cfg_data)
|
||||
# constants
|
||||
# TODO(praneshp): Hardcode for now, could/should be cmdline input
|
||||
|
@ -51,15 +52,17 @@ class Engine(object):
|
|||
self.futures = []
|
||||
LOG.info('Created engine obj %s', self.name)
|
||||
|
||||
# TODO(praneshp): Move to utils?
|
||||
@staticmethod
|
||||
def set_logger(**cfg_data):
|
||||
# Set the logger
|
||||
LOG.handlers = []
|
||||
log_to_file = logging.FileHandler(cfg_data['log_file'])
|
||||
log_to_file.setLevel(logging.DEBUG)
|
||||
log_to_file.setLevel(logging.INFO)
|
||||
log_format = logging.Formatter(cfg_data['log_format'])
|
||||
log_to_file.setFormatter(log_format)
|
||||
LOG.addHandler(log_to_file)
|
||||
LOG.propagate = False
|
||||
|
||||
def run(self):
|
||||
LOG.info('Starting Scheduler for %s', self.name)
|
||||
|
|
|
@ -120,12 +120,7 @@ class Audit(AuditBase):
|
|||
'boot': Audit.remote_call(boot_command, **kwargs)}
|
||||
|
||||
def send_message(self, **kwargs):
|
||||
LOG.handlers = []
|
||||
log_to_file = logging.FileHandler(kwargs['log_file'])
|
||||
log_to_file.setLevel(logging.DEBUG)
|
||||
log_format = logging.Formatter(kwargs['log_format'])
|
||||
log_to_file.setFormatter(log_format)
|
||||
LOG.addHandler(log_to_file)
|
||||
Audit.set_logger(LOG, **kwargs)
|
||||
connection = BrokerConnection('amqp://%(mq_user)s:%(mq_password)s@'
|
||||
'%(mq_host)s:%(mq_port)s//'
|
||||
% kwargs['mq_args'])
|
||||
|
|
|
@ -83,13 +83,18 @@ def parse_conf(conf):
|
|||
return kwargs
|
||||
|
||||
|
||||
def main(**kwargs):
|
||||
LOG.handlers = []
|
||||
def set_logger(logger, **kwargs):
|
||||
logger.handlers = []
|
||||
log_to_file = logging.FileHandler(kwargs['log_file'])
|
||||
log_to_file.setLevel(logging.DEBUG)
|
||||
log_format = logging.Formatter(kwargs['log_format'])
|
||||
log_to_file.setFormatter(log_format)
|
||||
LOG.addHandler(log_to_file)
|
||||
logger.addHandler(log_to_file)
|
||||
logger.propagate = False
|
||||
|
||||
|
||||
def main(**kwargs):
|
||||
set_logger(LOG, **kwargs)
|
||||
LOG.info('starting react script %s' % kwargs['name'])
|
||||
args = parse_conf(kwargs['conf'])
|
||||
recv_message(**args)
|
||||
|
|
|
@ -117,3 +117,15 @@ def check_duplicate(name, cfg_file):
|
|||
if name in names:
|
||||
return True
|
||||
return False
|
||||
|
||||
|
||||
def reset_logger(log):
|
||||
if not log:
|
||||
return
|
||||
handlers = list(log.handlers)
|
||||
for h in handlers:
|
||||
h.flush()
|
||||
h.close()
|
||||
log.removeHandler(h)
|
||||
log.setLevel(logging.NOTSET)
|
||||
log.addHandler(logging.NullHandler())
|
||||
|
|
Loading…
Reference in New Issue