feat: move to oslo.log

- Removal of openstack.common.log and rely on oslo.log libraries.
- LOG with oslo context, which gives us ability to track a request
  through a pipeline (server -> worker)

Change-Id: I2ca26de4daa5d013dfe00a0667d3419bf9b55c65
This commit is contained in:
Sriram Madapusi Vasudevan 2015-10-16 16:49:01 -04:00
parent b2eb2367ac
commit 4843ac4fb4
58 changed files with 207 additions and 819 deletions

View File

@ -2,7 +2,7 @@
keys=root,server,combined
[formatters]
keys=normal,normal_with_name,debug
keys=normal,normal_with_name,debug,context
[handlers]
keys=production,file,devel
@ -24,19 +24,19 @@ qualname=poppy-combined
[handler_production]
class=handlers.SysLogHandler
level=ERROR
formatter=normal_with_name
formatter=context
args=(('localhost', handlers.SYSLOG_UDP_PORT), handlers.SysLogHandler.LOG_USER)
[handler_file]
class=FileHandler
level=DEBUG
formatter=normal_with_name
formatter=context
args=('poppy.log', 'w')
[handler_devel]
class=StreamHandler
level=NOTSET
formatter=debug
formatter=context
args=(sys.stdout,)
[formatter_normal]
@ -47,3 +47,6 @@ format=(%(name)s): %(asctime)s %(levelname)s %(message)s
[formatter_debug]
format=(%(name)s): %(asctime)s %(levelname)s %(module)s %(funcName)s %(message)s
[formatter_context]
class = oslo_log.formatters.ContextFormatter

View File

@ -14,11 +14,11 @@
# limitations under the License.
from oslo_config import cfg
from oslo_log import log
from stevedore import driver
from stevedore import named
from poppy.common import decorators
from poppy.openstack.common import log
LOG = log.getLogger(__name__)
@ -74,10 +74,14 @@ class Bootstrap(object):
self.conf = conf
self.conf.register_opts(_DEFAULT_OPTIONS)
self.conf.register_opts(_DRIVER_OPTIONS, group=_DRIVER_GROUP)
try:
getattr(self.conf, 'log_config_append')
except cfg.NoSuchOptError:
log_config_append_opt = cfg.StrOpt('log_config_append')
self.conf.register_opt(log_config_append_opt)
self.driver_conf = self.conf[_DRIVER_GROUP]
log.setup('poppy')
log.setup(self.conf, "poppy")
LOG.debug("init bootstrap")
@decorators.lazy_property(write=False)

View File

@ -14,9 +14,9 @@
# limitations under the License.
from oslo_config import cfg
from oslo_log import log
from poppy.common import cli
from poppy.openstack.common import log
from poppy.provider.akamai.background_jobs.check_cert_status_and_update import \
check_cert_status_and_update_flow

View File

@ -16,9 +16,9 @@
import json
from oslo_config import cfg
from oslo_log import log
from poppy.common import cli
from poppy.openstack.common import log
from poppy.provider.akamai.background_jobs.update_property import \
update_property_flow

View File

@ -15,6 +15,7 @@
import os
from oslo_config import cfg
from oslo_log import log
from poppy import bootstrap
from poppy.common import cli
@ -26,6 +27,7 @@ def run():
# to pick up common options from openstack.common.log, since
# that module uses the global CONF instance exclusively.
conf = cfg.CONF
log.register_options(conf)
conf(project='poppy', prog='poppy')
server = bootstrap.Bootstrap(conf)

View File

@ -17,9 +17,9 @@ import os
import socket
from oslo_config import cfg
from oslo_log import log
from poppy import bootstrap
from poppy.openstack.common import log
LOG = log.getLogger(__name__)
@ -27,7 +27,8 @@ LOG = log.getLogger(__name__)
def run():
conf = cfg.CONF
conf(project='poppy', prog='poppy', args=[])
conductor_name = '{0}-{1}'.format(socket.gethostname(), os.getpid())
log.register_options(conf)
conf(project='poppy', prog='poppy')
b = bootstrap.Bootstrap(conf)
conductor_name = '{0}-{1}'.format(socket.gethostname(), os.getpid())
b.distributed_task.services_controller.run_task_worker(name=conductor_name)

View File

@ -20,11 +20,13 @@ import os
import sys
import termios
from oslo_config import cfg
from oslo_log import log
from poppy.openstack.common.gettextutils import _
from poppy.openstack.common import log as logging
LOG = logging.getLogger(__name__)
LOG = log.getLogger(__name__)
def _fail(returncode, ex):
@ -72,7 +74,9 @@ def runnable(func):
_enable_echo(True)
try:
logging.setup('poppy')
conf = cfg.CONF
log.register_options(conf)
log.setup(conf, 'poppy')
func()
except KeyboardInterrupt:
LOG.info(_(u'Terminating'))

View File

@ -16,7 +16,7 @@
import cgi
import pprint
from poppy.openstack.common import log
from oslo_log import log
LOG = log.getLogger(__name__)

View File

@ -13,9 +13,8 @@
# See the License for the specific language governing permissions and
# limitations under the License.
import logging
from oslo_config import cfg
from oslo_log import log
from taskflow.jobs import backends as job_backends
from taskflow.persistence import backends as persistence_backends
@ -23,7 +22,7 @@ from poppy.distributed_task import base
from poppy.distributed_task.taskflow import controllers
LOG = logging.getLogger(__name__)
LOG = log.getLogger(__name__)
TASKFLOW_OPTIONS = [
cfg.StrOpt('jobboard_backend_type', default='zookeeper',

View File

@ -14,13 +14,12 @@
# limitations under the License.
from oslo_config import cfg
from taskflow.patterns import graph_flow
from oslo_log import log
from taskflow.patterns import linear_flow
from taskflow import retry
from poppy.distributed_task.taskflow.task import common
from poppy.distributed_task.taskflow.task import create_service_tasks
from poppy.openstack.common import log
LOG = log.getLogger(__name__)
@ -31,8 +30,11 @@ conf(project='poppy', prog='poppy', args=[])
def create_service():
flow = graph_flow.Flow('Creating poppy-service').add(
create_service_tasks.CreateProviderServicesTask(),
flow = linear_flow.Flow('Creating poppy-service').add(
linear_flow.Flow('Update Oslo Context').add(
common.ContextUpdateTask()),
linear_flow.Flow('Create Provider Services').add(
create_service_tasks.CreateProviderServicesTask()),
linear_flow.Flow('Create Service DNS Mapping flow',
retry=retry.ParameterizedForEach(
rebind=['time_seconds'],

View File

@ -14,12 +14,12 @@
# limitations under the License.
from oslo_config import cfg
from taskflow.patterns import graph_flow
from oslo_log import log
from taskflow.patterns import linear_flow
from taskflow import retry
from poppy.distributed_task.taskflow.task import common
from poppy.distributed_task.taskflow.task import create_ssl_certificate_tasks
from poppy.openstack.common import log
LOG = log.getLogger(__name__)
@ -30,7 +30,9 @@ conf(project='poppy', prog='poppy', args=[])
def create_ssl_certificate():
flow = graph_flow.Flow('Creating poppy ssl certificate').add(
flow = linear_flow.Flow('Creating poppy ssl certificate').add(
linear_flow.Flow('Update Oslo Context').add(
common.ContextUpdateTask()),
linear_flow.Flow("Provision poppy ssl certificate",
retry=retry.Times(5)).add(
create_ssl_certificate_tasks.CreateProviderSSLCertificateTask()

View File

@ -14,13 +14,12 @@
# limitations under the License.
from oslo_config import cfg
from taskflow.patterns import graph_flow
from oslo_log import log
from taskflow.patterns import linear_flow
from taskflow import retry
from poppy.distributed_task.taskflow.task import common
from poppy.distributed_task.taskflow.task import delete_service_tasks
from poppy.openstack.common import log
LOG = log.getLogger(__name__)
@ -31,8 +30,11 @@ conf(project='poppy', prog='poppy', args=[])
def delete_service():
flow = graph_flow.Flow('Deleting poppy-service').add(
delete_service_tasks.DeleteProviderServicesTask(),
flow = linear_flow.Flow('Deleting poppy-service').add(
linear_flow.Flow('Update Oslo Context').add(
common.ContextUpdateTask()),
linear_flow.Flow('Delete Provider Services').add(
delete_service_tasks.DeleteProviderServicesTask()),
linear_flow.Flow('Delete Service DNS Mapping flow',
retry=retry.ParameterizedForEach(
rebind=['time_seconds'],

View File

@ -14,12 +14,14 @@
# limitations under the License.
from oslo_config import cfg
from oslo_log import log
from taskflow.patterns import graph_flow
from taskflow.patterns import linear_flow
from taskflow import retry
from poppy.distributed_task.taskflow.task import common
from poppy.distributed_task.taskflow.task import delete_ssl_certificate_tasks
from poppy.openstack.common import log
LOG = log.getLogger(__name__)
@ -30,6 +32,8 @@ conf(project='poppy', prog='poppy', args=[])
def delete_ssl_certificate():
flow = graph_flow.Flow('Deleting poppy ssl certificate').add(
linear_flow.Flow('Update Oslo Context').add(
common.ContextUpdateTask()),
linear_flow.Flow("Deleting poppy ssl certificate",
retry=retry.Times(5)).add(
delete_ssl_certificate_tasks.DeleteProviderSSLCertificateTask()

View File

@ -14,13 +14,12 @@
# limitations under the License.
from oslo_config import cfg
from taskflow.patterns import graph_flow
from oslo_log import log
from taskflow.patterns import linear_flow
from poppy.distributed_task.taskflow.task import common
from poppy.distributed_task.taskflow.task import purge_service_tasks
from poppy.openstack.common import log
LOG = log.getLogger(__name__)
@ -31,8 +30,11 @@ conf(project='poppy', prog='poppy', args=[])
def purge_service():
flow = graph_flow.Flow('Purging poppy-service').add(
purge_service_tasks.PurgeProviderServicesTask(),
flow = linear_flow.Flow('Purging poppy-service').add(
linear_flow.Flow('Update Oslo Context').add(
common.ContextUpdateTask()),
linear_flow.Flow('Purging Provider Services').add(
purge_service_tasks.PurgeProviderServicesTask()),
linear_flow.Flow('Purge provider details').add(
common.UpdateProviderDetailErrorTask(
rebind=['responders'])),

View File

@ -14,14 +14,12 @@
# limitations under the License.
from oslo_config import cfg
from taskflow.patterns import graph_flow
from oslo_log import log
from taskflow.patterns import linear_flow
from taskflow import retry
from poppy.distributed_task.taskflow.task import common
from poppy.distributed_task.taskflow.task import update_service_tasks
from poppy.openstack.common import log
LOG = log.getLogger(__name__)
@ -31,8 +29,11 @@ conf(project='poppy', prog='poppy', args=[])
def update_service():
flow = graph_flow.Flow('Updating poppy-service').add(
update_service_tasks.UpdateProviderServicesTask(),
flow = linear_flow.Flow('Updating poppy-service').add(
linear_flow.Flow('Update Oslo Context').add(
common.ContextUpdateTask()),
linear_flow.Flow('Update Provider Services').add(
update_service_tasks.UpdateProviderServicesTask()),
linear_flow.Flow('Update Service DNS Mapping flow',
retry=retry.ParameterizedForEach(
rebind=['time_seconds'],

View File

@ -14,12 +14,12 @@
# limitations under the License.
from oslo_config import cfg
from oslo_log import log
from taskflow.patterns import linear_flow
from taskflow import retry
from poppy.distributed_task.taskflow.task import common
from poppy.distributed_task.taskflow.task import update_service_state_tasks
from poppy.openstack.common import log
LOG = log.getLogger(__name__)
@ -31,7 +31,10 @@ conf(project='poppy', prog='poppy', args=[])
def disable_service():
flow = linear_flow.Flow('Disable service').add(
update_service_state_tasks.UpdateServiceStateTask(),
linear_flow.Flow('Update Oslo Context').add(
common.ContextUpdateTask()),
linear_flow.Flow('Update Service State').add(
update_service_state_tasks.UpdateServiceStateTask()),
linear_flow.Flow('Break DNS Chain',
retry=retry.ParameterizedForEach(
rebind=['time_seconds'],
@ -44,7 +47,10 @@ def disable_service():
def enable_service():
flow = linear_flow.Flow('Enable service').add(
update_service_state_tasks.UpdateServiceStateTask(),
linear_flow.Flow('Update Oslo Context').add(
common.ContextUpdateTask()),
linear_flow.Flow('Update Service State').add(
update_service_state_tasks.UpdateServiceStateTask()),
linear_flow.Flow('Break DNS Chain',
retry=retry.ParameterizedForEach(
rebind=['time_seconds'],

View File

@ -14,6 +14,7 @@
# limitations under the License.
from oslo_log import log
from oslo_utils import uuidutils
from taskflow.conductors import single_threaded
from taskflow import engines
@ -22,7 +23,6 @@ from taskflow.persistence import logbook
from taskflow.types.notifier import Notifier
from poppy.distributed_task import base
from poppy.openstack.common import log
LOG = log.getLogger(__name__)

View File

@ -17,11 +17,12 @@ import json
import requests
from oslo_config import cfg
from oslo_context import context as context_utils
from oslo_log import log
from taskflow import task
from poppy.distributed_task.utils import memoized_controllers
from poppy.model.helpers import provider_details
from poppy.openstack.common import log
from poppy.transport.pecan.models.request import (
provider_details as req_provider_details
)
@ -144,6 +145,13 @@ def create_log_delivery_container(project_id, auth_token):
return []
class ContextUpdateTask(task.Task):
def execute(self, context_dict):
context = context_utils.RequestContext.from_dict(ctx=context_dict)
context.update_store()
class UpdateProviderDetailTask(task.Task):
def execute(self, provider_details_dict, project_id, service_id):

View File

@ -19,13 +19,13 @@ import json
import time
from oslo_config import cfg
from oslo_log import log
from taskflow import task
from poppy.distributed_task.taskflow.task import common
from poppy.distributed_task.utils import exc_loader
from poppy.distributed_task.utils import memoized_controllers
from poppy.model.helpers import provider_details
from poppy.openstack.common import log
LOG = log.getLogger(__name__)

View File

@ -16,10 +16,10 @@
import json
from oslo_config import cfg
from oslo_log import log
from taskflow import task
from poppy.distributed_task.utils import memoized_controllers
from poppy.openstack.common import log
from poppy.transport.pecan.models.request import ssl_certificate
LOG = log.getLogger(__name__)

View File

@ -19,13 +19,13 @@ import json
import time
from oslo_config import cfg
from oslo_log import log
from taskflow import task
from poppy.distributed_task.taskflow.task import common
from poppy.distributed_task.utils import exc_loader
from poppy.distributed_task.utils import memoized_controllers
from poppy.model.helpers import provider_details as pd
from poppy.openstack.common import log
from poppy.transport.pecan.models.request import (
provider_details as req_provider_details
)

View File

@ -14,10 +14,10 @@
# limitations under the License.
from oslo_config import cfg
from oslo_log import log
from taskflow import task
from poppy.distributed_task.utils import memoized_controllers
from poppy.openstack.common import log
LOG = log.getLogger(__name__)

View File

@ -16,10 +16,10 @@
import json
from oslo_config import cfg
from oslo_log import log
from taskflow import task
from poppy.distributed_task.utils import memoized_controllers
from poppy.openstack.common import log
from poppy.transport.pecan.models.request import (
provider_details as req_provider_details)
from poppy.transport.pecan.models.request import service

View File

@ -17,11 +17,11 @@ import json
import time
from oslo_config import cfg
from oslo_log import log
from taskflow import task
from poppy.distributed_task.utils import exc_loader
from poppy.distributed_task.utils import memoized_controllers
from poppy.openstack.common import log
from poppy.transport.pecan.models.request import service

View File

@ -19,13 +19,13 @@ import json
import time
from oslo_config import cfg
from oslo_log import log
from taskflow import task
from poppy.distributed_task.taskflow.task import common
from poppy.distributed_task.utils import exc_loader
from poppy.distributed_task.utils import memoized_controllers
from poppy.model.helpers import provider_details
from poppy.openstack.common import log
from poppy.transport.pecan.models.request import service

View File

@ -15,13 +15,21 @@
from oslo_config import cfg
from oslo_log import log
from poppy import bootstrap
from poppy.openstack.common import log
LOG = log.getLogger(__name__)
conf = cfg.CONF
try:
getattr(conf, 'log_config_append')
except cfg.NoSuchOptError:
# NOTE(TheSriram): Only register options, if they
# havent already been registered.
log.register_options(conf)
conf(project='poppy', prog='poppy', args=[])

View File

@ -15,12 +15,14 @@
"""DNS Provider implementation."""
from oslo_log import log
from poppy.dns import base
from poppy.dns.default import controllers
from poppy.dns.default.helpers import retry_exceptions
from poppy.openstack.common import log as logging
LOG = logging.getLogger(__name__)
LOG = log.getLogger(__name__)
class DNSProvider(base.Driver):

View File

@ -15,12 +15,14 @@
"""DNS Provider implementation."""
from oslo_log import log
from poppy.dns import base
from poppy.dns.designate import controllers
from poppy.dns.designate.helpers import retry_exceptions
from poppy.openstack.common import log as logging
LOG = logging.getLogger(__name__)
LOG = log.getLogger(__name__)
class DNSProvider(base.Driver):

View File

@ -16,12 +16,12 @@
"""DNS Provider implementation."""
from oslo_config import cfg
from oslo_log import log
import pyrax
from poppy.dns import base
from poppy.dns.rackspace import controllers
from poppy.dns.rackspace.helpers import retry_exceptions
from poppy.openstack.common import log as logging
RACKSPACE_OPTIONS = [
@ -56,7 +56,7 @@ RACKSPACE_OPTIONS = [
RACKSPACE_GROUP = 'drivers:dns:rackspace'
LOG = logging.getLogger(__name__)
LOG = log.getLogger(__name__)
class DNSProvider(base.Driver):

View File

@ -19,10 +19,10 @@ try:
except NameError: # noqa pragma: no cover
from sets import Set as set # noqa pragma: no cover
from oslo_log import log
import pyrax.exceptions as exc
from poppy.dns import base
from poppy.openstack.common import log
LOG = log.getLogger(__name__)

View File

@ -16,10 +16,10 @@
import json
from oslo_config import cfg
from oslo_log import log
from poppy.manager import base
from poppy.notification.mailgun import driver as n_driver
from poppy.openstack.common import log
from poppy.provider.akamai.background_jobs.check_cert_status_and_update import \
check_cert_status_and_update_flow
from poppy.provider.akamai.background_jobs.update_property import \

View File

@ -19,6 +19,8 @@ import random
import uuid
import jsonpatch
from oslo_context import context as context_utils
from oslo_log import log
from poppy.common import errors
from poppy.distributed_task.taskflow.flow import create_service
@ -31,7 +33,6 @@ from poppy.model.helpers import cachingrule
from poppy.model.helpers import rule
from poppy.model import service
from poppy.model import ssl_certificate
from poppy.openstack.common import log
from poppy.transport.validators import helpers as validators
from poppy.transport.validators.schemas import service as service_schema
@ -205,7 +206,8 @@ class DefaultServicesController(base.ServicesController):
'project_id': project_id,
'auth_token': auth_token,
'service_id': service_id,
'time_seconds': self.determine_sleep_times()
'time_seconds': self.determine_sleep_times(),
'context_dict': context_utils.get_current().to_dict()
}
self.distributed_task_controller.submit_task(
@ -397,7 +399,8 @@ class DefaultServicesController(base.ServicesController):
'auth_token': auth_token,
'service_old': json.dumps(service_old.to_dict()),
'service_obj': json.dumps(service_new.to_dict()),
'time_seconds': self.determine_sleep_times()
'time_seconds': self.determine_sleep_times(),
'context_dict': context_utils.get_current().to_dict()
}
self.distributed_task_controller.submit_task(
@ -511,7 +514,8 @@ class DefaultServicesController(base.ServicesController):
dict([(k, v.to_dict()) for k, v in provider_details.items()])),
"project_id": project_id,
"service_id": service_id,
'time_seconds': self.determine_sleep_times()
'time_seconds': self.determine_sleep_times(),
'context_dict': context_utils.get_current().to_dict()
}
self.distributed_task_controller.submit_task(
@ -551,7 +555,8 @@ class DefaultServicesController(base.ServicesController):
'project_id': project_id,
'hard': json.dumps(hard),
'service_id': service_id,
'purge_url': str(purge_url)
'purge_url': str(purge_url),
'context_dict': context_utils.get_current().to_dict()
}
self.distributed_task_controller.submit_task(

View File

@ -15,6 +15,8 @@
import json
from oslo_context import context as context_utils
from poppy.distributed_task.taskflow.flow import create_ssl_certificate
from poppy.distributed_task.taskflow.flow import delete_ssl_certificate
from poppy.manager import base
@ -60,7 +62,8 @@ class DefaultSSLCertificateController(base.SSLCertificateController):
kwargs = {
'providers_list_json': json.dumps(providers),
'project_id': project_id,
'cert_obj_json': json.dumps(cert_obj.to_dict())
'cert_obj_json': json.dumps(cert_obj.to_dict()),
'context_dict': context_utils.get_current().to_dict()
}
self.distributed_task_controller.submit_task(
create_ssl_certificate.create_ssl_certificate,
@ -72,7 +75,8 @@ class DefaultSSLCertificateController(base.SSLCertificateController):
kwargs = {
'project_id': project_id,
'domain_name': domain_name,
'cert_type': cert_type
'cert_type': cert_type,
'context_dict': context_utils.get_current().to_dict()
}
self.distributed_task_controller.submit_task(
delete_ssl_certificate.delete_ssl_certificate,

View File

@ -13,10 +13,10 @@
# See the License for the specific language governing permissions and
# limitations under the License.
from oslo_log import log
import requests
from poppy.notification import base
from poppy.openstack.common import log
LOG = log.getLogger(__name__)

View File

@ -1,703 +0,0 @@
# Copyright 2011 OpenStack Foundation.
# Copyright 2010 United States Government as represented by the
# Administrator of the National Aeronautics and Space Administration.
# All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License"); you may
# not use this file except in compliance with the License. You may obtain
# a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
# License for the specific language governing permissions and limitations
# under the License.
"""OpenStack logging handler.
This module adds to logging functionality by adding the option to specify
a context object when calling the various log methods. If the context object
is not specified, default formatting is used. Additionally, an instance uuid
may be passed as part of the log message, which is intended to make it easier
for admins to find messages related to a specific instance.
It also allows setting of formatting information through conf.
"""
import inspect
import itertools
import logging
import logging.config
import logging.handlers
import os
import sys
import traceback
from oslo_config import cfg
import six
from six import moves
_PY26 = sys.version_info[0:2] == (2, 6)
from poppy.openstack.common.gettextutils import _
from poppy.openstack.common import importutils
from poppy.openstack.common import jsonutils
from poppy.openstack.common import local
# NOTE(flaper87): Pls, remove when graduating this module
# from the incubator.
from poppy.openstack.common.strutils import mask_password # noqa
_DEFAULT_LOG_DATE_FORMAT = "%Y-%m-%d %H:%M:%S"
common_cli_opts = [
cfg.BoolOpt('debug',
short='d',
default=False,
help='Print debugging output (set logging level to '
'DEBUG instead of default WARNING level).'),
cfg.BoolOpt('verbose',
short='v',
default=False,
help='Print more verbose output (set logging level to '
'INFO instead of default WARNING level).'),
]
logging_cli_opts = [
cfg.StrOpt('log-config-append',
metavar='PATH',
deprecated_name='log-config',
help='The name of a logging configuration file. This file '
'is appended to any existing logging configuration '
'files. For details about logging configuration files, '
'see the Python logging module documentation.'),
cfg.StrOpt('log-format',
metavar='FORMAT',
help='DEPRECATED. '
'A logging.Formatter log message format string which may '
'use any of the available logging.LogRecord attributes. '
'This option is deprecated. Please use '
'logging_context_format_string and '
'logging_default_format_string instead.'),
cfg.StrOpt('log-date-format',
default=_DEFAULT_LOG_DATE_FORMAT,
metavar='DATE_FORMAT',
help='Format string for %%(asctime)s in log records. '
'Default: %(default)s .'),
cfg.StrOpt('log-file',
metavar='PATH',
deprecated_name='logfile',
help='(Optional) Name of log file to output to. '
'If no default is set, logging will go to stdout.'),
cfg.StrOpt('log-dir',
deprecated_name='logdir',
help='(Optional) The base directory used for relative '
'--log-file paths.'),
cfg.BoolOpt('use-syslog',
default=False,
help='Use syslog for logging. '
'Existing syslog format is DEPRECATED during I, '
'and will change in J to honor RFC5424.'),
cfg.BoolOpt('use-syslog-rfc-format',
# TODO(bogdando) remove or use True after existing
# syslog format deprecation in J
default=False,
help='(Optional) Enables or disables syslog rfc5424 format '
'for logging. If enabled, prefixes the MSG part of the '
'syslog message with APP-NAME (RFC5424). The '
'format without the APP-NAME is deprecated in I, '
'and will be removed in J.'),
cfg.StrOpt('syslog-log-facility',
default='LOG_USER',
help='Syslog facility to receive log lines.')
]
generic_log_opts = [
cfg.BoolOpt('use_stderr',
default=True,
help='Log output to standard error.')
]
DEFAULT_LOG_LEVELS = ['amqp=WARN', 'amqplib=WARN', 'boto=WARN',
'qpid=WARN', 'sqlalchemy=WARN', 'suds=INFO',
'oslo_messaging=INFO', 'iso8601=WARN',
'requests.packages.urllib3.connectionpool=WARN',
'urllib3.connectionpool=WARN', 'websocket=WARN']
log_opts = [
cfg.StrOpt('logging_context_format_string',
default='%(asctime)s.%(msecs)03d %(process)d %(levelname)s '
'%(name)s [%(request_id)s %(user_identity)s] '
'%(instance)s%(message)s',
help='Format string to use for log messages with context.'),
cfg.StrOpt('logging_default_format_string',
default='%(asctime)s.%(msecs)03d %(process)d %(levelname)s '
'%(name)s [-] %(instance)s%(message)s',
help='Format string to use for log messages without context.'),
cfg.StrOpt('logging_debug_format_suffix',
default='%(funcName)s %(pathname)s:%(lineno)d',
help='Data to append to log format when level is DEBUG.'),
cfg.StrOpt('logging_exception_prefix',
default='%(asctime)s.%(msecs)03d %(process)d TRACE %(name)s '
'%(instance)s',
help='Prefix each line of exception output with this format.'),
cfg.ListOpt('default_log_levels',
default=DEFAULT_LOG_LEVELS,
help='List of logger=LEVEL pairs.'),
cfg.BoolOpt('publish_errors',
default=False,
help='Enables or disables publication of error events.'),
cfg.BoolOpt('fatal_deprecations',
default=False,
help='Enables or disables fatal status of deprecations.'),
# NOTE(mikal): there are two options here because sometimes we are handed
# a full instance (and could include more information), and other times we
# are just handed a UUID for the instance.
cfg.StrOpt('instance_format',
default='[instance: %(uuid)s] ',
help='The format for an instance that is passed with the log '
'message.'),
cfg.StrOpt('instance_uuid_format',
default='[instance: %(uuid)s] ',
help='The format for an instance UUID that is passed with the '
'log message.'),
]
CONF = cfg.CONF
CONF.register_cli_opts(common_cli_opts)
CONF.register_cli_opts(logging_cli_opts)
CONF.register_opts(generic_log_opts)
CONF.register_opts(log_opts)
# our new audit level
# NOTE(jkoelker) Since we synthesized an audit level, make the logging
# module aware of it so it acts like other levels.
logging.AUDIT = logging.INFO + 1
logging.addLevelName(logging.AUDIT, 'AUDIT')
try:
NullHandler = logging.NullHandler
except AttributeError: # NOTE(jkoelker) NullHandler added in Python 2.7
class NullHandler(logging.Handler):
def handle(self, record):
pass
def emit(self, record):
pass
def createLock(self):
self.lock = None
def _dictify_context(context):
if context is None:
return None
if not isinstance(context, dict) and getattr(context, 'to_dict', None):
context = context.to_dict()
return context
def _get_binary_name():
return os.path.basename(inspect.stack()[-1][1])
def _get_log_file_path(binary=None):
logfile = CONF.log_file
logdir = CONF.log_dir
if logfile and not logdir:
return logfile
if logfile and logdir:
return os.path.join(logdir, logfile)
if logdir:
binary = binary or _get_binary_name()
return '%s.log' % (os.path.join(logdir, binary),)
return None
class BaseLoggerAdapter(logging.LoggerAdapter):
def audit(self, msg, *args, **kwargs):
self.log(logging.AUDIT, msg, *args, **kwargs)
def isEnabledFor(self, level):
if _PY26:
# This method was added in python 2.7 (and it does the exact
# same logic, so we need to do the exact same logic so that
# python 2.6 has this capability as well).
return self.logger.isEnabledFor(level)
else:
return super(BaseLoggerAdapter, self).isEnabledFor(level)
class LazyAdapter(BaseLoggerAdapter):
def __init__(self, name='unknown', version='unknown'):
self._logger = None
self.extra = {}
self.name = name
self.version = version
@property
def logger(self):
if not self._logger:
self._logger = getLogger(self.name, self.version)
if six.PY3:
# In Python 3, the code fails because the 'manager' attribute
# cannot be found when using a LoggerAdapter as the
# underlying logger. Work around this issue.
self._logger.manager = self._logger.logger.manager
return self._logger
class ContextAdapter(BaseLoggerAdapter):
warn = logging.LoggerAdapter.warning
def __init__(self, logger, project_name, version_string):
self.logger = logger
self.project = project_name
self.version = version_string
self._deprecated_messages_sent = dict()
@property
def handlers(self):
return self.logger.handlers
def deprecated(self, msg, *args, **kwargs):
"""Call this method when a deprecated feature is used.
If the system is configured for fatal deprecations then the message
is logged at the 'critical' level and :class:`DeprecatedConfig` will
be raised.
Otherwise, the message will be logged (once) at the 'warn' level.
:raises: :class:`DeprecatedConfig` if the system is configured for
fatal deprecations.
"""
stdmsg = _("Deprecated: %s") % msg
if CONF.fatal_deprecations:
self.critical(stdmsg, *args, **kwargs)
raise DeprecatedConfig(msg=stdmsg)
# Using a list because a tuple with dict can't be stored in a set.
sent_args = self._deprecated_messages_sent.setdefault(msg, list())
if args in sent_args:
# Already logged this message, so don't log it again.
return
sent_args.append(args)
self.warn(stdmsg, *args, **kwargs)
def process(self, msg, kwargs):
# NOTE(mrodden): catch any Message/other object and
# coerce to unicode before they can get
# to the python logging and possibly
# cause string encoding trouble
if not isinstance(msg, six.string_types):
msg = six.text_type(msg)
if 'extra' not in kwargs:
kwargs['extra'] = {}
extra = kwargs['extra']
context = kwargs.pop('context', None)
if not context:
context = getattr(local.store, 'context', None)
if context:
extra.update(_dictify_context(context))
instance = kwargs.pop('instance', None)
instance_uuid = (extra.get('instance_uuid') or
kwargs.pop('instance_uuid', None))
instance_extra = ''
if instance:
instance_extra = CONF.instance_format % instance
elif instance_uuid:
instance_extra = (CONF.instance_uuid_format
% {'uuid': instance_uuid})
extra['instance'] = instance_extra
extra.setdefault('user_identity', kwargs.pop('user_identity', None))
extra['project'] = self.project
extra['version'] = self.version
extra['extra'] = extra.copy()
return msg, kwargs
class JSONFormatter(logging.Formatter):
def __init__(self, fmt=None, datefmt=None):
# NOTE(jkoelker) we ignore the fmt argument, but its still there
# since logging.config.fileConfig passes it.
self.datefmt = datefmt
def formatException(self, ei, strip_newlines=True):
lines = traceback.format_exception(*ei)
if strip_newlines:
lines = [moves.filter(
lambda x: x,
line.rstrip().splitlines()) for line in lines]
lines = list(itertools.chain(*lines))
return lines
def format(self, record):
message = {'message': record.getMessage(),
'asctime': self.formatTime(record, self.datefmt),
'name': record.name,
'msg': record.msg,
'args': record.args,
'levelname': record.levelname,
'levelno': record.levelno,
'pathname': record.pathname,
'filename': record.filename,
'module': record.module,
'lineno': record.lineno,
'funcname': record.funcName,
'created': record.created,
'msecs': record.msecs,
'relative_created': record.relativeCreated,
'thread': record.thread,
'thread_name': record.threadName,
'process_name': record.processName,
'process': record.process,
'traceback': None}
if hasattr(record, 'extra'):
message['extra'] = record.extra
if record.exc_info:
message['traceback'] = self.formatException(record.exc_info)
return jsonutils.dumps(message)
def _create_logging_excepthook(product_name):
def logging_excepthook(exc_type, value, tb):
extra = {'exc_info': (exc_type, value, tb)}
getLogger(product_name).critical(
"".join(traceback.format_exception_only(exc_type, value)),
**extra)
return logging_excepthook
class LogConfigError(Exception):
message = _('Error loading logging config %(log_config)s: %(err_msg)s')
def __init__(self, log_config, err_msg):
self.log_config = log_config
self.err_msg = err_msg
def __str__(self):
return self.message % dict(log_config=self.log_config,
err_msg=self.err_msg)
def _load_log_config(log_config_append):
try:
logging.config.fileConfig(log_config_append,
disable_existing_loggers=False)
except (moves.configparser.Error, KeyError) as exc:
raise LogConfigError(log_config_append, six.text_type(exc))
def setup(product_name, version='unknown'):
"""Setup logging."""
if CONF.log_config_append:
_load_log_config(CONF.log_config_append)
else:
_setup_logging_from_conf(product_name, version)
sys.excepthook = _create_logging_excepthook(product_name)
def set_defaults(logging_context_format_string=None,
default_log_levels=None):
# Just in case the caller is not setting the
# default_log_level. This is insurance because
# we introduced the default_log_level parameter
# later in a backwards in-compatible change
if default_log_levels is not None:
cfg.set_defaults(
log_opts,
default_log_levels=default_log_levels)
if logging_context_format_string is not None:
cfg.set_defaults(
log_opts,
logging_context_format_string=logging_context_format_string)
def _find_facility_from_conf():
facility_names = logging.handlers.SysLogHandler.facility_names
facility = getattr(logging.handlers.SysLogHandler,
CONF.syslog_log_facility,
None)
if facility is None and CONF.syslog_log_facility in facility_names:
facility = facility_names.get(CONF.syslog_log_facility)
if facility is None:
valid_facilities = facility_names.keys()
consts = ['LOG_AUTH', 'LOG_AUTHPRIV', 'LOG_CRON', 'LOG_DAEMON',
'LOG_FTP', 'LOG_KERN', 'LOG_LPR', 'LOG_MAIL', 'LOG_NEWS',
'LOG_AUTH', 'LOG_SYSLOG', 'LOG_USER', 'LOG_UUCP',
'LOG_LOCAL0', 'LOG_LOCAL1', 'LOG_LOCAL2', 'LOG_LOCAL3',
'LOG_LOCAL4', 'LOG_LOCAL5', 'LOG_LOCAL6', 'LOG_LOCAL7']
valid_facilities.extend(consts)
raise TypeError(_('syslog facility must be one of: %s') %
', '.join("'%s'" % fac
for fac in valid_facilities))
return facility
class RFCSysLogHandler(logging.handlers.SysLogHandler):
def __init__(self, *args, **kwargs):
self.binary_name = _get_binary_name()
# Do not use super() unless type(logging.handlers.SysLogHandler)
# is 'type' (Python 2.7).
# Use old style calls, if the type is 'classobj' (Python 2.6)
logging.handlers.SysLogHandler.__init__(self, *args, **kwargs)
def format(self, record):
# Do not use super() unless type(logging.handlers.SysLogHandler)
# is 'type' (Python 2.7).
# Use old style calls, if the type is 'classobj' (Python 2.6)
msg = logging.handlers.SysLogHandler.format(self, record)
msg = self.binary_name + ' ' + msg
return msg
def _setup_logging_from_conf(project, version):
log_root = getLogger(None).logger
for handler in log_root.handlers:
log_root.removeHandler(handler)
if CONF.use_syslog:
facility = _find_facility_from_conf()
# TODO(bogdando) use the format provided by RFCSysLogHandler
# after existing syslog format deprecation in J
if CONF.use_syslog_rfc_format:
syslog = RFCSysLogHandler(address='/dev/log',
facility=facility)
else:
syslog = logging.handlers.SysLogHandler(address='/dev/log',
facility=facility)
log_root.addHandler(syslog)
logpath = _get_log_file_path()
if logpath:
filelog = logging.handlers.WatchedFileHandler(logpath)
log_root.addHandler(filelog)
if CONF.use_stderr:
streamlog = ColorHandler()
log_root.addHandler(streamlog)
elif not logpath:
# pass sys.stdout as a positional argument
# python2.6 calls the argument strm, in 2.7 it's stream
streamlog = logging.StreamHandler(sys.stdout)
log_root.addHandler(streamlog)
if CONF.publish_errors:
try:
handler = importutils.import_object(
"poppy.openstack.common.log_handler.PublishErrorsHandler",
logging.ERROR)
except ImportError:
handler = importutils.import_object(
"oslo.messaging.notify.log_handler.PublishErrorsHandler",
logging.ERROR)
log_root.addHandler(handler)
datefmt = CONF.log_date_format
for handler in log_root.handlers:
# NOTE(alaski): CONF.log_format overrides everything currently. This
# should be deprecated in favor of context aware formatting.
if CONF.log_format:
handler.setFormatter(logging.Formatter(fmt=CONF.log_format,
datefmt=datefmt))
log_root.info('Deprecated: log_format is now deprecated and will '
'be removed in the next release')
else:
handler.setFormatter(ContextFormatter(project=project,
version=version,
datefmt=datefmt))
if CONF.debug:
log_root.setLevel(logging.DEBUG)
elif CONF.verbose:
log_root.setLevel(logging.INFO)
else:
log_root.setLevel(logging.WARNING)
for pair in CONF.default_log_levels:
mod, _sep, level_name = pair.partition('=')
logger = logging.getLogger(mod)
# NOTE(AAzza) in python2.6 Logger.setLevel doesn't convert string name
# to integer code.
if sys.version_info < (2, 7):
level = logging.getLevelName(level_name)
logger.setLevel(level)
else:
logger.setLevel(level_name)
_loggers = {}
def getLogger(name='unknown', version='unknown'):
if name not in _loggers:
_loggers[name] = ContextAdapter(logging.getLogger(name),
name,
version)
return _loggers[name]
def getLazyLogger(name='unknown', version='unknown'):
"""Returns lazy logger.
Creates a pass-through logger that does not create the real logger
until it is really needed and delegates all calls to the real logger
once it is created.
"""
return LazyAdapter(name, version)
class WritableLogger(object):
"""A thin wrapper that responds to `write` and logs."""
def __init__(self, logger, level=logging.INFO):
self.logger = logger
self.level = level
def write(self, msg):
self.logger.log(self.level, msg.rstrip())
class ContextFormatter(logging.Formatter):
"""A context.RequestContext aware formatter configured through flags.
The flags used to set format strings are: logging_context_format_string
and logging_default_format_string. You can also specify
logging_debug_format_suffix to append extra formatting if the log level is
debug.
For information about what variables are available for the formatter see:
http://docs.python.org/library/logging.html#formatter
If available, uses the context value stored in TLS - local.store.context
"""
def __init__(self, *args, **kwargs):
"""Initialize ContextFormatter instance
Takes additional keyword arguments which can be used in the message
format string.
:keyword project: project name
:type project: string
:keyword version: project version
:type version: string
"""
self.project = kwargs.pop('project', 'unknown')
self.version = kwargs.pop('version', 'unknown')
logging.Formatter.__init__(self, *args, **kwargs)
def format(self, record):
"""Uses contextstring if request_id is set, otherwise default."""
# store project info
record.project = self.project
record.version = self.version
# store request info
context = getattr(local.store, 'context', None)
if context:
d = _dictify_context(context)
for k, v in d.items():
setattr(record, k, v)
# NOTE(sdague): default the fancier formatting params
# to an empty string so we don't throw an exception if
# they get used
for key in ('instance', 'color', 'user_identity'):
if key not in record.__dict__:
record.__dict__[key] = ''
if record.__dict__.get('request_id'):
fmt = CONF.logging_context_format_string
else:
fmt = CONF.logging_default_format_string
if (record.levelno == logging.DEBUG and
CONF.logging_debug_format_suffix):
fmt += " " + CONF.logging_debug_format_suffix
if sys.version_info < (3, 2):
self._fmt = fmt
else:
self._style = logging.PercentStyle(fmt)
self._fmt = self._style._fmt
# Cache this on the record, Logger will respect our formatted copy
if record.exc_info:
record.exc_text = self.formatException(record.exc_info, record)
return logging.Formatter.format(self, record)
def formatException(self, exc_info, record=None):
"""Format exception output with CONF.logging_exception_prefix."""
if not record:
return logging.Formatter.formatException(self, exc_info)
stringbuffer = moves.StringIO()
traceback.print_exception(exc_info[0], exc_info[1], exc_info[2],
None, stringbuffer)
lines = stringbuffer.getvalue().split('\n')
stringbuffer.close()
if CONF.logging_exception_prefix.find('%(asctime)') != -1:
record.asctime = self.formatTime(record, self.datefmt)
formatted_lines = []
for line in lines:
pl = CONF.logging_exception_prefix % record.__dict__
fl = '%s%s' % (pl, line)
formatted_lines.append(fl)
return '\n'.join(formatted_lines)
class ColorHandler(logging.StreamHandler):
LEVEL_COLORS = {
logging.DEBUG: '\033[00;32m', # GREEN
logging.INFO: '\033[00;36m', # CYAN
logging.AUDIT: '\033[01;36m', # BOLD CYAN
logging.WARN: '\033[01;33m', # BOLD YELLOW
logging.ERROR: '\033[01;31m', # BOLD RED
logging.CRITICAL: '\033[01;31m', # BOLD RED
}
def format(self, record):
record.color = self.LEVEL_COLORS[record.levelno]
return logging.StreamHandler.format(self, record)
class DeprecatedConfig(Exception):
message = _("Fatal call to deprecated config: %(msg)s")
def __init__(self, msg):
super(Exception, self).__init__(self.message % dict(msg=msg))

View File

@ -17,7 +17,7 @@ from oslo_config import cfg
from taskflow import engines
from taskflow.patterns import linear_flow
from poppy.openstack.common import log
from oslo_log import log
from poppy.provider.akamai.background_jobs.check_cert_status_and_update import \
check_cert_status_and_update_tasks

View File

@ -16,10 +16,10 @@
import json
from oslo_config import cfg
from oslo_log import log
from taskflow import task
from poppy.distributed_task.utils import memoized_controllers
from poppy.openstack.common import log
from poppy.transport.pecan.models.request import ssl_certificate

View File

@ -17,7 +17,7 @@ from oslo_config import cfg
from taskflow import engines
from taskflow.patterns import linear_flow
from poppy.openstack.common import log
from oslo_log import log
from poppy.provider.akamai.background_jobs.update_property import (
update_property_tasks)

View File

@ -16,10 +16,11 @@
import json
from oslo_config import cfg
from oslo_log import log
from taskflow import task
from poppy.distributed_task.utils import memoized_controllers
from poppy.openstack.common import log
LOG = log.getLogger(__name__)

View File

@ -19,11 +19,11 @@ import json
from akamai import edgegrid
from oslo_config import cfg
from oslo_log import log
import requests
from stevedore import driver
from poppy.common import decorators
from poppy.openstack.common import log
from poppy.provider.akamai import controllers
from poppy.provider.akamai.mod_san_queue import zookeeper_queue
from poppy.provider import base

View File

@ -13,8 +13,9 @@
# See the License for the specific language governing permissions and
# limitations under the License.
from oslo_log import log
from poppy.model.helpers import geo_zones
from poppy.openstack.common import log
# to use log inside worker, we need to directly use logging
LOG = log.getLogger(__name__)

View File

@ -25,9 +25,9 @@ from cassandra import policies
from cassandra import query
from cdeploy import migrator
from oslo_config import cfg
from oslo_log import log
from poppy.common import decorators
from poppy.openstack.common import log as logging
from poppy.provider.akamai.san_info_storage import base
@ -84,7 +84,7 @@ CASSANDRA_OPTIONS = [
AKAMAI_CASSANDRA_STORAGE_GROUP = 'drivers:provider:akamai:storage'
LOG = logging.getLogger(__name__)
LOG = log.getLogger(__name__)
GET_PROVIDER_INFO = '''

View File

@ -17,9 +17,10 @@ import datetime
import json
import traceback
from oslo_log import log
from poppy.common import decorators
from poppy.common import util
from poppy.openstack.common import log
from poppy.provider.akamai import geo_zone_code_mapping
from poppy.provider import base

View File

@ -15,7 +15,7 @@
import traceback
from poppy.openstack.common import log
from oslo_log import log
LOG = log.getLogger(__name__)

View File

@ -17,14 +17,14 @@
import boto
from oslo_config import cfg
from oslo_log import log
import requests
from poppy.openstack.common import log as logging
from poppy.provider import base
from poppy.provider.cloudfront import controllers
LOG = logging.getLogger(__name__)
LOG = log.getLogger(__name__)
CLOUDFRONT_OPTIONS = [
cfg.StrOpt('aws_access_key_id', help='CloudFront Access Key ID'),

View File

@ -14,9 +14,9 @@
# limitations under the License.
from boto import cloudfront
from oslo_log import log
from poppy.common import decorators
from poppy.openstack.common import log
from poppy.provider import base
LOG = log.getLogger(__name__)

View File

@ -17,13 +17,13 @@
import fastly
from oslo_config import cfg
from oslo_log import log
import requests
from poppy.openstack.common import log as logging
from poppy.provider import base
from poppy.provider.fastly import controllers
LOG = logging.getLogger(__name__)
LOG = log.getLogger(__name__)
FASTLY_OPTIONS = [
cfg.StrOpt('apikey', help='Fastly API Key'),

View File

@ -17,14 +17,14 @@
import maxcdn
from oslo_config import cfg
from oslo_log import log
import requests
from poppy.openstack.common import log as logging
from poppy.provider import base
from poppy.provider.maxcdn import controllers
LOG = logging.getLogger(__name__)
LOG = log.getLogger(__name__)
MAXCDN_OPTIONS = [
cfg.StrOpt('alias', help='MAXCDN API account alias'),

View File

@ -15,11 +15,12 @@
"""CDN Provider implementation."""
from poppy.openstack.common import log as logging
from oslo_log import log
from poppy.provider import base
from poppy.provider.mock import controllers
LOG = logging.getLogger(__name__)
LOG = log.getLogger(__name__)
class CDNProvider(base.Driver):

View File

@ -15,8 +15,9 @@
import uuid
from oslo_log import log
from poppy.common import decorators
from poppy.openstack.common import log
from poppy.provider import base
LOG = log.getLogger(__name__)

View File

@ -27,13 +27,13 @@ from cassandra import policies
from cassandra import query
from cdeploy import migrator
from oslo_config import cfg
from oslo_log import log
from poppy.openstack.common import log as logging
from poppy.storage import base
from poppy.storage.cassandra import controllers
LOG = logging.getLogger(__name__)
LOG = log.getLogger(__name__)
CASSANDRA_OPTIONS = [
cfg.ListOpt('cluster', default=['127.0.0.1'],

View File

@ -22,8 +22,12 @@ except ImportError: # pragma: no cover
import collections # pragma: no cover
from cassandra import query
import six
from oslo_log import log
if six.PY2:
from itertools import ifilterfalse as filterfalse
else:
@ -37,10 +41,9 @@ from poppy.model.helpers import rule
from poppy.model import log_delivery as ld
from poppy.model import service
from poppy.model import ssl_certificate
from poppy.openstack.common import log as logging
from poppy.storage import base
LOG = logging.getLogger(__name__)
LOG = log.getLogger(__name__)
CQL_LIST_SERVICES = '''

View File

@ -15,13 +15,14 @@
"""Storage driver implementation."""
from poppy.openstack.common import log as logging
from oslo_config import cfg
from oslo_log import log
from poppy.storage import base
from poppy.storage.mockdb import controllers
from oslo_config import cfg
LOG = logging.getLogger(__name__)
LOG = log.getLogger(__name__)
MOCKDB_OPTIONS = [
cfg.StrOpt('database', default='poppy',

View File

@ -16,9 +16,9 @@
from wsgiref import simple_server
from oslo_config import cfg
from oslo_log import log
import pecan
from poppy.openstack.common import log
from poppy import transport
from poppy.transport.pecan import controllers
from poppy.transport.pecan.controllers import v1

View File

@ -14,10 +14,10 @@
# limitations under the License.
from oslo_config import cfg
from oslo_context import context
import pecan
from pecan import hooks
from poppy.openstack.common import context
from poppy.openstack.common import local

View File

@ -16,12 +16,12 @@
import json
import logging
from oslo_log import log
from pecan import hooks
import webob
from poppy.openstack.common import log as oslo_log
LOG = oslo_log.getLogger(__name__)
LOG = log.getLogger(__name__)
class ErrorHook(hooks.PecanHook):

View File

@ -15,5 +15,6 @@ stevedore>=0.10
six>=1.4.1
netifaces>=0.10.4
oslo.config>=2.0.0
oslo.log>=1.12.1
oslo.serialization>=1.7.0
oslo.utils>=2.0.0

View File

@ -16,6 +16,7 @@ import json
import uuid
import mock
from oslo_context import context as context_utils
from taskflow import engines
from poppy.distributed_task.taskflow.flow import create_service
@ -171,7 +172,8 @@ class TestFlowRuns(base.TestCase):
'auth_token': json.dumps(str(uuid.uuid4())),
'service_id': json.dumps(str(uuid.uuid4())),
'time_seconds': [i * self.time_factor
for i in range(self.total_retries)]
for i in range(self.total_retries)],
'context_dict': context_utils.RequestContext().to_dict()
}
service_controller, storage_controller, dns_controller = \
@ -212,7 +214,8 @@ class TestFlowRuns(base.TestCase):
'time_seconds': [i * self.time_factor
for i in range(self.total_retries)],
'service_old': json.dumps(service_old.to_dict()),
'service_obj': json.dumps(service_new.to_dict())
'service_obj': json.dumps(service_new.to_dict()),
'context_dict': context_utils.RequestContext().to_dict()
}
service_controller, storage_controller, dns_controller = \
@ -246,7 +249,8 @@ class TestFlowRuns(base.TestCase):
i in range(self.total_retries)],
'provider_details': json.dumps(
dict([(k, v.to_dict())
for k, v in service_obj.provider_details.items()]))
for k, v in service_obj.provider_details.items()])),
'context_dict': context_utils.RequestContext().to_dict()
}
service_controller, storage_controller, dns_controller = \
@ -281,7 +285,8 @@ class TestFlowRuns(base.TestCase):
for k, v in service_obj.provider_details.items()])),
'purge_url': 'cdn.poppy.org',
'hard': json.dumps(True),
'service_obj': json.dumps(service_obj.to_dict())
'service_obj': json.dumps(service_obj.to_dict()),
'context_dict': context_utils.RequestContext().to_dict()
}
service_controller, storage_controller, dns_controller = \
@ -315,6 +320,7 @@ class TestFlowRuns(base.TestCase):
'service_obj': json.dumps(service_obj.to_dict()),
'time_seconds': [i * self.time_factor for
i in range(self.total_retries)],
'context_dict': context_utils.RequestContext().to_dict()
}
disable_kwargs = enable_kwargs.copy()
@ -347,7 +353,8 @@ class TestFlowRuns(base.TestCase):
'auth_token': json.dumps(str(uuid.uuid4())),
'service_id': json.dumps(str(uuid.uuid4())),
'time_seconds': [i * self.time_factor for
i in range(self.total_retries)]
i in range(self.total_retries)],
'context_dict': context_utils.RequestContext().to_dict()
}
service_controller, storage_controller, dns_controller = \
@ -396,7 +403,8 @@ class TestFlowRuns(base.TestCase):
'time_seconds': [i * self.time_factor for
i in range(self.total_retries)],
'service_old': json.dumps(service_old.to_dict()),
'service_obj': json.dumps(service_new.to_dict())
'service_obj': json.dumps(service_new.to_dict()),
'context_dict': context_utils.RequestContext().to_dict()
}
service_controller, storage_controller, dns_controller = \
@ -440,7 +448,8 @@ class TestFlowRuns(base.TestCase):
i in range(self.total_retries)],
'provider_details': json.dumps(
dict([(k, v.to_dict())
for k, v in service_obj.provider_details.items()]))
for k, v in service_obj.provider_details.items()])),
'context_dict': context_utils.RequestContext().to_dict()
}
service_controller, storage_controller, dns_controller = \
@ -481,6 +490,7 @@ class TestFlowRuns(base.TestCase):
'service_obj': json.dumps(service_obj.to_dict()),
'time_seconds': [i * self.time_factor for
i in range(self.total_retries)],
'context_dict': context_utils.RequestContext().to_dict()
}
disable_kwargs = enable_kwargs.copy()
@ -543,7 +553,8 @@ class TestFlowRuns(base.TestCase):
'time_seconds': [i * self.time_factor for
i in range(self.total_retries)],
'service_old': json.dumps(service_old.to_dict()),
'service_obj': json.dumps(service_new.to_dict())
'service_obj': json.dumps(service_new.to_dict()),
'context_dict': context_utils.RequestContext().to_dict()
}
service_controller, storage_controller, dns_controller = \
@ -589,7 +600,8 @@ class TestFlowRuns(base.TestCase):
'time_seconds': [i * self.time_factor for
i in range(self.total_retries)],
'service_old': json.dumps(service_old.to_dict()),
'service_obj': json.dumps(service_new.to_dict())
'service_obj': json.dumps(service_new.to_dict()),
'context_dict': context_utils.RequestContext().to_dict()
}
service_controller, storage_controller, dns_controller = \
@ -621,7 +633,8 @@ class TestFlowRuns(base.TestCase):
'auth_token': json.dumps(str(uuid.uuid4())),
'service_id': json.dumps(str(uuid.uuid4())),
'time_seconds': [i * self.time_factor for
i in range(self.total_retries)]
i in range(self.total_retries)],
'context_dict': context_utils.RequestContext().to_dict()
}
service_controller, storage_controller, dns_controller = \
@ -653,7 +666,8 @@ class TestFlowRuns(base.TestCase):
'auth_token': json.dumps(str(uuid.uuid4())),
'service_id': json.dumps(str(uuid.uuid4())),
'time_seconds': [i * self.time_factor for
i in range(self.total_retries)]
i in range(self.total_retries)],
'context_dict': context_utils.RequestContext().to_dict()
}
service_controller, storage_controller, dns_controller = \
@ -695,7 +709,8 @@ class TestFlowRuns(base.TestCase):
i in range(self.total_retries)],
'provider_details': json.dumps(
dict([(k, v.to_dict())
for k, v in service_obj.provider_details.items()]))
for k, v in service_obj.provider_details.items()])),
'context_dict': context_utils.RequestContext().to_dict()
}
service_controller, storage_controller, dns_controller = \
@ -734,7 +749,8 @@ class TestFlowRuns(base.TestCase):
i in range(self.total_retries)],
'provider_details': json.dumps(
dict([(k, v.to_dict())
for k, v in service_obj.provider_details.items()]))
for k, v in service_obj.provider_details.items()])),
'context_dict': context_utils.RequestContext().to_dict()
}
service_controller, storage_controller, dns_controller = \
@ -773,6 +789,7 @@ class TestFlowRuns(base.TestCase):
'service_obj': json.dumps(service_obj.to_dict()),
'time_seconds': [i * self.time_factor for
i in range(self.total_retries)],
'context_dict': context_utils.RequestContext().to_dict()
}
disable_kwargs = enable_kwargs.copy()
@ -820,6 +837,7 @@ class TestFlowRuns(base.TestCase):
'service_obj': json.dumps(service_obj.to_dict()),
'time_seconds': [i * self.time_factor for
i in range(self.total_retries)],
'context_dict': context_utils.RequestContext().to_dict()
}
disable_kwargs = enable_kwargs.copy()
@ -861,6 +879,7 @@ class TestFlowRuns(base.TestCase):
'providers_list_json': json.dumps(providers),
'project_id': json.dumps(str(uuid.uuid4())),
'cert_obj_json': json.dumps(cert_obj_json.to_dict()),
'context_dict': context_utils.RequestContext().to_dict()
}
service_controller, storage_controller, dns_controller = \
@ -886,6 +905,7 @@ class TestFlowRuns(base.TestCase):
'cert_type': "san",
'project_id': json.dumps(str(uuid.uuid4())),
'domain_name': "san.san.com",
'context_dict': context_utils.RequestContext().to_dict()
}
service_controller, storage_controller, dns_controller = \