Remove log translations
Log messages are no longer being translated. This removes all use of the _LE, _LI, and _LW translation markers to simplify logging and to avoid confusion with new contributions. See: http://lists.openstack.org/pipermail/openstack-i18n/2016-November/002574.html http://lists.openstack.org/pipermail/openstack-dev/2017-March/113365.html Change-Id: I877d8897d3e314903e704073868aaa877eac8167
This commit is contained in:
parent
aa089a0bf4
commit
3084860863
|
@ -31,9 +31,9 @@ import wsmeext.pecan as wsme_pecan
|
|||
from panko.api.controllers.v2 import base
|
||||
from panko.api.controllers.v2 import utils as v2_utils
|
||||
from panko.api import rbac
|
||||
from panko.i18n import _
|
||||
from panko import storage
|
||||
from panko.storage import models as event_models
|
||||
from panko.i18n import _, _LE
|
||||
|
||||
LOG = log.getLogger(__name__)
|
||||
|
||||
|
@ -316,8 +316,8 @@ class EventsController(rest.RestController):
|
|||
raise base.EntityNotFound(_("Event"), message_id)
|
||||
|
||||
if len(events) > 1:
|
||||
LOG.error(_LE("More than one event with "
|
||||
"id %s returned from storage driver") % message_id)
|
||||
LOG.error(("More than one event with "
|
||||
"id %s returned from storage driver"), message_id)
|
||||
|
||||
event = events[0]
|
||||
|
||||
|
|
|
@ -27,7 +27,6 @@ import six
|
|||
import webob
|
||||
|
||||
from panko import i18n
|
||||
from panko.i18n import _LE
|
||||
|
||||
LOG = log.getLogger(__name__)
|
||||
|
||||
|
@ -99,7 +98,7 @@ class ParsableErrorMiddleware(object):
|
|||
error_message,
|
||||
b'</error_message>'))
|
||||
except etree.XMLSyntaxError as err:
|
||||
LOG.error(_LE('Error parsing HTTP response: %s'), err)
|
||||
LOG.error('Error parsing HTTP response: %s', err)
|
||||
error_message = state['status_code']
|
||||
body = '<error_message>%s</error_message>' % error_message
|
||||
if six.PY3:
|
||||
|
|
|
@ -16,7 +16,6 @@
|
|||
|
||||
from oslo_log import log
|
||||
|
||||
from panko.i18n import _LI
|
||||
from panko import service
|
||||
from panko import storage
|
||||
|
||||
|
@ -37,5 +36,5 @@ def expirer():
|
|||
conn = storage.get_connection_from_config(conf)
|
||||
conn.clear_expired_data(conf.database.event_time_to_live)
|
||||
else:
|
||||
LOG.info(_LI("Nothing to clean, database event time to live "
|
||||
"is disabled"))
|
||||
LOG.info("Nothing to clean, database event time to live "
|
||||
"is disabled")
|
||||
|
|
|
@ -17,7 +17,6 @@ import debtcollector
|
|||
from oslo_log import log
|
||||
from oslo_utils import timeutils
|
||||
|
||||
from panko.i18n import _LE
|
||||
from panko import service
|
||||
from panko import storage
|
||||
from panko.storage import models
|
||||
|
@ -67,6 +66,6 @@ class DatabaseDispatcher(object):
|
|||
raw=ev.get('raw', {}))
|
||||
)
|
||||
except Exception:
|
||||
LOG.exception(_LE("Error processing event and it will be "
|
||||
"dropped: %s"), ev)
|
||||
LOG.exception("Error processing event and it will be "
|
||||
"dropped: %s", ev)
|
||||
self.conn.record_events(event_list)
|
||||
|
|
|
@ -27,16 +27,6 @@ _translators = oslo_i18n.TranslatorFactory(domain=DOMAIN)
|
|||
# The primary translation function using the well-known name "_"
|
||||
_ = _translators.primary
|
||||
|
||||
# Translators for log levels.
|
||||
#
|
||||
# The abbreviated names are meant to reflect the usual use of a short
|
||||
# name like '_'. The "L" is for "log" and the other letter comes from
|
||||
# the level.
|
||||
_LI = _translators.log_info
|
||||
_LW = _translators.log_warning
|
||||
_LE = _translators.log_error
|
||||
_LC = _translators.log_critical
|
||||
|
||||
|
||||
def translate(value, user_locale):
|
||||
return oslo_i18n.translate(value, user_locale)
|
||||
|
|
|
@ -21,10 +21,9 @@ from oslo_utils import netutils
|
|||
from oslo_utils import timeutils
|
||||
import six
|
||||
|
||||
from panko import storage
|
||||
from panko.storage import base
|
||||
from panko.storage import models
|
||||
from panko.i18n import _LE, _LI, _LW
|
||||
from panko import storage
|
||||
from panko import utils
|
||||
|
||||
LOG = log.getLogger(__name__)
|
||||
|
@ -106,10 +105,10 @@ class Connection(base.Connection):
|
|||
if not ok:
|
||||
__, result = result.popitem()
|
||||
if result['status'] == 409:
|
||||
LOG.info(_LI('Duplicate event detected, skipping it: %s')
|
||||
% result)
|
||||
LOG.info('Duplicate event detected, skipping it: %s',
|
||||
result)
|
||||
else:
|
||||
LOG.exception(_LE('Failed to record event: %s') % result)
|
||||
LOG.exception('Failed to record event: %s', result)
|
||||
error = storage.StorageUnknownWriteError(result)
|
||||
|
||||
if self._refresh_on_write:
|
||||
|
@ -187,7 +186,7 @@ class Connection(base.Connection):
|
|||
limit = None
|
||||
if pagination:
|
||||
if pagination.get('sort'):
|
||||
LOG.warning(_LW('Driver does not support sort functionality'))
|
||||
LOG.warning('Driver does not support sort functionality')
|
||||
limit = pagination.get('limit')
|
||||
if limit == 0:
|
||||
return
|
||||
|
|
|
@ -16,10 +16,9 @@ import operator
|
|||
from oslo_log import log
|
||||
|
||||
from panko.storage import base
|
||||
from panko.storage import models
|
||||
from panko.i18n import _LE, _LW
|
||||
from panko.storage.hbase import base as hbase_base
|
||||
from panko.storage.hbase import utils as hbase_utils
|
||||
from panko.storage import models
|
||||
from panko import utils
|
||||
|
||||
LOG = log.getLogger(__name__)
|
||||
|
@ -115,7 +114,7 @@ class Connection(hbase_base.Connection, base.Connection):
|
|||
try:
|
||||
events_table.put(row, record)
|
||||
except Exception as ex:
|
||||
LOG.exception(_LE("Failed to record event: %s") % ex)
|
||||
LOG.exception("Failed to record event: %s", ex)
|
||||
error = ex
|
||||
if error:
|
||||
raise error
|
||||
|
@ -130,7 +129,7 @@ class Connection(hbase_base.Connection, base.Connection):
|
|||
limit = None
|
||||
if pagination:
|
||||
if pagination.get('sort'):
|
||||
LOG.warning(_LW('Driver does not support sort functionality'))
|
||||
LOG.warning('Driver does not support sort functionality')
|
||||
limit = pagination.get('limit')
|
||||
if limit == 0:
|
||||
return
|
||||
|
|
|
@ -13,7 +13,6 @@
|
|||
|
||||
from oslo_log import log
|
||||
|
||||
from panko.i18n import _LI
|
||||
from panko.storage import base
|
||||
|
||||
LOG = log.getLogger(__name__)
|
||||
|
@ -30,4 +29,4 @@ class Connection(base.Connection):
|
|||
|
||||
:param ttl: Number of seconds to keep records for.
|
||||
"""
|
||||
LOG.info(_LI("Dropping event data with TTL %d"), ttl)
|
||||
LOG.info("Dropping event data with TTL %d", ttl)
|
||||
|
|
|
@ -24,10 +24,9 @@ from oslo_log import log
|
|||
from oslo_utils import timeutils
|
||||
import sqlalchemy as sa
|
||||
|
||||
from panko import storage
|
||||
from panko.storage import base
|
||||
from panko.storage import models as api_models
|
||||
from panko.i18n import _LE, _LI
|
||||
from panko import storage
|
||||
from panko.storage.sqlalchemy import models
|
||||
from panko import utils
|
||||
|
||||
|
@ -196,11 +195,11 @@ class Connection(base.Connection):
|
|||
session.execute(model.__table__.insert(),
|
||||
trait_map[dtype])
|
||||
except dbexc.DBDuplicateEntry as e:
|
||||
LOG.info(_LI("Duplicate event detected, skipping it: %s") % e)
|
||||
LOG.info("Duplicate event detected, skipping it: %s", e)
|
||||
except KeyError as e:
|
||||
LOG.exception(_LE('Failed to record event: %s') % e)
|
||||
LOG.exception('Failed to record event: %s', e)
|
||||
except Exception as e:
|
||||
LOG.exception(_LE('Failed to record event: %s') % e)
|
||||
LOG.exception('Failed to record event: %s', e)
|
||||
error = e
|
||||
if error:
|
||||
raise error
|
||||
|
@ -459,4 +458,4 @@ class Connection(base.Connection):
|
|||
(session.query(models.EventType)
|
||||
.filter(~models.EventType.events.any())
|
||||
.delete(synchronize_session="fetch"))
|
||||
LOG.info(_LI("%d events are removed from database"), event_rows)
|
||||
LOG.info("%d events are removed from database", event_rows)
|
||||
|
|
|
@ -24,7 +24,7 @@ import pymongo.errors
|
|||
import six
|
||||
import tenacity
|
||||
|
||||
from panko.i18n import _, _LI
|
||||
from panko.i18n import _
|
||||
|
||||
ERROR_INDEX_WITH_DIFFERENT_SPEC_ALREADY_EXISTS = 86
|
||||
|
||||
|
@ -132,7 +132,7 @@ class ConnectionPool(object):
|
|||
splitted_url = netutils.urlsplit(url)
|
||||
log_data = {'db': splitted_url.scheme,
|
||||
'nodelist': connection_options['nodelist']}
|
||||
LOG.info(_LI('Connecting to %(db)s on %(nodelist)s') % log_data)
|
||||
LOG.info('Connecting to %(db)s on %(nodelist)s' % log_data)
|
||||
try:
|
||||
client = MongoProxy(pymongo.MongoClient(url),
|
||||
max_retries, retry_interval)
|
||||
|
@ -190,7 +190,7 @@ class MongoProxy(object):
|
|||
self.conn.create_index(keys, name=name, *args, **kwargs)
|
||||
except pymongo.errors.OperationFailure as e:
|
||||
if e.code is ERROR_INDEX_WITH_DIFFERENT_SPEC_ALREADY_EXISTS:
|
||||
LOG.info(_LI("Index %s will be recreate.") % name)
|
||||
LOG.info("Index %s will be recreate." % name)
|
||||
self._recreate_index(keys, name, *args, **kwargs)
|
||||
|
||||
def _recreate_index(self, keys, name, *args, **kwargs):
|
||||
|
|
|
@ -17,7 +17,6 @@ import pymongo
|
|||
|
||||
from panko.storage import base
|
||||
from panko.storage import models
|
||||
from panko.i18n import _LE, _LI, _LW
|
||||
from panko.storage.mongo import utils as pymongo_utils
|
||||
from panko import utils
|
||||
|
||||
|
@ -64,9 +63,9 @@ class Connection(base.Connection):
|
|||
'timestamp': event_model.generated,
|
||||
'traits': traits, 'raw': event_model.raw})
|
||||
except pymongo.errors.DuplicateKeyError as ex:
|
||||
LOG.info(_LI("Duplicate event detected, skipping it: %s") % ex)
|
||||
LOG.info("Duplicate event detected, skipping it: %s", ex)
|
||||
except Exception as ex:
|
||||
LOG.exception(_LE("Failed to record event: %s") % ex)
|
||||
LOG.exception("Failed to record event: %s", ex)
|
||||
error = ex
|
||||
if error:
|
||||
raise error
|
||||
|
@ -81,7 +80,7 @@ class Connection(base.Connection):
|
|||
limit = None
|
||||
if pagination:
|
||||
if pagination.get('sort'):
|
||||
LOG.warning(_LW('Driver does not support sort functionality'))
|
||||
LOG.warning('Driver does not support sort functionality')
|
||||
limit = pagination.get('limit')
|
||||
if limit == 0:
|
||||
return
|
||||
|
|
Loading…
Reference in New Issue