Remove translation of log messages

The i18n team has decided not to translate
the logs because it seems like it not very useful;
operators prefer to have them in English so that
they can search for those strings on the internet.

Closes-Bug: #1694193

Change-Id: I5b215eabd833c6e175168e7184944017815b0107
This commit is contained in:
Nam Nguyen Hoai 2017-05-30 11:13:15 +07:00
parent 57535a5dec
commit 91014f05cc
29 changed files with 162 additions and 206 deletions

View File

@ -51,7 +51,7 @@ def load_body(req, resp=None, validator=None):
body = req.body_file.read(CONF.max_allowed_request_size_in_bytes)
req.body_file.seek(0)
except IOError:
LOG.exception(u._LE("Problem reading request JSON stream."))
LOG.exception("Problem reading request JSON stream.")
pecan.abort(500, u._('Read Error'))
try:
@ -61,7 +61,7 @@ def load_body(req, resp=None, validator=None):
parsed_body = json.loads(body)
strip_whitespace(parsed_body)
except ValueError:
LOG.exception(u._LE("Problem loading request JSON."))
LOG.exception("Problem loading request JSON.")
pecan.abort(400, u._('Malformed JSON'))
if validator:

View File

@ -31,7 +31,6 @@ from oslo_log import log
from barbican.api.controllers import versions
from barbican.api import hooks
from barbican.common import config
from barbican import i18n as u
from barbican.model import repositories
from barbican import queue
@ -87,7 +86,7 @@ def main_app(func):
if newrelic_loaded:
wsgi_app = newrelic.agent.WSGIApplicationWrapper(wsgi_app)
LOG = log.getLogger(__name__)
LOG.info(u._LI('Barbican app created and initialized'))
LOG.info('Barbican app created and initialized')
return wsgi_app
return _wrapper

View File

@ -102,7 +102,7 @@ def handle_exceptions(operation_name=u._('System')):
try:
return fn(inst, *args, **kwargs)
except exc.HTTPError:
LOG.exception(u._LE('Webob error seen'))
LOG.exception('Webob error seen')
raise # Already converted to Webob exception, just reraise
# In case PolicyNotAuthorized, we do not want to expose payload by
# logging exception, so just LOG.error

View File

@ -31,8 +31,8 @@ from barbican.tasks import certificate_resources as cert_resources
LOG = utils.getLogger(__name__)
_DEPRECATION_MSG = u._LW('%s has been deprecated in the Newton release. It '
'will be removed in the Pike release.')
_DEPRECATION_MSG = '%s has been deprecated in the Newton release. ' \
'It will be removed in the Pike release.'
def _certificate_authority_not_found():
@ -254,7 +254,7 @@ class CertificateAuthorityController(controllers.ACLMixin):
@controllers.enforce_rbac('certificate_authority:delete')
def on_delete(self, external_project_id, **kwargs):
cert_resources.delete_subordinate_ca(external_project_id, self.ca)
LOG.info(u._LI('Deleted CA for project: %s'), external_project_id)
LOG.info('Deleted CA for project: %s', external_project_id)
class CertificateAuthoritiesController(controllers.ACLMixin):
@ -493,7 +493,7 @@ class CertificateAuthoritiesController(controllers.ACLMixin):
pecan.response.status = 201
pecan.response.headers['Location'] = url
LOG.info(u._LI('Created a sub CA for project: %s'),
LOG.info('Created a sub CA for project: %s',
external_project_id)
return {'ca_ref': url}

View File

@ -68,7 +68,7 @@ class ContainerConsumerController(controllers.ACLMixin):
dict_fields = consumer.to_dict_fields()
LOG.info(u._LI('Retrieved a consumer for project: %s'),
LOG.info('Retrieved a consumer for project: %s',
external_project_id)
return hrefs.convert_to_hrefs(
@ -132,7 +132,7 @@ class ContainerConsumersController(controllers.ACLMixin):
)
resp_ctrs_overall.update({'total': total})
LOG.info(u._LI('Retrieved a consumer list for project: %s'),
LOG.info('Retrieved a consumer list for project: %s',
external_project_id)
return resp_ctrs_overall
@ -158,7 +158,7 @@ class ContainerConsumersController(controllers.ACLMixin):
url = hrefs.convert_consumer_to_href(new_consumer.container_id)
pecan.response.headers['Location'] = url
LOG.info(u._LI('Created a consumer for project: %s'),
LOG.info('Created a consumer for project: %s',
external_project_id)
return self._return_container_data(self.container_id)
@ -196,11 +196,11 @@ class ContainerConsumersController(controllers.ACLMixin):
self.consumer_repo.delete_entity_by_id(consumer.id,
external_project_id)
except exception.NotFound:
LOG.exception(u._LE('Problem deleting consumer'))
LOG.exception('Problem deleting consumer')
_consumer_not_found()
ret_data = self._return_container_data(self.container_id)
LOG.info(u._LI('Deleted a consumer for project: %s'),
LOG.info('Deleted a consumer for project: %s',
external_project_id)
return ret_data

View File

@ -74,7 +74,7 @@ class ContainerController(controllers.ACLMixin):
for secret_ref in dict_fields['secret_refs']:
hrefs.convert_to_hrefs(secret_ref)
LOG.info(u._LI('Retrieved container for project: %s'),
LOG.info('Retrieved container for project: %s',
external_project_id)
return hrefs.convert_to_hrefs(
hrefs.convert_to_hrefs(dict_fields)
@ -95,10 +95,10 @@ class ContainerController(controllers.ACLMixin):
external_project_id=external_project_id
)
except exception.NotFound:
LOG.exception(u._LE('Problem deleting container'))
LOG.exception('Problem deleting container')
container_not_found()
LOG.info(u._LI('Deleted container for project: %s'),
LOG.info('Deleted container for project: %s',
external_project_id)
for consumer in container_consumers[0]:
@ -175,7 +175,7 @@ class ContainersController(controllers.ACLMixin):
)
resp_ctrs_overall.update({'total': total})
LOG.info(u._LI('Retrieved container list for project: %s'), project_id)
LOG.info('Retrieved container list for project: %s', project_id)
return resp_ctrs_overall
@index.when(method='POST', template='json')
@ -219,7 +219,7 @@ class ContainersController(controllers.ACLMixin):
pecan.response.status = 201
pecan.response.headers['Location'] = url
LOG.info(u._LI('Created a container for project: %s'),
LOG.info('Created a container for project: %s',
external_project_id)
return {'container_ref': url}
@ -285,7 +285,7 @@ class ContainersSecretsController(controllers.ACLMixin):
pecan.response.status = 201
pecan.response.headers['Location'] = url
LOG.info(u._LI('Created a container secret for project: %s'),
LOG.info('Created a container secret for project: %s',
external_project_id)
return {'container_ref': url}
@ -325,5 +325,5 @@ class ContainersSecretsController(controllers.ACLMixin):
container_secret.id, external_project_id)
pecan.response.status = 204
LOG.info(u._LI('Deleted container secret for project: %s'),
LOG.info('Deleted container secret for project: %s',
external_project_id)

View File

@ -27,8 +27,8 @@ from barbican.queue import client as async_client
LOG = utils.getLogger(__name__)
_DEPRECATION_MSG = u._LW('%s has been deprecated in the Newton release. It '
'will be removed in the Pike release.')
_DEPRECATION_MSG = '%s has been deprecated in the Newton release. ' \
'It will be removed in the Pike release.'
def _order_not_found():

View File

@ -91,7 +91,7 @@ class ProjectQuotasController(controllers.ACLMixin):
validator=self.validator)
self.quota_driver.set_project_quotas(self.passed_project_id,
kwargs['project_quotas'])
LOG.info(u._LI('Put Project Quotas'))
LOG.info('Put Project Quotas')
pecan.response.status = 204
@index.when(method='DELETE', template='json')
@ -103,10 +103,10 @@ class ProjectQuotasController(controllers.ACLMixin):
try:
self.quota_driver.delete_project_quotas(self.passed_project_id)
except exception.NotFound:
LOG.info(u._LI('Delete Project Quotas - Project not found'))
LOG.info('Delete Project Quotas - Project not found')
_project_quotas_not_found()
else:
LOG.info(u._LI('Delete Project Quotas'))
LOG.info('Delete Project Quotas')
pecan.response.status = 204

View File

@ -118,12 +118,12 @@ class SecretController(controllers.ACLMixin):
if controllers.is_json_request_accept(pecan.request):
resp = self._on_get_secret_metadata(self.secret, **kwargs)
LOG.info(u._LI('Retrieved secret metadata for project: %s'),
LOG.info('Retrieved secret metadata for project: %s',
external_project_id)
return resp
else:
LOG.warning(u._LW('Decrypted secret %s requested using deprecated '
'API call.'), self.secret.id)
LOG.warning('Decrypted secret %s requested using deprecated '
'API call.', self.secret.id)
return self._on_get_secret_payload(self.secret,
external_project_id,
**kwargs)
@ -202,7 +202,7 @@ class SecretController(controllers.ACLMixin):
external_project_id,
**kwargs)
LOG.info(u._LI('Retrieved secret payload for project: %s'),
LOG.info('Retrieved secret payload for project: %s',
external_project_id)
return resp
@ -243,7 +243,7 @@ class SecretController(controllers.ACLMixin):
secret_model=self.secret,
project_model=project_model,
transport_key_id=transport_key_id)
LOG.info(u._LI('Updated secret for project: %s'), external_project_id)
LOG.info('Updated secret for project: %s', external_project_id)
@index.when(method='DELETE')
@utils.allow_all_content_types
@ -251,7 +251,7 @@ class SecretController(controllers.ACLMixin):
@controllers.enforce_rbac('secret:delete')
def on_delete(self, external_project_id, **kwargs):
plugin.delete_secret(self.secret, external_project_id)
LOG.info(u._LI('Deleted secret for project: %s'), external_project_id)
LOG.info('Deleted secret for project: %s', external_project_id)
class SecretsController(controllers.ACLMixin):
@ -405,7 +405,7 @@ class SecretsController(controllers.ACLMixin):
)
secrets_resp_overall.update({'total': total})
LOG.info(u._LI('Retrieved secret list for project: %s'),
LOG.info('Retrieved secret list for project: %s',
external_project_id)
return secrets_resp_overall
@ -446,7 +446,7 @@ class SecretsController(controllers.ACLMixin):
pecan.response.status = 201
pecan.response.headers['Location'] = url
LOG.info(u._LI('Created a secret for project: %s'),
LOG.info('Created a secret for project: %s',
external_project_id)
if transport_key_model is not None:
tkey_url = hrefs.convert_transport_key_to_href(

View File

@ -74,7 +74,7 @@ class TransportKeyController(controllers.ACLMixin):
# TODO(alee) response should be 204 on success
# pecan.response.status = 204
except exception.NotFound:
LOG.exception(u._LE('Problem deleting transport_key'))
LOG.exception('Problem deleting transport_key')
_transport_key_not_found()

View File

@ -35,7 +35,7 @@ class BaseContextMiddleware(mw.Middleware):
resp.headers['x-openstack-request-id'] = resp.request.request_id
LOG.info(u._LI('Processed request: %(status)s - %(method)s %(url)s'),
LOG.info('Processed request: %(status)s - %(method)s %(url)s',
{"status": resp.status,
"method": resp.request.method,
"url": resp.request.url})

View File

@ -22,7 +22,6 @@ import sys
sys.path.insert(0, os.getcwd())
from barbican.common import config
from barbican import i18n as u
from barbican.model import clean
from barbican.model.migration import commands
from oslo_log import log
@ -176,8 +175,7 @@ def main():
dm.execute()
except Exception as ex:
if not _exception_is_successful_exit(ex):
LOG.exception(u._LE('Problem seen trying to run'
' barbican db manage'))
LOG.exception('Problem seen trying to run barbican db manage')
sys.stderr.write("ERROR: {0}\n".format(ex))
sys.exit(1)

View File

@ -40,7 +40,6 @@ if os.path.exists(os.path.join(possible_topdir, 'barbican', '__init__.py')):
from barbican.common import config
from barbican import i18n as u
from barbican import queue
from barbican.queue import keystone_listener
from barbican import version
@ -66,7 +65,7 @@ def main():
log.setup(CONF, 'barbican')
LOG = log.getLogger(__name__)
LOG.info(u._LI("Booting up Barbican Keystone listener node..."))
LOG.info("Booting up Barbican Keystone listener node...")
# Queuing initialization
queue.init(CONF)
@ -77,8 +76,7 @@ def main():
keystone_listener.MessageServer(CONF)
).wait()
else:
LOG.info(u._LI("Exiting as Barbican Keystone listener"
" is not enabled..."))
LOG.info("Exiting as Barbican Keystone listener is not enabled...")
except RuntimeError as e:
fail(1, e)

View File

@ -298,10 +298,10 @@ def setup_remote_pydev_debug():
stdoutToServer=True,
stderrToServer=True)
except Exception:
LOG.exception(u._LE('Unable to join debugger, please '
'make sure that the debugger processes is '
'listening on debug-host \'%(debug-host)s\' '
'debug-port \'%(debug-port)s\'.'),
LOG.exception('Unable to join debugger, please '
'make sure that the debugger processes is '
'listening on debug-host \'%(debug-host)s\' '
'debug-port \'%(debug-port)s\'.',
{'debug-host': CONF.pydev_debug_host,
'debug-port': CONF.pydev_debug_port})
raise

View File

@ -278,7 +278,7 @@ class NewSecretValidator(ValidatorBase):
expiration_tz = timeutils.parse_isotime(expiration_raw.strip())
expiration = timeutils.normalize_time(expiration_tz)
except ValueError:
LOG.exception(u._("Problem parsing expiration date"))
LOG.exception("Problem parsing expiration date")
raise exception.InvalidObject(
schema=schema_name,
reason=u._("Invalid date for 'expiration'"),
@ -334,7 +334,7 @@ class NewSecretValidator(ValidatorBase):
try:
base64.b64decode(payload)
except Exception:
LOG.exception(u._("Problem parsing payload"))
LOG.exception("Problem parsing payload")
raise exception.InvalidObject(
schema=schema_name,
reason=u._("Invalid payload for payload_content_encoding"),
@ -687,7 +687,7 @@ class TypeOrderValidator(ValidatorBase, CACommonHelpersMixin):
expiration_tz = timeutils.parse_isotime(expiration_raw)
expiration = timeutils.normalize_time(expiration_tz)
except ValueError:
LOG.exception(u._("Problem parsing expiration date"))
LOG.exception("Problem parsing expiration date")
raise exception.InvalidObject(schema=schema_name,
reason=u._("Invalid date "
"for 'expiration'"),

View File

@ -14,7 +14,6 @@
# limitations under the License.
from barbican.common import config
from barbican import i18n as u
from barbican.model import models
from barbican.model import repositories as repo
from oslo_log import log
@ -60,8 +59,8 @@ def cleanup_unassociated_projects():
query = session.query(models.Project)
query = query.filter(models.Project.id.in_(sub_query))
delete_count = query.delete(synchronize_session='fetch')
LOG.info(u._LI("Cleaned up %(delete_count)s entries for "
"%(project_name)s") %
LOG.info("Cleaned up %(delete_count)s entries for "
"%(project_name)s",
{'delete_count': str(delete_count),
'project_name': models.Project.__name__})
return delete_count
@ -98,8 +97,8 @@ def cleanup_parent_with_no_child(parent_model, child_model,
if threshold_date:
query = query.filter(parent_model.deleted_at <= threshold_date)
delete_count = query.delete(synchronize_session='fetch')
LOG.info(u._LI("Cleaned up %(delete_count)s entries for %(parent_name)s "
"with no children in %(child_name)s") %
LOG.info("Cleaned up %(delete_count)s entries for %(parent_name)s "
"with no children in %(child_name)s",
{'delete_count': delete_count,
'parent_name': parent_model.__name__,
'child_name': child_model.__name__})
@ -120,7 +119,7 @@ def cleanup_softdeletes(model, threshold_date=None):
if threshold_date:
query = query.filter(model.deleted_at <= threshold_date)
delete_count = query.delete()
LOG.info(u._LI("Cleaned up %(delete_count)s entries for %(model_name)s") %
LOG.info("Cleaned up %(delete_count)s entries for %(model_name)s",
{'delete_count': delete_count,
'model_name': model.__name__})
return delete_count
@ -172,7 +171,7 @@ def cleanup_all(threshold_date=None):
# TODO(edtubill) Clean up projects that were soft deleted by
# the keystone listener
LOG.info(u._LI("Cleaned up %s soft deleted entries"), total)
LOG.info("Cleaned up %s soft deleted entries", total)
return total
@ -295,9 +294,9 @@ def soft_delete_expired_secrets(threshold_date):
children_count, acl_total = _soft_delete_expired_secret_children(
threshold_date)
update_count += children_count
LOG.info(u._LI("Soft deleted %(update_count)s entries due to secret "
"expiration and %(acl_total)s secret acl entries "
"were removed from the database") %
LOG.info("Soft deleted %(update_count)s entries due to secret "
"expiration and %(acl_total)s secret acl entries "
"were removed from the database",
{'update_count': update_count,
'acl_total': acl_total})
return update_count + acl_total
@ -324,7 +323,7 @@ def clean_command(sql_url, min_num_days, do_clean_unassociated_projects,
if log_file:
CONF.set_override('log_file', log_file)
LOG.info(u._LI("Cleaning up soft deletions in the barbican database"))
LOG.info("Cleaning up soft deletions in the barbican database")
log.setup(CONF, 'barbican')
cleanup_total = 0
@ -353,7 +352,7 @@ def clean_command(sql_url, min_num_days, do_clean_unassociated_projects,
repo.commit()
except Exception as ex:
LOG.exception(u._LE('Failed to clean up soft deletions in database.'))
LOG.exception('Failed to clean up soft deletions in database.')
repo.rollback()
cleanup_total = 0 # rollback happened, no entries affected
raise ex
@ -372,6 +371,5 @@ def clean_command(sql_url, min_num_days, do_clean_unassociated_projects,
log.setup(CONF, 'barbican') # reset the overrides
LOG.info(u._LI("Cleaning of database affected %s entries"),
cleanup_total)
LOG.info(u._LI('DB clean up finished in %s seconds'), elapsed_time)
LOG.info("Cleaning of database affected %s entries", cleanup_total)
LOG.info('DB clean up finished in %s seconds', elapsed_time)

View File

@ -28,7 +28,6 @@ from alembic import config as alembic_config
from barbican.common import config
from barbican.common import utils
from barbican import i18n as u
LOG = utils.getLogger(__name__)
@ -45,8 +44,8 @@ def init_config(sql_url=None):
"the CLI or the configuration file.")
if sqlalchemy_url and 'sqlite' in sqlalchemy_url:
LOG.warning(u._('!!! Limited support for migration commands using'
' sqlite databases; This operation may not succeed.'))
LOG.warning('!!! Limited support for migration commands using'
' sqlite databases; This operation may not succeed.')
config = alembic_config.Config(
os.path.join(os.path.dirname(__file__), 'alembic.ini')

View File

@ -95,7 +95,7 @@ def hard_reset():
def setup_database_engine_and_factory():
global sa_logger, _SESSION_FACTORY, _ENGINE
LOG.info(u._LI('Setting up database engine and session factory'))
LOG.info('Setting up database engine and session factory')
if CONF.debug:
sa_logger = logging.getLogger('sqlalchemy.engine')
sa_logger.setLevel(logging.DEBUG)
@ -198,7 +198,7 @@ def _get_engine(engine):
_auto_generate_tables(engine, tables)
else:
LOG.info(u._LI('Not auto-creating barbican registry DB'))
LOG.info('Not auto-creating barbican registry DB')
return engine
@ -245,11 +245,11 @@ def _create_engine(connection, **engine_args):
def _auto_generate_tables(engine, tables):
if tables and 'alembic_version' in tables:
# Upgrade the database to the latest version.
LOG.info(u._LI('Updating schema to latest version'))
LOG.info('Updating schema to latest version')
commands.upgrade()
else:
# Create database tables from our models.
LOG.info(u._LI('Auto-creating barbican registry DB'))
LOG.info('Auto-creating barbican registry DB')
models.BASE.metadata.create_all(engine)
# Sync the alembic version 'head' with current models.
@ -267,7 +267,7 @@ def wrap_db_error(f):
remaining_attempts = CONF.sql_max_retries
while True:
LOG.warning(u._LW('SQL connection failed. %d attempts left.'),
LOG.warning('SQL connection failed. %d attempts left.',
remaining_attempts)
remaining_attempts -= 1
time.sleep(CONF.sql_retry_interval)
@ -371,7 +371,7 @@ class BaseRepo(object):
entity = query.one()
except sa_orm.exc.NoResultFound:
LOG.exception(u._LE("Not found for %s"), entity_id)
LOG.exception("Not found for %s", entity_id)
entity = None
if not suppress_exception:
_raise_entity_not_found(self._do_entity_name(), entity_id)
@ -406,7 +406,7 @@ class BaseRepo(object):
LOG.debug("Saving entity...")
entity.save(session=session)
except db_exc.DBDuplicateEntry as e:
LOG.exception(u._LE('Problem saving entity for create'))
LOG.exception('Problem saving entity for create')
error_msg = re.sub('[()]', '', str(e.args))
raise exception.ConstraintCheck(error=error_msg)
@ -558,8 +558,7 @@ class BaseRepo(object):
# Its a soft delete so its more like entity update
entity.delete(session=session)
except sqlalchemy.exc.SQLAlchemyError:
LOG.exception(u._LE('Problem finding project related entity to '
'delete'))
LOG.exception('Problem finding project related entity to delete')
if not suppress_exception:
raise exception.BarbicanException(u._('Error deleting project '
'entities for '
@ -595,7 +594,7 @@ class ProjectRepo(BaseRepo):
except sa_orm.exc.NoResultFound:
entity = None
if not suppress_exception:
LOG.exception(u._LE("Problem getting Project %s"),
LOG.exception("Problem getting Project %s",
external_project_id)
raise exception.NotFound(u._(
"No {entity_name} found with keystone-ID {id}").format(
@ -798,7 +797,7 @@ class SecretRepo(BaseRepo):
except sa_orm.exc.NoResultFound:
entity = None
if not suppress_exception:
LOG.exception(u._LE("Problem getting secret %s"),
LOG.exception("Problem getting secret %s",
entity_id)
raise exception.NotFound(u._(
"No secret found with secret-ID {id}").format(
@ -1338,8 +1337,7 @@ class ContainerRepo(BaseRepo):
except sa_orm.exc.NoResultFound:
entity = None
if not suppress_exception:
LOG.exception(u._LE("Problem getting container %s"),
entity_id)
LOG.exception("Problem getting container %s", entity_id)
raise exception.NotFound(u._(
"No container found with container-ID {id}").format(
entity_name=self._do_entity_name(),
@ -2275,7 +2273,7 @@ class ProjectSecretStoreRepo(BaseRepo):
try:
entity = query.one()
except sa_orm.exc.NoResultFound:
LOG.info(u._LE("No preferred secret store found for project = %s"),
LOG.info("No preferred secret store found for project = %s",
project_id)
entity = None
if not suppress_exception:

View File

@ -152,7 +152,7 @@ class P11CryptoPlugin(plugin.CryptoPluginBase):
try:
return func(*args, **kwargs)
except (exception.PKCS11Exception) as pe:
LOG.warning(u._LW("Reinitializing PKCS#11 library: %s"), pe)
LOG.warning("Reinitializing PKCS#11 library: %s", pe)
self._reinitialize_pkcs11()
return func(*args, **kwargs)

View File

@ -56,10 +56,10 @@ class SimpleCryptoPlugin(c.CryptoPluginBase):
def __init__(self, conf=CONF):
self.master_kek = conf.simple_crypto_plugin.kek
self.plugin_name = conf.simple_crypto_plugin.plugin_name
LOG.warning(u._LW("This plugin is NOT meant for a production "
"environment. This is meant just for development "
"and testing purposes. Please use another plugin "
"for production."))
LOG.warning("This plugin is NOT meant for a production "
"environment. This is meant just for development "
"and testing purposes. Please use another plugin "
"for production.")
def get_plugin_name(self):
return self.plugin_name

View File

@ -69,8 +69,8 @@ def _create_nss_db_if_needed(nss_db_path, nss_password):
nss_db_path, nss_password, over_write=True)
return True
else:
LOG.info(u._LI("The nss_db_path provided already exists, so the "
"database is assumed to be already set up."))
LOG.info("The nss_db_path provided already exists, so the "
"database is assumed to be already set up.")
return False
@ -87,8 +87,8 @@ def _setup_nss_db_services(conf):
nss_db_path, nss_password = (conf.dogtag_plugin.nss_db_path,
conf.dogtag_plugin.nss_password)
if nss_db_path is None:
LOG.warning(u._LW("nss_db_path was not provided so the crypto "
"provider functions were not initialized."))
LOG.warning("nss_db_path was not provided so the crypto "
"provider functions were not initialized.")
return None
if nss_password is None:
raise ValueError(u._("nss_password is required"))
@ -110,8 +110,8 @@ def _import_kra_transport_cert_to_nss_db(conf, crypto):
transport_cert = systemcert_client.get_transport_cert()
crypto.import_cert(KRA_TRANSPORT_NICK, transport_cert, "u,u,u")
except Exception as e:
LOG.error(u._LE("Error in importing transport cert."
" KRA may not be enabled: %s"), e)
LOG.error("Error in importing transport cert."
" KRA may not be enabled: %s", e)
def create_connection(conf, subsystem_path):
@ -640,7 +640,7 @@ def _catch_subca_deletion_exceptions(ca_related_function):
try:
return ca_related_function(self, *args, **kwargs)
except pki.ResourceNotFoundException as e:
LOG.warning(u._LI("Sub-CA already deleted"))
LOG.warning("Sub-CA already deleted")
pass
except pki.PKIException as e:
raise exception.SubCADeletionErrors(reason=e.message)
@ -699,7 +699,7 @@ class DogtagCAPlugin(cm.CertificatePluginBase):
"%Y-%m-%d %H:%M:%S.%f"
)
except (ValueError, TypeError):
LOG.warning(u._LI("Invalid data read from expiration file"))
LOG.warning("Invalid data read from expiration file")
self.expiration = datetime.utcnow()
return self._expiration
@ -738,15 +738,15 @@ class DogtagCAPlugin(cm.CertificatePluginBase):
feature_client = feature.FeatureClient(connection)
authority_feature = feature_client.get_feature("authority")
if authority_feature.enabled:
LOG.info(u._LI("Sub-CAs are enabled by Dogtag server"))
LOG.info("Sub-CAs are enabled by Dogtag server")
return True
else:
LOG.info(u._LI("Sub-CAs are not enabled by Dogtag server"))
LOG.info("Sub-CAs are not enabled by Dogtag server")
except (request_exceptions.HTTPError,
pki.ResourceNotFoundException):
LOG.info(u._LI("Sub-CAs are not supported by Dogtag server"))
LOG.info("Sub-CAs are not supported by Dogtag server")
else:
LOG.info(u._LI("Sub-CAs are not supported by Dogtag client"))
LOG.info("Sub-CAs are not supported by Dogtag client")
return False
def _get_request_id(self, order_id, plugin_meta, operation):

View File

@ -656,7 +656,7 @@ class CertificatePluginManager(named.NamedExtensionManager):
new_ca_infos = cert_plugin.get_ca_info()
except Exception as e:
# The plugin gave an invalid CA, log and return
LOG.error(u._LE("ERROR getting CA from plugin: %s"),
LOG.error("ERROR getting CA from plugin: %s",
encodeutils.exception_to_unicode(e))
return
@ -689,7 +689,7 @@ class CertificatePluginManager(named.NamedExtensionManager):
self._add_ca(plugin_name, add_id, new_ca_infos[add_id])
except Exception as e:
# The plugin gave an invalid CA, log and continue
LOG.error(u._LE("ERROR adding CA from plugin: %s"),
LOG.error("ERROR adding CA from plugin: %s",
encodeutils.exception_to_unicode(e))
def _add_ca(self, plugin_name, plugin_ca_id, ca_info):

View File

@ -245,8 +245,8 @@ class KMIPSecretStore(ss.SecretStoreBase):
config = conf.kmip_plugin
if not getattr(ssl, config.ssl_version, None):
LOG.error(u._LE("The configured SSL version (%s) is not available"
" on the system."), config.ssl_version)
LOG.error("The configured SSL version (%s) is not available"
" on the system.", config.ssl_version)
self.client = client.ProxyKmipClient(
hostname=config.host,
@ -292,7 +292,7 @@ class KMIPSecretStore(ss.SecretStoreBase):
"uuid: %s", uuid)
return {KMIPSecretStore.KEY_UUID: uuid}
except Exception as e:
LOG.exception(u._LE("Error opening or writing to client"))
LOG.exception("Error opening or writing to client")
raise ss.SecretGeneralException(e)
def generate_asymmetric_key(self, key_spec):
@ -341,7 +341,7 @@ class KMIPSecretStore(ss.SecretStoreBase):
public_key_metadata,
passphrase_metadata)
except Exception as e:
LOG.exception(u._LE("Error opening or writing to client"))
LOG.exception("Error opening or writing to client")
raise ss.SecretGeneralException(e)
def store_secret(self, secret_dto):
@ -375,7 +375,7 @@ class KMIPSecretStore(ss.SecretStoreBase):
LOG.debug("SUCCESS: Key stored with uuid: %s", uuid)
return {KMIPSecretStore.KEY_UUID: uuid}
except Exception as e:
LOG.exception(u._LE("Error opening or writing to client"))
LOG.exception("Error opening or writing to client")
raise ss.SecretGeneralException(e)
def get_secret(self, secret_type, secret_metadata):
@ -396,7 +396,7 @@ class KMIPSecretStore(ss.SecretStoreBase):
managed_object = self.client.get(uuid)
return self._get_barbican_secret(managed_object, secret_type)
except Exception as e:
LOG.exception(u._LE("Error opening or writing to client"))
LOG.exception("Error opening or writing to client")
raise ss.SecretGeneralException(e)
def generate_supports(self, key_spec):
@ -435,7 +435,7 @@ class KMIPSecretStore(ss.SecretStoreBase):
LOG.debug("Opened connection to KMIP client")
self.client.destroy(uuid)
except Exception as e:
LOG.exception(u._LE("Error opening or writing to client"))
LOG.exception("Error opening or writing to client")
raise ss.SecretGeneralException(e)
def store_secret_supports(self, key_spec):
@ -567,7 +567,7 @@ class KMIPSecretStore(ss.SecretStoreBase):
reason=result.result_reason,
message=result.result_message
)
LOG.error(u._LE("ERROR from KMIP server: %s"), msg)
LOG.error("ERROR from KMIP server: %s", msg)
raise ss.SecretGeneralException(msg)
def _validate_keyfile_permissions(self, path):

View File

@ -15,7 +15,6 @@
Default implementation of Barbican certificate processing plugins and support.
"""
from barbican.common import utils
from barbican import i18n as u
from barbican.plugin.interface import certificate_manager as cert
LOG = utils.getLogger(__name__)
@ -51,7 +50,7 @@ class SimpleCertificatePlugin(cert.CertificatePluginBase):
populated by the plugin implementation
:rtype: :class:`ResultDTO`
"""
LOG.info(u._LI('Invoking issue_certificate_request()'))
LOG.info('Invoking issue_certificate_request()')
return cert.ResultDTO(
cert.CertificateStatus.WAITING_FOR_CA,
retry_msec=MSEC_UNTIL_CHECK_STATUS)
@ -71,7 +70,7 @@ class SimpleCertificatePlugin(cert.CertificatePluginBase):
populated by the plugin implementation
:rtype: :class:`ResultDTO`
"""
LOG.info(u._LI('Invoking modify_certificate_request()'))
LOG.info('Invoking modify_certificate_request()')
return cert.ResultDTO(cert.CertificateStatus.WAITING_FOR_CA)
def cancel_certificate_request(self, order_id, order_meta, plugin_meta,
@ -89,7 +88,7 @@ class SimpleCertificatePlugin(cert.CertificatePluginBase):
populated by the plugin implementation
:rtype: :class:`ResultDTO`
"""
LOG.info(u._LI('Invoking cancel_certificate_request()'))
LOG.info('Invoking cancel_certificate_request()')
return cert.ResultDTO(cert.CertificateStatus.REQUEST_CANCELED)
def check_certificate_status(self, order_id, order_meta, plugin_meta,
@ -107,7 +106,7 @@ class SimpleCertificatePlugin(cert.CertificatePluginBase):
populated by the plugin implementation
:rtype: :class:`ResultDTO`
"""
LOG.info(u._LI('Invoking check_certificate_status()'))
LOG.info('Invoking check_certificate_status()')
return cert.ResultDTO(cert.CertificateStatus.CERTIFICATE_GENERATED)
def supports(self, certificate_spec):
@ -145,7 +144,7 @@ class SimpleCertificateEventPlugin(cert.CertificateEventPluginBase):
the certificate
:returns: None
"""
LOG.info(u._LI('Invoking notify_certificate_is_ready()'))
LOG.info('Invoking notify_certificate_is_ready()')
def notify_ca_is_unavailable(
self, project_id, order_ref, error_msg, retry_in_msec):
@ -158,4 +157,4 @@ class SimpleCertificateEventPlugin(cert.CertificateEventPluginBase):
If this is 0, then no attempt will be made.
:returns: None
"""
LOG.info(u._LI('Invoking notify_ca_is_unavailable()'))
LOG.info('Invoking notify_ca_is_unavailable()')

View File

@ -17,7 +17,6 @@
Utilities to support plugins and plugin managers.
"""
from barbican.common import utils
from barbican import i18n as u
LOG = utils.getLogger(__name__)
@ -42,10 +41,7 @@ def instantiate_plugins(extension_manager, invoke_args=(), invoke_kwargs={}):
plugin_instance = ext.plugin(*invoke_args, **invoke_kwargs)
except Exception:
LOG.logger.disabled = False # Ensure not suppressing logs.
LOG.exception(
u._LE("Problem seen creating plugin: '%s'"),
ext.name
)
LOG.exception("Problem seen creating plugin: '%s'", ext.name)
else:
ext.obj = plugin_instance

View File

@ -24,7 +24,6 @@ from oslo_service import service
from barbican.common import config
from barbican.common import utils
from barbican import i18n as u
from barbican.model import models
from barbican.model import repositories
from barbican.queue import client as async_client
@ -74,11 +73,11 @@ class PeriodicServer(service.Service):
self.order_retry_repo = repositories.get_order_retry_tasks_repository()
def start(self):
LOG.info(u._LI("Starting the PeriodicServer"))
LOG.info("Starting the PeriodicServer")
super(PeriodicServer, self).start()
def stop(self, graceful=True):
LOG.info(u._LI("Halting the PeriodicServer"))
LOG.info("Halting the PeriodicServer")
super(PeriodicServer, self).stop(graceful=graceful)
@periodic_task.periodic_task
@ -92,25 +91,22 @@ class PeriodicServer(service.Service):
try:
total_tasks_processed = self._process_retry_tasks()
except Exception:
LOG.exception(
u._LE("Problem seen processing scheduled retry tasks")
)
LOG.exception("Problem seen processing scheduled retry tasks")
# Return the next delay before this method is invoked again.
check_again_in_seconds = _compute_next_periodic_interval()
LOG.info(
u._LI("Done processing '%(total)s' tasks, will check again in "
"'%(next)s' seconds."),
{
'total': total_tasks_processed,
'next': check_again_in_seconds
}
)
LOG.info("Done processing '%(total)s' tasks, will check again in "
"'%(next)s' seconds.",
{
'total': total_tasks_processed,
'next': check_again_in_seconds
}
)
return check_again_in_seconds
def _process_retry_tasks(self):
"""Scan for and then re-queue tasks that are ready to retry."""
LOG.info(u._LI("Processing scheduled retry tasks:"))
LOG.info("Processing scheduled retry tasks:")
# Retrieve tasks to retry.
entities, total = self._retrieve_tasks()
@ -160,16 +156,14 @@ class PeriodicServer(service.Service):
"kwargs '{2}')".format(
retry_task_name, retry_args, retry_kwargs))
except Exception:
LOG.exception(
u._LE(
"Problem enqueuing method '%(name)s' with args '%(args)s' "
"and kwargs '%(kwargs)s'."),
{
'name': retry_task_name,
'args': retry_args,
'kwargs': retry_kwargs
}
)
LOG.exception("Problem enqueuing method '%(name)s' with args "
"'%(args)s' and kwargs '%(kwargs)s'.",
{
'name': retry_task_name,
'args': retry_args,
'kwargs': retry_kwargs
}
)
repositories.rollback()
finally:
repositories.clear()

View File

@ -29,7 +29,6 @@ except ImportError:
from oslo_service import service
from barbican.common import utils
from barbican import i18n as u
from barbican.model import models
from barbican.model import repositories
from barbican import queue
@ -63,13 +62,10 @@ def retryable_order(fn):
retry_rpc_method = schedule_order_retry_tasks(
fn, result, *args, **kwargs)
if retry_rpc_method:
LOG.info(
u._LI("Scheduled RPC method for retry: '%s'"),
retry_rpc_method)
LOG.info("Scheduled RPC method for retry: '%s'", retry_rpc_method)
else:
LOG.info(
u._LI("Task '%s' did not have to be retried"),
find_function_name(fn, if_no_name='???'))
LOG.info("Task '%s' did not have to be retried",
find_function_name(fn, if_no_name='???'))
return wrapper
@ -84,15 +80,13 @@ def transactional(fn):
if not queue.is_server_side():
# Non-server mode directly invokes tasks.
fn(*args, **kwargs)
LOG.info(u._LI("Completed worker task: '%s'"), fn_name)
LOG.info("Completed worker task: '%s'", fn_name)
else:
# Manage session/transaction.
try:
fn(*args, **kwargs)
repositories.commit()
LOG.info(
u._LI("Completed worker task (post-commit): '%s'"),
fn_name)
LOG.info("Completed worker task (post-commit): '%s'", fn_name)
except Exception:
"""NOTE: Wrapped functions must process with care!
@ -100,10 +94,9 @@ def transactional(fn):
including any updates made to entities such as setting error
codes and error messages.
"""
LOG.exception(
u._LE("Problem seen processing worker task: '%s'"),
fn_name
)
LOG.exception("Problem seen processing worker task: '%s'",
fn_name
)
repositories.rollback()
finally:
repositories.clear()
@ -212,10 +205,8 @@ class Tasks(object):
@retryable_order
def process_type_order(self, context, order_id, project_id, request_id):
"""Process TypeOrder."""
message = u._LI(
"Processing type order: "
"order ID is '%(order)s' and request ID is '%(request)s'"
)
message = "Processing type order: order ID is '%(order)s' and " \
"request ID is '%(request)s'"
LOG.info(message, {'order': order_id, 'request': request_id})
return resources.BeginTypeOrder().process_and_suppress_exceptions(
order_id, project_id)
@ -226,10 +217,9 @@ class Tasks(object):
def update_order(self, context, order_id, project_id,
updated_meta, request_id):
"""Update Order."""
message = u._LI(
"Processing update order: "
"order ID is '%(order)s' and request ID is '%(request)s'"
)
message = "Processing update order: order ID is '%(order)s' and " \
"request ID is '%(request)s'"
LOG.info(message, {'order': order_id, 'request': request_id})
return resources.UpdateOrder().process_and_suppress_exceptions(
order_id, project_id, updated_meta)
@ -240,10 +230,8 @@ class Tasks(object):
def check_certificate_status(self, context, order_id,
project_id, request_id):
"""Check the status of a certificate order."""
message = u._LI(
"Processing check certificate status on order: "
"order ID is '%(order)s' and request ID is '%(request)s'"
)
message = "Processing check certificate status on order: " \
"order ID is '%(order)s' and request ID is '%(request)s'"
LOG.info(message, {'order': order_id, 'request': request_id})
check_cert_order = resources.CheckCertificateStatusOrder()
@ -277,11 +265,11 @@ class TaskServer(Tasks, service.Service):
endpoints=[self])
def start(self):
LOG.info(u._LI("Starting the TaskServer"))
LOG.info("Starting the TaskServer")
self._server.start()
super(TaskServer, self).start()
def stop(self):
LOG.info(u._LI("Halting the TaskServer"))
LOG.info("Halting the TaskServer")
super(TaskServer, self).stop()
self._server.stop()

View File

@ -72,19 +72,16 @@ class KeystoneEventConsumer(resources.BaseTask):
def handle_error(self, project, status, message, exception,
project_id=None, resource_type=None, operation_type=None):
LOG.error(
u._LE(
'Error processing Keystone event, project_id=%(project_id)s, '
'event resource=%(resource)s, event operation=%(operation)s, '
'status=%(status)s, error message=%(message)s'
),
'Error processing Keystone event, project_id=%(project_id)s, '
'event resource=%(resource)s, event operation=%(operation)s, '
'status=%(status)s, error message=%(message)s',
{
'project_id': project.project_id,
'resource': resource_type,
'operation': operation_type,
'status': status,
'message': message
}
)
})
def handle_success(self, project, result, project_id=None,
resource_type=None, operation_type=None):
@ -92,11 +89,9 @@ class KeystoneEventConsumer(resources.BaseTask):
# only pertains to long-running tasks. See the documentation for
# BaseTask for more details.
LOG.info(
u._LI(
'Successfully handled Keystone event, '
'project_id=%(project_id)s, event resource=%(resource)s, '
'event operation=%(operation)s'
),
'Successfully handled Keystone event, '
'project_id=%(project_id)s, event resource=%(resource)s, '
'event operation=%(operation)s',
{
'project_id': project_id,
'resource': resource_type,
@ -118,9 +113,8 @@ class KeystoneEventConsumer(resources.BaseTask):
etc.) performed on Keystone resource.
"""
if project is None:
LOG.info(u._LI('No action is needed as there are no Barbican '
'resources present for Keystone '
'project_id=%s'), project_id)
LOG.info('No action is needed as there are no Barbican resources '
'present for Keystone project_id=%s', project_id)
return
# barbican entities use projects table 'id' field as foreign key.
@ -132,5 +126,5 @@ class KeystoneEventConsumer(resources.BaseTask):
# reached here means there is no error so log the successful
# cleanup log entry.
LOG.info(u._LI('Successfully completed Barbican resources cleanup for '
'Keystone project_id=%s'), project_id)
LOG.info('Successfully completed Barbican resources cleanup for '
'Keystone project_id=%s', project_id)

View File

@ -67,10 +67,8 @@ class BaseTask(object):
try:
return self.process(*args, **kwargs)
except Exception:
LOG.exception(
u._LE(
"Suppressing exception while trying to "
"process task '%s'."), self.get_name())
LOG.exception("Suppressing exception while trying to "
"process task '%s'.", self.get_name())
def process(self, *args, **kwargs):
"""A template method for all asynchronous tasks.
@ -93,16 +91,15 @@ class BaseTask(object):
entity = self.retrieve_entity(*args, **kwargs)
except Exception:
# Serious error!
LOG.exception(u._LE("Could not retrieve information needed to "
"process task '%s'."), name)
LOG.exception("Could not retrieve information needed to "
"process task '%s'.", name)
raise
# Process the target entity.
try:
result = self.handle_processing(entity, *args, **kwargs)
except Exception as e_orig:
LOG.exception(u._LE("Could not perform processing for "
"task '%s'."), name)
LOG.exception("Could not perform processing for task '%s'.", name)
# Handle failure to process entity.
try:
@ -111,19 +108,17 @@ class BaseTask(object):
self.handle_error(entity, status, message, e_orig,
*args, **kwargs)
except Exception:
LOG.exception(u._LE("Problem handling an error for task '%s', "
"raising original "
"exception."), name)
LOG.exception("Problem handling an error for task '%s', "
"raising original exception.", name)
raise e_orig
# Handle successful conclusion of processing.
try:
self.handle_success(entity, result, *args, **kwargs)
except Exception:
LOG.exception(u._LE("Could not process after successfully "
"executing task '%s'."), name)
LOG.exception("Could not process after successfully "
"executing task '%s'.", name)
raise
return result
@abc.abstractmethod