Fix string interpolation at logging call

Skip creating the formatted log message
if the message is not going to be emitted
because of the log level.

See the oslo i18n guideline.

* http://docs.openstack.org/developer/oslo.i18n/guidelines.html#\
  adding-variables-to-log-messages

Change-Id: Ie9f3c9179cdae57ee298149f829811a5422fb9aa
Closes-Bug: #1596829
This commit is contained in:
Takashi NATSUME 2016-08-08 10:01:55 +09:00 committed by Eric K
parent a0f50f2fd9
commit fb921a86d3
18 changed files with 75 additions and 75 deletions

View File

@ -122,7 +122,7 @@ class RowModel(base.APIModel):
KeyError: table id doesn't exist
DataModelException: any error occurs during replacing rows.
"""
LOG.info("update_items(context=%s)" % context)
LOG.info("update_items(context=%s)", context)
# Note(thread-safety): blocking call
caller, source_id = api_utils.get_id_from_context(context,
self.datasource_mgr,
@ -141,13 +141,13 @@ class RowModel(base.APIModel):
# Note(thread-safety): blocking call
self.invoke_rpc(caller, 'update_entire_data', args)
except exception.CongressException as e:
m = ("Error occurred while processing updating rows for "
"source_id '%s' and table_id '%s'" % (source_id, table_id))
LOG.exception(m)
LOG.exception("Error occurred while processing updating rows "
"for source_id '%s' and table_id '%s'",
source_id, table_id)
raise webservice.DataModelException.create(e)
LOG.info("finish update_items(context=%s)" % context)
LOG.debug("updated table %s with row items: %s" %
(table_id, str(items)))
LOG.info("finish update_items(context=%s)", context)
LOG.debug("updated table %s with row items: %s",
table_id, str(items))
# TODO(thinrichs): It makes sense to sometimes allow users to create
# a new row for internal data sources. But since we don't have

View File

@ -125,7 +125,7 @@ class APIServer(service.ServiceBase):
self.application = deploy.loadapp('config:%s' % self.app_conf,
name='congress', **kwargs)
except Exception:
LOG.exception('Failed to Start %s server' % self.node.node_id)
LOG.exception('Failed to Start %s server', self.node.node_id)
raise exception.CongressException(
'Failed to Start initializing %s server' % self.node.node_id)

View File

@ -139,7 +139,7 @@ class Schema(object):
schema_change = (tablename, val, True, th)
else:
if tablename not in self:
LOG.warning("Attempt to delete a non-existant rule: %s" % item)
LOG.warning("Attempt to delete a non-existant rule: %s", item)
elif self.count[tablename] > 1:
self.count[tablename] -= 1
schema_change = (tablename, None, False, th)

View File

@ -184,7 +184,7 @@ class CeilometerDriver(datasource_driver.PollingDataSourceDriver,
LOG.debug("Ceilometer grabbing meters")
meters = self.ceilometer_client.meters.list()
self._translate_meters(meters)
LOG.debug("METERS: %s" % str(self.state[self.METERS]))
LOG.debug("METERS: %s", str(self.state[self.METERS]))
# TODO(ramineni): Ceilometer alarms is moved to separate
# project Aodh. It's not fully functional yet.
@ -200,26 +200,26 @@ class CeilometerDriver(datasource_driver.PollingDataSourceDriver,
LOG.debug("Ceilometer grabbing events")
events = self.ceilometer_client.events.list()
self._translate_events(events)
LOG.debug("EVENTS: %s" % str(self.state[self.EVENTS]))
LOG.debug("TRAITS: %s" % str(self.state[self.EVENT_TRAITS]))
LOG.debug("EVENTS: %s", str(self.state[self.EVENTS]))
LOG.debug("TRAITS: %s", str(self.state[self.EVENT_TRAITS]))
LOG.debug("Ceilometer grabbing statistics")
statistics = self._get_statistics(meters)
self._translate_statistics(statistics)
LOG.debug("STATISTICS: %s" % str(self.state[self.STATISTICS]))
LOG.debug("STATISTICS: %s", str(self.state[self.STATISTICS]))
def _get_statistics(self, meters):
statistics = []
names = set()
for m in meters:
LOG.debug("Adding meter %s" % m.name)
LOG.debug("Adding meter %s", m.name)
names.add(m.name)
for meter_name in names:
LOG.debug("Getting all Resource ID for meter: %s"
% meter_name)
LOG.debug("Getting all Resource ID for meter: %s",
meter_name)
stat_list = self.ceilometer_client.statistics.list(
meter_name, groupby=['resource_id'])
LOG.debug("Statistics List: %s" % stat_list)
LOG.debug("Statistics List: %s", stat_list)
if (stat_list):
for temp in stat_list:
temp_dict = copy.copy(temp.to_dict())
@ -232,7 +232,7 @@ class CeilometerDriver(datasource_driver.PollingDataSourceDriver,
"""Translate the meters represented by OBJ into tables."""
meters = [o.to_dict() for o in obj]
LOG.debug("METERS: %s" % str(meters))
LOG.debug("METERS: %s", str(meters))
row_data = CeilometerDriver.convert_objs(meters,
self.meters_translator)
@ -242,7 +242,7 @@ class CeilometerDriver(datasource_driver.PollingDataSourceDriver,
def _translate_alarms(self, obj):
"""Translate the alarms represented by OBJ into tables."""
alarms = [o.to_dict() for o in obj]
LOG.debug("ALARMS: %s" % str(alarms))
LOG.debug("ALARMS: %s", str(alarms))
row_data = CeilometerDriver.convert_objs(alarms,
self.alarms_translator)
@ -252,7 +252,7 @@ class CeilometerDriver(datasource_driver.PollingDataSourceDriver,
def _translate_events(self, obj):
"""Translate the events represented by OBJ into tables."""
events = [o.to_dict() for o in obj]
LOG.debug("EVENTS: %s" % str(events))
LOG.debug("EVENTS: %s", str(events))
row_data = CeilometerDriver.convert_objs(events,
self.events_translator)
@ -261,7 +261,7 @@ class CeilometerDriver(datasource_driver.PollingDataSourceDriver,
@ds_utils.update_state_on_changed(STATISTICS)
def _translate_statistics(self, obj):
"""Translate the statistics represented by OBJ into tables."""
LOG.debug("STATISTICS: %s" % str(obj))
LOG.debug("STATISTICS: %s", str(obj))
row_data = CeilometerDriver.convert_objs(obj,
self.statistics_translator)

View File

@ -336,8 +336,8 @@ class DataSourceDriver(deepsix.deepSix):
self.add_rpc_endpoint(DataSourceDriverEndpoints(self))
def get_snapshot(self, table_name):
LOG.debug("datasource_driver get_snapshot(%s); %s" % (
table_name, self.state))
LOG.debug("datasource_driver get_snapshot(%s); %s",
table_name, self.state)
return self.state.get(table_name, set())
def _make_tmp_state(self, root_table_name, row_data):
@ -1208,14 +1208,14 @@ class PushedDataSourceDriver(DataSourceDriver):
# Note (thread-safety): blocking function
def update_entire_data(self, table_id, objs):
LOG.info('update %s table in %s datasource' % (table_id, self.name))
LOG.info('update %s table in %s datasource', table_id, self.name)
translator = self.get_translator(table_id)
tablename = translator['table-name']
self.prior_state = dict(self.state)
self._update_state(
tablename, PushedDataSourceDriver.convert_objs(objs, translator))
LOG.debug('publish a new state %s in %s' %
(self.state[tablename], tablename))
LOG.debug('publish a new state %s in %s',
self.state[tablename], tablename)
# Note (thread-safety): blocking call
self.publish(tablename, self.state[tablename])
self.number_of_updates += 1
@ -1464,8 +1464,8 @@ class ExecutionDriver(object):
positional_args = action_args.get('positional', [])
named_args = action_args.get('named', {})
LOG.debug('Processing action execution: action = %s, '
'positional args = %s, named args = %s'
% (action, positional_args, named_args))
'positional args = %s, named args = %s',
action, positional_args, named_args)
try:
method = self._get_method(client, action)
# Note(thread-safety): blocking call (potentially)

View File

@ -131,7 +131,7 @@ class MonascaDriver(datasource_driver.PollingDataSourceDriver,
LOG.debug("Monasca grabbing metrics")
metrics = self.monasca.metrics.list()
self._translate_metric(metrics)
LOG.debug("METRICS: %s" % str(self.state[self.METRICS]))
LOG.debug("METRICS: %s", str(self.state[self.METRICS]))
LOG.debug("Monasca grabbing statistics")
# gather statistic for the last day
@ -150,7 +150,7 @@ class MonascaDriver(datasource_driver.PollingDataSourceDriver,
statistics = self.monasca.metrics.list_statistics(
**_query_args)
self._translate_statistics(statistics)
LOG.debug("STATISTICS: %s" % str(self.state[self.STATISTICS]))
LOG.debug("STATISTICS: %s", str(self.state[self.STATISTICS]))
except Exception as e:
raise e
@ -158,7 +158,7 @@ class MonascaDriver(datasource_driver.PollingDataSourceDriver,
@ds_utils.update_state_on_changed(METRICS)
def _translate_metric(self, obj):
"""Translate the metrics represented by OBJ into tables."""
LOG.debug("METRIC: %s" % str(obj))
LOG.debug("METRIC: %s", str(obj))
row_data = MonascaDriver.convert_objs(obj,
self.metric_translator)
@ -168,7 +168,7 @@ class MonascaDriver(datasource_driver.PollingDataSourceDriver,
def _translate_statistics(self, obj):
"""Translate the metrics represented by OBJ into tables."""
LOG.debug("STATISTICS: %s" % str(obj))
LOG.debug("STATISTICS: %s", str(obj))
row_data = MonascaDriver.convert_objs(obj,
self.statistics_translator)

View File

@ -71,13 +71,13 @@ class PushDriver(datasource_driver.PushedDataSourceDriver):
return result
def update_entire_data(self, table_id, objs):
LOG.info('update %s table in %s datasource' % (table_id, self.name))
LOG.info('update %s table in %s datasource', table_id, self.name)
tablename = 'data' # hard code
self.prior_state = dict(self.state)
self._update_state(tablename,
[tuple([table_id, tuple(x)]) for x in objs])
LOG.debug('publish a new state %s in %s' %
(self.state[tablename], tablename))
LOG.debug('publish a new state %s in %s',
self.state[tablename], tablename)
self.publish(tablename, self.state[tablename])
self.number_of_updates += 1
self.last_updated_time = datetime.datetime.now()

View File

@ -106,12 +106,12 @@ class SwiftDriver(datasource_driver.PollingDataSourceDriver,
'''
containers, objects = self._get_containers_and_objects()
LOG.debug("Containers Lists--->: %s" % containers)
LOG.debug("Object Lists--->: %s " % objects)
LOG.debug("Containers Lists--->: %s", containers)
LOG.debug("Object Lists--->: %s ", objects)
self._translate_containers(containers)
self._translate_objects(objects)
LOG.debug("CONTAINERS: %s" % str(self.state[self.CONTAINERS]))
LOG.debug("OBJECTS: %s" % str(self.state[self.OBJECTS]))
LOG.debug("CONTAINERS: %s", str(self.state[self.CONTAINERS]))
LOG.debug("OBJECTS: %s", str(self.state[self.OBJECTS]))
def _get_containers_and_objects(self):
container_list = self.swift_service.list()

View File

@ -222,7 +222,7 @@ class DseNode(object):
if self._running is False:
return
LOG.info("Stopping DSE node '%s'" % self.node_id)
LOG.info("Stopping DSE node '%s'", self.node_id)
for s in self._services:
s.stop()
self._rpc_server.stop()
@ -751,7 +751,7 @@ class DseNode(object):
service = getattr(module, class_name)(**kwargs)
except Exception:
msg = ("Error loading instance of module '%s'")
LOG.exception(msg % class_path)
LOG.exception(msg, class_path)
raise exception.DataServiceError(msg % class_path)
return service

View File

@ -371,7 +371,7 @@ def create_datasources(bus):
datasources = db_datasources.get_datasources()
services = []
for ds in datasources:
LOG.info("create configured datasource service %s." % ds.name)
LOG.info("create configured datasource service %s.", ds.name)
try:
service = bus.create_datasource_service(ds)
if service:
@ -384,7 +384,7 @@ def create_datasources(bus):
"clean up stale datasources in DB.")
sys.exit(1)
except Exception:
LOG.exception("datasource %s creation failed." % ds.name)
LOG.exception("datasource %s creation failed.", ds.name)
raise
return services

View File

@ -97,7 +97,7 @@ def removed_in_dse2(wrapped):
@functools.wraps(wrapped)
def wrapper(*args, **kwargs):
if cfg.CONF.distributed_architecture:
LOG.error('%s is called in dse2' % wrapped.__name__)
LOG.error('%s is called in dse2', wrapped.__name__)
raise Exception('inappropriate function is called.')
else:
return wrapped(*args, **kwargs)

View File

@ -285,7 +285,7 @@ def datasource_statuses_list(request):
try:
status = client.list_datasource_status(ds['id'])
except Exception as e:
LOG.info("Exception while getting the status: %s" % e)
LOG.info("Exception while getting the status: %s", e)
status = "not available"
raise e
wrapper = PolicyAPIDictWrapper(ds)

View File

@ -27,7 +27,7 @@ def _get_policy_tables(request):
# Get all the policies.
policies = congress.policies_list(request)
except Exception as e:
LOG.error('Unable to get list of policies: %s' % e.message)
LOG.error('Unable to get list of policies: %s', e.message)
else:
try:
for policy in policies:
@ -47,8 +47,8 @@ def _get_policy_tables(request):
all_tables.append({'datasource': policy_name,
'tables': datasource_tables})
except Exception as e:
LOG.error('Unable to get tables for policy "%s": %s' %
(policy_name, e.message))
LOG.error('Unable to get tables for policy "%s": %s',
policy_name, e.message)
return all_tables
@ -59,7 +59,7 @@ def _get_service_tables(request):
# Get all the services.
services = congress.datasources_list(request)
except Exception as e:
LOG.error('Unable to get list of data sources: %s' % e.message)
LOG.error('Unable to get list of data sources: %s', e.message)
else:
try:
for service in services:
@ -76,8 +76,8 @@ def _get_service_tables(request):
all_tables.append({'datasource': service['name'],
'tables': datasource_tables})
except Exception as e:
LOG.error('Unable to get tables for data source "%s": %s' %
(service_id, e.message))
LOG.error('Unable to get tables for data source "%s": %s',
service_id, e.message)
return all_tables
@ -154,14 +154,14 @@ def get_datasource_columns(request):
all_columns.append({'datasource': policy_name,
'tables': datasource_tables})
except Exception as e:
LOG.error('Unable to get schema for policy "%s" table "%s": %s' %
(policy_name, table_name, e.message))
LOG.error('Unable to get schema for policy "%s" table "%s": %s',
policy_name, table_name, e.message)
try:
# Get all the services.
services = congress.datasources_list(request)
except Exception as e:
LOG.error('Unable to get list of data sources: %s' % e.message)
LOG.error('Unable to get list of data sources: %s', e.message)
else:
try:
for service in services:
@ -181,7 +181,7 @@ def get_datasource_columns(request):
all_columns.append({'datasource': service_name,
'tables': datasource_tables})
except Exception as e:
LOG.error('Unable to get schema for data source "%s": %s' %
(service_id, e.message))
LOG.error('Unable to get schema for data source "%s": %s',
service_id, e.message)
return all_columns

View File

@ -45,9 +45,9 @@ class CreatePolicy(forms.SelfHandlingForm):
policy_name = data['name']
policy_description = data.get('description')
policy_kind = data.pop('kind')
LOG.info('User %s creating policy "%s" of type %s in tenant %s' %
(request.user.username, policy_name, policy_kind,
request.user.tenant_name))
LOG.info('User %s creating policy "%s" of type %s in tenant %s',
request.user.username, policy_name, policy_kind,
request.user.tenant_name)
try:
params = {
'name': policy_name,

View File

@ -63,12 +63,12 @@ class DeleteRule(policy.PolicyTargetMixin, tables.DeleteAction):
def delete(self, request, obj_id):
policy_name = self.table.kwargs['policy_name']
LOG.info('User %s deleting policy "%s" rule "%s" in tenant %s' %
(request.user.username, policy_name, obj_id,
request.user.tenant_name))
LOG.info('User %s deleting policy "%s" rule "%s" in tenant %s',
request.user.username, policy_name, obj_id,
request.user.tenant_name)
try:
congress.policy_rule_delete(request, policy_name, obj_id)
LOG.info('Deleted policy rule "%s"' % obj_id)
LOG.info('Deleted policy rule "%s"', obj_id)
except Exception as e:
msg_args = {'rule_id': obj_id, 'error': e.message}
msg = _('Failed to delete policy rule "%(rule_id)s": '

View File

@ -220,8 +220,8 @@ class CreateRule(workflows.Workflow):
except Exception as e:
# Nope.
LOG.error('Unable to get schema for table "%s", '
'datasource "%s": %s' % (table_name, datasource,
e.message))
'datasource "%s": %s',
table_name, datasource, e.message)
return e.message
return schema['columns']
@ -364,7 +364,7 @@ class CreateRule(workflows.Workflow):
column_variables[value] = variable
column_variables[negation_column] = variable
LOG.debug('column_variables for rule: %s' % column_variables)
LOG.debug('column_variables for rule: %s', column_variables)
# Form the literals for all the tables needed in the body. Make sure
# column that have no relation to any other columns are given a unique
@ -421,8 +421,8 @@ class CreateRule(workflows.Workflow):
# All together now.
rule = '%s(%s) %s %s' % (policy_table, ', '.join(head_columns),
congress.RULE_SEPARATOR, ', '.join(literals))
LOG.info('User %s creating policy "%s" rule "%s" in tenant %s: %s' %
(username, policy_name, rule_name, project_name, rule))
LOG.info('User %s creating policy "%s" rule "%s" in tenant %s: %s',
username, policy_name, rule_name, project_name, rule)
try:
params = {
'name': rule_name,
@ -431,11 +431,11 @@ class CreateRule(workflows.Workflow):
}
rule = congress.policy_rule_create(request, policy_name,
body=params)
LOG.info('Created rule %s' % rule['id'])
LOG.info('Created rule %s', rule['id'])
self.context['rule_id'] = rule['id']
except Exception as e:
LOG.error('Error creating policy "%s" rule "%s": %s' %
(policy_name, rule_name, e.message))
LOG.error('Error creating policy "%s" rule "%s": %s',
policy_name, rule_name, e.message)
self.context['error'] = e.message
return False
return True

View File

@ -60,11 +60,11 @@ class DeletePolicy(policy.PolicyTargetMixin, tables.DeleteAction):
redirect_url = 'horizon:admin:policies:index'
def delete(self, request, obj_id):
LOG.info('User %s deleting policy "%s" in tenant %s' %
(request.user.username, obj_id, request.user.tenant_name))
LOG.info('User %s deleting policy "%s" in tenant %s',
request.user.username, obj_id, request.user.tenant_name)
try:
congress.policy_delete(request, obj_id)
LOG.info('Deleted policy "%s"' % obj_id)
LOG.info('Deleted policy "%s"', obj_id)
except Exception as e:
msg_args = {'policy_id': obj_id, 'error': e.message}
msg = _('Failed to delete policy "%(policy_id)s": '

View File

@ -81,7 +81,7 @@ class TestHA(manager_congress.ScenarioPolicyBase):
conf[index:])
sindex = conf.find('signing_dir')
conf = conf[:sindex] + '#' + conf[sindex:]
LOG.debug("Configuration file for replica: %s\n" % conf)
LOG.debug("Configuration file for replica: %s\n", conf)
f.write(conf)
f.close()