Merge "Handle log message interpolation by the logger part 11"

This commit is contained in:
Jenkins 2017-06-07 02:52:55 +00:00 committed by Gerrit Code Review
commit c9b6127688
6 changed files with 30 additions and 31 deletions

View File

@ -176,7 +176,7 @@ class TestTroveMigrations(object):
assert_equal(self.INIT_VERSION,
migration_api.db_version(engine, self.REPOSITORY))
LOG.debug('Latest version is %s' % self.REPOSITORY.latest)
LOG.debug('Latest version is %s', self.REPOSITORY.latest)
versions = range(self.INIT_VERSION + 1, self.REPOSITORY.latest + 1)
# Walk from version 1 to the latest, testing the upgrade paths.

View File

@ -35,11 +35,11 @@ class FakeDnsDriver(driver.DnsDriver):
"""
entry.content = content
assert_true(entry.name not in ENTRIES)
LOG.debug("Adding fake DNS entry for hostname %s." % entry.name)
LOG.debug("Adding fake DNS entry for hostname %s.", entry.name)
ENTRIES[entry.name] = entry
def delete_entry(self, name, type, dns_zone=None):
LOG.debug("Deleting fake DNS entry for hostname %s" % name)
LOG.debug("Deleting fake DNS entry for hostname %s", name)
ENTRIES.pop(name, None)
@ -48,8 +48,8 @@ class FakeDnsInstanceEntryFactory(driver.DnsInstanceEntryFactory):
def create_entry(self, instance_id):
# Construct hostname using pig-latin.
hostname = "%s-lay" % instance_id
LOG.debug("Mapping instance_id %s to hostname %s"
% (instance_id, hostname))
LOG.debug("Mapping instance_id %(id)s to hostname %(host)s",
{'id': instance_id, 'host': hostname})
return driver.DnsEntry(name=hostname, content=None,
type="A", ttl=42, dns_zone=None)

View File

@ -62,7 +62,7 @@ class FakeGuest(object):
}
def update_guest(self):
LOG.debug("Updating guest %s" % self.id)
LOG.debug("Updating guest %s", self.id)
self.version += 1
def _check_username(self, username):
@ -226,8 +226,8 @@ class FakeGuest(object):
from trove.guestagent.models import AgentHeartBeat
from trove.instance.models import DBInstance
from trove.instance.models import InstanceServiceStatus
LOG.debug("users... %s" % users)
LOG.debug("databases... %s" % databases)
LOG.debug("users... %s", users)
LOG.debug("databases... %s", databases)
instance_name = DBInstance.find_by(id=self.id).name
self.create_user(users)
self.create_database(databases)

View File

@ -146,7 +146,7 @@ class FakeServer(object):
self._current_status = "ACTIVE"
def reboot(self):
LOG.debug("Rebooting server %s" % (self.id))
LOG.debug("Rebooting server %s", self.id)
def set_to_active():
self._current_status = "ACTIVE"
@ -277,13 +277,13 @@ class FakeServers(object):
while volume.status == "BUILD":
eventlet.sleep(0.1)
if volume.status != "available":
LOG.info(_("volume status = %s") % volume.status)
LOG.info(_("volume status = %s"), volume.status)
raise nova_exceptions.ClientException("Volume was bad!")
mapping = "%s::%s:%s" % (volume.id, volume.size, 1)
block_device_mapping = {'vdb': mapping}
volumes = [volume]
LOG.debug("Fake Volume Create %(volumeid)s with "
"status %(volumestatus)s" %
"status %(volumestatus)s",
{'volumeid': volume.id, 'volumestatus': volume.status})
else:
volumes = self._get_volumes_from_bdm(block_device_mapping)
@ -306,7 +306,7 @@ class FakeServers(object):
"available.")
server.schedule_status("ACTIVE", 1)
LOG.info("FAKE_SERVERS_DB : %s" % str(FAKE_SERVERS_DB))
LOG.info("FAKE_SERVERS_DB : %s", str(FAKE_SERVERS_DB))
return server
def _get_volumes_from_bdm(self, block_device_mapping):
@ -329,7 +329,7 @@ class FakeServers(object):
def get(self, id):
if id not in self.db:
LOG.error(_("Couldn't find server id %(id)s, collection=%(db)s") %
LOG.error(_("Couldn't find server id %(id)s, collection=%(db)s"),
{'id': id, 'db': self.db})
raise nova_exceptions.NotFound(404, "Not found")
else:
@ -349,7 +349,7 @@ class FakeServers(object):
def schedule_delete(self, id, time_from_now):
def delete_server():
LOG.info(_("Simulated event ended, deleting server %s.") % id)
LOG.info(_("Simulated event ended, deleting server %s."), id)
del self.db[id]
eventlet.spawn_after(time_from_now, delete_server)
@ -359,7 +359,7 @@ class FakeServers(object):
def set_server_running():
instance = DBInstance.find_by(compute_instance_id=id)
LOG.debug("Setting server %s to running" % instance.id)
LOG.debug("Setting server %s to running", instance.id)
status = InstanceServiceStatus.find_by(instance_id=instance.id)
status.status = rd_instance.ServiceStatuses.RUNNING
status.save()
@ -399,8 +399,7 @@ class FakeServerVolumes(object):
def get_server_volumes(self, server_id):
class ServerVolumes(object):
def __init__(self, block_device_mapping):
LOG.debug("block_device_mapping = %s" %
block_device_mapping)
LOG.debug("block_device_mapping = %s", block_device_mapping)
device = block_device_mapping['vdb']
(self.volumeId,
self.type,
@ -492,7 +491,7 @@ class FakeVolumes(object):
def get(self, id):
if id not in self.db:
LOG.error(_("Couldn't find volume id %(id)s, collection=%(db)s") %
LOG.error(_("Couldn't find volume id %(id)s, collection=%(db)s"),
{'id': id, 'db': self.db})
raise nova_exceptions.NotFound(404, "Not found")
else:
@ -513,16 +512,16 @@ class FakeVolumes(object):
else:
volume.schedule_status("available", 2)
LOG.debug("Fake volume created %(volumeid)s with "
"status %(volumestatus)s" %
"status %(volumestatus)s",
{'volumeid': volume.id, 'volumestatus': volume.status})
LOG.info("FAKE_VOLUMES_DB : %s" % FAKE_VOLUMES_DB)
LOG.info("FAKE_VOLUMES_DB : %s", FAKE_VOLUMES_DB)
return volume
def list(self, detailed=True):
return [self.db[key] for key in self.db]
def extend(self, volume_id, new_size):
LOG.debug("Resize volume id (%(volumeid)s) to size (%(size)s)" %
LOG.debug("Resize volume id (%(volumeid)s) to size (%(size)s)",
{'volumeid': volume_id, 'size': new_size})
volume = self.get(volume_id)

View File

@ -76,7 +76,7 @@ class FakeSwiftConnection(object):
'x-account-object-count': '0'}, [])
def head_container(self, container):
LOG.debug("fake head_container(%s)" % container)
LOG.debug("fake head_container(%s)", container)
if container == 'missing_container':
raise swift.ClientException('fake exception',
http_status=http_client.NOT_FOUND)
@ -88,11 +88,11 @@ class FakeSwiftConnection(object):
pass
def put_container(self, container):
LOG.debug("fake put_container(%s)" % container)
LOG.debug("fake put_container(%s)", container)
pass
def get_container(self, container, **kwargs):
LOG.debug("fake get_container(%s)" % container)
LOG.debug("fake get_container(%s)", container)
fake_header = None
fake_body = [{'name': 'backup_001'},
{'name': 'backup_002'},
@ -100,7 +100,7 @@ class FakeSwiftConnection(object):
return fake_header, fake_body
def head_object(self, container, name):
LOG.debug("fake put_container(%(container)s, %(name)s)" %
LOG.debug("fake put_container(%(container)s, %(name)s)",
{'container': container, 'name': name})
checksum = md5()
if self.manifest_name == name:
@ -125,7 +125,7 @@ class FakeSwiftConnection(object):
return {'etag': '"%s"' % checksum.hexdigest()}
def get_object(self, container, name, resp_chunk_size=None):
LOG.debug("fake get_object(%(container)s, %(name)s)" %
LOG.debug("fake get_object(%(container)s, %(name)s)",
{'container': container, 'name': name})
if container == 'socket_error_on_get':
raise socket.error(111, 'ECONNREFUSED')
@ -163,7 +163,7 @@ class FakeSwiftConnection(object):
return (fake_header, fake_object_body)
def put_object(self, container, name, contents, **kwargs):
LOG.debug("fake put_object(%(container)s, %(name)s)" %
LOG.debug("fake put_object(%(container)s, %(name)s)",
{'container': container, 'name': name})
if container == 'socket_error_on_put':
raise socket.error(111, 'ECONNREFUSED')
@ -205,11 +205,11 @@ class FakeSwiftConnection(object):
return object_checksum.hexdigest()
def post_object(self, container, name, headers={}):
LOG.debug("fake post_object(%(container)s, %(name)s, %(head)s)" %
LOG.debug("fake post_object(%(container)s, %(name)s, %(head)s)",
{'container': container, 'name': name, 'head': str(headers)})
def delete_object(self, container, name):
LOG.debug("fake delete_object(%(container)s, %(name)s)" %
LOG.debug("fake delete_object(%(container)s, %(name)s)",
{'container': container, 'name': name})
if container == 'socket_error_on_delete':
raise socket.error(111, 'ECONNREFUSED')

View File

@ -75,10 +75,10 @@ def notify(event_type, payload):
"""Simple test notify function which saves the messages to global list."""
payload['event_type'] = event_type
if 'instance_id' in payload and 'server_type' not in payload:
LOG.debug('Received Usage Notification: %s' % event_type)
LOG.debug('Received Usage Notification: %s', event_type)
resource_id = payload['instance_id']
global MESSAGE_QUEUE
MESSAGE_QUEUE[resource_id].append(payload)
LOG.debug('Message Queue for %(id)s now has %(msg_count)d messages' %
LOG.debug('Message Queue for %(id)s now has %(msg_count)d messages',
{'id': resource_id,
'msg_count': len(MESSAGE_QUEUE[resource_id])})