pylint: fixed logging-not-lazy

Change-Id: Ic25366a9afdfc67ab2beddbe2b8d02544c51e480
This commit is contained in:
Sorin Sbarnea 2020-09-10 14:56:52 +01:00
parent ed5296999e
commit c41b9c6fa0
7 changed files with 29 additions and 30 deletions

View File

@ -15,7 +15,6 @@ disable =
invalid-name,
line-too-long,
literal-comparison,
logging-not-lazy,
missing-class-docstring,
missing-function-docstring,
missing-module-docstring,

View File

@ -102,11 +102,11 @@ class RecheckWatchBot(irc.bot.SingleServerIRCBot):
self.log.info('Identified with IRC server.')
for channel in self.channel_list:
c.join(channel)
self.log.info('Joined channel %s' % channel)
self.log.info('Joined channel %s', channel)
time.sleep(0.5)
def send(self, channel, msg):
self.log.info('Sending "%s" to %s' % (msg, channel))
self.log.info('Sending "%s" to %s', msg, channel)
# Cheap way to attempt to send fewer than 512 bytes at a time.
# TODO(clarkb) calculate actual irc overhead and split properly.
for chunk in textwrap.wrap(msg, 400):
@ -165,11 +165,11 @@ class RecheckWatch(threading.Thread):
self.print_msg(channel, msg)
else:
self.log.info("Didn't leave a message on channel %s for %s because"
" the bug doesn't target an appropriate project" % (
channel, event.url))
" the bug doesn't target an appropriate project",
channel, event.url)
def print_msg(self, channel, msg):
self.log.info('Compiled Message %s: %s' % (channel, msg))
self.log.info('Compiled Message %s: %s', channel, msg)
if self.ircbot:
self.ircbot.send(channel, msg)

View File

@ -167,8 +167,8 @@ def collect_metrics(classifier, fails):
start = time.time()
results = classifier.hits_by_query(q['query'], size=30000)
log = logging.getLogger('recheckwatchbot')
log.debug("Took %d seconds to run (uncached) query for bug %s" %
(time.time() - start, q['bug']))
log.debug("Took %d seconds to run (uncached) query for bug %s",
time.time() - start, q['bug'])
hits = _status_count(results)
data[q['bug']] = {
'fails': _failure_count(hits),

View File

@ -68,7 +68,7 @@ def get_launchpad_bug(bug):
bugdata = dict(name='Unknown (Private Bug)',
affects='Unknown (Private Bug)', reviews=[])
except requests.exceptions.RequestException:
LOG.exception("Failed to get Launchpad data for bug %s" % bug)
LOG.exception("Failed to get Launchpad data for bug %s", bug)
bugdata = dict(name='Unable to get launchpad data',
affects='Unknown', reviews=[])
return bugdata
@ -93,7 +93,7 @@ def get_open_reviews(bug_number, attempt=0):
try:
result = json.loads(r.text[4:])
except ValueError:
LOG.debug("gerrit response '%s' is not valid JSON" % r.text.strip())
LOG.debug("gerrit response '%s' is not valid JSON", r.text.strip())
raise
for review in result:
reviews.append(review['_number'])
@ -173,7 +173,7 @@ def main():
if query.get('suppress-graph'):
continue
if args.verbose:
LOG.debug("Starting query for bug %s" % query['bug'])
LOG.debug("Starting query for bug %s", query['bug'])
logstash_query = qb.encode_logstash_query(query['query'],
timeframe=timeframe)
logstash_url = ("%s/#/dashboard/file/logstash.json?%s"
@ -194,11 +194,11 @@ def main():
size=3000,
days=days)
except pyelasticsearch.exceptions.InvalidJsonResponseError:
LOG.exception("Invalid Json while collecting metrics for query %s"
% query['query'])
LOG.exception("Invalid Json while collecting metrics for query %s",
query['query'])
continue
except requests.exceptions.ReadTimeout:
LOG.exception("Timeout while collecting metrics for query %s" %
LOG.exception("Timeout while collecting metrics for query %s",
query['query'])
continue
except pyelasticsearch.exceptions.ElasticHttpError as ex:

View File

@ -327,7 +327,7 @@ def collect_metrics(classifier, fails, config=None):
'failed_jobs': _failed_jobs(results)
}
except requests.exceptions.ReadTimeout:
LOG.exception("Failed to collection metrics for query %s" %
LOG.exception("Failed to collection metrics for query %s",
q['query'])
return data

View File

@ -239,8 +239,8 @@ class Stream(object):
msg = ("Console logs not ready for %s %s,%s,%s" %
(name, change, patch, build_short_uuid))
raise ConsoleNotReady(msg)
self.log.debug("Console ready for %s %s,%s,%s" %
(name, change, patch, build_short_uuid))
self.log.debug("Console ready for %s %s,%s,%s",
name, change, patch, build_short_uuid)
def _has_required_files(self, change, patch, name, build_short_uuid):
query = qb.files_ready(change, patch, name, build_short_uuid)
@ -308,11 +308,11 @@ class Stream(object):
raise ResultTimedOut(msg)
self.log.debug(
"Found hits for change_number: %d, patch_number: %d"
% (event.change, event.rev))
"Found hits for change_number: %d, patch_number: %d",
event.change, event.rev)
self.log.info(
"All files present for change_number: %d, patch_number: %d"
% (event.change, event.rev))
"All files present for change_number: %d, patch_number: %d",
event.change, event.rev)
return True
def get_failed_tempest(self):
@ -333,9 +333,9 @@ class Stream(object):
if not fevent.is_included_job():
continue
self.log.info("Looking for failures in %d,%d on %s" %
(fevent.change, fevent.rev,
", ".join(fevent.failed_job_names())))
self.log.info("Looking for failures in %d,%d on %s",
fevent.change, fevent.rev,
", ".join(fevent.failed_job_names()))
if self._does_es_have_data(fevent):
return fevent
@ -351,8 +351,8 @@ class Stream(object):
else:
parts.append(msgs['no_bugs_found'])
msg = '\n'.join(parts)
self.log.debug("Compiled comment for commit %s:\n%s" %
(event.name(), msg))
self.log.debug("Compiled comment for commit %s:\n%s",
event.name(), msg)
if not debug:
self.gerrit.review(event.project, event.name(), msg)
@ -413,8 +413,8 @@ class Classifier(object):
if x.get('suppress-notification'):
continue
self.log.debug(
"Looking for bug: https://bugs.launchpad.net/bugs/%s"
% x['bug'])
"Looking for bug: https://bugs.launchpad.net/bugs/%s",
x['bug'])
query = qb.single_patch(x['query'], change_number, patch_number,
build_short_uuid)
results = self.es.search(query, size='10', recent=recent)
@ -423,7 +423,7 @@ class Classifier(object):
test_ids = x['test_ids']
self.log.debug(
"For bug %s checking subunit2sql for failures on "
"test_ids: %s" % (x['bug'], test_ids))
"test_ids: %s", x['bug'], test_ids)
if check_failed_test_ids_for_job(build_short_uuid,
test_ids, session):
bug_matches.append(x['bug'])

View File

@ -71,7 +71,7 @@ class Context():
valid_query = len(results) > 0
if not valid_query:
LOGGER.error("Didn't find any hits for bug %s" % x['bug'])
LOGGER.error("Didn't find any hits for bug %s", x['bug'])
# don't fail tests if no hits for a bug
return True