Handle log filter exceptions more gracefully.

If there is an exception filtering a log event handle that by removing
the filter and continuing to process the remaining log events for the
assocaited file. This prevents non filter data from being lost when the
filters have an exception.

Change-Id: I65141daf21a873096829c41fdc2c77cbeecde2e3
This commit is contained in:
Clark Boylan 2014-02-10 10:20:12 -08:00
parent 7dea09588f
commit 7390aeb828
1 changed files with 17 additions and 5 deletions

View File

@ -52,6 +52,10 @@ def semi_busy_wait(seconds):
return
class FilterException(Exception):
pass
class CRM114Filter(object):
def __init__(self, script, path, build_status):
self.p = None
@ -77,14 +81,14 @@ class CRM114Filter(object):
[self.p.stdin, self.p.stdout], 20)
if not r:
self.p.kill()
raise Exception('Timeout reading from CRM114')
raise FilterException('Timeout reading from CRM114')
r = self.p.stdout.readline()
if not r:
err = self.p.stderr.read()
if err:
raise Exception(err)
raise FilterException(err)
else:
raise Exception('Early EOF from CRM114')
raise FilterException('Early EOF from CRM114')
r = r.strip()
data['error_pr'] = float(r)
@ -143,6 +147,7 @@ class LogRetriever(threading.Thread):
for f in self.filters:
logging.debug("Adding filter: %s" % f.name)
filters.append(f.create(fields))
all_filters = filters
logging.debug("Pushing " + str(len(log_lines)) + " log lines.")
base_event = {}
@ -151,10 +156,17 @@ class LogRetriever(threading.Thread):
for line in log_lines:
out_event = base_event.copy()
out_event["message"] = line
new_filters = []
for f in filters:
f.process(out_event)
try:
f.process(out_event)
new_filters.append(f)
except FilterException:
logging.exception("Exception filtering event: "
"%s" % line.encode("utf-8"))
filters = new_filters
self.logq.put(out_event)
for f in filters:
for f in all_filters:
f.close()
job.sendWorkComplete()
except Exception as e: