Handle 404 and download errors properly

This commit adds proper error handling to the subunit gearman worker
in the event a download error occurs. Prior to this change if a
subunit stream is unable to be downloaded for whatever reason an empty
row was being added to the subunit2sql db. This was because after
logging the exception an empty stream file gets passed to subunit2sql
which would treat that as adding a run which didn't run anything.
This a waste of time and is confusing to users of the DB because
there will be a number of runs in the DB which didn't actually run
anything.

Change-Id: I1f8fd7ffd9c16ce2dddd534d4c641e6d65249d91
This commit is contained in:
Matthew Treinish 2015-12-07 15:43:17 -05:00
parent c38d9d37d4
commit 5327cc64a8
No known key found for this signature in database
GPG Key ID: FD12A0F214C9E177
1 changed files with 5 additions and 0 deletions

View File

@ -82,6 +82,9 @@ class SubunitRetriever(threading.Thread):
# Handle events ignoring aborted builds. These builds are
# discarded by zuul.
subunit_io = self._retrieve_subunit_v2(source_url, retry)
if not subunit_io:
job.sendWorkException(
'Unable to retrieve subunit stream'.encode('utf8'))
logging.debug("Pushing subunit files.")
out_event = fields.copy()
out_event["subunit"] = subunit_io
@ -104,11 +107,13 @@ class SubunitRetriever(threading.Thread):
source_url)
else:
logging.exception("Unable to get log data.")
return None
except Exception:
# Silently drop fatal errors when retrieving logs.
# TODO (clarkb): Handle these errors.
# Perhaps simply add a log message to raw_buf?
logging.exception("Unable to get log data.")
return None
if gzipped:
logging.debug("Decompressing gzipped source file.")
raw_strIO = cStringIO.StringIO(raw_buf)