Merge "Use short build_uuids in elasticSearch queries"

This commit is contained in:
Jenkins 2014-01-24 17:03:17 +00:00 committed by Gerrit Code Review
commit 3e4d066a30
4 changed files with 70 additions and 35 deletions

View File

@ -145,7 +145,9 @@ class RecheckWatch(threading.Thread):
try:
event = stream.get_failed_tempest()
event.bugs = classifier.classify(event.change, event.rev)
for short_build_uuid in event.short_build_uuids:
event.bugs |= set(classifier.classify(
event.change, event.rev, short_build_uuid))
if not event.bugs:
self._read(event)
else:

View File

@ -68,13 +68,20 @@ class FailEvent(object):
rev = None
project = None
url = None
bugs = []
bugs = set([])
short_build_uuids = []
comment = None
def __init__(self, event):
self.change = event['change']['number']
self.rev = event['patchSet']['number']
self.project = event['change']['project']
self.url = event['change']['url']
self.comment = event["comment"]
self.bugs = set([])
def is_openstack_project(self):
return "tempest-dsvm-full" in self.comment
def name(self):
return "%s,%s" % (self.change, self.rev)
@ -120,22 +127,26 @@ class Stream(object):
for line in event['comment'].split("\n"):
m = re.search("- ([\w-]+)\s*(http://\S+)\s*:\s*FAILURE", line)
if m:
failed_tests[m.group(1)] = m.group(2)
# The last 7 characters of the URL are the first 7 digits
# of the build_uuid.
failed_tests[m.group(1)] = {'url': m.group(2),
'short_build_uuid':
m.group(2)[-7:]}
return failed_tests
def _job_console_uploaded(self, change, patch, name):
query = qb.result_ready(change, patch, name)
def _job_console_uploaded(self, change, patch, name, short_build_uuid):
query = qb.result_ready(change, patch, name, short_build_uuid)
r = self.es.search(query, size='10')
if len(r) == 0:
msg = ("Console logs not ready for %s %s,%s" %
(name, change, patch))
msg = ("Console logs not ready for %s %s,%s,%s" %
(name, change, patch, short_build_uuid))
raise ConsoleNotReady(msg)
else:
LOG.debug("Console ready for %s %s,%s" %
(name, change, patch))
LOG.debug("Console ready for %s %s,%s,%s" %
(name, change, patch, short_build_uuid))
def _has_required_files(self, change, patch, name):
query = qb.files_ready(change, patch)
def _has_required_files(self, change, patch, name, short_build_uuid):
query = qb.files_ready(change, patch, name, short_build_uuid)
r = self.es.search(query, size='80')
files = [x['term'] for x in r.terms]
required = required_files(name)
@ -145,9 +156,6 @@ class Stream(object):
change, patch, name, missing_files))
raise FilesNotReady(msg)
def _is_openstack_project(self, event):
return "tempest-dsvm-full" in event["comment"]
def _does_es_have_data(self, change_number, patch_number, job_fails):
"""Wait till ElasticSearch is ready, but return False if timeout."""
NUMBER_OF_RETRIES = 20
@ -158,8 +166,12 @@ class Stream(object):
for i in range(NUMBER_OF_RETRIES):
try:
for job_name in job_fails:
#TODO(jogo) if there are three failed jobs and only the
#last one isn't ready we don't need to keep rechecking
# the first two
self._job_console_uploaded(
change_number, patch_number, job_name)
change_number, patch_number, job_name,
job_fails[job_name]['short_build_uuid'])
break
except ConsoleNotReady as e:
@ -177,8 +189,9 @@ class Stream(object):
if i == NUMBER_OF_RETRIES - 1:
elapsed = datetime.datetime.now() - started_at
msg = ("Console logs not available after %ss for %s %s,%s" %
(elapsed, job_name, change_number, patch_number))
msg = ("Console logs not available after %ss for %s %s,%s,%s" %
(elapsed, job_name, change_number, patch_number,
job_fails[job_name]['short_build_uuid']))
raise ResultTimedOut(msg)
LOG.debug(
@ -189,7 +202,8 @@ class Stream(object):
try:
for job_name in job_fails:
self._has_required_files(
change_number, patch_number, job_name)
change_number, patch_number, job_name,
job_fails[job_name]['short_build_uuid'])
LOG.info(
"All files present for change_number: %s, patch_number: %s"
% (change_number, patch_number))
@ -200,8 +214,9 @@ class Stream(object):
# if we get to the end, we're broken
elapsed = datetime.datetime.now() - started_at
msg = ("Required files not ready after %ss for %s %d,%d" %
(elapsed, job_name, change_number, patch_number))
msg = ("Required files not ready after %ss for %s %d,%d,%s" %
(elapsed, job_name, change_number, patch_number,
job_fails[job_name]['short_build_uuid']))
raise ResultTimedOut(msg)
def get_failed_tempest(self):
@ -214,13 +229,16 @@ class Stream(object):
# nothing to see here, lets try the next event
continue
fevent = FailEvent(event)
# bail if it's not an openstack project
if not self._is_openstack_project(event):
if not fevent.is_openstack_project():
continue
fevent = FailEvent(event)
LOG.info("Looking for failures in %s,%s on %s" %
(fevent.change, fevent.rev, ", ".join(failed_jobs)))
fevent.short_build_uuids = [
v['short_build_uuid'] for v in failed_jobs.values()]
if self._does_es_have_data(fevent.change, fevent.rev, failed_jobs):
return fevent
@ -267,7 +285,8 @@ class Classifier():
es_query = qb.generic(query, facet=facet)
return self.es.search(es_query, size=size)
def classify(self, change_number, patch_number, skip_resolved=True):
def classify(self, change_number, patch_number, short_build_uuid,
skip_resolved=True):
"""Returns either empty list or list with matched bugs."""
LOG.debug("Entering classify")
#Reload each time
@ -277,7 +296,8 @@ class Classifier():
LOG.debug(
"Looking for bug: https://bugs.launchpad.net/bugs/%s"
% x['bug'])
query = qb.single_patch(x['query'], change_number, patch_number)
query = qb.single_patch(x['query'], change_number, patch_number,
short_build_uuid)
results = self.es.search(query, size='10')
if len(results) > 0:
bug_matches.append(x['bug'])
@ -304,7 +324,11 @@ def main():
rev = event['patchSet']['number']
print "======================="
print "https://review.openstack.org/#/c/%(change)s/%(rev)s" % locals()
bug_numbers = classifier.classify(change, rev)
bug_numbers = []
for short_build_uuid in event.short_build_uuids:
bug_numbers = bug_numbers + classifier.classify(
change, rev, short_build_uuid)
bug_numbers = set(bug_numbers)
if not bug_numbers:
print "unable to classify failure"
else:

View File

@ -59,7 +59,7 @@ def generic(raw_query, facet=None):
return query
def result_ready(review=None, patch=None, name=None):
def result_ready(review=None, patch=None, name=None, short_build_uuid=None):
"""A query to determine if we have a failure for a particular patch.
This is looking for a particular FAILURE line in the console log, which
@ -70,11 +70,12 @@ def result_ready(review=None, patch=None, name=None):
'AND build_status:"FAILURE" '
'AND build_change:"%s" '
'AND build_patchset:"%s" '
'AND build_name:"%s"' %
(review, patch, name))
'AND build_name:"%s"'
'AND build_uuid:%s*' %
(review, patch, name, short_build_uuid))
def files_ready(review, patch):
def files_ready(review, patch, name, short_build_uuid):
"""A facetted query to ensure all the required files exist.
When changes are uploaded to elastic search there is a delay in
@ -84,11 +85,14 @@ def files_ready(review, patch):
"""
return generic('build_status:"FAILURE" '
'AND build_change:"%s" '
'AND build_patchset:"%s"' % (review, patch),
'AND build_patchset:"%s"'
'AND build_name:"%s"'
'AND build_uuid:%s*' %
(review, patch, name, short_build_uuid),
facet='filename')
def single_patch(query, review, patch):
def single_patch(query, review, patch, short_build_uuid):
"""A query for a single patch (review + revision).
This is used to narrow down a particular kind of failure found in a
@ -96,5 +100,6 @@ def single_patch(query, review, patch):
"""
return generic('%s '
'AND build_change:"%s" '
'AND build_patchset:"%s"' %
(query, review, patch))
'AND build_patchset:"%s"'
'AND build_uuid:%s*' %
(query, review, patch, short_build_uuid))

View File

@ -45,6 +45,7 @@ class TestStream(tests.TestCase):
self.assertEqual(event.project, "openstack/keystone")
self.assertEqual(event.name(), "64749,6")
self.assertEqual(event.url, "https://review.openstack.org/64749")
self.assertEqual(event.short_build_uuids, ["5dd41fe", "d3fd328"])
event = stream.get_failed_tempest()
self.assertEqual(event.change, "63078")
@ -52,6 +53,7 @@ class TestStream(tests.TestCase):
self.assertEqual(event.project, "openstack/horizon")
self.assertEqual(event.name(), "63078,19")
self.assertEqual(event.url, "https://review.openstack.org/63078")
self.assertEqual(event.short_build_uuids, ["ab07162"])
event = stream.get_failed_tempest()
self.assertEqual(event.change, "65361")
@ -59,6 +61,7 @@ class TestStream(tests.TestCase):
self.assertEqual(event.project, "openstack/requirements")
self.assertEqual(event.name(), "65361,2")
self.assertEqual(event.url, "https://review.openstack.org/65361")
self.assertEqual(event.short_build_uuids, ["8209fb4"])
self.assertRaises(
fg.GerritDone,
@ -81,8 +84,9 @@ class TestStream(tests.TestCase):
self.assertIn('check-tempest-dsvm-neutron', jobs)
self.assertEqual(jobs['check-requirements-integration-dsvm'],
"http://logs.openstack.org/31/64831/1/check/"
"check-requirements-integration-dsvm/135d0b4")
{'url': "http://logs.openstack.org/31/64831/1/check/"
"check-requirements-integration-dsvm/135d0b4",
'short_build_uuid': '135d0b4'})
self.assertNotIn('gate-requirements-pep8', jobs)
self.assertNotIn('gate-requirements-python27', jobs)