Take advantage of the new build_short_uuid field.

Recently the elasticsearch schema was updated to include a
build_short_uuid field which has indexed the first 7 chars of the
build_uuid. This field is useful because it allows e-r to filter on that
field instead of searching on build_uuid.

Update e-r to filter on build_short_uuid which should make queries much
more performant. As part of this change replace variables named
short_build_uuid with build_short_uuid for consistency with the
elasticsearch schema.

Change-Id: Iae5323f3f5d2fd01f2c69f78b9403baf5ebafe85
This commit is contained in:
Clark Boylan 2014-03-26 12:14:32 -07:00
parent a744dbba41
commit fea4d1f7ee
4 changed files with 35 additions and 35 deletions

View File

@ -195,7 +195,7 @@ class RecheckWatch(threading.Thread):
for job in event.failed_jobs:
job.bugs = set(classifier.classify(
event.change, event.rev, job.short_build_uuid))
event.change, event.rev, job.build_short_uuid))
if not event.get_all_bugs():
self._read(event)
else:

View File

@ -75,7 +75,7 @@ class FailJob(object):
A job is a zuul job.
"""
bugs = []
short_build_uuid = None
build_short_uuid = None
url = None
name = None
@ -84,7 +84,7 @@ class FailJob(object):
self.url = url
# The last 7 characters of the URL are the first 7 digits
# of the build_uuid.
self.short_build_uuid = url[-7:]
self.build_short_uuid = url[-7:]
def __str__(self):
return self.name
@ -99,7 +99,7 @@ class FailEvent(object):
rev = None
project = None
url = None
short_build_uuids = []
build_short_uuids = []
comment = None
failed_jobs = []
@ -165,8 +165,8 @@ class FailEvent(object):
return None
return self.failed_jobs[0].url.split('/')[6]
def short_build_uuids(self):
return [job.short_build_uuid for job in self.failed_jobs]
def build_short_uuids(self):
return [job.build_short_uuid for job in self.failed_jobs]
def failed_job_names(self):
return [job.name for job in self.failed_jobs]
@ -216,26 +216,26 @@ class Stream(object):
failed_tests.append(FailJob(m.group(1), m.group(2)))
return failed_tests
def _job_console_uploaded(self, change, patch, name, short_build_uuid):
query = qb.result_ready(change, patch, name, short_build_uuid)
def _job_console_uploaded(self, change, patch, name, build_short_uuid):
query = qb.result_ready(change, patch, name, build_short_uuid)
r = self.es.search(query, size='10')
if len(r) == 0:
msg = ("Console logs not ready for %s %s,%s,%s" %
(name, change, patch, short_build_uuid))
(name, change, patch, build_short_uuid))
raise ConsoleNotReady(msg)
else:
self.log.debug("Console ready for %s %s,%s,%s" %
(name, change, patch, short_build_uuid))
(name, change, patch, build_short_uuid))
def _has_required_files(self, change, patch, name, short_build_uuid):
query = qb.files_ready(change, patch, name, short_build_uuid)
def _has_required_files(self, change, patch, name, build_short_uuid):
query = qb.files_ready(change, patch, name, build_short_uuid)
r = self.es.search(query, size='80')
files = [x['term'] for x in r.terms]
required = required_files(name)
missing_files = [x for x in required if x not in files]
if len(missing_files) != 0:
msg = ("%s missing for %s %s,%s,%s" % (
missing_files, name, change, patch, short_build_uuid))
missing_files, name, change, patch, build_short_uuid))
raise FilesNotReady(msg)
def _does_es_have_data(self, event):
@ -253,7 +253,7 @@ class Stream(object):
# the first two
self._job_console_uploaded(
event.change, event.rev, job.name,
job.short_build_uuid)
job.build_short_uuid)
break
except ConsoleNotReady as e:
@ -272,7 +272,7 @@ class Stream(object):
elapsed = format_timedelta(datetime.datetime.now() - started_at)
msg = ("Console logs not available after %ss for %s %d,%d,%s" %
(elapsed, job.name, event.change, event.rev,
job.short_build_uuid))
job.build_short_uuid))
raise ResultTimedOut(msg)
self.log.debug(
@ -284,7 +284,7 @@ class Stream(object):
for job in event.failed_jobs:
self._has_required_files(
event.change, event.rev, job.name,
job.short_build_uuid)
job.build_short_uuid)
self.log.info(
"All files present for change_number: %d, patch_number: %d"
% (event.change, event.rev))
@ -298,7 +298,7 @@ class Stream(object):
elapsed = format_timedelta(datetime.datetime.now() - started_at)
msg = ("Required files not ready after %ss for %s %d,%d,%s" %
(elapsed, job.name, event.change, event.rev,
job.short_build_uuid))
job.build_short_uuid))
raise ResultTimedOut(msg)
def get_failed_tempest(self):
@ -374,7 +374,7 @@ class Classifier():
es_query = qb.generic(query, facet=facet)
return self.es.search(es_query, size=size)
def classify(self, change_number, patch_number, short_build_uuid):
def classify(self, change_number, patch_number, build_short_uuid):
"""Returns either empty list or list with matched bugs."""
self.log.debug("Entering classify")
#Reload each time
@ -385,7 +385,7 @@ class Classifier():
"Looking for bug: https://bugs.launchpad.net/bugs/%s"
% x['bug'])
query = qb.single_patch(x['query'], change_number, patch_number,
short_build_uuid)
build_short_uuid)
results = self.es.search(query, size='10')
if len(results) > 0:
bug_matches.append(x['bug'])

View File

@ -66,7 +66,7 @@ def single_queue(query, queue, facet=None):
(query, queue), facet=facet)
def result_ready(review, patch, name, short_build_uuid):
def result_ready(review, patch, name, build_short_uuid):
"""A query to determine if we have a failure for a particular patch.
This is looking for a particular FAILURE line in the console log, which
@ -78,11 +78,11 @@ def result_ready(review, patch, name, short_build_uuid):
'AND build_change:"%s" '
'AND build_patchset:"%s" '
'AND build_name:"%s"'
'AND build_uuid:%s*' %
(review, patch, name, short_build_uuid))
'AND build_short_uuid:%s' %
(review, patch, name, build_short_uuid))
def files_ready(review, patch, name, short_build_uuid):
def files_ready(review, patch, name, build_short_uuid):
"""A facetted query to ensure all the required files exist.
When changes are uploaded to elastic search there is a delay in
@ -94,12 +94,12 @@ def files_ready(review, patch, name, short_build_uuid):
'AND build_change:"%s" '
'AND build_patchset:"%s"'
'AND build_name:"%s"'
'AND build_uuid:%s*' %
(review, patch, name, short_build_uuid),
'AND build_short_uuid:%s' %
(review, patch, name, build_short_uuid),
facet='filename')
def single_patch(query, review, patch, short_build_uuid):
def single_patch(query, review, patch, build_short_uuid):
"""A query for a single patch (review + revision).
This is used to narrow down a particular kind of failure found in a
@ -108,5 +108,5 @@ def single_patch(query, review, patch, short_build_uuid):
return generic('%s '
'AND build_change:"%s" '
'AND build_patchset:"%s"'
'AND build_uuid:%s*' %
(query, review, patch, short_build_uuid))
'AND build_short_uuid:%s' %
(query, review, patch, build_short_uuid))

View File

@ -45,7 +45,7 @@ class TestStream(tests.TestCase):
self.assertEqual(event.project, "openstack/keystone")
self.assertEqual(event.name(), "64750,6")
self.assertEqual(event.url, "https://review.openstack.org/64750")
self.assertEqual(sorted(event.short_build_uuids()),
self.assertEqual(sorted(event.build_short_uuids()),
["5dd41fe", "d3fd328"])
self.assertTrue(event.is_openstack_project())
self.assertEqual(event.queue(), "gate")
@ -63,7 +63,7 @@ class TestStream(tests.TestCase):
self.assertEqual(event.project, "openstack/keystone")
self.assertEqual(event.name(), "64749,6")
self.assertEqual(event.url, "https://review.openstack.org/64749")
self.assertEqual(sorted(event.short_build_uuids()),
self.assertEqual(sorted(event.build_short_uuids()),
["5dd41fe", "d3fd328"])
self.assertTrue(event.is_openstack_project())
self.assertEqual(event.queue(), "check")
@ -81,7 +81,7 @@ class TestStream(tests.TestCase):
self.assertEqual(event.project, "openstack/horizon")
self.assertEqual(event.name(), "63078,19")
self.assertEqual(event.url, "https://review.openstack.org/63078")
self.assertEqual(event.short_build_uuids(), ["ab07162"])
self.assertEqual(event.build_short_uuids(), ["ab07162"])
event = stream.get_failed_tempest()
self.assertEqual(event.change, 65361)
@ -89,7 +89,7 @@ class TestStream(tests.TestCase):
self.assertEqual(event.project, "openstack/requirements")
self.assertEqual(event.name(), "65361,2")
self.assertEqual(event.url, "https://review.openstack.org/65361")
self.assertEqual(event.short_build_uuids(), ["8209fb4"])
self.assertEqual(event.build_short_uuids(), ["8209fb4"])
self.assertRaises(
fg.GerritDone,
@ -119,7 +119,7 @@ class TestStream(tests.TestCase):
self.assertEqual(job.url,
("http://logs.openstack.org/31/64831/1/check/"
"check-requirements-integration-dsvm/135d0b4"))
self.assertEqual(job.short_build_uuid, '135d0b4')
self.assertEqual(job.build_short_uuid, '135d0b4')
self.assertNotIn('gate-requirements-pep8', job_names)
self.assertNotIn('gate-requirements-python27', job_names)
@ -145,7 +145,7 @@ class TestStream(tests.TestCase):
self.assertEqual(event.project, "openstack/keystone")
self.assertEqual(event.name(), "64750,6")
self.assertEqual(event.url, "https://review.openstack.org/64750")
self.assertEqual(sorted(event.short_build_uuids()),
self.assertEqual(sorted(event.build_short_uuids()),
["5dd41fe", "d3fd328"])
self.assertTrue(event.is_openstack_project())
self.assertEqual(event.queue(), "gate")
@ -171,7 +171,7 @@ class TestStream(tests.TestCase):
self.assertEqual(event.project, "openstack/keystone")
self.assertEqual(event.name(), "64749,6")
self.assertEqual(event.url, "https://review.openstack.org/64749")
self.assertEqual(sorted(event.short_build_uuids()),
self.assertEqual(sorted(event.build_short_uuids()),
["5dd41fe", "d3fd328"])
self.assertTrue(event.is_openstack_project())
self.assertEqual(event.queue(), "check")