Fix log_url parsing for new job types

Previously, the log_dir was determined by taking the dirname() of the
log_url twice, which works when all collected 'testrepository.subunit'
files were located under 'logs/'. Now that the worker is collecting
subunit files from all jobs, this assumption can result in the Zuul
UUID being cut off from the generated log_dir. This change adds a
check to make sure path segments are only removed when the log_dir
is 'logs/' or 'logs/old/'.

Change-Id: I75c53c498261e44989cdb7bf49d909ebde2b2699
This commit is contained in:
Tim Buckley 2016-03-03 20:14:26 -07:00
parent e12fc216ed
commit 3663cc2be0
1 changed files with 11 additions and 1 deletions

View File

@ -180,7 +180,17 @@ class Subunit2SQLProcessor(object):
# Set run metadata from gearman
log_url = subunit.pop('log_url', None)
if log_url:
log_dir = os.path.dirname(os.path.dirname(log_url))
log_dir = os.path.dirname(log_url)
# log_dir should be the top-level directory containing a job run,
# but the subunit file may be nested in 0 - 2 subdirectories (top,
# logs/, or logs/old/), so we need to safely correct the path here
log_base = os.path.basename(log_dir)
if log_base == 'logs':
log_dir = os.path.dirname(log_dir)
elif log_base == 'old':
log_dir = os.path.dirname(os.path.dirname(log_dir))
shell.CONF.set_override('artifacts', log_dir)
shell.CONF.set_override('run_meta', subunit)
# Parse subunit stream and store in DB