Work on generating indexes

This commit is contained in:
Joshua Hesketh 2013-08-12 13:47:04 +10:00
parent c76ecde10b
commit cdda8272a9
2 changed files with 36 additions and 22 deletions

View File

@ -19,37 +19,50 @@ Primarily place the log files somewhere useful and optionally email
somebody """
from lib.utils import push_file
import tempfile
import os
def generate_log_index(datasets):
""" Create an index of logfiles and links to them """
# Loop over logfile URLs
# Create summary and links
pass
output = '<html><head><title>Index of results</title></head><body>'
output += '<ul>'
for dataset in datasets:
output += '<li><a href="%s">%s</a></li>' % (dataset['result_uri'],
dataset['name'])
output += '</ul>'
output += '</body></html>'
return output
def make_index_file(datasets):
def make_index_file(datasets, index_filename):
""" Writes an index into a file for pushing """
generate_log_index(datasets)
# write out to file
index_content = generate_log_index(datasets)
tempdir = tempfile.mkdtemp()
fd = open(os.path.join(tempdir, index_filename), 'w')
fd.write(index_content)
return os.path.join(tempdir, index_filename)
def generate_push_results(datasets):
def generate_push_results(datasets, job_unique_number):
""" Generates and pushes results """
for i, dataset in enumerate(datasets):
files = []
if 'publish_to' in dataset['config']:
for publish_config in dataset['config']['publish_to']:
files.append(push_file(dataset['name'],
dataset['log_file_path'],
publish_config))
datasets[i]['files'] = files
result_uri = push_file(job_unique_number,
dataset['log_file_path'],
dataset['config']['publish_to'])
datasets[i]['result_uri'] = result_uri
#index_file = make_index_file(datasets)
#index_file_url = push_file(index_file)
index_file = make_index_file(datasets, 'index.html')
index_file_url = push_file(job_unique_number,
index_file,
publish_config)
return files[0]
return index_file_url
def check_log_for_errors(logfile):

View File

@ -141,7 +141,8 @@ class Runner(threading.Thread):
def _handle_results(self):
""" pass over the results to handle_results.py for post-processing """
self.log.debug("Process the resulting files (upload/push)")
index_url = handle_results.generate_push_results(self._get_datasets())
index_url = handle_results.generate_push_results(self._get_datasets(),
self.job.unique)
self.log.debug("Index URL found at %s" % index_url)
self.work_data['url'] = index_url
@ -175,12 +176,9 @@ class Runner(threading.Thread):
dataset = {}
dataset['name'] = ent
dataset['path'] = os.path.join(datasets_path, ent)
dataset['job_working_dir'] = os.path.join(
self.config['jobs_working_dir'],
self.job.unique
)
dataset['log_file_path'] = os.path.join(
dataset['job_working_dir'],
self.config['jobs_working_dir'],
self.job.unique,
dataset['name'] + '.log'
)
with open(os.path.join(dataset['path'], 'config.json'),
@ -214,7 +212,10 @@ class Runner(threading.Thread):
' %(dataset_path)s %(pip_cache_dir)s')
% {
'unique_id': self.job.unique,
'job_working_dir': dataset['job_working_dir'],
'job_working_dir': os.path.join(
self.config['jobs_working_dir'],
self.job.unique
),
'git_path': git_path,
'dbuser': dataset['config']['db_user'],
'dbpassword': dataset['config']['db_pass'],