Merge "Keep going on network timeouts or service errors"

This commit is contained in:
Jenkins 2016-01-23 03:17:19 +00:00 committed by Gerrit Code Review
commit 0ded0c866c
2 changed files with 37 additions and 16 deletions

View File

@ -22,6 +22,7 @@ import os
import sys
from launchpadlib import launchpad
import pyelasticsearch
import pytz
import requests
@ -50,10 +51,10 @@ LOG = logging.getLogger('ergraph')
def get_launchpad_bug(bug):
lp = launchpad.Launchpad.login_anonymously('grabbing bugs',
'production',
LPCACHEDIR)
try:
lp = launchpad.Launchpad.login_anonymously('grabbing bugs',
'production',
LPCACHEDIR)
lp_bug = lp.bugs[bug]
bugdata = {'name': lp_bug.title}
projects = ", ".join(map(lambda x: "(%s - %s)" %
@ -65,6 +66,10 @@ def get_launchpad_bug(bug):
# if someone makes a bug private, we lose access to it.
bugdata = dict(name='Unknown (Private Bug)',
affects='Unknown (Private Bug)', reviews=[])
except requests.exceptions.RequestException:
LOG.exception("Failed to get Launchpad data for bug %s" % bug)
bugdata = dict(name='Unable to get launchpad data',
affects='Unknown', reviews=[])
return bugdata
@ -175,9 +180,18 @@ def main():
fails24=0,
data=[])
buglist.append(bug)
results = classifier.hits_by_query(query['query'],
args.queue,
size=3000)
try:
results = classifier.hits_by_query(query['query'],
args.queue,
size=3000)
except pyelasticsearch.exceptions.InvalidJsonResponseError:
LOG.exception("Invalid Json while collecting metrics for query %s"
% query['query'])
continue
except requests.exceptions.ReadTimeout:
LOG.exception("Timeout while collecting metrics for query %s" %
query['query'])
continue
facets_for_fail = er_results.FacetSet()
facets_for_fail.detect_facets(results,

View File

@ -18,8 +18,10 @@ import argparse
import collections
import ConfigParser
import datetime
import logging
import operator
import re
import requests
import dateutil.parser as dp
import jinja2
@ -42,6 +44,8 @@ EXCLUDED_JOBS = (
EXCLUDED_JOBS_REGEX = re.compile('(' + '|'.join(EXCLUDED_JOBS) + ')')
LOG = logging.getLogger('eruncategorized')
def get_options():
parser = argparse.ArgumentParser(
@ -253,16 +257,19 @@ def _failure_percentage(hits, fails):
def collect_metrics(classifier, fails):
data = {}
for q in classifier.queries:
results = classifier.hits_by_query(q['query'], size=30000)
hits = _status_count(results)
data[q['bug']] = {
'fails': _failure_count(hits),
'hits': hits,
'percentages': _failure_percentage(results, fails),
'query': q['query'],
'failed_jobs': _failed_jobs(results)
}
try:
results = classifier.hits_by_query(q['query'], size=30000)
hits = _status_count(results)
data[q['bug']] = {
'fails': _failure_count(hits),
'hits': hits,
'percentages': _failure_percentage(results, fails),
'query': q['query'],
'failed_jobs': _failed_jobs(results)
}
except requests.exceptions.ReadTimeout:
LOG.exception("Failed to collection metrics for query %s" %
q['query'])
return data