Enable configurable uris in uncategorized_fails.py

Currently it is not possible to point to a different database or
elastic search engine. Make these configurable by using the
same configuration file used by bot.py.

Also add a logstash url so that it can be configured separately
from elastic search url.

Change-Id: I77e4215765e32c34b67c38e37e5764c6c0e45c84
This commit is contained in:
Ramy Asselin 2015-10-15 00:39:51 +00:00
parent 918f3fd8dd
commit 96dca00b19
3 changed files with 28 additions and 6 deletions

View File

@ -14,4 +14,5 @@ key=/home/mtreinish/.ssh/id_rsa
[data_source]
es_url=http://logstash.openstack.org/elasticsearch
ls_url=http://logstash.openstack.org
db_uri=mysql+pymysql://query:query@logstash.openstack.org/subunit2sql

View File

@ -16,6 +16,7 @@
import argparse
import collections
import ConfigParser
import datetime
import operator
import re
@ -51,6 +52,10 @@ def get_options():
default="queries")
parser.add_argument('-t', '--templatedir', help="Template Directory")
parser.add_argument('-o', '--output', help="Output File")
parser.add_argument('-c', '--conf', help="Elastic Recheck Configuration "
"file to use for data_source options such as "
"elastic search url, logstash url, and database "
"uri.")
return parser.parse_args()
@ -105,7 +110,7 @@ def num_fails_per_build_name(all_jobs):
return counts
def classifying_rate(fails, data, engine, classifier):
def classifying_rate(fails, data, engine, classifier, ls_url):
"""Builds and prints the classification rate.
It's important to know how good a job we are doing, so this
@ -148,8 +153,8 @@ def classifying_rate(fails, data, engine, classifier):
'AND error_pr:["-1000.0" TO "-10.0"] '
% url['build_uuid'])
logstash_query = qb.encode_logstash_query(query)
logstash_url = 'http://logstash.openstack.org' \
'/#/dashboard/file/logstash.json?%s' % logstash_query
logstash_url = ('%s/#/dashboard/file/logstash.json?%s'
% (ls_url, logstash_query))
results = classifier.hits_by_query(query, size=1)
if results:
url['crm114'] = logstash_url
@ -262,11 +267,26 @@ def collect_metrics(classifier, fails):
def main():
opts = get_options()
classifier = er.Classifier(opts.dir)
# Start with defaults
es_url = er.ES_URL
ls_url = er.LS_URL
db_uri = er.DB_URI
if opts.conf:
config = ConfigParser.ConfigParser({'es_url': er.ES_URL,
'ls_url': er.LS_URL,
'db_uri': er.DB_URI})
config.read(opts.conf)
if config.has_section('data_source'):
es_url = config.get('data_source', 'es_url')
ls_url = config.get('data_source', 'ls_url')
db_uri = config.get('data_source', 'db_uri')
classifier = er.Classifier(opts.dir, es_url=es_url, db_uri=db_uri)
fails = all_fails(classifier)
data = collect_metrics(classifier, fails)
engine = setup_template_engine(opts.templatedir)
html = classifying_rate(fails, data, engine, classifier)
html = classifying_rate(fails, data, engine, classifier, ls_url)
if opts.output:
with open(opts.output, "w") as f:
f.write(html)

View File

@ -28,7 +28,8 @@ import elastic_recheck.loader as loader
import elastic_recheck.query_builder as qb
from elastic_recheck import results
ES_URL = "http://logstash.openstack.org/elasticsearch"
ES_URL = 'http://logstash.openstack.org/elasticsearch'
LS_URL = 'http://logstash.openstack.org'
DB_URI = 'mysql+pymysql://query:query@logstash.openstack.org/subunit2sql'