Add a rest api to get a test_runs time series by test

This commit adds a new REST API method to get a time series of
test_runs given the test_id for a test. The intent here is to enable
a per test view with a graph of success and failures over time, as
well as a test run_time graphed over time.

Change-Id: Id7fe36c3e1ca069d942fe246d688648f719d3168
This commit is contained in:
Matthew Treinish 2015-10-05 21:16:32 -04:00
parent 5cd6b14e7a
commit 4fd490d924
No known key found for this signature in database
GPG Key ID: FD12A0F214C9E177
4 changed files with 62 additions and 4 deletions

View File

@ -29,7 +29,7 @@ from sqlalchemy.orm import sessionmaker
from subunit2sql.db import api
from run_aggregator import RunAggregator
from test_run_aggregator import TestRunAggregator
import test_run_aggregator
app = flask.Flask(__name__)
app.config['PROPAGATE_EXCEPTIONS'] = True
@ -156,8 +156,8 @@ def get_test_runs_by_build_name(build_name):
' choice' % datetime_resolution), 400
tests = api.get_test_run_dict_by_run_meta_key_value(key, value, start_date,
stop_date, session)
tests = (TestRunAggregator(tests)
.aggregate(datetime_resolution=datetime_resolution))
tests = test_run_aggregator.TestRunAggregator(tests).aggregate(
datetime_resolution=datetime_resolution)
return jsonify({'tests': tests})
@ -303,6 +303,20 @@ def parse_command_line_args():
return parser.parse_args()
@app.route('/test_runs/<string:test_id>', methods=['GET'])
def get_test_runs_for_test(test_id):
global Session
session = Session()
start_date = _parse_datetimes(flask.request.args.get('start_date', None))
stop_date = _parse_datetimes(flask.request.args.get('stop_date', None))
db_test_runs = api.get_test_runs_by_test_test_id(test_id, session=session,
start_date=start_date,
stop_date=stop_date)
test_runs = test_run_aggregator.convert_test_runs_list_to_time_series_dict(
db_test_runs)
return jsonify({'test_runs': test_runs})
def main():
global config
args = parse_command_line_args()

View File

@ -17,6 +17,28 @@ from subunit2sql import read_subunit
from base_aggregator import BaseAggregator
def convert_test_runs_list_to_time_series_dict(test_runs_list):
test_runs = {}
for test_run in test_runs_list:
# Populate dict
start_time = test_run.start_time
if start_time and test_run.start_time_microsecond:
start_time = start_time.replace(
microsecond=test_run.start_time_microsecond)
if test_run.stop_time:
stop_time = test_run.stop_time
if test_run.stop_time_microsecond:
stop_time = stop_time.replace(
microsecond=test_run.stop_time_microsecond)
test_run_dict = {
'run_time': read_subunit.get_duration(start_time, stop_time),
'status': test_run.status,
'run_id': test_run.run_id
}
test_runs[start_time.isoformat()] = test_run_dict
return test_runs
class Status(object):
def __init__(self, status):
self.status = status

View File

@ -661,3 +661,25 @@ class TestRestAPI(base.TestCase):
]
successful_runs = api._calc_amount_of_successful_runs(runs)
self.assertEqual(successful_runs, 1)
@mock.patch('subunit2sql.db.api.get_test_runs_by_test_test_id',
return_value=[models.TestRun(
id='fake_id', test_id='test.id', run_id='fake_run_id',
status='success', start_time=timestamp_a,
stop_time=timestamp_b)])
def test_get_test_runs_for_test(self, api_mock):
api.Session = mock.MagicMock()
res = self.app.get('/test_runs/fake.test.id')
self.assertEqual(200, res.status_code)
exp_result = {'test_runs': {
timestamp_a.isoformat(): {
'run_time': 1.0,
'status': 'success',
'run_id': 'fake_run_id',
}
}}
response_data = json.loads(res.data)
self.assertEqual(exp_result, response_data)
api_mock.assert_called_once_with('fake.test.id', start_date=None,
stop_date=None,
session=api.Session())

View File

@ -3,7 +3,7 @@
# process, which may cause wedges in the gate later.
pbr<2.0,>=1.6
flask
subunit2sql>=0.11.0
subunit2sql>=1.0.2
sqlalchemy
flask-jsonpify
PyMySQL>=0.6.2