Merge "Add subunit output support"
This commit is contained in:
commit
8661bfa69e
|
@ -15,12 +15,14 @@
|
|||
# You should have received a copy of the GNU General Public License
|
||||
# along with ARA. If not, see <http://www.gnu.org/licenses/>.
|
||||
|
||||
import datetime
|
||||
import logging
|
||||
import os
|
||||
import six
|
||||
import sys
|
||||
|
||||
from ara import models
|
||||
from ara import utils
|
||||
from cliff.command import Command
|
||||
from flask_frozen import Freezer, walk_directory
|
||||
from flask_frozen import MissingURLGeneratorWarning
|
||||
|
@ -28,6 +30,8 @@ from junit_xml import TestCase
|
|||
from junit_xml import TestSuite
|
||||
from oslo_utils import encodeutils
|
||||
from oslo_serialization import jsonutils
|
||||
from subunit import iso8601
|
||||
from subunit.v2 import StreamResultToBytes
|
||||
from warnings import filterwarnings
|
||||
|
||||
|
||||
|
@ -152,3 +156,121 @@ class GenerateJunit(Command):
|
|||
else:
|
||||
with open(args.output_file, 'wb') as f:
|
||||
f.write(encodeutils.safe_encode(xml_string))
|
||||
|
||||
|
||||
class GenerateSubunit(Command):
|
||||
""" Generate subunit binary stream from ARA data """
|
||||
log = logging.getLogger(__name__)
|
||||
|
||||
def get_parser(self, prog_name):
|
||||
parser = super(GenerateSubunit, self).get_parser(prog_name)
|
||||
parser.add_argument(
|
||||
'output_file',
|
||||
metavar='<output file>',
|
||||
help='The file to write the subunit binary stream to. '
|
||||
'Use "-" for stdout.',
|
||||
)
|
||||
parser.add_argument(
|
||||
'--playbook',
|
||||
metavar='<playbook>',
|
||||
nargs='+',
|
||||
help='Only include the specified playbooks in the generation.',
|
||||
required=False,
|
||||
default=None,
|
||||
)
|
||||
|
||||
return parser
|
||||
|
||||
def take_action(self, args):
|
||||
# Setup where the output stream must go
|
||||
if args.output_file == '-':
|
||||
output_stream = sys.stdout
|
||||
else:
|
||||
output_stream = open(args.output_file, 'wb')
|
||||
|
||||
# Create the output stream
|
||||
output = StreamResultToBytes(output_stream)
|
||||
|
||||
# Create the test run
|
||||
output.startTestRun()
|
||||
|
||||
if args.playbook is not None:
|
||||
playbooks = args.playbook
|
||||
results = (models.TaskResult().query
|
||||
.join(models.Task)
|
||||
.filter(models.TaskResult.task_id == models.Task.id)
|
||||
.filter(models.Task.playbook_id.in_(playbooks)))
|
||||
else:
|
||||
results = models.TaskResult().query.all()
|
||||
|
||||
for result in results:
|
||||
# Generate a fixed length identifier for the task
|
||||
test_id = utils.generate_identifier(result)
|
||||
|
||||
# Assign the test_status value
|
||||
if result.status in ('failed', 'unreachable'):
|
||||
if result.ignore_errors is False:
|
||||
test_status = 'xfail'
|
||||
else:
|
||||
test_status = 'fail'
|
||||
elif result.status == 'skipped':
|
||||
test_status = 'skip'
|
||||
else:
|
||||
test_status = 'success'
|
||||
|
||||
# Determine the play file path
|
||||
if result.task.playbook and result.task.playbook.path:
|
||||
playbook_path = result.task.playbook.path
|
||||
else:
|
||||
playbook_path = ''
|
||||
|
||||
# Determine the task file path
|
||||
if result.task.file and result.task.file.path:
|
||||
task_path = result.task.file.path
|
||||
else:
|
||||
task_path = ''
|
||||
|
||||
# Assign the file_bytes value
|
||||
test_data = {
|
||||
'host': result.host.name,
|
||||
'playbook_id': result.task.playbook.id,
|
||||
'playbook_path': playbook_path,
|
||||
'play_name': result.task.play.name,
|
||||
'task_action': result.task.action,
|
||||
'task_action_lineno': result.task.lineno,
|
||||
'task_id': result.task.id,
|
||||
'task_name': result.task.name,
|
||||
'task_path': task_path
|
||||
}
|
||||
file_bytes = encodeutils.safe_encode(jsonutils.dumps(test_data))
|
||||
|
||||
# Assign the start_time and stop_time value
|
||||
# The timestamp needs to be an epoch, so we need
|
||||
# to convert it.
|
||||
start_time = datetime.datetime.fromtimestamp(
|
||||
float(result.time_start.strftime('%s'))
|
||||
).replace(tzinfo=iso8601.UTC)
|
||||
end_time = datetime.datetime.fromtimestamp(
|
||||
float(result.time_end.strftime('%s'))
|
||||
).replace(tzinfo=iso8601.UTC)
|
||||
|
||||
# Output the start of the event
|
||||
output.status(
|
||||
test_id=test_id,
|
||||
timestamp=start_time
|
||||
)
|
||||
|
||||
# Output the end of the event
|
||||
output.status(
|
||||
test_id=test_id,
|
||||
test_status=test_status,
|
||||
test_tags=None,
|
||||
runnable=False,
|
||||
file_name=test_id,
|
||||
file_bytes=file_bytes,
|
||||
timestamp=end_time,
|
||||
eof=True,
|
||||
mime_type='text/plain; charset=UTF8'
|
||||
)
|
||||
|
||||
output.stopTestRun()
|
||||
|
|
|
@ -23,8 +23,10 @@ import tempfile
|
|||
|
||||
from distutils.version import LooseVersion
|
||||
from flask_frozen import MissingURLGeneratorWarning
|
||||
from glob import glob
|
||||
from lxml import etree
|
||||
from oslo_serialization import jsonutils
|
||||
from subunit._to_disk import to_disk
|
||||
|
||||
import ara.shell
|
||||
import ara.cli.data
|
||||
|
@ -859,3 +861,67 @@ class TestCLIGenerate(TestAra):
|
|||
self.assertEqual(tree.getroot()[0].tag, "testsuite")
|
||||
self.assertEqual(tree.getroot()[0][0].tag, "testcase")
|
||||
self.assertEqual(int(tree.getroot().get('tests')), len(tasks))
|
||||
|
||||
def test_generate_subunit(self):
|
||||
""" Roughly ensures the expected subunit is generated properly """
|
||||
tdir = self.generate_dir
|
||||
|
||||
ansible_run()
|
||||
cmd = ara.cli.generate.GenerateSubunit(None, None)
|
||||
parser = cmd.get_parser('test')
|
||||
|
||||
subunit_file = os.path.join(tdir, 'test.subunit')
|
||||
subunit_dir = os.path.join(tdir, 'subunit_dir')
|
||||
args = parser.parse_args([subunit_file])
|
||||
cmd.take_action(args)
|
||||
|
||||
self.assertTrue(os.path.exists(subunit_file))
|
||||
# Dump the subunit binary stream to some files we can read and assert
|
||||
with open(subunit_file, 'r') as f:
|
||||
to_disk(['-d', subunit_dir], stdin=f)
|
||||
|
||||
# Get *.json files, load them and test them
|
||||
data = []
|
||||
testfiles = glob("%s/%s" % (subunit_dir, '*/*.json'))
|
||||
for testfile in testfiles:
|
||||
with open(testfile, 'rb') as f:
|
||||
data.append(jsonutils.load(f))
|
||||
|
||||
keys = ['status', 'tags', 'stop', 'start', 'details', 'id']
|
||||
for result in data:
|
||||
# Test that we have the expected keys, no more, no less
|
||||
for key in keys:
|
||||
self.assertTrue(key in result.keys())
|
||||
for key in result.keys():
|
||||
self.assertTrue(key in keys)
|
||||
|
||||
# Get non-json files, load them and test them
|
||||
data = []
|
||||
testfiles = [fn for fn in glob("%s/%s" % (subunit_dir, '*/*'))
|
||||
if not os.path.basename(fn).endswith('json')]
|
||||
for testfile in testfiles:
|
||||
with open(testfile, 'rb') as f:
|
||||
data.append(jsonutils.load(f))
|
||||
|
||||
keys = ['host', 'playbook_id', 'playbook_path', 'play_name',
|
||||
'task_action', 'task_action_lineno', 'task_id', 'task_name',
|
||||
'task_path']
|
||||
for result in data:
|
||||
# Test that we have the expected keys, no more, no less
|
||||
for key in keys:
|
||||
self.assertTrue(key in result.keys())
|
||||
for key in result.keys():
|
||||
self.assertTrue(key in keys)
|
||||
|
||||
# Test that we have matching data for playbook records
|
||||
playbook = m.Playbook.query.get(result['playbook_id'])
|
||||
self.assertEqual(playbook.id, result['playbook_id'])
|
||||
self.assertEqual(playbook.path, result['playbook_path'])
|
||||
|
||||
# Test that we have matchin gdata for task records
|
||||
task = m.Task.query.get(result['task_id'])
|
||||
self.assertEqual(task.id, result['task_id'])
|
||||
self.assertEqual(task.action, result['task_action'])
|
||||
self.assertEqual(task.file.path, result['task_path'])
|
||||
self.assertEqual(task.lineno, result['task_action_lineno'])
|
||||
self.assertEqual(task.name, result['task_name'])
|
||||
|
|
31
ara/utils.py
31
ara/utils.py
|
@ -17,11 +17,42 @@
|
|||
|
||||
from ara import models
|
||||
from oslo_serialization import jsonutils
|
||||
from oslo_utils import encodeutils
|
||||
from sqlalchemy import func
|
||||
|
||||
import hashlib
|
||||
import pyfakefs.fake_filesystem as fake_filesystem
|
||||
|
||||
|
||||
def generate_identifier(result):
|
||||
"""
|
||||
Returns a fixed length identifier based on a hash of a combined set of
|
||||
playbook/task values which are as close as we can guess to unique for each
|
||||
task.
|
||||
"""
|
||||
# Determine the playbook file path to use for the ID
|
||||
if result.task.playbook and result.task.playbook.path:
|
||||
playbook_file = result.task.playbook.path
|
||||
else:
|
||||
playbook_file = ''
|
||||
play_path = u'%s.%s' % (playbook_file, result.task.play.name)
|
||||
|
||||
# Determine the task file path to use for the ID
|
||||
if result.task.file and result.task.file.path:
|
||||
task_file = result.task.file.path
|
||||
else:
|
||||
task_file = ''
|
||||
task_path = u'%s.%s' % (task_file, result.task.name)
|
||||
|
||||
# Combine both of the above for a full path
|
||||
identifier_path = u'%s.%s' % (play_path, task_path)
|
||||
|
||||
# Assign the identifier as a hash of the fully unique path.
|
||||
identifier = hashlib.sha1(encodeutils.to_utf8(identifier_path)).hexdigest()
|
||||
|
||||
return identifier
|
||||
|
||||
|
||||
def get_summary_stats(items, attr):
|
||||
"""
|
||||
Returns a dictionary of aggregated statistics for 'items' filtered by
|
||||
|
|
|
@ -415,3 +415,42 @@ This is done by retrieving the playbook IDs you are interested in with
|
|||
<testcase classname="localhost._home_dev_ara_ara_tests_integration_smoke_yml.ARA_Tasks_test_play" name="Remove a file if it doesn't exist"/>
|
||||
<testcase classname="localhost._home_dev_ara_ara_tests_integration_smoke_yml.ARA_Tasks_test_play" name="Remove a file if it exists">
|
||||
[...]
|
||||
|
||||
Generating a static subunit version of the task results
|
||||
-------------------------------------------------------
|
||||
|
||||
ARA is able to generate a subunit report that contains task results and their
|
||||
status.
|
||||
|
||||
This is done with the ``ara generate subunit`` command.
|
||||
|
||||
By default, ARA will generate a report on all task results across all the
|
||||
recorded playbook runs in it's database.
|
||||
It is also possible to generate a report for one or many specific playbooks.
|
||||
This is done by retrieving the playbook IDs you are interested in with
|
||||
``ara playbook list`` and then using the ``ara generate subunit`` command with the
|
||||
``--playbook`` parameter::
|
||||
|
||||
$ ara help generate subunit
|
||||
usage: ara generate subunit [-h] [--playbook <playbook> [<playbook> ...]]
|
||||
<output file>
|
||||
|
||||
Generate subunit binary stream from ARA data
|
||||
|
||||
positional arguments:
|
||||
<output file> The file to write the subunit binary stream to. Use
|
||||
"-" for stdout.
|
||||
|
||||
optional arguments:
|
||||
-h, --help show this help message and exit
|
||||
--playbook <playbook> [<playbook> ...]
|
||||
Only include the specified playbooks in the
|
||||
generation.
|
||||
|
||||
$ ara generate subunit - | subunit2csv
|
||||
test,status,start_time,stop_time
|
||||
50d4e04fe034bea7479bc4a3fa3703254298baa8,success,2017-07-28 03:07:21+00:00,2017-07-28 03:07:21+00:00
|
||||
a62f7a36683972efe1ef6e51e389417521502153,success,2017-07-28 03:07:22+00:00,2017-07-28 03:07:22+00:00
|
||||
8902778f958439806aee2a22c26d8b79dc61c964,success,2017-07-28 03:07:22+00:00,2017-07-28 03:07:22+00:00
|
||||
fd2d199b22b635ed82b41d5edf8c1774f64484dc,success,2017-07-28 03:07:22+00:00,2017-07-28 03:07:22+00:00
|
||||
[...]
|
||||
|
|
|
@ -9,6 +9,7 @@ Flask-Script
|
|||
Frozen-Flask
|
||||
decorator>=4.0.0
|
||||
cliff
|
||||
python-subunit
|
||||
setuptools>=11.3
|
||||
pygments
|
||||
debtcollector>=1.2.0
|
||||
|
|
|
@ -173,11 +173,17 @@ export ARA_PLAYBOOK_PER_PAGE=3
|
|||
export ARA_RESULT_PER_PAGE=20
|
||||
ara generate html ${LOGDIR}/build
|
||||
ara generate html ${LOGDIR}/build-playbook --playbook $pbid
|
||||
|
||||
ara generate junit ${LOGDIR}/junit.xml
|
||||
ara generate junit ${LOGDIR}/junit-playbook.xml --playbook $pbid
|
||||
ara generate junit -
|
||||
python ara/tests/integration/helpers/junit_check.py ${LOGDIR}/junit.xml
|
||||
|
||||
ara generate subunit ${LOGDIR}/results.subunit
|
||||
ara generate subunit ${LOGDIR}/results-playbook.subunit --playbook $pbid
|
||||
ara generate subunit - > ${LOGDIR}/results-stdout.subunit
|
||||
subunit2pyunit ${LOGDIR}/results.subunit 2>&1 | cat > ${LOGDIR}/subunit2pyunit.txt
|
||||
|
||||
# It's important that ARA behaves well when gzipped
|
||||
gzip --best --recursive ${LOGDIR}/build
|
||||
|
||||
|
|
Loading…
Reference in New Issue