Fix even more pep8 violations

This commit is contained in:
Austin Clark 2015-08-04 14:27:28 -06:00
parent 07b8a3246f
commit aed4cba45e
8 changed files with 65 additions and 78 deletions

View File

@ -25,7 +25,7 @@ def inject_extra_context(request):
if repos:
ret.update({
'tempest_latest_run': get_repositories()[0].latest_id(),
'tempest_runs': xrange(get_repositories()[0].count()),
'tempest_runs': range(get_repositories()[0].count()),
})
return ret

View File

@ -42,10 +42,9 @@ TIMESTAMP_FORMAT = '%Y-%m-%d %H:%M:%S.%f'
def extract_date(line):
"""Extracts a date from the given line
"""
Extracts a date from the given line, returning the parsed date and
remaining contents of the line.
Returns the parsed date and remaining contents of the line.
:param line: the line to extract a date from
:return: a tuple of the parsed date and remaining line contents
@ -58,11 +57,11 @@ def extract_date(line):
def parse_summary(summary_path):
"""Parses a summary logfile
"""
Parses a summary logfile. Summary entries are prefixed with identical
datestamps to those in the main log, but have only explicit log messages
denoting the overall execution progress.
Summary entries are prefixed with identical datestamps to those in the
main log, but have only explicit log messages denoting the overall
execution progress.
While summary entries are also printed into the main log, the explicit
summary file is used to simplify parsing.
@ -90,10 +89,10 @@ def parse_summary(summary_path):
def parse_log(log_path):
"""Parses a general `stack.sh` logfile, forming a full log tree
"""
Parses a general `stack.sh` logfile, forming a full log tree based on the
hierarchy of nested commands as presented in the log.
The log tree is based on the hierarchy of nested commands as presented
in the log.
Note that command output (that is, lines not prefixed with one or more '+'
symbols) is ignored and will not be included it the returned list of log
@ -151,8 +150,8 @@ def parse_log(log_path):
def merge(summary, log):
"""Merges log entries into parent categories based on timestamp.
"""
Merges general log entries into parent categories based on their timestamp
relative to the summary output timestamp.
@ -189,11 +188,10 @@ def merge(summary, log):
def bootstrap(log_path, summary_path=None):
"""Loads, parses, and merges the given log and summary files.
"""
Loads, parses, and merges the given log and summary files. The path to the
summary file will be determined automatically based on the path to the
general logfile, but it must exist within the same directory.
The path to the summary file will be determined automatically based on the
path to the general logfile, but it must exist within the same directory.
If the log file names are changed from their default values, a summary path
can be explicitly provided using the optional `summary_path` parameter.

View File

@ -12,6 +12,8 @@
# License for the specific language governing permissions and limitations
# under the License.
import six
from datetime import datetime
from datetime import timedelta
@ -23,9 +25,9 @@ DEFAULT_PRUNE_CUTOFF = 0.05
class LogNode(object):
"""Represents an entry in an ordered event log.
"""
Represents an entry in an ordered event log, consisting of a date, message,
Represents an entry in an ordered event log. consisting of a date, message,
and an arbitrary set of child nodes.
Note that entries are assumed to be strictly sequential and linear, and all
@ -44,8 +46,8 @@ class LogNode(object):
@property
def duration(self):
"""Determines aggregate duration for this node
"""
Determines the overall duration for this node, beginning at this parent
node's start time through the final child's ending time.
"""
@ -74,11 +76,7 @@ class LogNode(object):
return self.next_sibling.date - self.date
def traverse(self):
"""
A generator that will traverse all child nodes of this log tree
sequentially.
"""
"""A generator that traverses all nodes of this tree sequentially"""
for child in self.children:
yield child
@ -149,7 +147,7 @@ class LogNode(object):
else:
if value is None:
p.text(tc.Blue)
elif isinstance(value, basestring):
elif isinstance(value, six.string_types):
p.text(tc.Red)
elif isinstance(value, Number):
p.text(tc.DarkGray)
@ -161,8 +159,8 @@ class LogNode(object):
def prune(nodes, cutoff=DEFAULT_PRUNE_CUTOFF, fill=None):
"""Prunes given list of `LogNode` instances.
"""
Prunes the given list of `LogNode` instances, removing nodes whose duration
is less than the given cutoff value. If a `fill` value is provided, removed
nodes will be replaced with a single filler value accounting for the lost

View File

@ -33,11 +33,11 @@ NAME_TAGS_PATTERN = re.compile(r'^(.+)\[(.+)\]$')
def get_repositories():
"""Loads all test repositories from locations configured in settings
"""
Loads all test repositories from locations configured in
`settings.TEST_REPOSITORIES`. Only locations with a valid `.testrepository`
subdirectory containing valid test entries will be returned.
Where settings is found in`settings.TEST_REPOSITORIES`. Only locations
with a valid `.testrepository` subdirectory containing valid test entries
will be returned.
:return: a list of loaded :class:`Repository` instances
:rtype: list[Repository]
@ -96,11 +96,10 @@ def _read_test(test, out, strip_details):
def convert_run(test_run, strip_details=False):
"""Converts the given test run into a raw list of test dicts.
"""
Converts the given test run into a raw list of test dicts, using the
subunit stream as an intermediate format.(see: read_subunit.py from
subunit2sql)
Uses the subunit stream as an intermediate format.(see: read_subunit.py
from subunit2sql)
:param test_run: the test run to convert
:type test_run: AbstractTestRun
@ -154,8 +153,8 @@ def _descend_recurse(parent, parts_remaining):
def _descend(root, path):
"""Retrieves the node within the 'root' dict
"""
Retrieves the node within the `root` dict denoted by the series of
'.'-separated children as specified in `path`. Children for each node must
be contained in a list `children`, and name comparison will be
@ -179,8 +178,8 @@ def _descend(root, path):
def reorganize(converted_test_run):
"""Reorganizes test run, forming trees based on module paths
"""
Reorganizes and categorizes the given test run, forming tree of tests
categorized by their module paths.

View File

@ -22,7 +22,7 @@
<div class="panel panel-heading" id="runs-panel-heading">Analyze Run</div>
<div class="panel panel-body">
<div id="gerrit-panel">
Enter a Gerrit ChangeID: <input type="text" id="gerrit-id">
Enter a Gerrit Change ID (six-digit): <input type="text" id="gerrit-id">
<input id="gerrit-id-button" type="button" value="Submit">
</div>
<div id="runs-panel"></div>

View File

@ -26,25 +26,26 @@ from api import TempestRunTreeEndpoint
urlpatterns = patterns('',
url(r'^results_(?P<run_id>\d+).html$',
ResultsView.as_view(),
name='tempest_results'),
url(r'^timeline_(?P<run_id>\d+).html$',
TimelineView.as_view(),
name='tempest_timeline'),
url(r'^results_(?P<run_id>\d+).html$',
ResultsView.as_view(),
name='tempest_results'),
url(r'^timeline_(?P<run_id>\d+).html$',
TimelineView.as_view(),
name='tempest_timeline'),
url(r'^api_tree_(?P<run_id>\d+).json$',
TempestRunTreeEndpoint.as_view(),
name='tempest_api_tree'),
url(r'^api_raw_(?P<run_id>\d+).json$',
TempestRunRawEndpoint.as_view(),
name='tempest_api_raw'),
url(r'^api_details_(?P<run_id>\d+).json$',
TempestRunDetailsEndpoint.as_view()),
url(r'^api_details_(?P<run_id>\d+)_(?P<test_name>[^/]+).json$',
TempestRunDetailsEndpoint.as_view()),
url(r'^api_tree_(?P<run_id>\d+).json$',
TempestRunTreeEndpoint.as_view(),
name='tempest_api_tree'),
url(r'^api_raw_(?P<run_id>\d+).json$',
TempestRunRawEndpoint.as_view(),
name='tempest_api_raw'),
url(r'^api_details_(?P<run_id>\d+).json$',
TempestRunDetailsEndpoint.as_view()),
url(r'^api_details_(?P<run_id>\d+)_(?P<test_name>[^/]+)'
r'.json$',
TempestRunDetailsEndpoint.as_view()),
url(r'^aggregate.html$',
AggregateResultsView.as_view(),
name='tempest_aggregate_results'),
)
url(r'^aggregate.html$',
AggregateResultsView.as_view(),
name='tempest_aggregate_results'),
)

View File

@ -21,13 +21,13 @@ from sqlalchemy.orm import sessionmaker
def _get_runs(change_id):
"""Returns the dict of run objects associated with a changeID
'''
When given the change_id of a Gerrit change, a connection will be made to
the upstream subunit2sql db and query all run meta having that change_id
:param change_id: the Gerrit change_id to query
:return: a json dict of run_meta objects
'''
"""
engine = create_engine('mysql://query:query@logstash.openstack.org' +
':3306/subunit2sql')
@ -51,12 +51,4 @@ def _get_runs(change_id):
class GerritURLEndpoint(Endpoint):
def get(self, request, change_id):
'''
:param request:
:param change_id:
:return: Collection of run objects associated with a
specific CID
'''
return _get_runs(change_id)

View File

@ -21,16 +21,15 @@ from test import TestView
from api import GerritURLEndpoint
urlpatterns = patterns('',
url(r'^run.html$',
RunView.as_view(),
name='run_metadata'),
url(r'^run.html$',
RunView.as_view(),
name='run_metadata'),
url(r'^test.html$',
TestView.as_view(),
name='test_data'),
url(r'^test.html$',
TestView.as_view(),
name='test_data'),
url(r'^api_changeid_(?P<change_id>\d+).json$',
GerritURLEndpoint.as_view(),
name='gerrit_url')
)
url(r'^api_changeid_(?P<change_id>\d+).json$',
GerritURLEndpoint.as_view(),
name='gerrit_url')
)