Fix JSON generation when scenario has hooks
Patch the code according to hooks' internal structure. Generate JSON report in `self` verification job. Change-Id: Ia431460439473ab93fc2384054ede91bd02713e3 Closes-Bug: #1734336
This commit is contained in:
parent
f8757e412f
commit
a6dfe34b97
|
@ -1,109 +1,113 @@
|
|||
# All Rights Reserved.
|
||||
#
|
||||
# Licensed under the Apache License, Version 2.0 (the "License"); you may
|
||||
# not use this file except in compliance with the License. You may obtain
|
||||
# a copy of the License at
|
||||
#
|
||||
# http://www.apache.org/licenses/LICENSE-2.0
|
||||
#
|
||||
# Unless required by applicable law or agreed to in writing, software
|
||||
# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
|
||||
# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
|
||||
# License for the specific language governing permissions and limitations
|
||||
# under the License.
|
||||
|
||||
import collections
|
||||
import datetime as dt
|
||||
import json
|
||||
|
||||
from oslo_utils import timeutils
|
||||
|
||||
from rally.common import version as rally_version
|
||||
from rally.task import exporter
|
||||
|
||||
TIMEFORMAT = "%Y-%m-%dT%H:%M:%S"
|
||||
|
||||
|
||||
@exporter.configure("json")
|
||||
class JSONExporter(exporter.TaskExporter):
|
||||
"""Generates task report in JSON format."""
|
||||
|
||||
def _generate_tasks(self):
|
||||
tasks = []
|
||||
for task in self.tasks_results:
|
||||
subtasks = []
|
||||
for subtask in task["subtasks"]:
|
||||
workloads = []
|
||||
for workload in subtask["workloads"]:
|
||||
workloads.append(
|
||||
collections.OrderedDict(
|
||||
[("uuid", workload["uuid"]),
|
||||
("description", workload["description"]),
|
||||
("runner", {
|
||||
workload["runner_type"]: workload["runner"]}),
|
||||
("hooks", [{"action": dict([h["action"]]),
|
||||
"trigger": dict([h["trigger"]])}
|
||||
for h in workload["hooks"]]),
|
||||
("scenario", {
|
||||
workload["name"]: workload["args"]}),
|
||||
("min_duration", workload["min_duration"]),
|
||||
("max_duration", workload["max_duration"]),
|
||||
("start_time", workload["start_time"]),
|
||||
("load_duration", workload["load_duration"]),
|
||||
("full_duration", workload["full_duration"]),
|
||||
("statistics", workload["statistics"]),
|
||||
("data", workload["data"]),
|
||||
("failed_iteration_count",
|
||||
workload["failed_iteration_count"]),
|
||||
("total_iteration_count",
|
||||
workload["total_iteration_count"]),
|
||||
("created_at", workload["created_at"]),
|
||||
("updated_at", workload["updated_at"]),
|
||||
("contexts", workload["context"]),
|
||||
("position", workload["position"]),
|
||||
("pass_sla", workload["pass_sla"]),
|
||||
("sla_results", workload["sla_results"]),
|
||||
("sla", workload["sla"])]
|
||||
)
|
||||
)
|
||||
subtasks.append(
|
||||
collections.OrderedDict(
|
||||
[("uuid", subtask["uuid"]),
|
||||
("title", subtask["title"]),
|
||||
("description", subtask["description"]),
|
||||
("status", subtask["status"]),
|
||||
("created_at", subtask["created_at"]),
|
||||
("updated_at", subtask["updated_at"]),
|
||||
("sla", subtask["sla"]),
|
||||
("workloads", workloads)]
|
||||
)
|
||||
)
|
||||
tasks.append(
|
||||
collections.OrderedDict(
|
||||
[("uuid", task["uuid"]),
|
||||
("title", task["title"]),
|
||||
("description", task["description"]),
|
||||
("status", task["status"]),
|
||||
("tags", task["tags"]),
|
||||
("created_at", task["created_at"]),
|
||||
("updated_at", task["updated_at"]),
|
||||
("pass_sla", task["pass_sla"]),
|
||||
("subtasks", subtasks)]
|
||||
)
|
||||
)
|
||||
return tasks
|
||||
|
||||
def generate(self):
|
||||
results = {"info": {"rally_version": rally_version.version_string(),
|
||||
"generated_at": dt.datetime.strftime(
|
||||
timeutils.utcnow(), TIMEFORMAT),
|
||||
"format_version": "1"},
|
||||
"tasks": self._generate_tasks()}
|
||||
|
||||
results = json.dumps(results, sort_keys=False, indent=4)
|
||||
|
||||
if self.output_destination:
|
||||
return {"files": {self.output_destination: results},
|
||||
"open": "file://" + self.output_destination}
|
||||
else:
|
||||
return {"print": results}
|
||||
# All Rights Reserved.
|
||||
#
|
||||
# Licensed under the Apache License, Version 2.0 (the "License"); you may
|
||||
# not use this file except in compliance with the License. You may obtain
|
||||
# a copy of the License at
|
||||
#
|
||||
# http://www.apache.org/licenses/LICENSE-2.0
|
||||
#
|
||||
# Unless required by applicable law or agreed to in writing, software
|
||||
# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
|
||||
# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
|
||||
# License for the specific language governing permissions and limitations
|
||||
# under the License.
|
||||
|
||||
import collections
|
||||
import datetime as dt
|
||||
import json
|
||||
|
||||
from oslo_utils import timeutils
|
||||
|
||||
from rally.common import version as rally_version
|
||||
from rally.task import exporter
|
||||
|
||||
TIMEFORMAT = "%Y-%m-%dT%H:%M:%S"
|
||||
|
||||
|
||||
@exporter.configure("json")
|
||||
class JSONExporter(exporter.TaskExporter):
|
||||
"""Generates task report in JSON format."""
|
||||
|
||||
def _generate_tasks(self):
|
||||
tasks = []
|
||||
for task in self.tasks_results:
|
||||
subtasks = []
|
||||
for subtask in task["subtasks"]:
|
||||
workloads = []
|
||||
for workload in subtask["workloads"]:
|
||||
hooks = [{
|
||||
"config": {"action": dict([h["config"]["action"]]),
|
||||
"trigger": dict([h["config"]["trigger"]]),
|
||||
"description": h["config"]["description"]},
|
||||
"results": h["results"],
|
||||
"summary": h["summary"], } for h in workload["hooks"]]
|
||||
workloads.append(
|
||||
collections.OrderedDict(
|
||||
[("uuid", workload["uuid"]),
|
||||
("description", workload["description"]),
|
||||
("runner", {
|
||||
workload["runner_type"]: workload["runner"]}),
|
||||
("hooks", hooks),
|
||||
("scenario", {
|
||||
workload["name"]: workload["args"]}),
|
||||
("min_duration", workload["min_duration"]),
|
||||
("max_duration", workload["max_duration"]),
|
||||
("start_time", workload["start_time"]),
|
||||
("load_duration", workload["load_duration"]),
|
||||
("full_duration", workload["full_duration"]),
|
||||
("statistics", workload["statistics"]),
|
||||
("data", workload["data"]),
|
||||
("failed_iteration_count",
|
||||
workload["failed_iteration_count"]),
|
||||
("total_iteration_count",
|
||||
workload["total_iteration_count"]),
|
||||
("created_at", workload["created_at"]),
|
||||
("updated_at", workload["updated_at"]),
|
||||
("contexts", workload["context"]),
|
||||
("position", workload["position"]),
|
||||
("pass_sla", workload["pass_sla"]),
|
||||
("sla_results", workload["sla_results"]),
|
||||
("sla", workload["sla"])]
|
||||
)
|
||||
)
|
||||
subtasks.append(
|
||||
collections.OrderedDict(
|
||||
[("uuid", subtask["uuid"]),
|
||||
("title", subtask["title"]),
|
||||
("description", subtask["description"]),
|
||||
("status", subtask["status"]),
|
||||
("created_at", subtask["created_at"]),
|
||||
("updated_at", subtask["updated_at"]),
|
||||
("sla", subtask["sla"]),
|
||||
("workloads", workloads)]
|
||||
)
|
||||
)
|
||||
tasks.append(
|
||||
collections.OrderedDict(
|
||||
[("uuid", task["uuid"]),
|
||||
("title", task["title"]),
|
||||
("description", task["description"]),
|
||||
("status", task["status"]),
|
||||
("tags", task["tags"]),
|
||||
("created_at", task["created_at"]),
|
||||
("updated_at", task["updated_at"]),
|
||||
("pass_sla", task["pass_sla"]),
|
||||
("subtasks", subtasks)]
|
||||
)
|
||||
)
|
||||
return tasks
|
||||
|
||||
def generate(self):
|
||||
results = {"info": {"rally_version": rally_version.version_string(),
|
||||
"generated_at": dt.datetime.strftime(
|
||||
timeutils.utcnow(), TIMEFORMAT),
|
||||
"format_version": "1"},
|
||||
"tasks": self._generate_tasks()}
|
||||
|
||||
results = json.dumps(results, sort_keys=False, indent=4)
|
||||
|
||||
if self.output_destination:
|
||||
return {"files": {self.output_destination: results},
|
||||
"open": "file://" + self.output_destination}
|
||||
else:
|
||||
return {"print": results}
|
||||
|
|
|
@ -16,8 +16,10 @@ TASK_FILE=$1
|
|||
PLUGIN_PATHS=rally-jobs/plugins
|
||||
if [ -n "$ZUUL_PROJECT" ]; then
|
||||
HTML_REPORT=testr_results.html
|
||||
JSON_REPORT=testr_results.json
|
||||
else
|
||||
HTML_REPORT=rally_self_results.html
|
||||
JSON_REPORT=rally_self_results.json
|
||||
fi
|
||||
RND=$(head /dev/urandom | tr -dc a-z0-9 | head -c 5)
|
||||
TMP_RALLY_CONF="/tmp/self-rally-$RND.conf"
|
||||
|
@ -36,9 +38,11 @@ $RALLY -d deployment create --name=self
|
|||
# Run task
|
||||
$RALLY -d --plugin-paths=$PLUGIN_PATHS task start $TASK_FILE
|
||||
$RALLY task report --html-static --out $HTML_REPORT
|
||||
$RALLY task report --json --out $JSON_REPORT
|
||||
|
||||
if [ -n "$ZUUL_PROJECT" ]; then
|
||||
gzip -9 -f $HTML_REPORT
|
||||
gzip -9 -f $JSON_REPORT
|
||||
fi
|
||||
|
||||
# Check sla (this may fail the job)
|
||||
|
|
Loading…
Reference in New Issue