Merge " jsonToRst add support for 1.x schema"

This commit is contained in:
Zuul 2022-01-28 02:57:20 +00:00 committed by Gerrit Code Review
commit 87ebc0e86f
1 changed files with 114 additions and 49 deletions

163
tools/jsonToRst_schema_1.py Normal file → Executable file
View File

@ -1,6 +1,7 @@
#!/usr/bin/env python #!/usr/bin/env python3
# #
# Copyright 2015 Alexander Hirschfeld # Copyright 2015 Alexander Hirschfeld
# Copyright 2021 Red Hat, Inc.
# #
# Licensed under the Apache License, Version 2.0 (the "License"); you may # Licensed under the Apache License, Version 2.0 (the "License"); you may
# not use this file except in compliance with the License. You may obtain # not use this file except in compliance with the License. You may obtain
@ -14,13 +15,36 @@
# License for the specific language governing permissions and limitations # License for the specific language governing permissions and limitations
# under the License. # under the License.
# #
"""
This script will convert .json guideline to .rst guideline.
Only schema 1.x is supported (guidelines prior to 2018.02 version)
This script takes filename (not file path) as argument.
(File has to be located in interop/guidelines directory)
It is possible to specify output directory with --outdir option.
This option takes path to output directory as argument.
If this option isn't used file will be stored in interop/doc/source/guidelines
Examples:
[Generating out.2017.09.rst file to interop/doc/source/guidelines directory]
python3 jsonToRst.py 2017.09
[Generating out.2017.09.rst file to current directory]
python3 jsonToRst.py 2017.09.json --outdir .
"""
import argparse
import json import json
from json.decoder import JSONDecodeError
import os
import sys import sys
import textwrap import textwrap
def printHelpArrays(input): def print_help_arrays(input):
if not input: if not input:
return None return None
output = "" output = ""
@ -35,41 +59,46 @@ def print_error(msg):
sys.exit(1) sys.exit(1)
wrapper = textwrap.TextWrapper(width=79, subsequent_indent=' ') def add_extension(filename, extension):
if extension not in filename:
inFileName = None filename += extension
for potentialFile in sys.argv: return filename
if ".json" in potentialFile:
inFileName = potentialFile
if not inFileName:
print_error("Please pass the JSON file")
print("Reading from: " + inFileName)
with open(inFileName) as f: def parse_arguments():
data = json.load(f) default_outdir = os.path.join(os.path.dirname(os.path.realpath(__file__)),
"../doc/source/guidelines/")
if not isinstance(data, dict): parser = argparse.ArgumentParser(__doc__)
print_error('Make sure this is a valid file') parser.add_argument('--outdir',
help='Path to output file',
outFileName = 'doc/source/guidelines/' + inFileName.replace("json", "rst") metavar='FILENAME',
default=default_outdir)
parser.add_argument('file')
return parser.parse_args()
print("Writing to: " + outFileName) def get_file_path(in_file_name):
# filename
fname = __file__.split('/')[-1]
interop_path = (os.path.realpath(__file__)).replace('/tools/' + fname, '')
# check if file exists
if os.path.isfile(interop_path + '/guidelines/' + in_file_name):
return interop_path + '/guidelines/' + in_file_name
else:
return None
# intro def write_intro(data, out_file):
with open(outFileName, "w") as outFile:
if data.get('id') is None: if data.get('id') is None:
print_error('Make sure there is a valid id') print_error('Make sure there is a valid id')
line01 = "OpenStack Interoperability Guideline %s" % data["id"] line01 = "OpenStack Interoperability Guideline %s" % data["id"]
outFile.write('=' * len(line01) + '\n') out_file.write('=' * len(line01) + '\n')
outFile.write(line01 + '\n') out_file.write(line01 + '\n')
outFile.write('=' * len(line01) + '\n') out_file.write('=' * len(line01) + '\n')
# Nonlooping # Nonlooping
if data.get('platform') is None: if data.get('platform') is None:
@ -78,13 +107,14 @@ with open(outFileName, "w") as outFile:
# Correct Source # Correct Source
if data.get('source') not in ( if data.get('source') not in (
'http://opendev.org/openstack/defcore/', 'http://opendev.org/openstack/defcore/',
'http://opendev.org/openinfra/interop/'): 'http://opendev.org/openstack/interop/',
'https://opendev.org/openinfra/interop/'):
print_error("The expected interoperability guideline source not found") print_error("The expected interoperability guideline source not found")
outFile.write(""" out_file.write("""
:Status: {status} :Status: {status}
:Replaces: {replaces} :Replaces: {replaces}
:JSON Master: http://opendev.org/openinfra/interop/raw/branch/master/{id}.json :JSON Master: http://opendev.org/openstack/interop/raw/branch/master/{id}.json
This document outlines the mandatory capabilities and designated This document outlines the mandatory capabilities and designated
sections required to exist in a software installation in order to sections required to exist in a software installation in order to
@ -108,12 +138,14 @@ Platform Components
""".format(status=data.get("status"), """.format(status=data.get("status"),
replaces=data.get("replaces"), replaces=data.get("replaces"),
id=data.get("id"), id=data.get("id"),
releases=printHelpArrays(data.get("releases")), releases=print_help_arrays(data.get("releases")),
platformRequired=printHelpArrays(data["platform"].get("required")), platformRequired=print_help_arrays(data["platform"]["required"]),
platformAdvisory=printHelpArrays(data["platform"].get("advisory")), platformAdvisory=print_help_arrays(data["platform"]["advisory"]),
platformDepric=printHelpArrays(data["platform"].get("deprecated")), platformDepric=print_help_arrays(data["platform"]["deprecated"]),
platformRemoved=printHelpArrays(data["platform"].get("removed")))) platformRemoved=print_help_arrays(data["platform"]["removed"])))
def write_components(data, out_file):
# looping # looping
if data.get('components') is None: if data.get('components') is None:
print_error("No components found") print_error("No components found")
@ -122,35 +154,37 @@ Platform Components
order = ["required", "advisory", "deprecated", "removed"] order = ["required", "advisory", "deprecated", "removed"]
for component in components: for component in components:
outFile.write(""" out_file.write("""
{component} Component Capabilities {component} Component Capabilities
""".format(component=component.capitalize())) """.format(component=component.capitalize()))
outFile.write('=' * (len(component) + 23)) # footer out_file.write('=' * (len(component) + 23)) # footer
for event in order: for event in order:
outFile.write("\n{event} Capabilities\n".format( out_file.write("\n{event} Capabilities\n".format(
event=event.capitalize())) event=event.capitalize()))
outFile.write("-" * (len(event) + 15) + "\n") out_file.write("-" * (len(event) + 15) + "\n")
if(len(data['components'][component][event]) == 0): if(len(data['components'][component][event]) == 0):
outFile.write("None\n") out_file.write("None\n")
for req in data['components'][component][event]: for req in data['components'][component][event]:
outFile.write("* {name} ({project})\n".format( out_file.write("* {name} ({project})\n".format(
name=req, name=req,
project=data["capabilities"][req].get( project=data["capabilities"][req].get(
"project").capitalize())) "project").capitalize()))
# Designated -Sections
def write_designated_sections(data, out_file):
wrapper = textwrap.TextWrapper(width=79, subsequent_indent=' ')
if 'designated-sections' not in data: if 'designated-sections' not in data:
print_error("designated-sections not in json file") print_error("designated-sections not in json file")
outFile.write(""" out_file.write("""
Designated Sections Designated Sections
===================================== =====================================
@ -161,15 +195,14 @@ this specification.""")
desig = data.get("designated-sections") desig = data.get("designated-sections")
for event in order: for event in order:
outFile.write('\n\n{event} Designated Sections\n'.format( out_file.write('\n\n{event} Designated Sections\n'.format(
event=event.capitalize())) event=event.capitalize()))
# +20 is for length of header # +20 is for length of header
outFile.write('-' * (len(event) + 20) + '\n\n') out_file.write('-' * (len(event) + 20) + '\n\n')
names = sorted(desig[event].keys()) names = sorted(desig[event].keys())
if len(names) == 0: if len(names) == 0:
outFile.write('None') out_file.write('None')
outlines = [] outlines = []
for name in names: for name in names:
outlines.append( outlines.append(
@ -177,6 +210,38 @@ this specification.""")
"* {name} : {guide}".format( "* {name} : {guide}".format(
name=name.capitalize(), name=name.capitalize(),
guide=desig[event][name].get('guidance')))) guide=desig[event][name].get('guidance'))))
outFile.write("\n".join(outlines)) out_file.write("\n".join(outlines))
out_file.write('\n')
outFile.write('\n')
def run(in_file_name, out_file_path):
in_file_path = get_file_path(in_file_name)
if in_file_path is None:
print_error("[ERROR] " + in_file_name + " doesn't exist")
print("[INFO] Reading from: " + in_file_path)
with open(in_file_path) as f:
try:
data = json.load(f)
except JSONDecodeError:
print_error('[ERROR] Make sure this is a valid file')
print("[INFO] Writing to: " + out_file_path)
# intro
with open(out_file_path, "w") as out_file:
write_intro(data, out_file)
write_components(data, out_file)
write_designated_sections(data, out_file)
if __name__ == '__main__':
args = parse_arguments()
# add extension, if only version of guideline was specified as file
args.file = add_extension(args.file, ".json")
out_file_path = os.path.join(args.outdir,
"out." + args.file.replace("json", "rst"))
run(args.file, out_file_path)