Merge " jsonToRst add support for 1.x schema"
This commit is contained in:
commit
87ebc0e86f
|
@ -1,6 +1,7 @@
|
|||
#!/usr/bin/env python
|
||||
#!/usr/bin/env python3
|
||||
#
|
||||
# Copyright 2015 Alexander Hirschfeld
|
||||
# Copyright 2021 Red Hat, Inc.
|
||||
#
|
||||
# Licensed under the Apache License, Version 2.0 (the "License"); you may
|
||||
# not use this file except in compliance with the License. You may obtain
|
||||
|
@ -14,13 +15,36 @@
|
|||
# License for the specific language governing permissions and limitations
|
||||
# under the License.
|
||||
#
|
||||
"""
|
||||
This script will convert .json guideline to .rst guideline.
|
||||
|
||||
Only schema 1.x is supported (guidelines prior to 2018.02 version)
|
||||
|
||||
This script takes filename (not file path) as argument.
|
||||
(File has to be located in interop/guidelines directory)
|
||||
|
||||
It is possible to specify output directory with --outdir option.
|
||||
This option takes path to output directory as argument.
|
||||
If this option isn't used file will be stored in interop/doc/source/guidelines
|
||||
|
||||
Examples:
|
||||
|
||||
[Generating out.2017.09.rst file to interop/doc/source/guidelines directory]
|
||||
python3 jsonToRst.py 2017.09
|
||||
|
||||
[Generating out.2017.09.rst file to current directory]
|
||||
python3 jsonToRst.py 2017.09.json --outdir .
|
||||
"""
|
||||
|
||||
import argparse
|
||||
import json
|
||||
from json.decoder import JSONDecodeError
|
||||
import os
|
||||
import sys
|
||||
import textwrap
|
||||
|
||||
|
||||
def printHelpArrays(input):
|
||||
def print_help_arrays(input):
|
||||
if not input:
|
||||
return None
|
||||
output = ""
|
||||
|
@ -35,41 +59,46 @@ def print_error(msg):
|
|||
sys.exit(1)
|
||||
|
||||
|
||||
wrapper = textwrap.TextWrapper(width=79, subsequent_indent=' ')
|
||||
|
||||
inFileName = None
|
||||
for potentialFile in sys.argv:
|
||||
if ".json" in potentialFile:
|
||||
inFileName = potentialFile
|
||||
|
||||
if not inFileName:
|
||||
print_error("Please pass the JSON file")
|
||||
|
||||
print("Reading from: " + inFileName)
|
||||
def add_extension(filename, extension):
|
||||
if extension not in filename:
|
||||
filename += extension
|
||||
return filename
|
||||
|
||||
|
||||
with open(inFileName) as f:
|
||||
data = json.load(f)
|
||||
|
||||
if not isinstance(data, dict):
|
||||
print_error('Make sure this is a valid file')
|
||||
|
||||
outFileName = 'doc/source/guidelines/' + inFileName.replace("json", "rst")
|
||||
def parse_arguments():
|
||||
default_outdir = os.path.join(os.path.dirname(os.path.realpath(__file__)),
|
||||
"../doc/source/guidelines/")
|
||||
parser = argparse.ArgumentParser(__doc__)
|
||||
parser.add_argument('--outdir',
|
||||
help='Path to output file',
|
||||
metavar='FILENAME',
|
||||
default=default_outdir)
|
||||
parser.add_argument('file')
|
||||
return parser.parse_args()
|
||||
|
||||
|
||||
print("Writing to: " + outFileName)
|
||||
def get_file_path(in_file_name):
|
||||
|
||||
# filename
|
||||
fname = __file__.split('/')[-1]
|
||||
interop_path = (os.path.realpath(__file__)).replace('/tools/' + fname, '')
|
||||
|
||||
# check if file exists
|
||||
if os.path.isfile(interop_path + '/guidelines/' + in_file_name):
|
||||
return interop_path + '/guidelines/' + in_file_name
|
||||
else:
|
||||
return None
|
||||
|
||||
|
||||
# intro
|
||||
with open(outFileName, "w") as outFile:
|
||||
def write_intro(data, out_file):
|
||||
if data.get('id') is None:
|
||||
print_error('Make sure there is a valid id')
|
||||
|
||||
line01 = "OpenStack Interoperability Guideline %s" % data["id"]
|
||||
|
||||
outFile.write('=' * len(line01) + '\n')
|
||||
outFile.write(line01 + '\n')
|
||||
outFile.write('=' * len(line01) + '\n')
|
||||
out_file.write('=' * len(line01) + '\n')
|
||||
out_file.write(line01 + '\n')
|
||||
out_file.write('=' * len(line01) + '\n')
|
||||
|
||||
# Nonlooping
|
||||
if data.get('platform') is None:
|
||||
|
@ -78,13 +107,14 @@ with open(outFileName, "w") as outFile:
|
|||
# Correct Source
|
||||
if data.get('source') not in (
|
||||
'http://opendev.org/openstack/defcore/',
|
||||
'http://opendev.org/openinfra/interop/'):
|
||||
'http://opendev.org/openstack/interop/',
|
||||
'https://opendev.org/openinfra/interop/'):
|
||||
print_error("The expected interoperability guideline source not found")
|
||||
|
||||
outFile.write("""
|
||||
out_file.write("""
|
||||
:Status: {status}
|
||||
:Replaces: {replaces}
|
||||
:JSON Master: http://opendev.org/openinfra/interop/raw/branch/master/{id}.json
|
||||
:JSON Master: http://opendev.org/openstack/interop/raw/branch/master/{id}.json
|
||||
|
||||
This document outlines the mandatory capabilities and designated
|
||||
sections required to exist in a software installation in order to
|
||||
|
@ -108,12 +138,14 @@ Platform Components
|
|||
""".format(status=data.get("status"),
|
||||
replaces=data.get("replaces"),
|
||||
id=data.get("id"),
|
||||
releases=printHelpArrays(data.get("releases")),
|
||||
platformRequired=printHelpArrays(data["platform"].get("required")),
|
||||
platformAdvisory=printHelpArrays(data["platform"].get("advisory")),
|
||||
platformDepric=printHelpArrays(data["platform"].get("deprecated")),
|
||||
platformRemoved=printHelpArrays(data["platform"].get("removed"))))
|
||||
releases=print_help_arrays(data.get("releases")),
|
||||
platformRequired=print_help_arrays(data["platform"]["required"]),
|
||||
platformAdvisory=print_help_arrays(data["platform"]["advisory"]),
|
||||
platformDepric=print_help_arrays(data["platform"]["deprecated"]),
|
||||
platformRemoved=print_help_arrays(data["platform"]["removed"])))
|
||||
|
||||
|
||||
def write_components(data, out_file):
|
||||
# looping
|
||||
if data.get('components') is None:
|
||||
print_error("No components found")
|
||||
|
@ -122,35 +154,37 @@ Platform Components
|
|||
order = ["required", "advisory", "deprecated", "removed"]
|
||||
for component in components:
|
||||
|
||||
outFile.write("""
|
||||
out_file.write("""
|
||||
|
||||
|
||||
|
||||
{component} Component Capabilities
|
||||
""".format(component=component.capitalize()))
|
||||
outFile.write('=' * (len(component) + 23)) # footer
|
||||
out_file.write('=' * (len(component) + 23)) # footer
|
||||
|
||||
for event in order:
|
||||
|
||||
outFile.write("\n{event} Capabilities\n".format(
|
||||
out_file.write("\n{event} Capabilities\n".format(
|
||||
event=event.capitalize()))
|
||||
outFile.write("-" * (len(event) + 15) + "\n")
|
||||
out_file.write("-" * (len(event) + 15) + "\n")
|
||||
|
||||
if(len(data['components'][component][event]) == 0):
|
||||
outFile.write("None\n")
|
||||
out_file.write("None\n")
|
||||
|
||||
for req in data['components'][component][event]:
|
||||
outFile.write("* {name} ({project})\n".format(
|
||||
out_file.write("* {name} ({project})\n".format(
|
||||
name=req,
|
||||
project=data["capabilities"][req].get(
|
||||
"project").capitalize()))
|
||||
|
||||
# Designated -Sections
|
||||
|
||||
def write_designated_sections(data, out_file):
|
||||
wrapper = textwrap.TextWrapper(width=79, subsequent_indent=' ')
|
||||
|
||||
if 'designated-sections' not in data:
|
||||
print_error("designated-sections not in json file")
|
||||
|
||||
outFile.write("""
|
||||
out_file.write("""
|
||||
|
||||
Designated Sections
|
||||
=====================================
|
||||
|
@ -161,15 +195,14 @@ this specification.""")
|
|||
desig = data.get("designated-sections")
|
||||
for event in order:
|
||||
|
||||
outFile.write('\n\n{event} Designated Sections\n'.format(
|
||||
out_file.write('\n\n{event} Designated Sections\n'.format(
|
||||
event=event.capitalize()))
|
||||
# +20 is for length of header
|
||||
outFile.write('-' * (len(event) + 20) + '\n\n')
|
||||
out_file.write('-' * (len(event) + 20) + '\n\n')
|
||||
|
||||
names = sorted(desig[event].keys())
|
||||
if len(names) == 0:
|
||||
outFile.write('None')
|
||||
|
||||
out_file.write('None')
|
||||
outlines = []
|
||||
for name in names:
|
||||
outlines.append(
|
||||
|
@ -177,6 +210,38 @@ this specification.""")
|
|||
"* {name} : {guide}".format(
|
||||
name=name.capitalize(),
|
||||
guide=desig[event][name].get('guidance'))))
|
||||
outFile.write("\n".join(outlines))
|
||||
out_file.write("\n".join(outlines))
|
||||
out_file.write('\n')
|
||||
|
||||
outFile.write('\n')
|
||||
|
||||
def run(in_file_name, out_file_path):
|
||||
in_file_path = get_file_path(in_file_name)
|
||||
|
||||
if in_file_path is None:
|
||||
print_error("[ERROR] " + in_file_name + " doesn't exist")
|
||||
|
||||
print("[INFO] Reading from: " + in_file_path)
|
||||
|
||||
with open(in_file_path) as f:
|
||||
try:
|
||||
data = json.load(f)
|
||||
except JSONDecodeError:
|
||||
print_error('[ERROR] Make sure this is a valid file')
|
||||
|
||||
print("[INFO] Writing to: " + out_file_path)
|
||||
|
||||
# intro
|
||||
with open(out_file_path, "w") as out_file:
|
||||
write_intro(data, out_file)
|
||||
write_components(data, out_file)
|
||||
write_designated_sections(data, out_file)
|
||||
|
||||
|
||||
if __name__ == '__main__':
|
||||
args = parse_arguments()
|
||||
# add extension, if only version of guideline was specified as file
|
||||
args.file = add_extension(args.file, ".json")
|
||||
|
||||
out_file_path = os.path.join(args.outdir,
|
||||
"out." + args.file.replace("json", "rst"))
|
||||
run(args.file, out_file_path)
|
||||
|
|
Loading…
Reference in New Issue