Updates jsonToRst.py tool to support schema 2.0

* works with python3
   * argparse used for argument handling
   * script can be run from tools dir and from repo's top dir as well
   * support for schema 2.0 added
   * support for add-ons added
   * creates single guideline for core + add-ons on same date
   * added --file and --all options
   * keep old script in file jsonToRst_schema_1.py

  Task: 42762
  Story: 2009028

Change-Id: I6deb70fa39e8a51fcff0372217bbfc46282d7237
This commit is contained in:
Roman Popelka 2021-08-13 10:27:37 +02:00 committed by Martin Kopec
parent dd7ae2660d
commit c336cf3cea
2 changed files with 380 additions and 81 deletions

View File

@ -1,6 +1,7 @@
#!/usr/bin/env python #!/usr/bin/env python3
# #
# Copyright 2015 Alexander Hirschfeld # Copyright 2015 Alexander Hirschfeld
# Copyright 2021 Red Hat, Inc.
# #
# Licensed under the Apache License, Version 2.0 (the "License"); you may # Licensed under the Apache License, Version 2.0 (the "License"); you may
# not use this file except in compliance with the License. You may obtain # not use this file except in compliance with the License. You may obtain
@ -14,19 +15,55 @@
# License for the specific language governing permissions and limitations # License for the specific language governing permissions and limitations
# under the License. # under the License.
# #
"""
This script will convert .json guideline to .rst guideline.
Only schema 2.0 is supported
1. It is possible to convert single file with --file option.
This option takes filename (not file path) as argument.
(File has to be located either in add-ons interop/add-ons/guidelines
directory or interop/guidelines directory)
2. It is possible to convert core guidelines + add-ons guidelines into
single file with --all option.
This option takes date of guideline release as argument.
3. It is possible to specify output directory with --outdir option.
This option takes path to output directory as argument.
If this option isn't used file will be stored in interop/doc/source/guidelines
Examples:
[Generating out.2020.11.rst file to interop/doc/source/guidelines directory]
python3 jsonToRst.py --file 2020.11.json
[Generating all.2020.11.rst file to interop/doc/source/guidelines directory
(core + add-ons)]
python3 jsonToRst.py --all 2020.11
[Generating out.2020.11.rst file and out.dns.2020.11.rst file to
interop/doc/source/guidelines directory]
python3 jsonToRst.py --file 2020.11.json --file dns.2020.11.json
[Generating out.2020.11.rst file to current directory]
python3 jsonToRst.py --file 2020.11.json --outdir .
"""
import argparse
import json import json
from json.decoder import JSONDecodeError
import os
import sys import sys
import textwrap import textwrap
def printHelpArrays(input): def print_help_arrays(input):
if not input: if not input:
return None return None
output = "" output = ""
for i in input: for i in input:
output = output + i.capitalize() + ', ' output = output + i.capitalize() + ', '
return output[0:-2] return output[0:-2]
@ -35,85 +72,76 @@ def print_error(msg):
sys.exit(1) sys.exit(1)
wrapper = textwrap.TextWrapper(width=79, subsequent_indent=' ') def parse_arguments():
default_outdir = os.path.join(os.path.dirname(os.path.realpath(__file__)),
inFileName = None "../doc/source/guidelines/")
for potentialFile in sys.argv: parser = argparse.ArgumentParser(__doc__)
if ".json" in potentialFile: parser.add_argument('--file', help='Creates guideline for single file',
inFileName = potentialFile action='append')
parser.add_argument('--all',
if not inFileName: help='Creates complete guideline(core + add-ons)',
print_error("Please pass the JSON file") metavar='DATE')
parser.add_argument('--outdir',
print("Reading from: " + inFileName) help='Path to output file',
metavar='FILENAME',
default=default_outdir)
return parser.parse_args()
with open(inFileName) as f: def get_file_path(in_file_name):
data = json.load(f)
if not isinstance(data, dict): # get interop repo path
print_error('Make sure this is a valid file') interop_path = os.path.realpath(__file__).replace('/tools/jsonToRst.py',
'')
possible_paths = {
'platform': interop_path + '/guidelines/',
'add-ons': interop_path + '/add-ons/guidelines/',
}
outFileName = 'doc/source/guidelines/' + inFileName.replace("json", "rst") # check if file exists
if os.path.isfile(possible_paths['platform'] + in_file_name):
return possible_paths['platform'] + in_file_name
elif os.path.isfile(possible_paths['add-ons'] + in_file_name):
return possible_paths['add-ons'] + in_file_name
else:
return None
print("Writing to: " + outFileName) def write_intro(data, out_file):
metadata = data.get('metadata')
if metadata.get('id') is None:
# intro
with open(outFileName, "w") as outFile:
if data.get('id') is None:
print_error('Make sure there is a valid id') print_error('Make sure there is a valid id')
line01 = "OpenStack Interoperability Guideline %s" % data["id"] line01 = "OpenStack Interoperability Guideline %s" % metadata["id"]
outFile.write('=' * len(line01) + '\n') out_file.write('=' * len(line01) + '\n')
outFile.write(line01 + '\n') out_file.write(line01 + '\n')
outFile.write('=' * len(line01) + '\n') out_file.write('=' * len(line01) + '\n')
out_file.write("""
# Nonlooping
if data.get('platform') is None:
print_error("The platform section is not found")
# Correct Source
if data.get('source') not in (
'http://opendev.org/openstack/defcore/',
'http://opendev.org/openstack/interop/'):
print_error("The expected interoperability guideline source not found")
outFile.write("""
:Status: {status} :Status: {status}
:Replaces: {replaces} :Replaces: {replaces}
:JSON Master: http://opendev.org/openstack/interop/raw/branch/master/{id}.json :JSON Master: {source}
This document outlines the mandatory capabilities and designated This document outlines the mandatory capabilities and designated
sections required to exist in a software installation in order to sections required to exist in a software installation in order to
be eligible to use marks controlled by the OpenStack Foundation. be eligible to use marks controlled by the OpenStack Foundation.
This document was generated from the `master JSON version <{id}.json>`_. This document was generated from the `<{id}.json>`_.
Releases Covered Releases Covered
============================== ==============================
Applies to {releases} Applies to {releases}
Platform Components """.format(status=metadata["os_trademark_approval"].get("status"),
============================== replaces=metadata["os_trademark_approval"].get("replaces"),
:Required: {platformRequired} source=metadata.get("source"),
id=metadata.get("id"),
releases=print_help_arrays(
metadata["os_trademark_approval"].get("releases"))))
:Advisory: {platformAdvisory}
:Deprecated: {platformDepric}
:Removed: {platformRemoved}
""".format(status=data.get("status"),
replaces=data.get("replaces"),
id=data.get("id"),
releases=printHelpArrays(data.get("releases")),
platformRequired=printHelpArrays(data["platform"].get("required")),
platformAdvisory=printHelpArrays(data["platform"].get("advisory")),
platformDepric=printHelpArrays(data["platform"].get("deprecated")),
platformRemoved=printHelpArrays(data["platform"].get("removed"))))
def write_components(data, out_file):
# looping # looping
if data.get('components') is None: if data.get('components') is None:
print_error("No components found") print_error("No components found")
@ -121,36 +149,44 @@ Platform Components
components = sorted(data["components"].keys()) components = sorted(data["components"].keys())
order = ["required", "advisory", "deprecated", "removed"] order = ["required", "advisory", "deprecated", "removed"]
for component in components: for component in components:
out_file.write("""
outFile.write("""
{component} Component Capabilities {component} Component Capabilities
""".format(component=component.capitalize())) """.format(component=component.capitalize()))
outFile.write('=' * (len(component) + 23)) # footer out_file.write('=' * (len(component) + 23)) # footer
for event in order: for event in order:
outFile.write("\n{event} Capabilities\n".format( out_file.write("\n{event} Capabilities\n".format(
event=event.capitalize())) event=event.capitalize()))
outFile.write("-" * (len(event) + 15) + "\n") out_file.write("-" * (len(event) + 15) + "\n")
if(len(data['components'][component][event]) == 0): if(len(data['components'][component]['capabilities'][event]) == 0):
outFile.write("None\n") out_file.write("None\n")
for req in data['components'][component][event]: for req in data['components'][component]['capabilities'][event]:
outFile.write("* {name} ({project})\n".format( try:
data['capabilities'][req]
except KeyError:
print("[WARNING] " + event + " section doesn't exist in " +
"capabilities")
continue
out_file.write("* {name} ({project})\n".format(
name=req, name=req,
project=data["capabilities"][req].get( project=data["capabilities"][req].get(
"project").capitalize())) "project").capitalize()))
# Designated -Sections
if 'designated-sections' not in data: def write_designated_sections(data, out_file):
print_error("designated-sections not in json file") wrapper = textwrap.TextWrapper(width=79, subsequent_indent=' ')
outFile.write(""" if 'designated_sections' not in data:
print_error("designated_sections not in json file")
out_file.write("""
Designated Sections Designated Sections
===================================== =====================================
@ -158,25 +194,106 @@ Designated Sections
The following designated sections apply to the same releases as The following designated sections apply to the same releases as
this specification.""") this specification.""")
order = ['required', 'advisory', 'deprecated', 'removed'] order = ['required', 'advisory', 'deprecated', 'removed']
desig = data.get("designated-sections") components = data.get("designated_sections")
sections_components = {}
for component in components:
section = list(data["designated_sections"].get(component).keys())[0]
if section not in sections_components.keys():
sections_components[section] = [component]
else:
sections_components[section].append(component)
for event in order: for event in order:
outFile.write('\n\n{event} Designated Sections\n'.format( out_file.write('\n\n{event} Designated Sections\n'.format(
event=event.capitalize())) event=event.capitalize()))
# +20 is for length of header # +20 is for length of header
outFile.write('-' * (len(event) + 20) + '\n\n') out_file.write('-' * (len(event) + 20) + '\n\n')
names = sorted(desig[event].keys()) if event not in sections_components:
if len(names) == 0: out_file.write('None')
outFile.write('None') continue
names = sorted(sections_components[event])
outlines = [] outlines = []
for name in names: for name in names:
outlines.append( outlines.append(
wrapper.fill( wrapper.fill(
"* {name} : {guide}".format( "* {name} : {guide}".format(
name=name.capitalize(), name=name.capitalize(),
guide=desig[event][name].get('guidance')))) guide=components[name][event].get('guidance'))))
outFile.write("\n".join(outlines)) out_file.write("\n".join(outlines))
outFile.write('\n') out_file.write('\n')
def run(in_file_names, out_file_path):
with open(out_file_path, "w") as out_file:
for in_file_name in in_file_names:
in_file_path = get_file_path(in_file_name)
if in_file_path is None:
print('[WARNING] File ' +
in_file_name +
' does not exist! SKIPPING')
continue
print('[ INFO ] Reading from: ' + in_file_path)
with open(in_file_path) as f:
try:
data = json.load(f)
except JSONDecodeError:
print('[WARNING] Make sure ' +
in_file_path +
' is a valid JSON file! SKIPPING')
continue
print('[ INFO ] Writing to: ' + out_file_path)
# intro
write_intro(data, out_file)
# components
write_components(data, out_file)
# Designated -Sections
write_designated_sections(data, out_file)
# check whether output file contains anything
if os.path.getsize(out_file_path) == 0:
print('[ ERROR ] Output file is empty. REMOVING FILE')
os.remove(out_file_path)
if __name__ == '__main__':
args = parse_arguments()
# create guideline for single file
if args.file is not None:
for file in args.file:
out_file_path = os.path.join(args.outdir,
"out." + file.replace("json", "rst"))
run([file], out_file_path)
# create single guideline for core and all add-ons
if args.all is not None:
date = args.all
# input files names
files = [
date + ".json",
"dns." + date + ".json",
"orchestration." + date + ".json",
"shared_file_system." + date + ".json",
]
out_file_name = "all." + date + ".rst"
out_file_path = os.path.join(args.outdir, out_file_name)
run(files, out_file_path)

182
tools/jsonToRst_schema_1.py Normal file
View File

@ -0,0 +1,182 @@
#!/usr/bin/env python
#
# Copyright 2015 Alexander Hirschfeld
#
# Licensed under the Apache License, Version 2.0 (the "License"); you may
# not use this file except in compliance with the License. You may obtain
# a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
# License for the specific language governing permissions and limitations
# under the License.
#
import json
import sys
import textwrap
def printHelpArrays(input):
if not input:
return None
output = ""
for i in input:
output = output + i.capitalize() + ', '
return output[0:-2]
def print_error(msg):
print(msg)
sys.exit(1)
wrapper = textwrap.TextWrapper(width=79, subsequent_indent=' ')
inFileName = None
for potentialFile in sys.argv:
if ".json" in potentialFile:
inFileName = potentialFile
if not inFileName:
print_error("Please pass the JSON file")
print("Reading from: " + inFileName)
with open(inFileName) as f:
data = json.load(f)
if not isinstance(data, dict):
print_error('Make sure this is a valid file')
outFileName = 'doc/source/guidelines/' + inFileName.replace("json", "rst")
print("Writing to: " + outFileName)
# intro
with open(outFileName, "w") as outFile:
if data.get('id') is None:
print_error('Make sure there is a valid id')
line01 = "OpenStack Interoperability Guideline %s" % data["id"]
outFile.write('=' * len(line01) + '\n')
outFile.write(line01 + '\n')
outFile.write('=' * len(line01) + '\n')
# Nonlooping
if data.get('platform') is None:
print_error("The platform section is not found")
# Correct Source
if data.get('source') not in (
'http://opendev.org/openstack/defcore/',
'http://opendev.org/openstack/interop/'):
print_error("The expected interoperability guideline source not found")
outFile.write("""
:Status: {status}
:Replaces: {replaces}
:JSON Master: http://opendev.org/openstack/interop/raw/branch/master/{id}.json
This document outlines the mandatory capabilities and designated
sections required to exist in a software installation in order to
be eligible to use marks controlled by the OpenStack Foundation.
This document was generated from the `master JSON version <{id}.json>`_.
Releases Covered
==============================
Applies to {releases}
Platform Components
==============================
:Required: {platformRequired}
:Advisory: {platformAdvisory}
:Deprecated: {platformDepric}
:Removed: {platformRemoved}
""".format(status=data.get("status"),
replaces=data.get("replaces"),
id=data.get("id"),
releases=printHelpArrays(data.get("releases")),
platformRequired=printHelpArrays(data["platform"].get("required")),
platformAdvisory=printHelpArrays(data["platform"].get("advisory")),
platformDepric=printHelpArrays(data["platform"].get("deprecated")),
platformRemoved=printHelpArrays(data["platform"].get("removed"))))
# looping
if data.get('components') is None:
print_error("No components found")
components = sorted(data["components"].keys())
order = ["required", "advisory", "deprecated", "removed"]
for component in components:
outFile.write("""
{component} Component Capabilities
""".format(component=component.capitalize()))
outFile.write('=' * (len(component) + 23)) # footer
for event in order:
outFile.write("\n{event} Capabilities\n".format(
event=event.capitalize()))
outFile.write("-" * (len(event) + 15) + "\n")
if(len(data['components'][component][event]) == 0):
outFile.write("None\n")
for req in data['components'][component][event]:
outFile.write("* {name} ({project})\n".format(
name=req,
project=data["capabilities"][req].get(
"project").capitalize()))
# Designated -Sections
if 'designated-sections' not in data:
print_error("designated-sections not in json file")
outFile.write("""
Designated Sections
=====================================
The following designated sections apply to the same releases as
this specification.""")
order = ['required', 'advisory', 'deprecated', 'removed']
desig = data.get("designated-sections")
for event in order:
outFile.write('\n\n{event} Designated Sections\n'.format(
event=event.capitalize()))
# +20 is for length of header
outFile.write('-' * (len(event) + 20) + '\n\n')
names = sorted(desig[event].keys())
if len(names) == 0:
outFile.write('None')
outlines = []
for name in names:
outlines.append(
wrapper.fill(
"* {name} : {guide}".format(
name=name.capitalize(),
guide=desig[event][name].get('guidance'))))
outFile.write("\n".join(outlines))
outFile.write('\n')