From c336cf3ceaf7821dffaf926ff84dd1b3c4cff16d Mon Sep 17 00:00:00 2001 From: Roman Popelka Date: Fri, 13 Aug 2021 10:27:37 +0200 Subject: [PATCH] Updates jsonToRst.py tool to support schema 2.0 * works with python3 * argparse used for argument handling * script can be run from tools dir and from repo's top dir as well * support for schema 2.0 added * support for add-ons added * creates single guideline for core + add-ons on same date * added --file and --all options * keep old script in file jsonToRst_schema_1.py Task: 42762 Story: 2009028 Change-Id: I6deb70fa39e8a51fcff0372217bbfc46282d7237 --- tools/jsonToRst.py | 279 +++++++++++++++++++++++++----------- tools/jsonToRst_schema_1.py | 182 +++++++++++++++++++++++ 2 files changed, 380 insertions(+), 81 deletions(-) create mode 100644 tools/jsonToRst_schema_1.py diff --git a/tools/jsonToRst.py b/tools/jsonToRst.py index 2d1c4e13..4529168a 100755 --- a/tools/jsonToRst.py +++ b/tools/jsonToRst.py @@ -1,6 +1,7 @@ -#!/usr/bin/env python +#!/usr/bin/env python3 # # Copyright 2015 Alexander Hirschfeld +# Copyright 2021 Red Hat, Inc. # # Licensed under the Apache License, Version 2.0 (the "License"); you may # not use this file except in compliance with the License. You may obtain @@ -14,19 +15,55 @@ # License for the specific language governing permissions and limitations # under the License. # +""" +This script will convert .json guideline to .rst guideline. +Only schema 2.0 is supported + +1. It is possible to convert single file with --file option. +This option takes filename (not file path) as argument. +(File has to be located either in add-ons interop/add-ons/guidelines +directory or interop/guidelines directory) + +2. It is possible to convert core guidelines + add-ons guidelines into +single file with --all option. +This option takes date of guideline release as argument. + +3. It is possible to specify output directory with --outdir option. +This option takes path to output directory as argument. +If this option isn't used file will be stored in interop/doc/source/guidelines + +Examples: + +[Generating out.2020.11.rst file to interop/doc/source/guidelines directory] + python3 jsonToRst.py --file 2020.11.json + +[Generating all.2020.11.rst file to interop/doc/source/guidelines directory +(core + add-ons)] + python3 jsonToRst.py --all 2020.11 + +[Generating out.2020.11.rst file and out.dns.2020.11.rst file to +interop/doc/source/guidelines directory] + python3 jsonToRst.py --file 2020.11.json --file dns.2020.11.json + +[Generating out.2020.11.rst file to current directory] + python3 jsonToRst.py --file 2020.11.json --outdir . +""" + +import argparse import json +from json.decoder import JSONDecodeError +import os import sys import textwrap -def printHelpArrays(input): +def print_help_arrays(input): if not input: return None output = "" for i in input: output = output + i.capitalize() + ', ' - return output[0:-2] @@ -35,85 +72,76 @@ def print_error(msg): sys.exit(1) -wrapper = textwrap.TextWrapper(width=79, subsequent_indent=' ') - -inFileName = None -for potentialFile in sys.argv: - if ".json" in potentialFile: - inFileName = potentialFile - -if not inFileName: - print_error("Please pass the JSON file") - -print("Reading from: " + inFileName) +def parse_arguments(): + default_outdir = os.path.join(os.path.dirname(os.path.realpath(__file__)), + "../doc/source/guidelines/") + parser = argparse.ArgumentParser(__doc__) + parser.add_argument('--file', help='Creates guideline for single file', + action='append') + parser.add_argument('--all', + help='Creates complete guideline(core + add-ons)', + metavar='DATE') + parser.add_argument('--outdir', + help='Path to output file', + metavar='FILENAME', + default=default_outdir) + return parser.parse_args() -with open(inFileName) as f: - data = json.load(f) +def get_file_path(in_file_name): -if not isinstance(data, dict): - print_error('Make sure this is a valid file') + # get interop repo path + interop_path = os.path.realpath(__file__).replace('/tools/jsonToRst.py', + '') + possible_paths = { + 'platform': interop_path + '/guidelines/', + 'add-ons': interop_path + '/add-ons/guidelines/', + } -outFileName = 'doc/source/guidelines/' + inFileName.replace("json", "rst") + # check if file exists + if os.path.isfile(possible_paths['platform'] + in_file_name): + return possible_paths['platform'] + in_file_name + elif os.path.isfile(possible_paths['add-ons'] + in_file_name): + return possible_paths['add-ons'] + in_file_name + else: + return None -print("Writing to: " + outFileName) +def write_intro(data, out_file): + metadata = data.get('metadata') - -# intro -with open(outFileName, "w") as outFile: - if data.get('id') is None: + if metadata.get('id') is None: print_error('Make sure there is a valid id') - line01 = "OpenStack Interoperability Guideline %s" % data["id"] + line01 = "OpenStack Interoperability Guideline %s" % metadata["id"] - outFile.write('=' * len(line01) + '\n') - outFile.write(line01 + '\n') - outFile.write('=' * len(line01) + '\n') - - # Nonlooping - if data.get('platform') is None: - print_error("The platform section is not found") - - # Correct Source - if data.get('source') not in ( - 'http://opendev.org/openstack/defcore/', - 'http://opendev.org/openstack/interop/'): - print_error("The expected interoperability guideline source not found") - - outFile.write(""" + out_file.write('=' * len(line01) + '\n') + out_file.write(line01 + '\n') + out_file.write('=' * len(line01) + '\n') + out_file.write(""" :Status: {status} :Replaces: {replaces} -:JSON Master: http://opendev.org/openstack/interop/raw/branch/master/{id}.json +:JSON Master: {source} This document outlines the mandatory capabilities and designated sections required to exist in a software installation in order to be eligible to use marks controlled by the OpenStack Foundation. -This document was generated from the `master JSON version <{id}.json>`_. +This document was generated from the `<{id}.json>`_. Releases Covered ============================== Applies to {releases} -Platform Components -============================== -:Required: {platformRequired} +""".format(status=metadata["os_trademark_approval"].get("status"), + replaces=metadata["os_trademark_approval"].get("replaces"), + source=metadata.get("source"), + id=metadata.get("id"), + releases=print_help_arrays( + metadata["os_trademark_approval"].get("releases")))) -:Advisory: {platformAdvisory} - -:Deprecated: {platformDepric} - -:Removed: {platformRemoved} -""".format(status=data.get("status"), - replaces=data.get("replaces"), - id=data.get("id"), - releases=printHelpArrays(data.get("releases")), - platformRequired=printHelpArrays(data["platform"].get("required")), - platformAdvisory=printHelpArrays(data["platform"].get("advisory")), - platformDepric=printHelpArrays(data["platform"].get("deprecated")), - platformRemoved=printHelpArrays(data["platform"].get("removed")))) +def write_components(data, out_file): # looping if data.get('components') is None: print_error("No components found") @@ -121,36 +149,44 @@ Platform Components components = sorted(data["components"].keys()) order = ["required", "advisory", "deprecated", "removed"] for component in components: - - outFile.write(""" + out_file.write(""" {component} Component Capabilities """.format(component=component.capitalize())) - outFile.write('=' * (len(component) + 23)) # footer + out_file.write('=' * (len(component) + 23)) # footer for event in order: - outFile.write("\n{event} Capabilities\n".format( + out_file.write("\n{event} Capabilities\n".format( event=event.capitalize())) - outFile.write("-" * (len(event) + 15) + "\n") + out_file.write("-" * (len(event) + 15) + "\n") - if(len(data['components'][component][event]) == 0): - outFile.write("None\n") + if(len(data['components'][component]['capabilities'][event]) == 0): + out_file.write("None\n") - for req in data['components'][component][event]: - outFile.write("* {name} ({project})\n".format( + for req in data['components'][component]['capabilities'][event]: + try: + data['capabilities'][req] + except KeyError: + print("[WARNING] " + event + " section doesn't exist in " + + "capabilities") + continue + + out_file.write("* {name} ({project})\n".format( name=req, project=data["capabilities"][req].get( "project").capitalize())) - # Designated -Sections - if 'designated-sections' not in data: - print_error("designated-sections not in json file") +def write_designated_sections(data, out_file): + wrapper = textwrap.TextWrapper(width=79, subsequent_indent=' ') - outFile.write(""" + if 'designated_sections' not in data: + print_error("designated_sections not in json file") + + out_file.write(""" Designated Sections ===================================== @@ -158,25 +194,106 @@ Designated Sections The following designated sections apply to the same releases as this specification.""") order = ['required', 'advisory', 'deprecated', 'removed'] - desig = data.get("designated-sections") + components = data.get("designated_sections") + + sections_components = {} + + for component in components: + section = list(data["designated_sections"].get(component).keys())[0] + if section not in sections_components.keys(): + sections_components[section] = [component] + else: + sections_components[section].append(component) + for event in order: - outFile.write('\n\n{event} Designated Sections\n'.format( - event=event.capitalize())) + out_file.write('\n\n{event} Designated Sections\n'.format( + event=event.capitalize())) # +20 is for length of header - outFile.write('-' * (len(event) + 20) + '\n\n') + out_file.write('-' * (len(event) + 20) + '\n\n') - names = sorted(desig[event].keys()) - if len(names) == 0: - outFile.write('None') + if event not in sections_components: + out_file.write('None') + continue + + names = sorted(sections_components[event]) outlines = [] for name in names: + outlines.append( wrapper.fill( "* {name} : {guide}".format( name=name.capitalize(), - guide=desig[event][name].get('guidance')))) - outFile.write("\n".join(outlines)) + guide=components[name][event].get('guidance')))) + out_file.write("\n".join(outlines)) - outFile.write('\n') + out_file.write('\n') + + +def run(in_file_names, out_file_path): + with open(out_file_path, "w") as out_file: + for in_file_name in in_file_names: + + in_file_path = get_file_path(in_file_name) + + if in_file_path is None: + print('[WARNING] File ' + + in_file_name + + ' does not exist! SKIPPING') + continue + + print('[ INFO ] Reading from: ' + in_file_path) + + with open(in_file_path) as f: + try: + data = json.load(f) + except JSONDecodeError: + print('[WARNING] Make sure ' + + in_file_path + + ' is a valid JSON file! SKIPPING') + continue + + print('[ INFO ] Writing to: ' + out_file_path) + + # intro + write_intro(data, out_file) + + # components + write_components(data, out_file) + + # Designated -Sections + write_designated_sections(data, out_file) + + # check whether output file contains anything + if os.path.getsize(out_file_path) == 0: + print('[ ERROR ] Output file is empty. REMOVING FILE') + os.remove(out_file_path) + + +if __name__ == '__main__': + args = parse_arguments() + + # create guideline for single file + if args.file is not None: + for file in args.file: + out_file_path = os.path.join(args.outdir, + "out." + file.replace("json", "rst")) + run([file], out_file_path) + + # create single guideline for core and all add-ons + if args.all is not None: + date = args.all + + # input files names + files = [ + date + ".json", + "dns." + date + ".json", + "orchestration." + date + ".json", + "shared_file_system." + date + ".json", + ] + + out_file_name = "all." + date + ".rst" + out_file_path = os.path.join(args.outdir, out_file_name) + + run(files, out_file_path) diff --git a/tools/jsonToRst_schema_1.py b/tools/jsonToRst_schema_1.py new file mode 100644 index 00000000..2d1c4e13 --- /dev/null +++ b/tools/jsonToRst_schema_1.py @@ -0,0 +1,182 @@ +#!/usr/bin/env python +# +# Copyright 2015 Alexander Hirschfeld +# +# Licensed under the Apache License, Version 2.0 (the "License"); you may +# not use this file except in compliance with the License. You may obtain +# a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT +# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the +# License for the specific language governing permissions and limitations +# under the License. +# + +import json +import sys +import textwrap + + +def printHelpArrays(input): + if not input: + return None + output = "" + for i in input: + output = output + i.capitalize() + ', ' + + return output[0:-2] + + +def print_error(msg): + print(msg) + sys.exit(1) + + +wrapper = textwrap.TextWrapper(width=79, subsequent_indent=' ') + +inFileName = None +for potentialFile in sys.argv: + if ".json" in potentialFile: + inFileName = potentialFile + +if not inFileName: + print_error("Please pass the JSON file") + +print("Reading from: " + inFileName) + + +with open(inFileName) as f: + data = json.load(f) + +if not isinstance(data, dict): + print_error('Make sure this is a valid file') + +outFileName = 'doc/source/guidelines/' + inFileName.replace("json", "rst") + + +print("Writing to: " + outFileName) + + +# intro +with open(outFileName, "w") as outFile: + if data.get('id') is None: + print_error('Make sure there is a valid id') + + line01 = "OpenStack Interoperability Guideline %s" % data["id"] + + outFile.write('=' * len(line01) + '\n') + outFile.write(line01 + '\n') + outFile.write('=' * len(line01) + '\n') + + # Nonlooping + if data.get('platform') is None: + print_error("The platform section is not found") + + # Correct Source + if data.get('source') not in ( + 'http://opendev.org/openstack/defcore/', + 'http://opendev.org/openstack/interop/'): + print_error("The expected interoperability guideline source not found") + + outFile.write(""" +:Status: {status} +:Replaces: {replaces} +:JSON Master: http://opendev.org/openstack/interop/raw/branch/master/{id}.json + +This document outlines the mandatory capabilities and designated +sections required to exist in a software installation in order to +be eligible to use marks controlled by the OpenStack Foundation. + +This document was generated from the `master JSON version <{id}.json>`_. + +Releases Covered +============================== +Applies to {releases} + +Platform Components +============================== +:Required: {platformRequired} + +:Advisory: {platformAdvisory} + +:Deprecated: {platformDepric} + +:Removed: {platformRemoved} +""".format(status=data.get("status"), + replaces=data.get("replaces"), + id=data.get("id"), + releases=printHelpArrays(data.get("releases")), + platformRequired=printHelpArrays(data["platform"].get("required")), + platformAdvisory=printHelpArrays(data["platform"].get("advisory")), + platformDepric=printHelpArrays(data["platform"].get("deprecated")), + platformRemoved=printHelpArrays(data["platform"].get("removed")))) + + # looping + if data.get('components') is None: + print_error("No components found") + + components = sorted(data["components"].keys()) + order = ["required", "advisory", "deprecated", "removed"] + for component in components: + + outFile.write(""" + + + +{component} Component Capabilities +""".format(component=component.capitalize())) + outFile.write('=' * (len(component) + 23)) # footer + + for event in order: + + outFile.write("\n{event} Capabilities\n".format( + event=event.capitalize())) + outFile.write("-" * (len(event) + 15) + "\n") + + if(len(data['components'][component][event]) == 0): + outFile.write("None\n") + + for req in data['components'][component][event]: + outFile.write("* {name} ({project})\n".format( + name=req, + project=data["capabilities"][req].get( + "project").capitalize())) + + # Designated -Sections + + if 'designated-sections' not in data: + print_error("designated-sections not in json file") + + outFile.write(""" + +Designated Sections +===================================== + +The following designated sections apply to the same releases as +this specification.""") + order = ['required', 'advisory', 'deprecated', 'removed'] + desig = data.get("designated-sections") + for event in order: + + outFile.write('\n\n{event} Designated Sections\n'.format( + event=event.capitalize())) + # +20 is for length of header + outFile.write('-' * (len(event) + 20) + '\n\n') + + names = sorted(desig[event].keys()) + if len(names) == 0: + outFile.write('None') + + outlines = [] + for name in names: + outlines.append( + wrapper.fill( + "* {name} : {guide}".format( + name=name.capitalize(), + guide=desig[event][name].get('guidance')))) + outFile.write("\n".join(outlines)) + + outFile.write('\n')