Add validation check for divergent parameter definitions

Many of our parameters are defined in multiple templates, but
currently there is no easy way of checking that all of those
definitions match.  It can be confusing when a parameter is defined
one way in one file and another way in a different file.  For example,
the NovaWorkers description is:

Number of workers for Nova API service.

and

Number of workers for Nova Placement API service.

and

Number of workers for Nova Conductor service.

Which is it actually?  All of them.  That one parameter controls
the workers for all of the nova services, and its description should
reflect that, no matter which template you happen to look at.

This change adds a check to yaml-validate.py to catch these sorts of
inconsistencies and allow us to eventually prevent new ones from
getting into the templates.

An exclusion mechanism is included because there are some parameter
definitions we probably can't/shouldn't change.  In particular, this
includes the network cidrs which are defaulted to ipv4 addresses in
the ipv4 net-iso templates and ipv6 in the ipv6 templates.  It's
possible a user would be relying on one of those defaults in their
configuration, so if we change it they might break.

To get around that, the tool explicitly ignores the default field of
those parameters, while still checking the description and type fields
so we maintain some sanity.  There may be other parameters where this
is an issue, but those can be added later as they are found.

For the moment any inconsistencies are soft-fails.  A failure message
will be printed, but the return value will not be affected so we can
add the tool without first having to fix every divergent parameter
definition in tripleo-heat-templates (and there appear to be plenty).
This will allow us to gradually fix the parameters over time, and
once that is done we can make this a hard-fail.

Change-Id: Ib8b2cb5e610022d2bbcec9f2e2d30d9a7c2be511
Partial-Bug: 1700664
This commit is contained in:
Ben Nemec 2017-06-26 16:58:13 -05:00
parent f7062d83d7
commit f572b764f1
1 changed files with 71 additions and 4 deletions

View File

@ -39,6 +39,19 @@ OPTIONAL_DOCKER_SECTIONS = ['docker_puppet_tasks', 'upgrade_tasks',
REQUIRED_DOCKER_PUPPET_CONFIG_SECTIONS = ['config_volume', 'step_config',
'config_image']
OPTIONAL_DOCKER_PUPPET_CONFIG_SECTIONS = [ 'puppet_tags' ]
# Mapping of parameter names to a list of the fields we should _not_ enforce
# consistency across files on. This should only contain parameters whose
# definition we cannot change for backwards compatibility reasons. New
# parameters to the templates should not be added to this list.
PARAMETER_DEFINITION_EXCLUSIONS = {'ManagementNetCidr': ['default'],
'ManagementAllocationPools': ['default'],
'ExternalNetCidr': ['default'],
'ExternalAllocationPools': ['default'],
'StorageNetCidr': ['default'],
'StorageAllocationPools': ['default'],
'StorageMgmtNetCidr': ['default'],
'StorageMgmtAllocationPools': ['default'],
}
def exit_usage():
@ -211,7 +224,30 @@ def validate_service(filename, tpl):
return 0
def validate(filename):
def validate(filename, param_map):
"""Validate a Heat template
:param filename: The path to the file to validate
:param param_map: A dict which will be populated with the details of the
parameters in the template. The dict will have the
following structure:
{'ParameterName': [
{'filename': ./file1.yaml,
'data': {'description': '',
'type': string,
'default': '',
...}
},
{'filename': ./file2.yaml,
'data': {'description': '',
'type': string,
'default': '',
...}
},
...
]}
"""
print('Validating %s' % filename)
retval = 0
try:
@ -240,7 +276,9 @@ def validate(filename):
return 1
# yaml is OK, now walk the parameters and output a warning for unused ones
if 'heat_template_version' in tpl:
for p in tpl.get('parameters', {}):
for p, data in tpl.get('parameters', {}).items():
definition = {'data': data, 'filename': filename}
param_map.setdefault(p, []).append(definition)
if p in required_params:
continue
str_p = '\'%s\'' % p
@ -260,6 +298,7 @@ exit_val = 0
failed_files = []
base_endpoint_map = None
env_endpoint_maps = list()
param_map = {}
for base_path in path_args:
if os.path.isdir(base_path):
@ -267,7 +306,7 @@ for base_path in path_args:
for f in files:
if f.endswith('.yaml') and not f.endswith('.j2.yaml'):
file_path = os.path.join(subdir, f)
failed = validate(file_path)
failed = validate(file_path, param_map)
if failed:
failed_files.append(file_path)
exit_val |= failed
@ -278,7 +317,7 @@ for base_path in path_args:
if env_endpoint_map:
env_endpoint_maps.append(env_endpoint_map)
elif os.path.isfile(base_path) and base_path.endswith('.yaml'):
failed = validate(base_path)
failed = validate(base_path, param_map)
if failed:
failed_files.append(base_path)
exit_val |= failed
@ -310,6 +349,34 @@ else:
failed_files.extend(set(envs_containing_endpoint_map) - matched_files)
exit_val |= 1
# Validate that duplicate parameters defined in multiple files all have the
# same definition.
mismatch_count = 0
for p, defs in param_map.items():
# Nothing to validate if the parameter is only defined once
if len(defs) == 1:
continue
check_data = [d['data'] for d in defs]
# Override excluded fields so they don't affect the result
exclusions = PARAMETER_DEFINITION_EXCLUSIONS.get(p, [])
ex_dict = {}
for field in exclusions:
ex_dict[field] = 'IGNORED'
for d in check_data:
d.update(ex_dict)
# If all items in the list are not == the first, then the check fails
if check_data.count(check_data[0]) != len(check_data):
mismatch_count += 1
# TODO(bnemec): Make this a hard failure once all the templates have
# been fixed.
#exit_val |= 1
#failed_files.extend([d['filename'] for d in defs])
print('Mismatched parameter definitions found for "%s"' % p)
print('Definitions found:')
for d in defs:
print(' %s:\n %s' % (d['filename'], d['data']))
print('Mismatched parameter definitions: %d' % mismatch_count)
if failed_files:
print('Validation failed on:')
for f in failed_files: