Perform validations on site documents as part of "collect" command

Pegleg should have the ability to perform validations as part of
"collect" by default and bypass validation using optional parameter.

This PS adds automatic validation (enabled by default) to Pegleg's
collect command or else skips it if --no-validate is provided.
The "collect" command also takes the same optional arguments of
-x/-w for controlling the lint options which are excluded or warned
about.

Change-Id: Iee9f27f1133d780d507f7a13cce42076768a0c1c
This commit is contained in:
Felipe Monteiro 2018-04-25 23:45:03 +01:00
parent c7dc0e71dc
commit cc000c66ab
11 changed files with 358 additions and 62 deletions

View File

@ -73,7 +73,6 @@ Path to the root of an auxiliary repo.
Collect
-------
Output complete config for one site.
It is assumed that all lint errors have been corrected already.
**site_name**
@ -81,16 +80,44 @@ Name of the site. (Required).
**-s / --save-location**
Where to output.
Where to output collected documents.
**-x <code>** (Optional, validation only).
Will exclude the specified lint option. -w takes priority over -x.
**-w <code>** (Optional, validation only).
Will warn of lint failures from the specified lint options.
**--validate** (Optional, validation only). False by default.
Perform validation of documents prior to collection. See :ref:`linting` for
additional information on document linting. It is recommended that document
linting be executed prior to document collection. However, ``--validate``
is False by default for backwards compatibility concerns.
Usage:
::
./pegleg.sh <command> <options> collect site_name -s save_location
-x P001 -w P002 --validate
Example without validation:
::
Example:
./pegleg.sh site -p /workspace/repo_1 -a /workspace/repo_2
collect site_name -s /workspace
Example with validation:
::
./pegleg.sh site -p /workspace/repo_1 -a /workspace/repo_2
collect site_name -s /workspace -x P004 --validate
Impacted
--------
Find sites impacted by changed files.
@ -141,7 +168,7 @@ Where to output.
Example:
./pegleg site -p /workspace/repo_1 show site_name -o /workspace
.. _linting:
Lint
----
@ -173,7 +200,7 @@ Raise Deckhand exception on missing substitution sources. Defaults to True.
**-x <code>**
Will excluded the specified lint option. -w takes priority over -x.
Will exclude the specified lint option. -w takes priority over -x.
**-w <code>**

View File

@ -12,13 +12,14 @@
# See the License for the specific language governing permissions and
# limitations under the License.
from . import engine
from pegleg import config
import click
import logging
import sys
import click
from pegleg import config
from pegleg import engine
LOG = logging.getLogger(__name__)
LOG_FORMAT = '%(asctime)s %(levelname)-8s %(name)s:%(funcName)s [%(lineno)3d] %(message)s' # noqa
@ -69,9 +70,44 @@ def site(primary_repo, aux_repo):
'save_location',
type=click.Path(
file_okay=False, dir_okay=True, writable=True, resolve_path=True),
help='Where to output')
help='Where to output the complete site definition.')
@click.option(
'--validate',
'validate',
is_flag=True,
# TODO(felipemonteiro): Potentially set this to True in the future. This
# is currently set to False to skip validation by default for backwards
# compatibility concerns.
default=False,
help='Perform validations on documents prior to collection.')
@click.option(
'-x',
'--exclude',
'exclude_lint',
multiple=True,
help='Excludes specified linting checks. Warnings will still be issued. '
'-w takes priority over -x.')
@click.option(
'-w',
'--warn',
'warn_lint',
multiple=True,
help='Warn if linting check fails. -w takes priority over -x.')
@click.argument('site_name')
def collect(*, save_location, site_name):
def collect(*, save_location, validate, exclude_lint, warn_lint, site_name):
"""Collects documents into a single site-definition.yaml file, which
defines the entire site definition and contains all documents required
for ingestion by Airship.
Collect can lint documents prior to collection if the ``--validate``
flag is optionally included.
"""
if validate:
# Lint the primary repo prior to document collection.
_lint(
fail_on_missing_sub_src=True,
exclude_lint=exclude_lint,
warn_lint=warn_lint)
engine.site.collect(site_name, save_location)
@ -188,6 +224,14 @@ def site_type(*, revision, site_type):
engine.stub.site_type(revision, site_type)
def _lint(*, fail_on_missing_sub_src, exclude_lint, warn_lint):
warns = engine.lint.full(fail_on_missing_sub_src, exclude_lint, warn_lint)
if warns:
click.echo("Linting passed, but produced some warnings.")
for w in warns:
click.echo(w)
@LINT_OPTION
@main.command(help='Sanity checks for repository content')
@click.option(
@ -220,8 +264,7 @@ def lint(*, fail_on_missing_sub_src, primary_repo, aux_repo, exclude_lint,
warn_lint):
config.set_primary_repo(primary_repo)
config.set_auxiliary_repo_list(aux_repo or [])
warns = engine.lint.full(fail_on_missing_sub_src, exclude_lint, warn_lint)
if warns:
click.echo("Linting passed, but produced some warnings.")
for w in warns:
click.echo(w)
_lint(
fail_on_missing_sub_src=fail_on_missing_sub_src,
exclude_lint=exclude_lint,
warn_lint=warn_lint)

View File

@ -19,6 +19,7 @@ except NameError:
GLOBAL_CONTEXT = {
'primary_repo': './',
'aux_repos': [],
'site_path': 'site'
}
@ -51,3 +52,12 @@ def all_repos():
repos = [get_primary_repo()]
repos.extend(get_auxiliary_repo_list())
return repos
def get_rel_site_path():
return GLOBAL_CONTEXT.get('site_path', 'site')
def set_rel_site_path(p):
p = p or 'site'
GLOBAL_CONTEXT['site_path'] = p

View File

@ -39,18 +39,20 @@ DECKHAND_SCHEMAS = {
def full(fail_on_missing_sub_src=False, exclude_lint=None, warn_lint=None):
exclude_lint = exclude_lint or []
warn_lint = warn_lint or []
messages = []
# If policy is cleartext and error is added this will put
# that particular message into the warns list and all others will
# be added to the error list if SCHEMA_STORAGE_POLICY_MISMATCH_FLAG
messages.extend(_verify_file_contents())
# Deckhand Rendering completes without error
messages.extend(_verify_deckhand_render(fail_on_missing_sub_src))
# All repos contain expected directories
messages.extend(_verify_no_unexpected_files())
# Deckhand Rendering completes without error
messages.extend(_verify_deckhand_render(fail_on_missing_sub_src))
errors = []
warns = []
for code, message in messages:
@ -160,9 +162,15 @@ def _verify_document(document, schemas, filename):
return errors
def _verify_deckhand_render(fail_on_missing_sub_src=False):
def _gather_relevant_documents_per_site():
"""Gathers all relevant documents per site, which includes all type and
global documents that are needed to render each site document.
:returns: Dictionary of documents, keyed by each site name.
:rtype: dict
"""
sitenames = list(util.files.list_sites())
documents_by_site = {s: [] for s in sitenames}
documents_to_render = {s: [] for s in sitenames}
for sitename in sitenames:
params = util.definition.load_as_params(sitename)
@ -170,11 +178,21 @@ def _verify_deckhand_render(fail_on_missing_sub_src=False):
filenames = set(util.files.search(paths))
for filename in filenames:
with open(filename) as f:
documents_by_site[sitename].extend(list(yaml.safe_load_all(f)))
documents_to_render[sitename].extend(
list(yaml.safe_load_all(f)))
return documents_to_render
def _verify_deckhand_render(fail_on_missing_sub_src=False):
"""Verify Deckhand render works by using all relevant deployment files.
:returns: List of errors generated during rendering.
"""
all_errors = []
documents_to_render = _gather_relevant_documents_per_site()
for sitename, documents in documents_by_site.items():
for sitename, documents in documents_to_render.items():
LOG.debug('Rendering documents for site: %s.', sitename)
_, errors = util.deckhand.deckhand_render(
documents=documents,

View File

@ -21,6 +21,7 @@ import yaml
import logging
from pegleg.engine import util
__all__ = ['collect', 'impacted', 'list_', 'show', 'render']
LOG = logging.getLogger(__name__)
@ -33,17 +34,22 @@ def collect(site_name, save_location):
raise ValueError('Missing param: save-location')
elif not os.path.exists(save_location):
raise FileNotFoundError('Invalid save-location path')
for (repo_base,
filename) in util.definition.site_files_by_repo(site_name):
for repo_base, filename in util.definition.site_files_by_repo(
site_name):
repo_name = os.path.normpath(repo_base).split(os.sep)[-1]
save_file = os.path.join(save_location, repo_name + '.yaml')
if repo_name not in save_files:
save_files[repo_name] = open(
os.path.join(save_location, repo_name + ".yaml"), "w")
LOG.debug("Collecting file %s to file %s" %
(filename,
os.path.join(save_location, repo_name + '.yaml')))
save_files[repo_name] = open(save_file, "w")
LOG.debug("Collecting file %s to file %s" % (filename, save_file))
with open(filename) as f:
save_files[repo_name].writelines(f.readlines())
lines_to_write = f.readlines()
if lines_to_write[0] != '---\n':
lines_to_write = ['---\n'] + lines_to_write
if lines_to_write[-1] != '...\n':
lines_to_write.append('...\n')
save_files[repo_name].writelines(lines_to_write)
except Exception as ex:
raise click.ClickException("Error saving output: %s" % str(ex))
finally:

View File

@ -12,6 +12,8 @@
# See the License for the specific language governing permissions and
# limitations under the License.
import os
import click
from pegleg import config
@ -24,6 +26,7 @@ __all__ = [
'path',
'pluck',
'site_files',
'site_files_by_repo',
]
@ -61,7 +64,8 @@ def load_as_params(site_name, primary_repo_base=None):
def path(site_name, primary_repo_base=None):
if not primary_repo_base:
primary_repo_base = config.get_primary_repo()
return '%s/site/%s/site-definition.yaml' % (primary_repo_base, site_name)
return os.path.join(primary_repo_base, 'site', site_name,
'site-definition.yaml')
def pluck(site_definition, key):

View File

@ -163,15 +163,17 @@ def _site_type_revision_path(site_type, revision):
def _site_path(site_name):
return 'site/%s' % site_name
return os.path.join(config.get_rel_site_path(), site_name)
def list_sites(primary_repo_base=None):
"""Get a list of site definition directories in the primary repo."""
if not primary_repo_base:
primary_repo_base = config.get_primary_repo()
for path in os.listdir(os.path.join(primary_repo_base, 'site')):
joined_path = os.path.join(primary_repo_base, 'site', path)
full_site_path = os.path.join(primary_repo_base,
config.get_rel_site_path())
for path in os.listdir(full_site_path):
joined_path = os.path.join(full_site_path, path)
if os.path.isdir(joined_path):
yield path
@ -198,8 +200,8 @@ def existing_directories():
def slurp(path):
if not os.path.exists(path):
raise click.ClickException(
'%s not found. pegleg must be run from '
'the root of a configuration repostiory.' % path)
'%s not found. Pegleg must be run from the root of a configuration'
' repository.' % path)
with open(path) as f:
try:

View File

@ -17,6 +17,7 @@ from setuptools import find_packages
setup(
name='pegleg',
python_requires='>=3.5.0',
version='0.1.0',
packages=find_packages(),
entry_points={

View File

@ -0,0 +1,73 @@
# Copyright 2018 AT&T Intellectual Property. All other rights reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
import mock
import yaml
from pegleg import cli
from pegleg import config
from pegleg.engine import errorcodes
from pegleg.engine import lint
from pegleg.engine import site
from pegleg.engine.util import deckhand
from pegleg.engine.util import files
from tests.unit.fixtures import create_tmp_deployment_files
def _test_site_collect(expected_site, collection_path):
site_definition = {
'schema': 'pegleg/SiteDefinition/v1',
'metadata': {
'storagePolicy': 'cleartext',
'schema': 'metadata/Document/v1',
'layeringDefinition': {
'abstract': False,
'layer': 'site'
},
'name': expected_site
},
'data': {
'site_type': expected_site,
'revision': 'v1.0'
}
}
expected_document_names = [
'global-common',
'global-v1.0',
'%s-type-common' % expected_site,
'%s-type-v1.0' % expected_site,
site_definition['metadata']['name'],
'%s-chart' % expected_site,
'%s-passphrase' % expected_site,
]
deployment_files = collection_path.join('deployment_files.yaml')
assert deployment_files.isfile()
with open(str(deployment_files), 'r') as f:
lines = f.read()
deployment_documents = list(yaml.safe_load_all(lines))
assert sorted(expected_document_names) == sorted(
[x['metadata']['name'] for x in deployment_documents])
def test_site_collect(tmpdir, create_tmp_deployment_files):
collection_path = tmpdir.mkdir("cicd_path")
site.collect("cicd", str(collection_path))
_test_site_collect("cicd", collection_path)
collection_path = tmpdir.mkdir("lab_path")
site.collect("lab", str(collection_path))
_test_site_collect("lab", collection_path)

View File

@ -40,29 +40,113 @@ def test_verify_deckhand_render_site_documents_separately(
result = lint._verify_deckhand_render()
assert result == []
expected_sitenames = ['cicd', 'lab']
sites = files.list_sites()
# Verify that both expected site types are listed.
assert sorted(sites) == ['cicd', 'lab']
assert sorted(sites) == expected_sitenames
# Verify that Deckhand called render twice, once for each site.
assert mock_render.call_count == 2
expected_documents = []
for sitename in expected_sitenames:
documents = [{
'data': 'global-common-password',
'metadata': {
'layeringDefinition': {
'abstract': False,
'layer': 'global'
},
'name': 'global-common',
'schema': 'metadata/Document/v1',
'storagePolicy': 'cleartext'
},
'schema': 'deckhand/Passphrase/v1'
}, {
'data': 'global-v1.0-password',
'metadata': {
'layeringDefinition': {
'abstract': False,
'layer': 'global'
},
'name': 'global-v1.0',
'schema': 'metadata/Document/v1',
'storagePolicy': 'cleartext'
},
'schema': 'deckhand/Passphrase/v1'
}, {
'data': '%s-type-common-password' % sitename,
'metadata': {
'layeringDefinition': {
'abstract': False,
'layer': 'type'
},
'name': '%s-type-common' % sitename,
'schema': 'metadata/Document/v1',
'storagePolicy': 'cleartext'
},
'schema': 'deckhand/Passphrase/v1'
}, {
'data': '%s-type-v1.0-password' % sitename,
'metadata': {
'layeringDefinition': {
'abstract': False,
'layer': 'type'
},
'name': '%s-type-v1.0' % sitename,
'schema': 'metadata/Document/v1',
'storagePolicy': 'cleartext'
},
'schema': 'deckhand/Passphrase/v1'
}, {
'data': '%s-chart-password' % sitename,
'metadata': {
'layeringDefinition': {
'abstract': False,
'layer': 'site'
},
'name': '%s-chart' % sitename,
'schema': 'metadata/Document/v1',
'storagePolicy': 'cleartext'
},
'schema': 'deckhand/Passphrase/v1'
}, {
'data': '%s-passphrase-password' % sitename,
'metadata': {
'layeringDefinition': {
'abstract': False,
'layer': 'site'
},
'name': '%s-passphrase' % sitename,
'schema': 'metadata/Document/v1',
'storagePolicy': 'cleartext'
},
'schema': 'deckhand/Passphrase/v1'
}, {
'data': {
'revision': 'v1.0',
'site_type': sitename
},
'metadata': {
'layeringDefinition': {
'abstract': False,
'layer': 'site'
},
'name': sitename,
'schema': 'metadata/Document/v1',
'storagePolicy': 'cleartext'
},
'schema': 'pegleg/SiteDefinition/v1'
}]
expected_documents.extend(documents)
mock_calls = list(mock_render.mock_calls)
actual_documents = []
for mock_call in mock_calls:
documents = mock_call[2]['documents']
assert len(documents) == 7
actual_documents.extend(documents)
# Verify one site_definition.yaml per site.
site_definitions = [x for x in documents if isinstance(x, dict)]
assert len(site_definitions) == 1
site_definition = site_definitions[0]
site_type = site_definition['data']['site_type']
assert site_definition['data']['revision'] == 'v1.0'
assert site_type in expected_documents
# Verify expected documents collected per site.
other_documents = expected_documents[site_type]
for other_document in other_documents:
assert other_document in documents
sort_func = lambda x: x['metadata']['name']
assert sorted(
expected_documents, key=sort_func) == sorted(
actual_documents, key=sort_func)

View File

@ -13,6 +13,7 @@
# limitations under the License.
from __future__ import absolute_import
import copy
import os
import tempfile
@ -22,11 +23,29 @@ import yaml
from pegleg import config
from pegleg.engine.util import files
TEST_DOCUMENT = """
---
schema: deckhand/Passphrase/v1
metadata:
schema: metadata/Document/v1
name: %(name)s
storagePolicy: cleartext
layeringDefinition:
abstract: False
layer: %(layer)s
data: %(name)s-password
...
"""
def _gen_document(**kwargs):
test_document = TEST_DOCUMENT % kwargs
return yaml.load(test_document)
@pytest.fixture()
def create_tmp_deployment_files(tmpdir):
"""Fixture that creates a temporary directory structure."""
orig_primary_repo = config.get_primary_repo()
sitenames = ['cicd', 'lab']
SITE_TEST_STRUCTURE = {
@ -59,12 +78,14 @@ def create_tmp_deployment_files(tmpdir):
'directories': {
'common': {
'files': {
'global-common.yaml': 'global-common'
'global-common.yaml':
_gen_document(name="global-common", layer='global')
}
},
'v1.0': {
'files': {
'global-v1.0.yaml': 'global-v1.0'
'global-v1.0.yaml':
_gen_document(name="global-v1.0", layer='global')
}
}
}
@ -80,13 +101,15 @@ def create_tmp_deployment_files(tmpdir):
'common': {
'files': {
'%s-type-common.yaml' % site:
('%s-type-common' % site)
_gen_document(
name="%s-type-common" % site, layer='type')
}
},
'v1.0': {
'files': {
'%s-type-v1.0.yaml' % site:
('%s-type-v1.0' % site)
_gen_document(
name="%s-type-v1.0" % site, layer='type')
}
}
}
@ -108,17 +131,18 @@ metadata:
schema: metadata/Document/v1
storagePolicy: cleartext
schema: pegleg/SiteDefinition/v1
...
""" % (site, site)
test_structure = SITE_TEST_STRUCTURE.copy()
test_structure['directories']['secrets']['directories']['passphrases'][
'files'] = {
'%s-passphrase.yaml' % site: '%s-passphrase' % site
'%s-passphrase.yaml' % site:
_gen_document(name="%s-passphrase" % site, layer='site')
}
test_structure['directories']['software']['directories']['charts'][
'files'] = {
'%s-chart.yaml' % site: '%s-chart' % site
'%s-chart.yaml' % site:
_gen_document(name="%s-chart" % site, layer='site')
}
test_structure['files']['site-definition.yaml'] = yaml.safe_load(
site_definition)
@ -128,4 +152,8 @@ schema: pegleg/SiteDefinition/v1
yield
config.set_primary_repo(orig_primary_repo)
config.GLOBAL_CONTEXT = {
'primary_repo': './',
'aux_repos': [],
'site_path': 'site'
}