Twigleg to Pegleg transition

Change-Id: Ice500c7e7e30609827d69dc0ae178b627ea046d9
Author: mb874d@att.com
This commit is contained in:
Craig Anderson 2018-01-31 12:20:42 -08:00
parent 85e23436eb
commit e59c4fbf2a
24 changed files with 1036 additions and 0 deletions

4
.gitreview Normal file
View File

@ -0,0 +1,4 @@
[gerrit]
host=review.gerrithub.io
port=29418
project=att-comdev/pegleg

3
docs/requirements.txt Normal file
View File

@ -0,0 +1,3 @@
# Documentation
sphinx>=1.6.2
sphinx_rtd_theme==0.2.4

130
docs/source/conf.py Normal file
View File

@ -0,0 +1,130 @@
# -*- coding: utf-8 -*-
#
# shipyard documentation build configuration file, created by
# sphinx-quickstart on Sat Sep 16 03:40:50 2017.
#
# This file is execfile()d with the current directory set to its
# containing dir.
#
# Note that not all possible configuration values are present in this
# autogenerated file.
#
# All configuration values have a default; values that are commented out
# serve to show the default.
# If extensions (or modules to document with autodoc) are in another directory,
# add these directories to sys.path here. If the directory is relative to the
# documentation root, use os.path.abspath to make it absolute, like shown here.
#
# import os
# import sys
# sys.path.insert(0, os.path.abspath('.'))
import sphinx_rtd_theme
# -- General configuration ------------------------------------------------
# If your documentation needs a minimal Sphinx version, state it here.
#
# needs_sphinx = '1.0'
# Add any Sphinx extension module names here, as strings. They can be
# extensions coming with Sphinx (named 'sphinx.ext.*') or your custom
# ones.
extensions = [
'sphinx.ext.autodoc',
'sphinx.ext.todo',
'sphinx.ext.viewcode',
]
# Add any paths that contain templates here, relative to this directory.
# templates_path = []
# The suffix(es) of source filenames.
# You can specify multiple suffix as a list of string:
#
# source_suffix = ['.rst', '.md']
source_suffix = '.rst'
# The master toctree document.
master_doc = 'index'
# General information about the project.
project = u'pegleg'
copyright = u'2017 AT&T Intellectual Property.'
author = u'pegleg Authors'
# The version info for the project you're documenting, acts as replacement for
# |version| and |release|, also used in various other places throughout the
# built documents.
#
# The short X.Y version.
version = u'0.1.0'
# The full version, including alpha/beta/rc tags.
release = u'0.1.0'
# The language for content autogenerated by Sphinx. Refer to documentation
# for a list of supported languages.
#
# This is also used if you do content translation via gettext catalogs.
# Usually you set "language" from the command line for these cases.
language = None
# List of patterns, relative to source directory, that match files and
# directories to ignore when looking for source files.
# This patterns also effect to html_static_path and html_extra_path
exclude_patterns = []
# The name of the Pygments (syntax highlighting) style to use.
pygments_style = 'sphinx'
# If true, `todo` and `todoList` produce output, else they produce nothing.
todo_include_todos = False
# -- Options for HTML output ----------------------------------------------
# The theme to use for HTML and HTML Help pages. See the documentation for
# a list of builtin themes.
#
html_theme = "sphinx_rtd_theme"
html_theme_path = [sphinx_rtd_theme.get_html_theme_path()]
# Theme options are theme-specific and customize the look and feel of a theme
# further. For a list of options available for each theme, see the
# documentation.
#
# html_theme_options = {}
# Add any paths that contain custom static files (such as style sheets) here,
# relative to this directory. They are copied after the builtin static files,
# so a file named "default.css" will overwrite the builtin "default.css".
html_static_path = []
# -- Options for HTMLHelp output ------------------------------------------
# Output file base name for HTML help builder.
htmlhelp_basename = 'ucpintdoc'
# -- Options for LaTeX output ---------------------------------------------
latex_elements = {
# The paper size ('letterpaper' or 'a4paper').
#
# 'papersize': 'letterpaper',
# The font size ('10pt', '11pt' or '12pt').
#
# 'pointsize': '10pt',
# Additional stuff for the LaTeX preamble.
#
# 'preamble': '',
# Latex figure (float) alignment
#
# 'figure_align': 'htbp',
}

44
docs/source/index.rst Normal file
View File

@ -0,0 +1,44 @@
..
Copyright 2017 AT&T Intellectual Property.
All Rights Reserved.
Licensed under the Apache License, Version 2.0 (the "License"); you may
not use this file except in compliance with the License. You may obtain
a copy of the License at
http://www.apache.org/licenses/LICENSE-2.0
Unless required by applicable law or agreed to in writing, software
distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
License for the specific language governing permissions and limitations
under the License.
.. tip::
The Undercloud Platform is part of the AIC CP (AT&T Integrated Cloud
Containerized Platform). More details may be found by using the `Treasuremap`_
Building this Documentation
---------------------------
Use of ``sphinx-build -b html docs/source docs/build`` will build a html
version of this documentation that can be viewed using a browser at
docs/build/index.html on the local filesystem.
Conventions and Standards
-------------------------
.. toctree::
:maxdepth: 2
authoring_strategy
artifacts
.. _Helm: https://helm.sh/
.. _Kubernetes: https://kubernetes.io/
.. _Openstack: https://www.openstack.org/
.. _Openstack Helm: https://github.com/openstack/openstack-helm
.. _Treasuremap: https://github.com/att-comdev/treasuremap
.. _yaml: http://yaml.org/

21
tools/pegleg.sh Normal file
View File

@ -0,0 +1,21 @@
#!/usr/bin/env bash
set -eu
SCRIPT_DIR=$(realpath "$(dirname "${0}")")
SOURCE_DIR=${SCRIPT_DIR}/pegleg
WORKSPACE=$(realpath "${SCRIPT_DIR}/..")
IMAGE_PEGLEG=${IMAGE_PEGLEG:-quay.io/attcomdev/pegleg:latest}
if [[ -z ${http_proxy} && -z ${https_proxy} ]]
then
docker build -q --rm -t "${IMAGE_PEGLEG}" "${SOURCE_DIR}" > /dev/null
else
docker build -q --rm -t "${IMAGE_PEGLEG}" --build-arg http_proxy=${http_proxy} --build-arg https_proxy=${https_proxy} "${SOURCE_DIR}" > /dev/null
fi
docker run --rm -t \
-v "${WORKSPACE}:/var/pegleg" \
"${IMAGE_PEGLEG}" \
pegleg "${@}"

View File

@ -0,0 +1,4 @@
__pycache__
.tox
.eggs
pegleg.egg-info

6
tools/pegleg/.gitignore vendored Normal file
View File

@ -0,0 +1,6 @@
__pycache__
/.tox
/.eggs
/pegleg.egg-info
/ChangeLog
/AUTHORS

10
tools/pegleg/Dockerfile Normal file
View File

@ -0,0 +1,10 @@
FROM python:3.6
VOLUME /var/pegleg
WORKDIR /var/pegleg
COPY requirements.txt /opt/pegleg/requirements.txt
RUN pip3 install --no-cache-dir -r /opt/pegleg/requirements.txt
COPY . /opt/pegleg
RUN pip3 install -e /opt/pegleg

View File

141
tools/pegleg/pegleg/cli.py Normal file
View File

@ -0,0 +1,141 @@
from . import engine
import click
import logging
import sys
LOG = logging.getLogger(__name__)
LOG_FORMAT = '%(asctime)s %(levelname)-8s %(name)s:%(funcName)s [%(lineno)3d] %(message)s' # noqa
CONTEXT_SETTINGS = {
'help_option_names': ['-h', '--help'],
}
@click.group(context_settings=CONTEXT_SETTINGS)
@click.pass_context
@click.option(
'-v',
'--verbose',
is_flag=bool,
default=False,
help='Enable debug logging')
def main(ctx, *, verbose):
if verbose:
log_level = logging.DEBUG
else:
log_level = logging.INFO
logging.basicConfig(format=LOG_FORMAT, level=log_level)
@main.group(help='Commands related to sites')
def site():
pass
@site.command(help='Output complete config for one site')
@click.option(
'-o',
'--output',
'output_stream',
type=click.File(mode='w'),
default=sys.stdout,
help='Where to output')
@click.argument('site_name')
def collect(*, output_stream, site_name):
engine.site.collect(site_name, output_stream)
@site.command(help='Find sites impacted by changed files')
@click.option(
'-i',
'--input',
'input_stream',
type=click.File(mode='r'),
default=sys.stdin,
help='List of impacted files')
@click.option(
'-o',
'--output',
'output_stream',
type=click.File(mode='w'),
default=sys.stdout)
def impacted(*, input_stream, output_stream):
engine.site.impacted(input_stream, output_stream)
@site.command('list', help='List known sites')
@click.option(
'-o',
'--output',
'output_stream',
type=click.File(mode='w'),
default=sys.stdout,
help='Where to output')
def list_(*, output_stream):
engine.site.list_(output_stream)
@site.command(help='Show details for one site')
@click.option(
'-o',
'--output',
'output_stream',
type=click.File(mode='w'),
default=sys.stdout,
help='Where to output')
@click.argument('site_name')
def show(*, output_stream, site_name):
engine.site.show(site_name, output_stream)
def _validate_revision_callback(_ctx, _param, value):
if value is not None and value.startswith('v'):
return value
else:
raise click.BadParameter('revisions must start with "v"')
@main.group(help='Create directory structure and stubs')
def stub():
pass
RELEASE_OPTION = click.option(
'-r',
'--aic-revision',
callback=_validate_revision_callback,
required=True,
help='AIC revision to use (e.g. v4.0)')
SITE_TYPE_OPTION = click.option(
'-t',
'--site-type',
required=True,
help='Site type to use (e.g. "medium" or "large"')
@stub.command('global', help='Add global structure for a new revision')
@RELEASE_OPTION
def global_(*, aic_revision):
engine.stub.global_(aic_revision)
@stub.command(help='Add a new site + revision')
@click.argument('site_name')
@RELEASE_OPTION
@SITE_TYPE_OPTION
def site(*, aic_revision, site_type, site_name):
engine.stub.site(aic_revision, site_type, site_name)
@stub.command('site-type', help='Add a new site-type + revision')
@RELEASE_OPTION
@SITE_TYPE_OPTION
def site_type(*, aic_revision, site_type):
engine.stub.site_type(aic_revision, site_type)
@main.command(help='Sanity checks for repository content')
def lint():
engine.lint.full()

View File

@ -0,0 +1,4 @@
# flake8: noqa
from . import lint
from . import site
from . import stub

View File

@ -0,0 +1,148 @@
from pegleg.engine import util
import click
import jsonschema
import logging
import os
import pkg_resources
import yaml
__all__ = ['full']
LOG = logging.getLogger(__name__)
DECKHAND_SCHEMAS = {
'root': 'schemas/deckhand-root.yaml',
'metadata/Control/v1': 'schemas/deckhand-metadata-control.yaml',
'metadata/Document/v1': 'schemas/deckhand-metadata-document.yaml',
}
def full():
errors = []
errors.extend(_verify_no_unexpected_files())
errors.extend(_verify_file_contents())
if errors:
raise click.ClickException('\n'.join(['Linting failed:'] + errors))
def _verify_no_unexpected_files():
expected_directories = set()
for site_name in util.files.list_sites():
params = util.definition.load_as_params(site_name)
expected_directories.update(util.files.directories_for(**params))
LOG.debug('expected_directories: %s', expected_directories)
found_directories = util.files.existing_directories()
LOG.debug('found_directories: %s', found_directories)
errors = []
for unused_dir in sorted(found_directories - expected_directories):
errors.append('%s exists, but is unused' % unused_dir)
for missing_dir in sorted(expected_directories - found_directories):
if not missing_dir.endswith('common'):
errors.append(
'%s was not found, but expected by manifest' % missing_dir)
return errors
def _verify_file_contents():
schemas = _load_schemas()
errors = []
for filename in util.files.all():
errors.extend(_verify_single_file(filename, schemas))
return errors
def _verify_single_file(filename, schemas):
errors = []
LOG.debug("Validating file %s." % filename)
with open(filename) as f:
if not f.read(4) == '---\n':
errors.append('%s does not begin with YAML beginning of document '
'marker "---".' % filename)
f.seek(0)
try:
documents = yaml.safe_load_all(f)
for document in documents:
errors.extend(_verify_document(document, schemas, filename))
except Exception as e:
errors.append('%s is not valid yaml: %s' % (filename, e))
return errors
MANDATORY_ENCRYPTED_TYPES = {
'deckhand/CertificateAuthorityKey/v1',
'deckhand/CertificateKey/v1',
'deckhand/Passphrase/v1',
'deckhand/PrivateKey/v1',
}
def _verify_document(document, schemas, filename):
name = ':'.join([
document.get('schema', ''),
document.get('metadata', {}).get('name', '')
])
errors = []
try:
jsonschema.validate(document, schemas['root'])
try:
jsonschema.validate(document['metadata'],
schemas[document['metadata']['schema']])
except Exception as e:
errors.append('%s (document %s) failed Deckhand metadata schema '
'validation: %s' % (filename, name, e))
except Exception as e:
errors.append(
'%s (document %s) failed Deckhand root schema validation: %s' %
(filename, name, e))
layer = _layer(document)
if layer is not None and layer != _expected_layer(filename):
errors.append(
'%s (document %s) had unexpected layer "%s", expected "%s"' %
(filename, name, layer, _expected_layer(filename)))
# secrets must live in the appropriate directory, and must be
# "storagePolicy: encrypted".
if document.get('schema') in MANDATORY_ENCRYPTED_TYPES:
storage_policy = document.get('metadata', {}).get('storagePolicy')
if storage_policy != 'encrypted':
errors.append(
'%s (document %s) is a secret, but has unexpected storagePolicy: "%s"'
% (filename, name, storage_policy))
if not _filename_in_section(filename, 'secrets/'):
errors.append(
'%s (document %s) is a secret, is not stored in a secrets path'
% (filename, name))
return errors
def _layer(data):
if hasattr(data, 'get'):
return data.get('metadata', {}).get('layeringDefinition',
{}).get('layer')
def _expected_layer(filename):
parts = os.path.normpath(filename).split(os.sep)
return parts[0]
def _load_schemas():
schemas = {}
for key, filename in DECKHAND_SCHEMAS.items():
schemas[key] = util.files.slurp(
pkg_resources.resource_filename('pegleg', filename))
return schemas
def _filename_in_section(filename, section):
directory = util.files.directory_for(path=filename)
rest = filename[len(directory) + 1:]
return rest is not None and rest.startswith(section)

View File

@ -0,0 +1,52 @@
from pegleg.engine import util
import collections
import csv
import json
__all__ = ['collect', 'impacted', 'list_', 'show']
def collect(site_name, output_stream):
for filename in util.definition.site_files(site_name):
with open(filename) as f:
output_stream.writelines(f.readlines())
def impacted(input_stream, output_stream):
mapping = _build_impact_mapping()
impacted_sites = set()
for line in input_stream:
line = line.strip()
directory = util.files.directory_for(path=line)
if directory is not None:
impacted_sites.update(mapping[directory])
for site_name in sorted(impacted_sites):
output_stream.write(site_name + '\n')
def list_(output_stream):
fieldnames = ['site_name', 'site_type', 'aic_revision']
writer = csv.DictWriter(
output_stream, fieldnames=fieldnames, delimiter=' ')
for site_name in util.files.list_sites():
params = util.definition.load_as_params(site_name)
writer.writerow(params)
def show(site_name, output_stream):
data = util.definition.load_as_params(site_name)
data['files'] = list(util.definition.site_files(site_name))
json.dump(data, output_stream, indent=2, sort_keys=True)
def _build_impact_mapping():
mapping = collections.defaultdict(set)
for site_name in util.files.list_sites():
params = util.definition.load_as_params(site_name)
for directory in util.files.directories_for(**params):
mapping[directory].add(site_name)
return mapping

View File

@ -0,0 +1,19 @@
from pegleg.engine import util
__all__ = ['global_', 'site', 'site_type']
def global_(aic_revision):
util.files.create_global_directories(aic_revision)
def site(aic_revision, site_type, site_name):
util.definition.create(
aic_revision=aic_revision, site_name=site_name, site_type=site_type)
params = util.definition.load_as_params(site_name)
util.files.create_site_directories(**params)
def site_type(aic_revision, site_type):
util.files.create_site_type_directories(
aic_revision=aic_revision, site_type=site_type)

View File

@ -0,0 +1,3 @@
# flake8: noqa
from . import definition
from . import files

View File

@ -0,0 +1,63 @@
from . import files
import click
__all__ = [
'create',
'load',
'load_as_params',
'path',
'pluck',
'site_files',
]
def create(*, site_name, site_type, aic_revision):
definition = {
'schema': 'pegleg/SiteDefinition/v1',
'metadata': {
'schema': 'metadata/Document/v1',
'name': site_name,
'storagePolicy': 'cleartext',
'layeringDefinition': {
'abstract': False,
'layer': 'site',
},
},
'data': {
'aic_revision': aic_revision,
'site_type': site_type,
}
}
files.dump(path(site_name), definition)
def load(site):
return files.slurp(path(site))
def load_as_params(site_name):
definition = load(site_name)
params = definition.get('data', {})
params['site_name'] = site_name
return params
def path(site_name):
return 'site/%s/site-definition.yaml' % site_name
def pluck(site_definition, key):
try:
return site_definition['data'][key]
except Exception as e:
site_name = site_definition.get('metadata', {}).get('name')
raise click.ClickException(
'failed to get "%s" from site definition "%s": %s' (key,
site_name, e))
def site_files(site_name):
params = load_as_params(site_name)
for filename in files.search(files.directories_for(**params)):
yield filename
yield path(site_name)

View File

@ -0,0 +1,175 @@
import click
import os
import yaml
__all__ = [
'all',
'create_global_directories',
'create_site_directories',
'create_site_type_directories',
'directories_for',
'directory_for',
'dump',
'existing_directories',
'search',
'slurp',
]
DIR_DEPTHS = {
'global': 1,
'type': 2,
'site': 1,
}
def all():
return search(DIR_DEPTHS.keys())
def create_global_directories(aic_revision):
_create_tree(_global_common_path())
_create_tree(_global_revision_path(aic_revision))
def create_site_directories(*, site_name, aic_revision, **_kwargs):
_create_tree(_site_path(site_name))
def create_site_type_directories(*, aic_revision, site_type):
_create_tree(_site_type_common_path(site_type))
_create_tree(_site_type_revision_path(site_type, aic_revision))
FULL_STRUCTURE = {
'directories': {
'baremetal': {},
'networks': {
'directories': {
'physical': {},
},
},
'pki': {},
'profiles': {
'directories': {
'hardware': {},
'host': {},
}
},
'schemas': {},
'secrets': {
'directories': {
'certificate-authorities': {},
'certificates': {},
'keypairs': {},
'passphrases': {},
},
},
'software': {
'directories': {
'charts': {},
'config': {},
'manifests': {},
},
},
},
}
def _create_tree(root_path, *, tree=FULL_STRUCTURE):
for name, data in tree.get('directories', {}).items():
path = os.path.join(root_path, name)
os.makedirs(path, mode=0o775, exist_ok=True)
_create_tree(path, tree=data)
def directories_for(*, site_name, aic_revision, site_type):
return [
_global_common_path(),
_global_revision_path(aic_revision),
_site_type_common_path(site_type),
_site_type_revision_path(site_type, aic_revision),
_site_path(site_name),
]
def _global_common_path():
return 'global/common'
def _global_revision_path(aic_revision):
return 'global/%s' % aic_revision
def _site_type_common_path(site_type):
return 'type/%s/common' % site_type
def _site_type_revision_path(site_type, aic_revision):
return 'type/%s/%s' % (site_type, aic_revision)
def _site_path(site_name):
return 'site/%s' % site_name
def list_sites():
for path in os.listdir('site'):
joined_path = os.path.join('site', path)
if os.path.isdir(joined_path):
yield path
def directory_for(*, path):
parts = os.path.normpath(path).split(os.sep)
depth = DIR_DEPTHS.get(parts[0])
if depth is not None:
return os.path.join(*parts[:depth + 1])
def existing_directories():
directories = set()
for search_path, depth in DIR_DEPTHS.items():
directories.update(_recurse_subdirs(search_path, depth))
return directories
def slurp(path):
if not os.path.exists(path):
raise click.ClickException(
'%s not found. pegleg must be run from '
'the root of an AIC cLCP configuration repostiory.' % path)
with open(path) as f:
try:
return yaml.load(f)
except Exception as e:
raise click.ClickException('Failed to parse %s:\n%s' % (path, e))
def dump(path, data):
if os.path.exists(path):
raise click.ClickException('%s already exists, aborting' % path)
os.makedirs(os.path.dirname(path), mode=0o775, exist_ok=True)
with open(path, 'w') as f:
yaml.dump(data, f, explicit_start=True)
def _recurse_subdirs(search_path, depth):
directories = set()
for path in os.listdir(search_path):
joined_path = os.path.join(search_path, path)
if os.path.isdir(joined_path):
if depth == 1:
directories.add(joined_path)
else:
directories.update(_recurse_subdirs(joined_path, depth - 1))
return directories
def search(search_paths):
for search_path in search_paths:
for root, _dirs, filenames in os.walk(search_path):
for filename in filenames:
yield os.path.join(root, filename)

View File

@ -0,0 +1,19 @@
$schema: http://json-schema.org/schema#
definitions:
labels:
type: object
type: object
properties:
schema:
type: string
pattern: '^metadata/Control/v1$'
name:
type: string
labels:
$ref: '#/definitions/labels'
additionalProperties: false
required:
- schema
- name

View File

@ -0,0 +1,103 @@
$schema: http://json-schema.org/schema#
definitions:
action:
type: object
properties:
method:
type: string
enum:
- delete
- merge
- replace
path:
$ref: '#/definitions/path'
additionalProperties: false
required:
- method
- path
labels:
type: object
path:
type: string
schema:
type: string
pattern: '^.+/.+/v[0-9](\.[0-9])?$'
substitution:
type: object
properties:
dest:
type: object
properties:
path:
$ref: '#/definitions/path'
pattern:
type: string
additionalProperties: false
required:
- path
src:
type: object
properties:
name:
type: string
path:
$ref: '#/definitions/path'
schema:
$ref: '#/definitions/schema'
required:
- name
- path
- schema
additionalProperties: false
required:
- dest
- src
type: object
properties:
schema:
type: string
pattern: '^metadata/Document/v1$'
name:
type: string
labels:
$ref: '#/definitions/labels'
layeringDefinition:
type: object
properties:
abstract:
type: boolean
layer:
type: string
parentSelector:
$ref: '#/definitions/labels'
actions:
type: array
items:
$ref: '#/definitions/action'
additionalProperties: false
required:
- abstract
- layer
storagePolicy:
type: string
enum:
- cleartext
- encrypted
substitutions:
type: array
items:
$ref: '#/definitions/substitution'
additionalProperties: false
required:
- schema
- name
- layeringDefinition
- storagePolicy

View File

@ -0,0 +1,30 @@
$schema: http://json-schema.org/schema#
definitions:
labels:
type: object
schema:
type: string
pattern: '^.+/.+/v[0-9](\.[0-9])?$'
type: object
properties:
schema:
$ref: '#/definitions/schema'
metadata:
properties:
schema:
type: string
enum:
- metadata/Control/v1
- metadata/Document/v1
additionalProperties: true
required:
- schema
data: {}
additionalProperties: false
required:
- schema
- metadata
- data

View File

@ -0,0 +1,22 @@
$schema: http://json-schema.org/schema#
definitions:
site:
type: object
properties:
type:
type: string
version:
type: string
pattern: '^v.+$'
type: object
properties:
sites:
type: object
additionalProperties:
$ref: '#/definitions/site'
additionalProperties: false
required:
- sites

View File

@ -0,0 +1,3 @@
click==6.7
jsonschema==2.6.0
pyyaml==3.12

17
tools/pegleg/setup.py Normal file
View File

@ -0,0 +1,17 @@
from setuptools import setup
setup(
name='pegleg',
version='0.1.0',
packages=['pegleg'],
entry_points={
'console_scripts': [
'pegleg=pegleg.cli:main',
]},
include_package_data=True,
package_data={
'schemas': [
'schemas/*.yaml',
],
},
)

15
tools/pegleg/tox.ini Normal file
View File

@ -0,0 +1,15 @@
[tox]
envlist = lint
[testenv:fmt]
deps = yapf==0.20.0
commands =
yapf -ir {toxinidir}/pegleg
[testenv:lint]
deps =
yapf==0.20.0
flake8==3.5.0
commands =
yapf -rd {toxinidir}/pegleg
flake8 {toxinidir}/pegleg