Add deployment group validation to shipyard

Adds the validation of the existence of a deployment
configuration and a deployment strategy document to
the checks before submitting a site action for
processing.

Change-Id: I61bf67759bd919dcc31208370cb1be5a777baf54
This commit is contained in:
Bryan Strassner 2018-04-27 17:16:24 -05:00
parent afc2ea501d
commit 23fd081ece
48 changed files with 2047 additions and 403 deletions

View File

@ -20,8 +20,9 @@ psycopg2==2.7.3.1
docker-py==1.6.0
apache-airflow[crypto,celery,postgres,hive,hdfs,jdbc]==1.9.0
python-openstackclient==3.11.0
kubernetes>=6.0.0
# Dependencies for other UCP components
git+https://github.com/att-comdev/deckhand.git@3cdf3d2d896d43c6e3bc26170522c3eee0d7158f#egg=deckhand
git+https://github.com/att-comdev/drydock.git@42aa3c486ee4c495c2377d31481df5ab681f84f2#egg=drydock_provisioner
git+https://github.com/att-comdev/drydock.git@8af92eaf29ca0dd6a129748c132ea7f6593eae83#egg=drydock_provisioner
git+https://github.com/att-comdev/armada.git@7a2ba22ab12a3f1f180b6af4085972ba44853377#egg=armada

View File

@ -21,7 +21,7 @@ falcon==1.2.0
jsonschema==2.6.0
keystoneauth1==3.4.0
keystonemiddleware==4.21.0
networkx==2.1
networkx==2.1 # common/deployment_group
oslo.config==5.2.0
oslo.policy==1.33.1
PasteDeploy==1.5.2
@ -33,3 +33,7 @@ setuptools==39.0.1
SQLAlchemy==1.1.13
ulid==1.1
uwsgi==2.0.15
# Dependencies for other UCP components
git+https://github.com/att-comdev/deckhand.git@3cdf3d2d896d43c6e3bc26170522c3eee0d7158f#egg=deckhand
git+https://github.com/att-comdev/drydock.git@8af92eaf29ca0dd6a129748c132ea7f6593eae83#egg=drydock_provisioner

View File

@ -0,0 +1,28 @@
..
Copyright 2018 AT&T Intellectual Property.
All Rights Reserved.
Licensed under the Apache License, Version 2.0 (the "License"); you may
not use this file except in compliance with the License. You may obtain
a copy of the License at
http://www.apache.org/licenses/LICENSE-2.0
Unless required by applicable law or agreed to in writing, software
distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
License for the specific language governing permissions and limitations
under the License.
.. _common_modules:
Common Modules
==============
The various packages in this common package should each be stand-alone
modules having no dependencies on prior logic running in Shipyard (e.g.
Setup of configuration files, Shipyard/Airflow database access, etc...). It is
ok if these modules use imports found in requirements.txt
These modules are intended to be safe for reuse outside of the context of
the Shipyard_Airflow/Api service as well as within.

View File

@ -17,11 +17,11 @@
Encapsulates classes and functions that provide core deployment group
functionality used during baremetal provisioning.
"""
import collections
from enum import Enum
import logging
import operator
from .errors import DeploymentGroupLabelFormatError
from .errors import DeploymentGroupStageError
from .errors import InvalidDeploymentGroupError
from .errors import InvalidDeploymentGroupNodeLookupError
@ -29,6 +29,27 @@ from .errors import InvalidDeploymentGroupNodeLookupError
LOG = logging.getLogger(__name__)
def check_label_format(label_string):
"""Validates that a label_string is in key:value format.
Raises DeploymentGroupLabelFormatError if the value is not compliant.
"""
split = label_string.split(":")
if not len(split) == 2:
raise DeploymentGroupLabelFormatError(
"Label {} is formatted incorrectly. One : (colon) character is "
"required, and the label must be in key:value format".format(
label_string)
)
for v in split:
if v.strip() == "":
raise DeploymentGroupLabelFormatError(
"Label {} is formatted incorrectly. The values on either side "
"of the colon character must not be empty.".format(
label_string)
)
class Stage(Enum):
"""Valid values for baremetal node and deployment group stages of
deployment
@ -83,12 +104,19 @@ class GroupNodeSelector:
self.node_tags = selector_dict.get('node_tags', [])
self.rack_names = selector_dict.get('rack_names', [])
for label in self.node_labels:
check_label_format(label)
# A selector is an "all_selector" if there are no criteria specified.
self.all_selector = not any([self.node_names, self.node_labels,
self.node_tags, self.rack_names])
if self.all_selector:
LOG.debug("Selector values select all available nodes")
def get_node_labels_as_dict(self):
return {label.split(':')[0].strip(): label.split(':')[1].strip()
for label in self.node_labels}
class SuccessCriteria:
"""Defines the success criteria for a deployment group
@ -180,7 +208,8 @@ class DeploymentGroup:
:param group_dict: dictionary representing a group
:param node_lookup: an injected function that will perform node lookup for
a group. Function must accept an iterable of GroupNodeSelector and
return a string list of node names
return a string iterable of node names (or empty iterable if there are
no node names)
Example group_dict::
@ -280,18 +309,20 @@ class DeploymentGroup:
not useful as the results are stored in self.full_nodes
"""
LOG.debug("Beginning lookup of nodes for group %s", self.name)
node_list = self.node_lookup(self.selectors)
if node_list is None:
node_list = []
if not isinstance(node_list, collections.Sequence):
nodes = self.node_lookup(self.selectors)
if nodes is None:
nodes = []
try:
node_list = list(nodes)
except TypeError:
raise InvalidDeploymentGroupNodeLookupError(
"The node lookup function supplied to the DeploymentGroup "
"does not return a valid result of an iterable"
"is not an iterable"
)
if not all(isinstance(node, str) for node in node_list):
raise InvalidDeploymentGroupNodeLookupError(
"The node lookup function supplied to the DeploymentGroup "
"has returned an iterable, but not all strings"
"is not all strings"
)
LOG.info("Group %s selectors have resolved to nodes: %s",
self.name, ", ".join(node_list))

View File

@ -231,7 +231,7 @@ def _generate_group_graph(groups):
LOG.debug("%s has parent %s", group.name, parent)
graph.add_edge(parent, group.name)
else:
LOG.debug("%s is not dependent upon any other groups")
LOG.debug("%s is not dependent upon any other groups", group.name)
_detect_cycles(graph)
return graph
@ -258,5 +258,5 @@ def _detect_cycles(graph):
involved_nodes.update(dep)
raise DeploymentGroupCycleError(
"The following are involved in a circular dependency:"
" %s", ", ".join(involved_nodes)
" {}".format(", ".join(involved_nodes))
)

View File

@ -22,7 +22,7 @@ class InvalidDeploymentGroupError(Exception):
pass
class InvalidDeploymentGroupNodeLookupError(InvalidDeploymentGroupError):
class InvalidDeploymentGroupNodeLookupError(Exception):
"""InvalidDeploymentGroupNodeLookupError
Indicates that there is a problem with the node lookup function
@ -31,6 +31,15 @@ class InvalidDeploymentGroupNodeLookupError(InvalidDeploymentGroupError):
pass
class DeploymentGroupLabelFormatError(Exception):
"""DeploymentGroupLabelFormatError
Indicates that a value that is intended to be a label is not formatted
correctly
"""
pass
class DeploymentGroupCycleError(Exception):
"""DeploymentGroupCycleError

View File

@ -0,0 +1,106 @@
# Copyright 2018 AT&T Intellectual Property. All other rights reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
#
"""A node_lookup class with a lookup method that can be used to access Drydock
to retrieve nodes based on a list of GroupNodeSelector objects
"""
import logging
from .deployment_group import GroupNodeSelector
from .errors import (
InvalidDeploymentGroupNodeLookupError
)
LOG = logging.getLogger(__name__)
class NodeLookup:
"""Provides NodeLookup functionality
:param drydock_client: a Drydock Client (Api Client from Drydock)
:param design_ref: the design ref that will be used to perform a lookup
"""
def __init__(self, drydock_client, design_ref):
# Empty dictionary or none for design ref will not work.
if not design_ref:
raise InvalidDeploymentGroupNodeLookupError(
"An incomplete design ref was supplied to the NodeLookup: "
" {}".format(str(design_ref))
)
if drydock_client is None:
raise TypeError('Drydock client is required.')
self.design_ref = design_ref
self.drydock_client = drydock_client
def lookup(self, selectors):
"""Lookup method
:param selectors: list of GroupNodeSelector objects used to construct
a request against Drydock to get a list of nodes
"""
sel_list = _validate_selectors(selectors)
node_filter = _generate_node_filter(sel_list)
return _get_nodes_for_filter(self.drydock_client,
self.design_ref,
node_filter)
def _validate_selectors(selectors):
"""Validate that the selectors are in a valid format and return a list"""
try:
sel_list = list(selectors)
except TypeError:
raise InvalidDeploymentGroupNodeLookupError(
"The node lookup function requires an iterable of "
"GroupNodeSelectors as input"
)
if not (all(isinstance(sel, GroupNodeSelector) for sel in sel_list)):
raise InvalidDeploymentGroupNodeLookupError(
"The node lookup function requires all input elements in the "
"selectors be GroupNodeSelectors"
)
return sel_list
def _generate_node_filter(selectors):
"""Create a Drydock node_filter based on the input selectors"""
node_filter = {}
node_filter['filter_set_type'] = 'union'
node_filter['filter_set'] = []
for sel in selectors:
if sel.all_selector:
# Drydock regards the lack of a selector as being 'all',
# and an intersection of all with other criteria is the same as
# just the other criteria.
continue
filter_ = {'filter_type': 'intersection'}
filter_['node_names'] = sel.node_names
filter_['node_tags'] = sel.node_tags
filter_['node_labels'] = sel.get_node_labels_as_dict()
filter_['rack_names'] = sel.rack_names
node_filter['filter_set'].append(filter_)
if not node_filter['filter_set']:
# if there have been no filters added to the filter set, we want
# an empty filter object (all) instead of having one that has no
# criteria (none)
node_filter = None
return node_filter
def _get_nodes_for_filter(client, design_ref, node_filter):
return set(client.get_nodes_for_filter(
design_ref=design_ref,
node_filter=node_filter
))

View File

@ -0,0 +1,70 @@
# Copyright 2018 AT&T Intellectual Property. All other rights reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
"""Utilities for use by document validators."""
import logging
from .errors import DocumentLookupError, DocumentNotFoundError
LOG = logging.getLogger(__name__)
class DocumentValidationUtils:
def __init__(self, deckhand_client):
if deckhand_client is None:
raise TypeError('Deckhand client is required.')
self.deckhand_client = deckhand_client
def get_unique_doc(self, revision_id, name, schema):
"""Retrieve a single, unique document as a dictionary
:param revision_id: the revision to fetch the rendered document from
:param name: the name of the document
:param schema: the schema for the document
:param raise_ex: if True, raise an ApiError if the document is not
found
returns the specified document, or raises a DocumentLookupError
"""
filters = {
"schema": schema,
"metadata.name": name
}
docs = self.get_docs_by_filter(revision_id, filters)
LOG.info("Found %s documents", len(docs))
if len(docs) == 1 and docs[0].data:
return docs[0].data
raise DocumentNotFoundError
def get_docs_by_filter(self, revision_id, filters):
"""Get the dictionary form of documents from Deckhand using a filter
:param revision_id: The revision to use
:param filters: a dictionary containing the needed filters to get the
needed documents
Returns a list of dictionaries created from the rendered documents, or
an empty list if they do not.
"""
LOG.info("Attempting to retrieve %s from revision %s", str(filters),
revision_id)
try:
docs = self.deckhand_client.revisions.documents(revision_id,
rendered=True,
**filters)
except Exception as ex:
# If we looked for a document, it's either not there ([] response)
# or it's there. Anything else is a DocumentLookupError.
LOG.exception(ex)
raise DocumentLookupError("Exception during lookup of a document "
"for validation: {}".format(str(ex)))
return docs or []

View File

@ -0,0 +1,186 @@
# Copyright 2018 AT&T Intellectual Property. All other rights reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
"""Base class for document validators"""
import abc
import logging
from .errors import (
DeckhandClientRequiredError, DocumentLookupError, DocumentNotFoundError
)
from .document_validation_utils import DocumentValidationUtils
LOG = logging.getLogger(__name__)
class DocumentValidator(metaclass=abc.ABCMeta):
"""Document validator base class
:param deckhand_client: An instance of a Deckhand client that can be used
to interact with Deckhand during the validation
:param revision: The numeric Deckhand revision of document under test
:param doc_name: The name of the document under test
"""
def __init__(self, deckhand_client, revision, doc_name):
if deckhand_client is None:
raise DeckhandClientRequiredError()
self.deckhand_client = deckhand_client
self.docutils = DocumentValidationUtils(self.deckhand_client)
self.doc_name = doc_name
# self.error_status is False if no validations fail. It becomes
# True for any validatoion failure (including missing docs that are
# not error level, because it interrupts the flow from proceeding with
# further validation.)
self.error_status = False
self.revision = revision
self._triggered_validations = []
self.val_msg_list = []
@property
@abc.abstractmethod
def schema(self):
"""The schema name of the document being validated by this validator"""
pass
@property
@abc.abstractmethod
def missing_severity(self):
"""The severity level if this document is missing
Error, Warning, or Info
"""
pass
@property
def triggered_validations(self):
return self._triggered_validations
def add_triggered_validation(self, validator_class, doc_name):
"""The validation to add to the list of triggered validations
:param validator_class: The class of the validator to use
:param doc_name: the document name to validate
"""
self._triggered_validations.append((validator_class, doc_name))
def val_msg(self, message, name, error=True, level='Error',
documents=None, diagnostic=None):
"""Generate a ValidationMessage
:param error: True or False
:param level: "Error", "Warning", "Info"
:param message: The explanation of the valiadaiton message
:param name: The short name of the messge, e.g.: DocumentMissing
:param documents: list of {"schema": <schema name>,
"name": <document name>}
defaults to the current document under test
:param diagnostic: Possible solutions or troubleshooting. Defaults to
a generic message about being generated by Shipyard
In accordance with:
https://github.com/att-comdev/ucp-integration/blob/master/docs/source/api-conventions.rst#validationmessage-message-type
"""
if documents is None:
documents = [{"schema": self.schema, "name": self.doc_name}]
if diagnostic is None:
diagnostic = "Message generated by Shipyard."
return {
"error": error,
"level": level,
"message": message,
"name": name,
"documents": documents,
"diagnostic": diagnostic,
"kind": "ValidationMessage"
}
@abc.abstractmethod
def do_validate(self):
"""Run Validations"""
pass
def validate(self):
"""Triggers the validations for this validator
Triggers the specific checks after any common checks
"""
if self.missing_severity not in ["Error", "Warning", "Info"]:
LOG.warn("Document Validator for {}, {} does not have a valid "
"value set for missing_severity. Assuming Error".format(
self.schema, self.doc_name
))
self.missing_severity = "Error"
try:
LOG.debug("Lookup up document %s: %s from revision %s",
self.schema,
self.doc_name,
self.revision)
self.doc_dict = self.docutils.get_unique_doc(self.revision,
self.doc_name,
self.schema)
# only proceed to validating the document if it is present.
LOG.debug("Generic document validaton complete. Proceeding to "
"specific validation")
self.do_validate()
except DocumentLookupError as dle:
self.val_msg_list.append(self.val_msg(
name=dle.__class__.__name__,
error=True,
level="Error",
message="Document Lookup failed for {}".format(self.schema),
diagnostic=str(dle)))
except DocumentNotFoundError as dnfe:
name = dnfe.__class__.__name__
if self.missing_severity == "Error":
diagnostic = (
"The configuration documents must include a document with "
"schema: {} and name: {}".format(
self.schema,
self.doc_name
)
)
message = "Missing required document {}".format(self.schema)
error = True
self.error_status = True
elif self.missing_severity == "Warning":
diagnostic = (
"It is recommended, but not required that the "
"configuration documents include a document with "
"schema: {} and name: {}".format(
self.schema,
self.doc_name
)
)
message = "Missing recommended document {}".format(self.schema)
error = False
self.error_status = True
elif self.missing_severity == "Info":
diagnostic = (
"Optional document with schema: {} and name: {} was not"
"found among the configuration documents.".format(
self.schema,
self.doc_name
)
)
message = "Optional document {} not found".format(self.schema)
error = False
self.error_status = True
self.val_msg_list.append(self.val_msg(
name=name, error=error, level=self.missing_severity,
message=message, diagnostic=diagnostic
))

View File

@ -0,0 +1,90 @@
# Copyright 2018 AT&T Intellectual Property. All other rights reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
"""Coordination and running of the documents to validate for Shipyard"""
import logging
LOG = logging.getLogger(__name__)
class _DocValidationDef:
"""Represents the definition, status, and results of a validation
:param validator: the class of the validator
:param name: the name of the document to be validated
"""
def __init__(self, validator, name):
LOG.info("Setting up validation for %s", name)
self.validator = validator
self.name = name
self.finished = False
self.errored = False
self.results = []
class DocumentValidationManager:
"""Coordinates the validation of Shipyard documents
:param deckhand_client: An instance of a Deckhand client that can be used
to interact with Deckhand during the validation
:param revision: The numeric Deckhand revision of document under test
:param validations: The list of tuples containing a Validator (extending
DocumentValidator) and a document name.
"""
def __init__(self, deckhand_client, revision, validations):
self.deckhand_client = deckhand_client
self.revision = revision
self.validations = self._parse_validations(validations)
self.errored = False
self.validations_run = 0
def _parse_validations(self, validations):
# Turn tuples into DocValidationDefs
defs = []
for val, name in validations:
defs.append(_DocValidationDef(val, name))
return defs
def validate(self):
"""Run the validations
Runs through the validations until all are finished
"""
unfinished = [v for v in self.validations if not v.finished]
while unfinished:
# find the next doc to validate
for val_def in unfinished:
vldtr = val_def.validator(deckhand_client=self.deckhand_client,
revision=self.revision,
doc_name=val_def.name)
LOG.info("Validating document %s: %s ",
vldtr.schema, vldtr.doc_name)
vldtr.validate()
self.validations_run += 1
# set the validation status from the status of the validator
val_def.errored = vldtr.error_status
val_def.results.extend(vldtr.val_msg_list)
val_def.finished = True
# acquire any new validations that should be run
new_vals = self._parse_validations(vldtr.triggered_validations)
self.validations.extend(new_vals)
unfinished = [v for v in self.validations if not v.finished]
# gather the results
final_result = []
for v in self.validations:
if v.errored:
self.errored = True
final_result.extend(v.results)
return final_result

View File

@ -11,14 +11,18 @@
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
#
"""Common Modules
"""Errors raised by the document validators"""
The various packages in this common package should each be stand-alone
modules having no dependencies on prior logic running in Shipyard (e.g.
Setup of configuration files, Shipyard/Airflow database access, etc...). It is
ok if these modules use imports found in requirements.txt
These modules are intended to be safe for reuse outside of the context of
the Shipyard_Airflow/Api service as well as within.
"""
class DeckhandClientRequiredError(Exception):
"""Signals that a Deckhand client was required but was not provided"""
class DocumentLookupError(Exception):
"""Signals that an error occurred while looking up a document"""
pass
class DocumentNotFoundError(Exception):
"""Signals that a document that was expected to be found was not found"""
pass

View File

@ -0,0 +1,111 @@
# Copyright 2018 AT&T Intellectual Property. All other rights reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
"""Action validators module
Validators are run as part of action creation and will raise an ApiError if
there are any validation failures.
"""
import logging
import falcon
from shipyard_airflow.common.document_validators.document_validator_manager \
import DocumentValidationManager
from shipyard_airflow.control import service_clients
from shipyard_airflow.control.validators.validate_deployment_configuration \
import ValidateDeploymentConfiguration
from shipyard_airflow.errors import ApiError
LOG = logging.getLogger(__name__)
def validate_site_action(action):
"""Validates that the deployment configuration is correctly set up
Checks:
- The deployment configuration from Deckhand using the design version
- If the deployment configuration is missing, error
- The deployment strategy from the deployment configuration.
- If the deployment strategy is specified, but is missing, error.
- Check that there are no cycles in the groups
"""
validator = _SiteActionValidator(
dh_client=service_clients.deckhand_client(),
action=action
)
validator.validate()
class _SiteActionValidator:
"""The validator object setup and used by the validate_site_action function
"""
def __init__(self, dh_client, action):
self.action = action
self.doc_revision = self._get_doc_revision()
self.cont_on_fail = str(self._action_param(
'continue-on-fail')).lower() == 'true'
self.doc_val_mgr = DocumentValidationManager(
dh_client,
self.doc_revision,
[(ValidateDeploymentConfiguration, 'deployment-configuration')]
)
def validate(self):
results = self.doc_val_mgr.validate()
if self.doc_val_mgr.errored:
if self.cont_on_fail:
LOG.warn("Validation failures occured, but 'continue-on-fail' "
"is set to true. Processing continues")
else:
raise ApiError(
title='Document validation failed',
description='InvalidConfigurationDocuments',
status=falcon.HTTP_400,
error_list=results,
retry=False,
)
def _action_param(self, p_name):
"""Retrieve the value of the specified parameter or None if it doesn't
exist
"""
try:
return self.action['parameters'][p_name]
except KeyError:
return None
def _get_doc_revision(self):
"""Finds the revision id for the committed revision"""
doc_revision = self.action.get('committed_rev_id')
if doc_revision is None:
raise ApiError(
title='Invalid document revision',
description='InvalidDocumentRevision',
status=falcon.HTTP_400,
error_list=[{
'message': (
'Action {} with id {} was unable to find a valid '
'committed document revision'.format(
self.action.get('name'),
self.action.get('id')
)
)
}],
retry=False,
)
return doc_revision

View File

@ -22,11 +22,14 @@ from oslo_config import cfg
import ulid
from shipyard_airflow import policy
from shipyard_airflow.control.action.action_helper import (determine_lifecycle,
format_action_steps)
from shipyard_airflow.control.helpers.action_helper import (
determine_lifecycle,
format_action_steps
)
from shipyard_airflow.control.action import action_validators
from shipyard_airflow.control.base import BaseResource
from shipyard_airflow.control.configdocs import configdocs_helper
from shipyard_airflow.control.configdocs.configdocs_helper import (
from shipyard_airflow.control.helpers import configdocs_helper
from shipyard_airflow.control.helpers.configdocs_helper import (
ConfigdocsHelper)
from shipyard_airflow.control.json_schemas import ACTION
from shipyard_airflow.db.db import AIRFLOW_DB, SHIPYARD_DB
@ -35,24 +38,23 @@ from shipyard_airflow.errors import ApiError
CONF = cfg.CONF
LOG = logging.getLogger(__name__)
# Mappings of actions to dags
SUPPORTED_ACTION_MAPPINGS = {
# action : dag, validation
'deploy_site': {
'dag': 'deploy_site',
'validator': None
},
'update_site': {
'dag': 'update_site',
'validator': None
},
'redeploy_server': {
'dag': 'redeploy_server',
# TODO (Bryan Strassner) This should have a validator method
# Needs to be revisited when defined
'validator': None
def _action_mappings():
# Return dictionary mapping actions to their dags and validators
return {
'deploy_site': {
'dag': 'deploy_site',
'validators': [action_validators.validate_site_action]
},
'update_site': {
'dag': 'update_site',
'validators': [action_validators.validate_site_action]
},
'redeploy_server': {
'dag': 'redeploy_server',
'validators': []
}
}
}
# /api/v1.0/actions
@ -93,6 +95,7 @@ class ActionsResource(BaseResource):
resp.location = '/api/v1.0/actions/{}'.format(action['id'])
def create_action(self, action, context, allow_intermediate_commits=False):
action_mappings = _action_mappings()
# use uuid assigned for this request as the id of the action.
action['id'] = ulid.ulid()
# the invoking user
@ -101,12 +104,12 @@ class ActionsResource(BaseResource):
action['timestamp'] = str(datetime.utcnow())
# validate that action is supported.
LOG.info("Attempting action: %s", action['name'])
if action['name'] not in SUPPORTED_ACTION_MAPPINGS:
if action['name'] not in action_mappings:
raise ApiError(
title='Unable to start action',
description='Unsupported Action: {}'.format(action['name']))
dag = SUPPORTED_ACTION_MAPPINGS.get(action['name'])['dag']
dag = action_mappings.get(action['name'])['dag']
action['dag_id'] = dag
# Set up configdocs_helper
@ -121,9 +124,9 @@ class ActionsResource(BaseResource):
# populate action parameters if they are not set
if 'parameters' not in action:
action['parameters'] = {}
# validate if there is any validation to do
validator = SUPPORTED_ACTION_MAPPINGS.get(action['name'])['validator']
if validator is not None:
for validator in action_mappings.get(action['name'])['validators']:
# validators will raise ApiError if they are not validated.
validator(action)

View File

@ -14,9 +14,11 @@
import falcon
from shipyard_airflow import policy
from shipyard_airflow.control.action.action_helper import (determine_lifecycle,
format_action_steps)
from shipyard_airflow.control.base import BaseResource
from shipyard_airflow.control.helpers.action_helper import (
determine_lifecycle,
format_action_steps
)
from shipyard_airflow.db.db import AIRFLOW_DB, SHIPYARD_DB
from shipyard_airflow.errors import ApiError

View File

@ -19,8 +19,8 @@ import requests
from oslo_config import cfg
from shipyard_airflow import policy
from shipyard_airflow.control.action.action_helper import ActionsHelper
from shipyard_airflow.control.base import BaseResource
from shipyard_airflow.control.helpers.action_helper import ActionsHelper
CONF = cfg.CONF
LOG = logging.getLogger(__name__)

View File

@ -16,7 +16,7 @@ from oslo_config import cfg
from shipyard_airflow import policy
from shipyard_airflow.control.base import BaseResource
from shipyard_airflow.control.af_monitoring.workflow_helper import (
from shipyard_airflow.control.helpers.workflow_helper import (
WorkflowHelper
)
from shipyard_airflow.errors import ApiError

View File

@ -18,10 +18,10 @@ import falcon
from oslo_config import cfg
from shipyard_airflow import policy
from shipyard_airflow.control.configdocs import configdocs_helper
from shipyard_airflow.control.api_lock import (api_lock, ApiLockType)
from shipyard_airflow.control.base import BaseResource
from shipyard_airflow.control.configdocs.configdocs_helper import (
from shipyard_airflow.control.helpers import configdocs_helper
from shipyard_airflow.control.helpers.configdocs_helper import (
ConfigdocsHelper)
from shipyard_airflow.errors import ApiError
@ -201,7 +201,9 @@ class CommitConfigDocsResource(BaseResource):
description='There are no documents in the buffer to commit',
status=falcon.HTTP_409,
retry=True)
validations = helper.get_validations_for_buffer()
validations = helper.get_validations_for_revision(
helper.get_revision_id(configdocs_helper.BUFFER)
)
if dryrun:
validations['code'] = falcon.HTTP_200
if 'message' in validations:

View File

@ -20,7 +20,7 @@ from oslo_config import cfg
from shipyard_airflow import policy
from shipyard_airflow.control.base import BaseResource
from shipyard_airflow.control.configdocs.configdocs_helper import \
from shipyard_airflow.control.helpers.configdocs_helper import \
ConfigdocsHelper
from shipyard_airflow.errors import ApiError

View File

@ -0,0 +1,36 @@
..
Copyright 2018 AT&T Intellectual Property.
All Rights Reserved.
Licensed under the Apache License, Version 2.0 (the "License"); you may
not use this file except in compliance with the License. You may obtain
a copy of the License at
http://www.apache.org/licenses/LICENSE-2.0
Unless required by applicable law or agreed to in writing, software
distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
License for the specific language governing permissions and limitations
under the License.
.. _common_modules:
Helper Modules
==============
A home for the helper modules used by the various apis. While mostly the
helpers are used by the api that encompasses the function - e.g. configdocs
uses the configdocs helper, there are multiple cases where there's a need
to cross between functions. One such example is the need for the action
api's to need to use functionality related to configdocs. Rather than having
depenedencies between the functional sections, this package serves as a place
for the common dependencies encompassed into helper modules.
One major difference between the helpers and the api controllers is that
helpers should never raise API errors, but rather App Errors or other non-http
focused errors.
Note: The deckhand client module found in this package is intended to be
(largely) replaced by use of the Deckhand client, when that refactoring can
be accomplished.

View File

@ -17,7 +17,6 @@ Deckhand, providing a representation of a buffer and a committed
bucket for Shipyard
"""
import enum
import json
import logging
import threading
@ -25,11 +24,18 @@ import falcon
from oslo_config import cfg
import requests
from shipyard_airflow.control.configdocs.deckhand_client import (
DeckhandClient, DeckhandError, DeckhandPaths, DeckhandRejectedInputError,
from shipyard_airflow.common.document_validators.document_validator_manager \
import DocumentValidationManager
from shipyard_airflow.control import service_clients
from shipyard_airflow.control.helpers.design_reference_helper import \
DesignRefHelper
from shipyard_airflow.control.helpers.deckhand_client import (
DeckhandClient, DeckhandError, DeckhandRejectedInputError,
DeckhandResponseError, DocumentExistsElsewhereError, NoRevisionsExistError)
from shipyard_airflow.control.service_endpoints import (
Endpoints, get_endpoint, get_token)
from shipyard_airflow.control.validators.validate_deployment_configuration \
import ValidateDeploymentConfiguration
from shipyard_airflow.errors import ApiError, AppError
CONF = cfg.CONF
@ -468,132 +474,14 @@ class ConfigdocsHelper(object):
status=falcon.HTTP_404,
retry=False)
def get_validations_for_buffer(self):
"""
Convenience method to do validations for buffer version.
"""
buffer_rev_id = self.get_revision_id(BUFFER)
if buffer_rev_id:
return self.get_validations_for_revision(buffer_rev_id)
raise AppError(
title='Unable to start validation of buffer',
description=('Buffer revision id could not be determined from'
'Deckhand'),
status=falcon.HTTP_500,
retry=False)
@staticmethod
def _get_design_reference(revision_id):
# Constructs the design reference as json for use by other components
design_reference = {
"rel": "design",
"href": "deckhand+{}".format(
DeckhandClient.get_path(DeckhandPaths.RENDERED_REVISION_DOCS)
.format(revision_id)),
"type": "application/x-yaml"
}
return json.dumps(design_reference)
@staticmethod
def _get_validation_endpoints():
# returns the list of validation endpoint supported
val_ep = '{}/validatedesign'
return [
{
'name': 'Drydock',
'url': val_ep.format(get_endpoint(Endpoints.DRYDOCK))
},
{
'name': 'Armada',
'url': val_ep.format(get_endpoint(Endpoints.ARMADA))
},
]
@staticmethod
def _get_validation_threads(validation_endpoints, revision_id, ctx):
# create a list of validation threads from the endpoints
validation_threads = []
for endpoint in validation_endpoints:
# create a holder for things we need back from the threads
response = {'response': None}
exception = {'exception': None}
design_ref = ConfigdocsHelper._get_design_reference(revision_id)
validation_threads.append({
'thread':
threading.Thread(
target=ConfigdocsHelper._get_validations_for_component,
kwargs={
'url': endpoint['url'],
'design_reference': design_ref,
'response': response,
'exception': exception,
'context_marker': ctx.external_marker,
'thread_name': endpoint['name'],
'log_extra': {
'req_id': ctx.request_id,
'external_ctx': ctx.external_marker,
'user': ctx.user
}
}),
'name': endpoint['name'],
'url': endpoint['url'],
'response': response,
'exception': exception
})
return validation_threads
@staticmethod
def _get_validations_for_component(url, design_reference, response,
exception, context_marker, thread_name,
**kwargs):
# Invoke the POST for validation
try:
headers = {
'X-Context-Marker': context_marker,
'X-Auth-Token': get_token(),
'content-type': 'application/json'
}
http_resp = requests.post(
url,
headers=headers,
data=design_reference,
timeout=(
CONF.requests_config.validation_connect_timeout,
CONF.requests_config.validation_read_timeout))
# 400 response is "valid" failure to validate. > 400 is a problem.
if http_resp.status_code > 400:
http_resp.raise_for_status()
response_dict = http_resp.json()
response['response'] = response_dict
except Exception as ex:
# catch anything exceptional as a failure to run validations
unable_str = '{} unable to validate configdocs'.format(thread_name)
LOG.error("%s. Exception follows.", unable_str)
LOG.error(str(ex))
response['response'] = {
'details': {
'messageList': [{
'message': unable_str,
'kind': 'SimpleMessage',
'error': True
}, {
'message': str(ex),
'kind': 'SimpleMessage',
'error': True
}]
}
}
exception['exception'] = ex
def _get_validations_from_ucp_components(self, revision_id):
"""Invoke other UCP components to retrieve their validations"""
resp_msgs = []
error_count = 0
design_ref = DesignRefHelper().get_design_reference(revision_id)
validation_threads = ConfigdocsHelper._get_validation_threads(
ConfigdocsHelper._get_validation_endpoints(), revision_id,
self.ctx)
validation_threads = _get_validation_threads(
_get_validation_endpoints(), self.ctx, design_ref)
# trigger each validation in parallel
for validation_thread in validation_threads:
if validation_thread.get('thread'):
@ -623,10 +511,8 @@ class ConfigdocsHelper(object):
if msg.get('error'):
error_count = error_count + 1
default_level = 'Error'
val_msg = ConfigdocsHelper._generate_validation_message(
msg,
level=default_level,
source=th_name
val_msg = _generate_validation_message(
msg, level=default_level, source=th_name
)
resp_msgs.append(val_msg)
return (error_count, resp_msgs)
@ -660,6 +546,13 @@ class ConfigdocsHelper(object):
# Only invoke the other validations if Deckhand has not returned any.
if (error_count == 0):
# Start with Shipyard's own validations
results = self._get_shipyard_validations(revision_id)
err_results = [r for r in results if r['error']]
error_count += len(err_results)
resp_msgs.extend(results)
# And then the other ucp components
(cpnt_ec, cpnt_msgs) = self._get_validations_from_ucp_components(
revision_id)
resp_msgs.extend(cpnt_msgs)
@ -667,8 +560,27 @@ class ConfigdocsHelper(object):
LOG.debug("UCP component validations: %s", cpnt_ec)
# return the formatted status response
return ConfigdocsHelper._format_validations_to_status(
resp_msgs, error_count)
return _format_validations_to_status(resp_msgs, error_count)
def _get_shipyard_validations(self, revision_id):
# Run Shipyard's own validations
try:
sy_val_mgr = DocumentValidationManager(
service_clients.deckhand_client(),
revision_id,
[(ValidateDeploymentConfiguration, 'deployment-configuration')]
)
return sy_val_mgr.validate()
except Exception as ex:
# Don't let any exceptions here prevent subsequent processing,
# but make sure we register an error to prevent success.
return [_generate_validation_message({
"error": True,
"message": ("Shipyard has encountered an unexpected error "
"while processing document validations"),
"name": "DocumentValidationProcessingError",
"diagnostic": str(ex),
})]
def get_deckhand_validation_status(self, revision_id):
"""Retrieve Deckhand validation status
@ -677,8 +589,7 @@ class ConfigdocsHelper(object):
"""
dh_validations = self._get_deckhand_validation_errors(revision_id)
error_count = len(dh_validations)
return ConfigdocsHelper._format_validations_to_status(
dh_validations, error_count)
return _format_validations_to_status(dh_validations, error_count)
def _get_deckhand_validation_errors(self, revision_id):
# Returns stored validation errors that deckhand has for this revision.
@ -689,102 +600,12 @@ class ConfigdocsHelper(object):
if dh_result.get('errors'):
for error in dh_result.get('errors'):
resp_msgs.append(
ConfigdocsHelper._generate_dh_val_msg(
error,
dh_result_name=dh_result.get('name')
_generate_dh_val_msg(
error, dh_result_name=dh_result.get('name')
)
)
return resp_msgs
@staticmethod
def _generate_dh_val_msg(msg, dh_result_name):
# Maps a deckhand validation response to a ValidationMessage.
# Result name is used if the msg doesn't specify a name field.
# Deckhand may provide the following fields:
# 'validation_schema', 'schema_path', 'name', 'schema', 'path',
# 'error_section', 'message'
not_spec = 'not specified'
if 'diagnostic' not in msg:
# format path, error_section, validation_schema, and schema_path
# into diagnostic
msg['diagnostic'] = 'Section: {} at {} (schema {} at {})'.format(
msg.get('error_section', not_spec),
msg.get('path', not_spec),
msg.get('validation_schema', not_spec),
msg.get('schema_path', not_spec)
)
if 'documents' not in msg:
msg['documents'] = [{
'name': msg.get('name', not_spec),
'schema': msg.get('schema', not_spec)
}]
return ConfigdocsHelper._generate_validation_message(
msg,
name=dh_result_name,
error=True,
level='Error',
source='Deckhand'
)
@staticmethod
def _generate_validation_message(msg, **kwargs):
# Special note about kwargs: the values provided via kwargs are used
# as defaults, not overrides. Values in the msg will take precedence.
#
# Using a compatible message, transform it into a ValidationMessage.
# By combining it with the default values passed via kwargs. The values
# used from kwargs match the fields listed below.
fields = ['message', 'error', 'name', 'documents', 'level',
'diagnostic', 'source']
if 'documents' not in kwargs:
kwargs['documents'] = []
valmsg = {}
for key in fields:
valmsg[key] = msg.get(key, kwargs.get(key, None))
valmsg['kind'] = 'ValidationMessage'
valmsg['level'] = (
valmsg.get('level') or ConfigdocsHelper._error_to_level(
valmsg.get('error'))
)
return valmsg
@staticmethod
def _error_to_level(error):
"""Convert a boolean error field to 'Error' or 'Info' """
if error:
return 'Error'
else:
return 'Info'
@staticmethod
def _format_validations_to_status(val_msgs, error_count):
# Using a list of validation messages and an error count,
# formulates and returns a status response dict
status = 'Success'
message = 'Validations succeeded'
code = falcon.HTTP_200
if error_count > 0:
status = 'Failure'
message = 'Validations failed'
code = falcon.HTTP_400
return {
"kind": "Status",
"apiVersion": "v1.0",
"metadata": {},
"status": status,
"message": message,
"reason": "Validation",
"details": {
"errorCount": error_count,
"messageList": val_msgs,
},
"code": code
}
def tag_buffer(self, tag):
"""
Convenience method to tag the buffer version.
@ -872,3 +693,183 @@ class ConfigdocsHelper(object):
return True
return False
def _get_validation_endpoints():
# returns the list of validation endpoint supported
val_ep = '{}/validatedesign'
return [
{
'name': 'Drydock',
'url': val_ep.format(get_endpoint(Endpoints.DRYDOCK))
},
{
'name': 'Armada',
'url': val_ep.format(get_endpoint(Endpoints.ARMADA))
},
]
def _get_validation_threads(validation_endpoints, ctx, design_ref):
# create a list of validation threads from the endpoints
validation_threads = []
for endpoint in validation_endpoints:
# create a holder for things we need back from the threads
response = {'response': None}
exception = {'exception': None}
validation_threads.append({
'thread':
threading.Thread(
target=_get_validations_for_component,
kwargs={
'url': endpoint['url'],
'design_reference': design_ref,
'response': response,
'exception': exception,
'context_marker': ctx.external_marker,
'thread_name': endpoint['name'],
'log_extra': {
'req_id': ctx.request_id,
'external_ctx': ctx.external_marker,
'user': ctx.user
}
}),
'name': endpoint['name'],
'url': endpoint['url'],
'response': response,
'exception': exception
})
return validation_threads
def _get_validations_for_component(url, design_reference, response,
exception, context_marker, thread_name,
**kwargs):
# Invoke the POST for validation
try:
headers = {
'X-Context-Marker': context_marker,
'X-Auth-Token': get_token(),
'content-type': 'application/json'
}
http_resp = requests.post(
url,
headers=headers,
data=design_reference,
timeout=(
CONF.requests_config.validation_connect_timeout,
CONF.requests_config.validation_read_timeout))
# 400 response is "valid" failure to validate. > 400 is a problem.
if http_resp.status_code > 400:
http_resp.raise_for_status()
response_dict = http_resp.json()
response['response'] = response_dict
except Exception as ex:
# catch anything exceptional as a failure to run validations
unable_str = '{} unable to validate configdocs'.format(thread_name)
LOG.error("%s. Exception follows.", unable_str)
LOG.error(str(ex))
response['response'] = {
'details': {
'messageList': [{
'message': unable_str,
'kind': 'SimpleMessage',
'error': True
}, {
'message': str(ex),
'kind': 'SimpleMessage',
'error': True
}]
}
}
exception['exception'] = ex
def _generate_dh_val_msg(msg, dh_result_name):
# Maps a deckhand validation response to a ValidationMessage.
# Result name is used if the msg doesn't specify a name field.
# Deckhand may provide the following fields:
# 'validation_schema', 'schema_path', 'name', 'schema', 'path',
# 'error_section', 'message'
not_spec = 'not specified'
if 'diagnostic' not in msg:
# format path, error_section, validation_schema, and schema_path
# into diagnostic
msg['diagnostic'] = 'Section: {} at {} (schema {} at {})'.format(
msg.get('error_section', not_spec),
msg.get('path', not_spec),
msg.get('validation_schema', not_spec),
msg.get('schema_path', not_spec)
)
if 'documents' not in msg:
msg['documents'] = [{
'name': msg.get('name', not_spec),
'schema': msg.get('schema', not_spec)
}]
return _generate_validation_message(
msg,
name=dh_result_name,
error=True,
level='Error',
source='Deckhand'
)
def _generate_validation_message(msg, **kwargs):
# Special note about kwargs: the values provided via kwargs are used
# as defaults, not overrides. Values in the msg will take precedence.
#
# Using a compatible message, transform it into a ValidationMessage.
# By combining it with the default values passed via kwargs. The values
# used from kwargs match the fields listed below.
fields = ['message', 'error', 'name', 'documents', 'level', 'diagnostic',
'source']
if 'documents' not in kwargs:
kwargs['documents'] = []
valmsg = {}
for key in fields:
valmsg[key] = msg.get(key, kwargs.get(key, None))
valmsg['kind'] = 'ValidationMessage'
valmsg['level'] = (
valmsg.get('level') or _error_to_level(
valmsg.get('error'))
)
return valmsg
def _error_to_level(error):
"""Convert a boolean error field to 'Error' or 'Info' """
if error:
return 'Error'
else:
return 'Info'
def _format_validations_to_status(val_msgs, error_count):
# Using a list of validation messages and an error count,
# formulates and returns a status response dict
status = 'Success'
message = 'Validations succeeded'
code = falcon.HTTP_200
if error_count > 0:
status = 'Failure'
message = 'Validations failed'
code = falcon.HTTP_400
return {
"kind": "Status",
"apiVersion": "v1.0",
"metadata": {},
"status": status,
"message": message,
"reason": "Validation",
"details": {
"errorCount": error_count,
"messageList": val_msgs,
},
"code": code
}

View File

@ -11,9 +11,8 @@
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
"""
Enacapsulates a deckhand API client
"""
"""Enacapsulates a deckhand API client"""
# TODO(bryan-strassner) replace this functionality with a real Deckhand client
import enum
import logging

View File

@ -0,0 +1,49 @@
# Copyright 2018 AT&T Intellectual Property. All other rights reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
"""The design reference is a commonly used object across most of the undercloud
platform, particularly for use during validations of documents by each
component.
"""
import json
from shipyard_airflow.control.helpers.deckhand_client import (
DeckhandClient, DeckhandPaths
)
class DesignRefHelper:
def __init__(self):
self._path = DeckhandClient.get_path(
DeckhandPaths.RENDERED_REVISION_DOCS
)
def get_design_reference(self, revision_id):
"""Constructs a design reference as json using the supplied revision_id
:param revision_id: the numeric Deckhand revision
Returns a json String
"""
return json.dumps(self.get_design_reference_dict(revision_id))
def get_design_reference_dict(self, revision_id):
"""Constructs a Deckhand specific design reference
:param revision_id: the numeric Deckhand revision
Returns a dictionary representing the design_ref
"""
return {
"rel": "design",
"href": "deckhand+{}".format(self._path.format(revision_id)),
"type": "application/x-yaml"
}

View File

@ -0,0 +1,50 @@
# Copyright 2018 AT&T Intellectual Property. All other rights reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
"""Generates clients and client-like objects and functions"""
from urllib.parse import urlparse
from deckhand.client import client as dh_client
import drydock_provisioner.drydock_client.client as dd_client
import drydock_provisioner.drydock_client.session as dd_session
from shipyard_airflow.control.service_endpoints import Endpoints
from shipyard_airflow.control import service_endpoints as svc_endpoints
#
# Deckhand Client
#
def deckhand_client():
"""Retrieve a Deckhand client"""
return dh_client.Client(session=svc_endpoints.get_session(),
endpoint_type='internal')
#
# Drydock Client
#
def _auth_gen():
return [('X-Auth-Token', svc_endpoints.get_token())]
def drydock_client():
"""Retreive a Drydock client"""
# Setup the drydock session
endpoint = svc_endpoints.get_endpoint(Endpoints.DRYDOCK)
dd_url = urlparse(endpoint)
session = dd_session.DrydockSession(dd_url.hostname,
port=dd_url.port,
auth_gen=_auth_gen)
return dd_client.DrydockClient(session)

View File

@ -104,6 +104,11 @@ def get_token():
return _get_ks_session().get_auth_headers().get('X-Auth-Token')
def get_session():
"""Return the Keystone Session for Shipyard"""
return _get_ks_session()
def _get_ks_session():
# Establishes a keystone session
keystone_auth = {}

View File

@ -0,0 +1,58 @@
# Copyright 2018 AT&T Intellectual Property. All other rights reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
"""Classes and functions to support Shipyard specific document validation
Only validates that which is not already covered by schema validation, which
is performed by Deckhand on Shipyard's behalf.
"""
import logging
from shipyard_airflow.common.document_validators.document_validator import (
DocumentValidator
)
from .validate_deployment_strategy import ValidateDeploymentStrategy
LOG = logging.getLogger(__name__)
class ValidateDeploymentConfiguration(DocumentValidator):
"""Validates the DeploymentConfiguration."""
def __init__(self, **kwargs):
super().__init__(**kwargs)
schema = "shipyard/DeploymentConfiguration/v1"
missing_severity = "Error"
def do_validate(self):
try:
dep_strat_nm = (
self.doc_dict['physical_provisioner']['deployment_strategy']
)
self.add_triggered_validation(ValidateDeploymentStrategy,
dep_strat_nm)
except KeyError:
self.val_msg_list.append(self.val_msg(
name="DeploymentStrategyNotSpecified",
error=False,
level="Info",
message=("A deployment strategy document was not specified in "
"the deployment configuration. Beacuse of this, the "
"strategy used will be all-at-once.")
))
LOG.info("No deployment strategy document specified, "
"'all-at-once' is assumed, and deployment strategy will "
"not be further validated")
self.error_status = False

View File

@ -0,0 +1,102 @@
# Copyright 2018 AT&T Intellectual Property. All other rights reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
"""Classes and functions to support Shipyard specific document validation
Only validates that which is not already covered by schema validation, which
is performed by Deckhand on Shipyard's behalf.
"""
import logging
from shipyard_airflow.common.deployment_group.deployment_group_manager import (
DeploymentGroupManager
)
from shipyard_airflow.common.deployment_group.errors import (
DeploymentGroupCycleError,
InvalidDeploymentGroupError,
InvalidDeploymentGroupNodeLookupError
)
from shipyard_airflow.common.deployment_group.node_lookup import NodeLookup
from shipyard_airflow.common.document_validators.document_validator import (
DocumentValidator
)
from shipyard_airflow.control import service_clients
from shipyard_airflow.control.helpers.design_reference_helper import (
DesignRefHelper
)
LOG = logging.getLogger(__name__)
def _get_node_lookup(revision_id):
# get a Drydock node_lookup function using the supplied revision_id
return NodeLookup(
service_clients.drydock_client(),
DesignRefHelper().get_design_reference_dict(revision_id)
).lookup
class ValidateDeploymentStrategy(DocumentValidator):
"""Validates the deployment strategy"""
def __init__(self, **kwargs):
super().__init__(**kwargs)
schema = "shipyard/DeploymentStrategy/v1"
missing_severity = "Error"
def do_validate(self):
groups = self.doc_dict['groups']
try:
DeploymentGroupManager(groups, _get_node_lookup(self.revision))
except DeploymentGroupCycleError as dgce:
self.val_msg_list.append(self.val_msg(
name=dgce.__class__.__name__,
error=True,
level="Error",
message=("The deployment groups specified in the Deployment "
"Strategy have groups that form a "
"cycle."),
diagnostic=str(dgce)
))
self.error_status = True
except InvalidDeploymentGroupError as idge:
self.val_msg_list.append(self.val_msg(
name=idge.__class__.__name__,
error=True,
level="Error",
message=("A deployment group specified in the Deployment "
"Strategy is invalid"),
diagnostic=str(idge)
))
self.error_status = True
except InvalidDeploymentGroupNodeLookupError as idgnle:
self.val_msg_list.append(self.val_msg(
name=idgnle.__class__.__name__,
error=True,
level="Error",
message=("Shipyard does not have a valid node lookup to "
"validate the deployment strategy"),
diagnostic=str(idgnle)
))
self.error_status = True
except Exception as ex:
# all other exceptions are an error
self.val_msg_list.append(self.val_msg(
name="DocumentValidationProcessingError",
error=True,
level="Error",
message=("Shipyard has encountered an unexpected error "
"while processing document validations"),
diagnostic=str(ex)
))
self.error_status = True

View File

@ -8,7 +8,7 @@ apache-airflow[crypto,celery,postgres,hive,hdfs,jdbc]==1.9.0
# Testing - Client libraries for UCP components
git+https://github.com/att-comdev/deckhand.git@3cdf3d2d896d43c6e3bc26170522c3eee0d7158f#egg=deckhand
git+https://github.com/att-comdev/drydock.git@42aa3c486ee4c495c2377d31481df5ab681f84f2#egg=drydock_provisioner
git+https://github.com/att-comdev/drydock.git@8af92eaf29ca0dd6a129748c132ea7f6593eae83#egg=drydock_provisioner
git+https://github.com/att-comdev/armada.git@7a2ba22ab12a3f1f180b6af4085972ba44853377#egg=armada
# TODO(bryan-strassner) Pin to version for airflow when added to the
# requirements.txt in the airflow images directory

View File

@ -1,4 +1,4 @@
# Copyright 2017 AT&T Intellectual Property. All other rights reserved.
# Copyright 2018 AT&T Intellectual Property. All other rights reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
@ -67,7 +67,7 @@ def node_lookup(selectors):
nl_list.append(get_nodes(_RACK_NAMES, selector.rack_names))
nodes = set.intersection(*nl_list)
nodes_full.extend(nodes)
return nodes_full
return set(nodes_full)
def crummy_node_lookup(selectors):
@ -76,8 +76,8 @@ def crummy_node_lookup(selectors):
def broken_node_lookup_1(selectors):
"""Doesn't return a list"""
return {"this": "that"}
"""Doesn't return an iterable """
return True
def broken_node_lookup_2(selectors):

View File

@ -1,4 +1,4 @@
# Copyright 2017 AT&T Intellectual Property. All other rights reserved.
# Copyright 2018 AT&T Intellectual Property. All other rights reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
@ -16,11 +16,11 @@ import pytest
import yaml
from shipyard_airflow.common.deployment_group.deployment_group import (
DeploymentGroup, Stage
DeploymentGroup, Stage, check_label_format
)
from shipyard_airflow.common.deployment_group.errors import (
DeploymentGroupStageError, InvalidDeploymentGroupError,
InvalidDeploymentGroupNodeLookupError
DeploymentGroupLabelFormatError, DeploymentGroupStageError,
InvalidDeploymentGroupError, InvalidDeploymentGroupNodeLookupError
)
from .node_lookup_stubs import node_lookup
@ -170,6 +170,10 @@ class TestDeploymentGroup:
dg = DeploymentGroup(yaml.safe_load(_GROUP_YAML_MULTI_SELECTOR),
node_lookup)
assert set(dg.full_nodes) == {'node7', 'node8', 'node9', 'node11'}
assert dg.selectors[0].get_node_labels_as_dict() == {}
assert dg.selectors[1].get_node_labels_as_dict() == {
'label1': 'label1'
}
def test_basic_class_missing_req(self):
with pytest.raises(InvalidDeploymentGroupError):
@ -221,23 +225,23 @@ class TestDeploymentGroup:
def test_selector_excludes_all(self):
dg = DeploymentGroup(yaml.safe_load(_GROUP_YAML_EXCLUDES_ALL),
node_lookup)
assert dg.full_nodes == []
assert len(dg.full_nodes) == 0
def test_handle_none_node_lookup(self):
dg = DeploymentGroup(yaml.safe_load(_GROUP_YAML_1),
crummy_node_lookup)
assert dg.full_nodes == []
assert len(dg.full_nodes) == 0
def test_handle_broken_node_lookup(self):
with pytest.raises(InvalidDeploymentGroupNodeLookupError) as err:
dg = DeploymentGroup(yaml.safe_load(_GROUP_YAML_1),
broken_node_lookup_1)
assert str(err).endswith("iterable")
DeploymentGroup(yaml.safe_load(_GROUP_YAML_1),
broken_node_lookup_1)
assert str(err).endswith("is not an iterable")
with pytest.raises(InvalidDeploymentGroupNodeLookupError) as err:
dg = DeploymentGroup(yaml.safe_load(_GROUP_YAML_1),
broken_node_lookup_2)
assert str(err).endswith("but not all strings")
DeploymentGroup(yaml.safe_load(_GROUP_YAML_1),
broken_node_lookup_2)
assert str(err).endswith("is not all strings")
def test_set_stage(self):
dg = DeploymentGroup(yaml.safe_load(_GROUP_YAML_ALL_SELECTOR),
@ -266,3 +270,31 @@ class TestStage:
with pytest.raises(DeploymentGroupStageError) as de:
Stage.previous_stage('Chickens and Turkeys')
assert str(de).endswith("Chickens and Turkeys is not a valid stage")
class TestCheckLabelFormat:
def test_check_label_format(self):
with pytest.raises(DeploymentGroupLabelFormatError) as dglfe:
check_label_format("thisthat")
assert "thisthat is formatted incorrectly. One" in str(dglfe.value)
with pytest.raises(DeploymentGroupLabelFormatError) as dglfe:
check_label_format("")
assert " is formatted incorrectly. One" in str(dglfe.value)
with pytest.raises(DeploymentGroupLabelFormatError) as dglfe:
check_label_format(":::")
assert "::: is formatted incorrectly. One" in str(dglfe.value)
with pytest.raises(DeploymentGroupLabelFormatError) as dglfe:
check_label_format("this:that:another")
assert ("this:that:another is formatted incorrectly. "
"One") in str(dglfe.value)
with pytest.raises(DeploymentGroupLabelFormatError) as dglfe:
check_label_format("this: ")
assert "this: is formatted incorrectly. The" in str(dglfe.value)
# no exceptions - these are good
check_label_format("this:that")
check_label_format(" this : that ")

View File

@ -216,20 +216,7 @@ class TestDeploymentGroupManager:
def test_get_group_failures_for_stage(self):
dgm = DeploymentGroupManager(yaml.safe_load(_GROUPS_YAML), node_lookup)
dgm._all_nodes = {
'node1': Stage.DEPLOYED,
'node2': Stage.DEPLOYED,
'node3': Stage.DEPLOYED,
'node4': Stage.DEPLOYED,
'node5': Stage.DEPLOYED,
'node6': Stage.DEPLOYED,
'node7': Stage.DEPLOYED,
'node8': Stage.DEPLOYED,
'node9': Stage.DEPLOYED,
'node10': Stage.DEPLOYED,
'node11': Stage.DEPLOYED,
'node12': Stage.DEPLOYED,
}
dgm._all_nodes = {'node%d' % x: Stage.DEPLOYED for x in range(1, 13)}
for group_name in dgm._all_groups:
assert not dgm.get_group_failures_for_stage(group_name,
@ -237,20 +224,7 @@ class TestDeploymentGroupManager:
assert not dgm.get_group_failures_for_stage(group_name,
Stage.PREPARED)
dgm._all_nodes = {
'node1': Stage.PREPARED,
'node2': Stage.PREPARED,
'node3': Stage.PREPARED,
'node4': Stage.PREPARED,
'node5': Stage.PREPARED,
'node6': Stage.PREPARED,
'node7': Stage.PREPARED,
'node8': Stage.PREPARED,
'node9': Stage.PREPARED,
'node10': Stage.PREPARED,
'node11': Stage.PREPARED,
'node12': Stage.PREPARED,
}
dgm._all_nodes = {'node%d' % x: Stage.PREPARED for x in range(1, 13)}
for group_name in dgm._all_groups:
assert not dgm.get_group_failures_for_stage(group_name,

View File

@ -0,0 +1,151 @@
# Copyright 2018 AT&T Intellectual Property. All other rights reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
"""Tests for the default node_lookup provided with the deployment group
functionality.
"""
import mock
import pytest
from shipyard_airflow.common.deployment_group.deployment_group import (
GroupNodeSelector
)
from shipyard_airflow.common.deployment_group.errors import (
InvalidDeploymentGroupNodeLookupError
)
from shipyard_airflow.common.deployment_group.node_lookup import (
NodeLookup, _generate_node_filter, _validate_selectors
)
class TestNodeLookup:
def test_validate_selectors(self):
"""Tests the _validate_selectors function"""
try:
_validate_selectors([GroupNodeSelector({})])
_validate_selectors([])
except:
# No exceptions expected.
assert False
with pytest.raises(InvalidDeploymentGroupNodeLookupError) as idgnle:
_validate_selectors(None)
assert "iterable of GroupNodeSelectors" in str(idgnle.value)
with pytest.raises(InvalidDeploymentGroupNodeLookupError) as idgnle:
_validate_selectors(["bad!"])
assert "all input elements in the selectors" in str(idgnle.value)
with pytest.raises(InvalidDeploymentGroupNodeLookupError) as idgnle:
_validate_selectors(["bad!", "also bad!"])
assert "all input elements in the selectors" in str(idgnle.value)
with pytest.raises(InvalidDeploymentGroupNodeLookupError) as idgnle:
_validate_selectors([GroupNodeSelector({}), "bad!"])
assert "all input elements in the selectors" in str(idgnle.value)
def test_generate_node_filter(self):
"""Tests the _generate_node_filter function"""
sel = GroupNodeSelector({
'node_names': [],
'node_labels': ['label1:label1'],
'node_tags': ['tag1', 'tag2'],
'rack_names': ['rack3', 'rack1'],
})
nf = _generate_node_filter([sel])
assert nf == {
'filter_set': [{
'filter_type': 'intersection',
'node_names': [],
'node_tags': ['tag1', 'tag2'],
'rack_names': ['rack3', 'rack1'],
'node_labels': {'label1': 'label1'}}
],
'filter_set_type': 'union'
}
sel2 = GroupNodeSelector({
'node_names': ['node1', 'node2', 'node3', 'node4', 'node5'],
'node_labels': ['label1:label1', 'label2:label2'],
'node_tags': ['tag1', 'tag2'],
'rack_names': ['rack3', 'rack1'],
})
nf = _generate_node_filter([sel, sel2])
assert nf == {
'filter_set': [
{
'filter_type': 'intersection',
'node_names': [],
'node_tags': ['tag1', 'tag2'],
'rack_names': ['rack3', 'rack1'],
'node_labels': {'label1': 'label1'}
},
{
'filter_type': 'intersection',
'node_names': ['node1', 'node2', 'node3', 'node4',
'node5'],
'node_tags': ['tag1', 'tag2'],
'rack_names': ['rack3', 'rack1'],
'node_labels': {'label1': 'label1', 'label2': 'label2'}
}
],
'filter_set_type': 'union'
}
sel3 = GroupNodeSelector({})
sel4 = GroupNodeSelector({
'node_names': [],
'node_labels': [],
'node_tags': [],
'rack_names': [],
})
nf = _generate_node_filter([sel, sel3, sel4])
assert nf == {
'filter_set': [{
'filter_type': 'intersection',
'node_names': [],
'node_tags': ['tag1', 'tag2'],
'rack_names': ['rack3', 'rack1'],
'node_labels': {'label1': 'label1'}}
],
'filter_set_type': 'union'
}
nf = _generate_node_filter([sel3, sel4])
assert nf is None
@mock.patch('shipyard_airflow.common.deployment_group.node_lookup'
'._get_nodes_for_filter', return_value=['node1', 'node2'])
def test_NodeLookup_lookup(self, *args):
"""Test the functionality of the setup and lookup functions"""
nl = NodeLookup(mock.MagicMock(), {"design": "ref"})
assert nl.design_ref == {"design": "ref"}
assert nl.drydock_client
sel = GroupNodeSelector({
'node_names': [],
'node_labels': ['label1:label1'],
'node_tags': ['tag1', 'tag2'],
'rack_names': ['rack3', 'rack1'],
})
resp = nl.lookup([sel])
assert resp == ['node1', 'node2']
def test_NodeLookup_lookup_missing_design_ref(self):
"""Test the functionality of the setup and lookup functions"""
with pytest.raises(InvalidDeploymentGroupNodeLookupError) as idgnle:
NodeLookup(mock.MagicMock(), {})
assert 'An incomplete design ref' in str(idgnle.value)

View File

@ -0,0 +1,283 @@
# Copyright 2018 AT&T Intellectual Property. All other rights reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
"""Tests for the DocumentValidationManager"""
import mock
from mock import MagicMock
import pytest
from shipyard_airflow.common.document_validators.document_validator import (
DocumentValidator
)
from shipyard_airflow.common.document_validators.document_validator_manager \
import DocumentValidationManager
from shipyard_airflow.common.document_validators.errors import (
DeckhandClientRequiredError
)
def get_doc_returner():
placeholder = MagicMock()
placeholder.data = {"nothing": "here"}
def doc_returner(revision_id, rendered, **filters):
if not revision_id == 99:
doc = filters['metadata.name']
if 'document-placeholder' in doc:
return [placeholder]
return []
return doc_returner
def _dh_doc_client():
dhc = MagicMock()
dhc.revisions.documents = get_doc_returner()
return dhc
class ValidatorA(DocumentValidator):
def __init__(self, **kwargs):
super().__init__(**kwargs)
schema = "schema/Schema/v1"
missing_severity = "Error"
def do_validate(self):
self.error_status = True
self.val_msg_list.append(self.val_msg(
name="DeploymentGroupCycle",
error=True,
level="Error",
message="Message Here",
diagnostic="diags"
))
class ValidatorB(DocumentValidator):
def __init__(self, **kwargs):
super().__init__(**kwargs)
schema = "schema/Schema/v1"
missing_severity = "Error"
def do_validate(self):
pass
class ValidatorB2(DocumentValidator):
def __init__(self, **kwargs):
super().__init__(**kwargs)
schema = "schema/Schema/v1"
missing_severity = "Warning"
def do_validate(self):
pass
class ValidatorB3(DocumentValidator):
def __init__(self, **kwargs):
super().__init__(**kwargs)
schema = "schema/Schema/v1"
missing_severity = "Info"
def do_validate(self):
pass
class ValidatorC(DocumentValidator):
def __init__(self, **kwargs):
super().__init__(**kwargs)
schema = "schema/Schema/v1"
missing_severity = "Error"
def do_validate(self):
# all should succeed.
self.add_triggered_validation(ValidatorB,
'document-placeholder-A')
self.add_triggered_validation(ValidatorB,
'document-placeholder-B')
self.add_triggered_validation(ValidatorB,
'document-placeholder-C')
self.add_triggered_validation(ValidatorB,
'document-placeholder-D')
self.add_triggered_validation(ValidatorC2,
'document-placeholder-E')
class ValidatorC2(DocumentValidator):
def __init__(self, **kwargs):
super().__init__(**kwargs)
schema = "schema/Schema/v1"
missing_severity = "Error"
def do_validate(self):
# all should succeed.
self.add_triggered_validation(ValidatorC3,
'document-placeholder-F')
self.add_triggered_validation(ValidatorC3,
'document-placeholder-G')
self.add_triggered_validation(ValidatorC3,
'document-placeholder-H')
self.add_triggered_validation(ValidatorC3,
'document-placeholder-I')
class ValidatorC3(DocumentValidator):
def __init__(self, **kwargs):
super().__init__(**kwargs)
schema = "schema/Schema/v1"
missing_severity = "Error"
def do_validate(self):
# all should succeed.
self.add_triggered_validation(ValidatorB,
'document-placeholder-J')
self.add_triggered_validation(ValidatorB,
'document-placeholder-K')
self.add_triggered_validation(ValidatorB,
'document-placeholder-L')
self.add_triggered_validation(ValidatorB,
'document-placeholder-M')
class ValidatorD(DocumentValidator):
def __init__(self, **kwargs):
super().__init__(**kwargs)
schema = "schema/Schema/v1"
missing_severity = "Error"
def do_validate(self):
# one should have errors
self.add_triggered_validation(ValidatorB,
'document-placeholder-A')
self.add_triggered_validation(ValidatorB,
'document-placeholder-B')
self.add_triggered_validation(ValidatorA,
'document-placeholder-C')
class ValidatorBadMissingSeverity(DocumentValidator):
def __init__(self, **kwargs):
super().__init__(**kwargs)
schema = "schema/Schema/v1"
missing_severity = "Pancake Syrup"
def do_validate(self):
pass
class TestValidatorManager:
@mock.patch("shipyard_airflow.control.service_clients.deckhand_client",
return_value=_dh_doc_client())
def test_simple_success(self, fake_client):
validations = [(ValidatorB, 'document-placeholder01')]
dvm = DocumentValidationManager(fake_client(), 1, validations)
dvm.validate()
assert not dvm.errored
assert dvm.validations_run == 1
@mock.patch("shipyard_airflow.control.service_clients.deckhand_client",
return_value=_dh_doc_client())
def test_simple_failure(self, fake_client):
validations = [(ValidatorA, 'document-placeholder02')]
dvm = DocumentValidationManager(fake_client(), 1, validations)
dvm.validate()
assert dvm.errored
assert dvm.validations_run == 1
@mock.patch("shipyard_airflow.control.service_clients.deckhand_client",
return_value=_dh_doc_client())
def test_chained_success(self, fake_client):
validations = [(ValidatorC, 'document-placeholder03')]
dvm = DocumentValidationManager(fake_client(), 1, validations)
dvm.validate()
assert not dvm.errored
assert dvm.validations_run == 26
@mock.patch("shipyard_airflow.control.service_clients.deckhand_client",
return_value=_dh_doc_client())
def test_chained_failure(self, fake_client):
validations = [(ValidatorD, 'document-placeholder04')]
dvm = DocumentValidationManager(fake_client(), 1, validations)
dvm.validate()
assert dvm.errored
assert dvm.validations_run == 4
@mock.patch("shipyard_airflow.control.service_clients.deckhand_client",
return_value=_dh_doc_client())
def test_missing_doc_failure_warn(self, fake_client):
validations = [(ValidatorB2, 'missing-error')]
dvm = DocumentValidationManager(fake_client(), 1, validations)
results = dvm.validate()
assert dvm.errored
assert len(results) == 1
for r in results:
assert r['level'] == "Warning"
assert not r['error']
@mock.patch("shipyard_airflow.control.service_clients.deckhand_client",
return_value=_dh_doc_client())
def test_missing_doc_failure_info(self, fake_client):
validations = [(ValidatorB3, 'missing-error')]
dvm = DocumentValidationManager(fake_client(), 1, validations)
results = dvm.validate()
assert dvm.errored
assert len(results) == 1
for r in results:
assert r['level'] == "Info"
assert not r['error']
@mock.patch("shipyard_airflow.control.service_clients.deckhand_client",
return_value=_dh_doc_client())
def test_missing_doc_failure(self, fake_client):
validations = [(ValidatorB, 'missing-error')]
dvm = DocumentValidationManager(fake_client(), 1, validations)
results = dvm.validate()
assert dvm.errored
assert len(results) == 1
for r in results:
assert r['level'] == "Error"
assert r['error']
@mock.patch("shipyard_airflow.control.service_clients.deckhand_client",
return_value=_dh_doc_client())
def test_missing_doc_bad_severity(self, fake_client):
validations = [(ValidatorBadMissingSeverity, 'missing-error')]
dvm = DocumentValidationManager(fake_client(), 1, validations)
results = dvm.validate()
assert dvm.errored
assert len(results) == 1
for r in results:
assert r['level'] == "Error"
assert r['error']
def test_missing_dh_client(self):
with pytest.raises(DeckhandClientRequiredError):
ValidatorB(deckhand_client=None, revision=1, doc_name="no")
def test_val_msg_defaults(self):
vb = ValidatorB(deckhand_client=MagicMock(), revision=1, doc_name="no")
msg = vb.val_msg("hi", "nm")
assert msg['error']
assert msg['level'] == "Error"
assert msg['diagnostic'] == "Message generated by Shipyard."
assert msg['documents'] == [{"name": "no",
"schema": "schema/Schema/v1"}]

View File

@ -14,7 +14,7 @@
from mock import patch
import pytest
from shipyard_airflow.control.af_monitoring.workflow_helper import (
from shipyard_airflow.control.helpers.workflow_helper import (
WorkflowHelper)
from shipyard_airflow.control.af_monitoring.workflows_api import (
WorkflowResource, WorkflowIdResource)

View File

@ -15,7 +15,7 @@ from datetime import datetime
import arrow
from shipyard_airflow.control.af_monitoring.workflow_helper import (
from shipyard_airflow.control.helpers.workflow_helper import (
WorkflowHelper
)

View File

@ -13,7 +13,7 @@
# limitations under the License.
""" Tests for the action_helper.py module """
from shipyard_airflow.control.action import action_helper
from shipyard_airflow.control.helpers import action_helper
def test_determine_lifecycle():

View File

@ -0,0 +1,171 @@
# Copyright 2018 AT&T Intellectual Property. All other rights reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
"""Tests for the action validators run when an action is created"""
import mock
from mock import MagicMock
import yaml
import pytest
from shipyard_airflow.control.action.action_validators import (
validate_site_action
)
from shipyard_airflow.errors import ApiError
from tests.unit.common.deployment_group.node_lookup_stubs import node_lookup
import tests.unit.common.deployment_group.test_deployment_group_manager as tdgm
def get_doc_returner(style, ds_name):
strategy = MagicMock()
if style == 'cycle':
strategy.data = {"groups": yaml.safe_load(tdgm._CYCLE_GROUPS_YAML)}
elif style == 'clean':
strategy.data = {"groups": yaml.safe_load(tdgm._GROUPS_YAML)}
def doc_returner(revision_id, rendered, **filters):
if not revision_id == 99:
doc = filters['metadata.name']
if doc == 'deployment-configuration':
dc = MagicMock()
dc.data = {
"physical_provisioner": {
"deployment_strategy": ds_name
},
"armada": {
"manifest": "full-site"
}
}
# if passed a name of 'defaulted' clear the section
if ds_name == 'defaulted':
dc.data["physical_provisioner"] = None
print(dc.__dict__)
return [dc]
elif doc == 'dep-strat':
return [strategy]
return []
return doc_returner
def fake_dh_doc_client(style, ds_name='dep-strat'):
dhc = MagicMock()
dhc.revisions.documents = get_doc_returner(style, ds_name)
return dhc
class TestActionValidator:
@mock.patch("shipyard_airflow.control.service_clients.deckhand_client",
return_value=fake_dh_doc_client('clean'))
@mock.patch("shipyard_airflow.control.validators."
"validate_deployment_strategy._get_node_lookup",
return_value=node_lookup)
def test_validate_site_action(self, *args):
"""Test the function that runs the validator class"""
try:
validate_site_action({
'id': '123',
'name': 'deploy_site',
'committed_rev_id': 1
})
except Exception as ex:
# any exception is a failure
assert False, str(ex)
@mock.patch("shipyard_airflow.control.service_clients.deckhand_client",
return_value=fake_dh_doc_client('cycle'))
@mock.patch("shipyard_airflow.control.validators."
"validate_deployment_strategy._get_node_lookup",
return_value=node_lookup)
def test_validate_site_action_cycle(self, *args):
"""Test the function that runs the validator class with a
deployment strategy that has a cycle in the groups
"""
with pytest.raises(ApiError) as apie:
validate_site_action({
'id': '123',
'name': 'deploy_site',
'committed_rev_id': 1
})
assert apie.value.description == 'InvalidConfigurationDocuments'
assert (
'The following are involved in a circular dependency:'
) in apie.value.error_list[0]['diagnostic']
@mock.patch("shipyard_airflow.control.service_clients.deckhand_client",
return_value=fake_dh_doc_client('clean', ds_name='not-there'))
@mock.patch("shipyard_airflow.control.validators."
"validate_deployment_strategy._get_node_lookup",
return_value=node_lookup)
def test_validate_site_action_missing_dep_strat(self, *args):
"""Test the function that runs the validator class with a missing
deployment strategy - specified, but not present
"""
with pytest.raises(ApiError) as apie:
validate_site_action({
'id': '123',
'name': 'deploy_site',
'committed_rev_id': 1
})
assert apie.value.description == 'InvalidConfigurationDocuments'
assert apie.value.error_list[0]['name'] == 'DocumentNotFoundError'
@mock.patch("shipyard_airflow.control.service_clients.deckhand_client",
return_value=fake_dh_doc_client('clean'), ds_name='defaulted')
@mock.patch("shipyard_airflow.control.validators."
"validate_deployment_strategy._get_node_lookup",
return_value=node_lookup)
def test_validate_site_action_default_dep_strat(self, *args):
"""Test the function that runs the validator class with a defaulted
deployment strategy (not specified)
"""
try:
validate_site_action({
'id': '123',
'name': 'deploy_site',
'committed_rev_id': 1
})
except:
# any exception is a failure
assert False
def test_validate_site_missing_rev(self):
"""Test the function that runs the validator class with a
deployment strategy that has a cycle in the groups
"""
with pytest.raises(ApiError) as apie:
validate_site_action({
'id': '123',
'name': 'deploy_site'
})
assert apie.value.description == 'InvalidDocumentRevision'
@mock.patch("shipyard_airflow.control.service_clients.deckhand_client",
return_value=fake_dh_doc_client('clean', ds_name='not-there'))
@mock.patch("shipyard_airflow.control.validators."
"validate_deployment_strategy._get_node_lookup",
return_value=node_lookup)
def test_validate_site_action_continue_failure(self, *args):
"""Test the function that runs the validator class with a defaulted
deployment strategy (not specified)
"""
try:
validate_site_action({
'id': '123',
'name': 'deploy_site',
'committed_rev_id': 1,
'parameters': {'continue-on-fail': 'true'}
})
except:
# any exception is a failure
assert False

View File

@ -26,6 +26,9 @@ import responses
from shipyard_airflow.control.action import actions_api
from shipyard_airflow.control.action.actions_api import ActionsResource
from shipyard_airflow.control.base import ShipyardRequestContext
from shipyard_airflow.control.helpers.configdocs_helper import (
ConfigdocsHelper
)
from shipyard_airflow.errors import ApiError
from shipyard_airflow.policy import ShipyardPolicy
@ -309,51 +312,93 @@ def test_create_action():
CHECK_INTERMEDIATE_COMMIT)
# with invalid input. fail.
try:
action = action_resource.create_action(
action={'name': 'broken',
'parameters': {
'a': 'aaa'
}},
context=context,
allow_intermediate_commits=False)
assert False, 'Should throw an ApiError'
except ApiError:
# expected
pass
with mock.patch('shipyard_airflow.control.action.action_validators'
'.validate_site_action') as validator:
try:
action = action_resource.create_action(
action={'name': 'broken',
'parameters': {
'a': 'aaa'
}},
context=context,
allow_intermediate_commits=False)
assert False, 'Should throw an ApiError'
except ApiError:
# expected
pass
assert not validator.called
# with valid input and some parameters
try:
action = action_resource.create_action(
action={'name': 'deploy_site',
'parameters': {
'a': 'aaa'
}},
context=context,
allow_intermediate_commits=False)
assert action['timestamp']
assert action['id']
assert len(action['id']) == 26
assert action['dag_execution_date'] == '2017-09-06 14:10:08.528402'
assert action['dag_status'] == 'SCHEDULED'
assert action['committed_rev_id'] == 1
except ApiError:
assert False, 'Should not raise an ApiError'
with mock.patch('shipyard_airflow.control.action.action_validators'
'.validate_site_action') as validator:
try:
action = action_resource.create_action(
action={'name': 'deploy_site',
'parameters': {
'a': 'aaa'
}},
context=context,
allow_intermediate_commits=False)
assert action['timestamp']
assert action['id']
assert len(action['id']) == 26
assert action['dag_execution_date'] == '2017-09-06 14:10:08.528402'
assert action['dag_status'] == 'SCHEDULED'
assert action['committed_rev_id'] == 1
except ApiError:
assert False, 'Should not raise an ApiError'
validator.assert_called_once_with(action)
# with valid input and no parameters
try:
action = action_resource.create_action(
action={'name': 'deploy_site'},
context=context,
allow_intermediate_commits=False)
assert action['timestamp']
assert action['id']
assert len(action['id']) == 26
assert action['dag_execution_date'] == '2017-09-06 14:10:08.528402'
assert action['dag_status'] == 'SCHEDULED'
assert action['committed_rev_id'] == 1
except ApiError:
assert False, 'Should not raise an ApiError'
with mock.patch('shipyard_airflow.control.action.action_validators'
'.validate_site_action') as validator:
try:
action = action_resource.create_action(
action={'name': 'deploy_site'},
context=context,
allow_intermediate_commits=False)
assert action['timestamp']
assert action['id']
assert len(action['id']) == 26
assert action['dag_execution_date'] == '2017-09-06 14:10:08.528402'
assert action['dag_status'] == 'SCHEDULED'
assert action['committed_rev_id'] == 1
except ApiError:
assert False, 'Should not raise an ApiError'
validator.assert_called_once_with(action)
def test_create_action_validator_error():
action_resource = ActionsResource()
action_resource.get_all_actions_db = actions_db
action_resource.get_all_dag_runs_db = dag_runs_db
action_resource.get_all_tasks_db = tasks_db
action_resource.invoke_airflow_dag = airflow_stub
action_resource.insert_action = insert_action_stub
action_resource.audit_control_command_db = audit_control_command_db
action_resource.get_committed_design_version = lambda: DESIGN_VERSION
action_resource.check_intermediate_commit_revision = (
CHECK_INTERMEDIATE_COMMIT)
# with valid input and some parameters
with mock.patch('shipyard_airflow.control.action.action_validators'
'.validate_site_action',
side_effect=ApiError(title='bad')):
with pytest.raises(ApiError) as apie:
action = action_resource.create_action(
action={'name': 'deploy_site',
'parameters': {
'a': 'aaa'
}},
context=context,
allow_intermediate_commits=False)
assert action['timestamp']
assert action['id']
assert len(action['id']) == 26
assert action['dag_execution_date'] == '2017-09-06 14:10:08.528402'
assert action['dag_status'] == 'SCHEDULED'
assert action['committed_rev_id'] == 1
assert apie.value.title == 'bad'
@patch('shipyard_airflow.db.shipyard_db.ShipyardDbAccess.'
@ -484,3 +529,23 @@ def test_exhume_date():
assert (
'Airflow has not responded with parseable output. Shipyard is unable '
'to determine run timestamp') in str(expected_exc)
@mock.patch.object(ConfigdocsHelper, 'get_revision_id', return_value=7)
def test_get_committed_design_version(*args):
act_resource = ActionsResource()
act_resource.configdocs_helper = ConfigdocsHelper(ShipyardRequestContext())
assert act_resource.get_committed_design_version() == 7
@mock.patch.object(ConfigdocsHelper, 'get_revision_id', return_value=None)
def test_get_committed_design_version_missing(*args):
with pytest.raises(ApiError) as apie:
act_resource = ActionsResource()
act_resource.configdocs_helper = ConfigdocsHelper(
ShipyardRequestContext()
)
act_resource.get_committed_design_version()
assert apie.value.status == falcon.HTTP_404
assert apie.value.title == ('Unable to locate any committed revision in '
'Deckhand')

View File

@ -138,7 +138,7 @@ class TestActionsStepsLogsEndpoint():
headers=common.AUTH_HEADERS)
assert result.status_code == 200
@patch('shipyard_airflow.control.action.action_helper.ActionsHelper',
@patch('shipyard_airflow.control.helpers.action_helper.ActionsHelper',
autospec=True)
def test_generate_log_endpoint(self, mock_actions_helper):
"""Tests log endpoint generation"""

View File

@ -23,7 +23,8 @@ from shipyard_airflow.control.configdocs.configdocs_api import (
CommitConfigDocsResource,
ConfigDocsResource
)
from shipyard_airflow.control.configdocs.configdocs_helper import \
from shipyard_airflow.control.helpers import configdocs_helper
from shipyard_airflow.control.helpers.configdocs_helper import \
ConfigdocsHelper
from shipyard_airflow.control.api_lock import ApiLock
from shipyard_airflow.errors import ApiError
@ -111,7 +112,8 @@ class TestConfigDocsResource():
helper = ConfigdocsHelper(CTX)
helper.is_buffer_valid_for_bucket = lambda a, b: True
helper.get_deckhand_validation_status = (
lambda a: ConfigdocsHelper._format_validations_to_status([], 0)
lambda a: configdocs_helper._format_validations_to_status([],
0)
)
cdr.post_collection(helper=helper,
collection_id=collection_id,
@ -135,7 +137,8 @@ class TestConfigDocsResource():
helper = ConfigdocsHelper(CTX)
# not valid for bucket
helper.get_deckhand_validation_status = (
lambda a: ConfigdocsHelper._format_validations_to_status([], 0)
lambda a: configdocs_helper._format_validations_to_status([],
0)
)
cdr.post_collection(helper=helper,
collection_id=collection_id,
@ -157,7 +160,8 @@ class TestConfigDocsResource():
cdr = ConfigDocsResource()
helper = ConfigdocsHelper(CTX)
helper.get_deckhand_validation_status = (
lambda a: ConfigdocsHelper._format_validations_to_status([], 0)
lambda a: configdocs_helper._format_validations_to_status([],
0)
)
with pytest.raises(ApiError) as apie:
cdr.post_collection(helper=helper,
@ -196,7 +200,10 @@ class TestCommitConfigDocsResource():
with patch.object(ConfigdocsHelper, 'tag_buffer') as mock_method:
helper = ConfigdocsHelper(CTX)
helper.is_buffer_empty = lambda: False
helper.get_validations_for_buffer = lambda: {'status': 'Success'}
helper.get_validations_for_revision = lambda x: {
'status': 'Success'
}
helper.get_revision_id = lambda x: 1
commit_resp = ccdr.commit_configdocs(helper, False, False)
mock_method.assert_called_once_with('committed')
@ -206,13 +213,14 @@ class TestCommitConfigDocsResource():
with patch.object(ConfigdocsHelper, 'tag_buffer') as mock_method:
helper = ConfigdocsHelper(CTX)
helper.is_buffer_empty = lambda: False
helper.get_validations_for_buffer = (
lambda: {
helper.get_validations_for_revision = (
lambda x: {
'status': 'Failure',
'code': '400 Bad Request',
'message': 'this is a mock response'
}
)
helper.get_revision_id = lambda x: 1
commit_resp = ccdr.commit_configdocs(helper, False, False)
assert '400' in commit_resp['code']
assert commit_resp['message'] is not None
@ -227,7 +235,10 @@ class TestCommitConfigDocsResource():
with patch.object(ConfigdocsHelper, 'tag_buffer') as mock_method:
helper = ConfigdocsHelper(CTX)
helper.is_buffer_empty = lambda: False
helper.get_validations_for_buffer = lambda: {'status': 'Failure'}
helper.get_validations_for_revision = lambda x: {
'status': 'Failure'
}
helper.get_revision_id = lambda x: 1
commit_resp = ccdr.commit_configdocs(helper, True, False)
mock_method.assert_called_once_with('committed')
@ -244,7 +255,9 @@ class TestCommitConfigDocsResource():
with pytest.raises(ApiError):
helper = ConfigdocsHelper(CTX)
helper.is_buffer_empty = lambda: True
helper.get_validations_for_buffer = lambda: {'status': 'Success'}
helper.get_validations_for_revision = lambda x: {
'status': 'Success'
}
ccdr.commit_configdocs(helper, False, False)
def test_commit_configdocs_dryrun(self):
@ -256,7 +269,10 @@ class TestCommitConfigDocsResource():
with patch.object(ConfigdocsHelper, 'tag_buffer') as mock_method:
helper = ConfigdocsHelper(CTX)
helper.is_buffer_empty = lambda: False
helper.get_validations_for_buffer = lambda: {'status': 'Success'}
helper.get_validations_for_revision = lambda x: {
'status': 'Success'
}
helper.get_revision_id = lambda x: 1
commit_resp = ccdr.commit_configdocs(helper, False, True)
assert '200' in commit_resp['code']

View File

@ -20,10 +20,10 @@ import pytest
from .fake_response import FakeResponse
from shipyard_airflow.control.base import ShipyardRequestContext
from shipyard_airflow.control.configdocs import configdocs_helper
from shipyard_airflow.control.configdocs.configdocs_helper import (
from shipyard_airflow.control.helpers import configdocs_helper
from shipyard_airflow.control.helpers.configdocs_helper import (
BufferMode, ConfigdocsHelper)
from shipyard_airflow.control.configdocs.deckhand_client import (
from shipyard_airflow.control.helpers.deckhand_client import (
DeckhandClient, DeckhandResponseError,
NoRevisionsExistError)
from shipyard_airflow.errors import ApiError, AppError
@ -563,12 +563,12 @@ dh_render_val_list = [{"error": True, "message": "broken!"}]
@mock.patch.object(DeckhandClient, 'get_render_errors',
return_value=dh_render_val_list)
def test_get_validations_for_revision_dh_render(get_endpoint):
def test_get_validations_for_revision_dh_render(dh_client):
"""
Tests the functionality of the get_validations_for_revision method
"""
helper = ConfigdocsHelper(CTX)
hold_ve = helper.__class__._get_validation_endpoints
hold_ve = configdocs_helper._get_validation_endpoints
helper._get_deckhand_validation_errors = lambda revision_id: []
val_status = helper.get_validations_for_revision(3)
err_count = val_status['details']['errorCount']
@ -582,16 +582,21 @@ def test_get_validations_for_revision_dh_render(get_endpoint):
return_value=[])
@mock.patch.object(DeckhandClient, 'get_path',
return_value='path{}')
@mock.patch.object(ConfigdocsHelper, '_get_validation_endpoints',
return_value=val_endpoints)
@mock.patch.object(ConfigdocsHelper, '_get_validations_for_component',
new=_fake_get_validations_for_component)
def test_get_validations_for_revision(p1, p2, p3):
@mock.patch('shipyard_airflow.control.helpers.configdocs_helper'
'._get_validation_endpoints',
return_value=val_endpoints)
@mock.patch('shipyard_airflow.control.helpers.configdocs_helper'
'._get_validations_for_component',
new=_fake_get_validations_for_component)
@mock.patch.object(ConfigdocsHelper, '_get_deckhand_validation_errors',
return_value=[])
@mock.patch.object(ConfigdocsHelper, '_get_shipyard_validations',
return_value=[])
def test_get_validations_for_revision(*args):
"""
Tests the functionality of the get_validations_for_revision method
"""
helper = ConfigdocsHelper(CTX)
helper._get_deckhand_validation_errors = lambda revision_id: []
val_status = helper.get_validations_for_revision(3)
err_count = val_status['details']['errorCount']
err_list_count = len(val_status['details']['messageList'])
@ -619,7 +624,7 @@ def test_generate_validation_message():
'source': None
}
generated = ConfigdocsHelper._generate_validation_message(message)
generated = configdocs_helper._generate_validation_message(message)
assert generated == expected
@ -651,8 +656,8 @@ def test_generate_validation_message_args():
'diagnostic': None
}
generated = ConfigdocsHelper._generate_validation_message(message,
**kwargs)
generated = configdocs_helper._generate_validation_message(message,
**kwargs)
assert generated == expected
@ -685,8 +690,8 @@ def test_generate_validation_message_args_full():
'diagnostic': None
}
generated = ConfigdocsHelper._generate_validation_message(message,
**kwargs)
generated = configdocs_helper._generate_validation_message(message,
**kwargs)
assert generated == expected
@ -715,7 +720,7 @@ def test_generate_dh_val_message():
'diagnostic': 'Section: es at p (schema vs at sp)',
}
generated = ConfigdocsHelper._generate_dh_val_msg(
generated = configdocs_helper._generate_dh_val_msg(
message,
dh_result_name='x'
)

View File

@ -18,7 +18,7 @@ import pytest
from shipyard_airflow.control.base import ShipyardRequestContext
from shipyard_airflow.control.configdocs.rendered_configdocs_api import \
RenderedConfigDocsResource
from shipyard_airflow.control.configdocs.configdocs_helper import \
from shipyard_airflow.control.helpers.configdocs_helper import \
ConfigdocsHelper
from shipyard_airflow.errors import ApiError

View File

@ -69,7 +69,7 @@ def test_execute_exception():
@mock.patch.object(DeploymentConfigurationOperator, 'get_revision_id',
return_value=99)
def test_execute_no_client(p1):
def test_execute_no_client(*args):
# no keystone authtoken present in configuration
dco = DeploymentConfigurationOperator(main_dag_name="main",
shipyard_conf="shipyard.conf",
@ -127,7 +127,7 @@ def get_m_client(data):
@mock.patch.object(DeckhandClientFactory, 'get_client',
return_value=get_m_client('abcdefg'))
def test_get_doc_mock_deckhand(p1):
def test_get_doc_mock_deckhand(*args):
"""Get doc should return a document"""
dco = DeploymentConfigurationOperator(main_dag_name="main",
shipyard_conf="shipyard.conf",
@ -139,7 +139,7 @@ def test_get_doc_mock_deckhand(p1):
@mock.patch.object(DeckhandClientFactory, 'get_client',
return_value=get_m_client(None))
def test_get_doc_mock_deckhand_invalid(p1):
def test_get_doc_mock_deckhand_invalid(*args):
"""Get doc should return a document"""
dco = DeploymentConfigurationOperator(main_dag_name="main",
shipyard_conf="shipyard.conf",