Merge "refactor: Use yaml.add_representer to reduce complexity"

This commit is contained in:
Zuul 2018-07-19 18:39:49 +00:00 committed by Gerrit Code Review
commit 1583b78902
12 changed files with 144 additions and 189 deletions

View File

@ -13,16 +13,12 @@
# limitations under the License.
import collections
import functools
import inspect
import re
from oslo_serialization import jsonutils as json
from oslo_utils import uuidutils
import six
from deckhand.common import utils
import yaml
_URL_RE = re.compile(r'http[s]?://(?:[a-zA-Z]|[0-9]|[$-_@.&+]|[!*\(\),]|'
'(?:%[0-9a-fA-F][0-9a-fA-F]))+')
@ -44,49 +40,17 @@ class DocumentDict(dict):
"""
def __init__(self, *args, **kwargs):
super(DocumentDict, self).__init__(*args, **kwargs)
self._replaced_by = None
@classmethod
def from_dict(self, documents):
"""Convert a list of documents or single document into an instance of
this class.
:param documents: Documents to wrap in this class.
:type documents: list or dict
"""
if isinstance(documents, collections.Iterable):
return [DocumentDict(d) for d in documents]
return DocumentDict(documents)
@property
def meta(self):
return (self.schema, self.layer, self.name)
@property
def is_abstract(self):
return utils.jsonpath_parse(
self, 'metadata.layeringDefinition.abstract') is True
@property
def is_control(self):
return self.metadata.get('schema', '').startswith('metadata/Control')
@property
def schema(self):
schema = self.get('schema')
return schema if schema is not None else ''
@property
def metadata(self):
metadata = self.get('metadata')
return metadata if metadata is not None else {}
return self.get('metadata') or DocumentDict({})
@property
def data(self):
data = self.get('data')
return data if data is not None else {}
return self.get('data')
@data.setter
def data(self, value):
@ -94,50 +58,69 @@ class DocumentDict(dict):
@property
def name(self):
return utils.jsonpath_parse(self, 'metadata.name')
return self.metadata.get('name') or ''
@property
def layering_definition(self):
return utils.jsonpath_parse(self, 'metadata.layeringDefinition')
def schema(self):
return self.get('schema') or ''
@property
def layer(self):
return utils.jsonpath_parse(
self, 'metadata.layeringDefinition.layer')
return self.layering_definition.get('layer') or ''
@property
def is_abstract(self):
return self.layering_definition.get('abstract') is True
@property
def is_control(self):
return self.schema.startswith('deckhand/Control')
@property
def layering_definition(self):
metadata = self.metadata or {}
return metadata.get('layeringDefinition') or DocumentDict({})
@property
def layeringDefinition(self):
metadata = self.metadata or {}
return metadata.get('layeringDefinition') or DocumentDict({})
@property
def layer_order(self):
return utils.jsonpath_parse(self, 'data.layerOrder')
if not self.schema.startswith('deckhand/LayeringPolicy'):
raise TypeError(
'layer_order only exists for LayeringPolicy documents')
return self.data.get('layerOrder', [])
@property
def parent_selector(self):
return utils.jsonpath_parse(
self, 'metadata.layeringDefinition.parentSelector') or {}
return self.layering_definition.get(
'parentSelector') or DocumentDict({})
@property
def labels(self):
return utils.jsonpath_parse(self, 'metadata.labels') or {}
return self.metadata.get('labels') or DocumentDict({})
@property
def substitutions(self):
return utils.jsonpath_parse(self, 'metadata.substitutions') or []
return self.metadata.get('substitutions', [])
@substitutions.setter
def substitutions(self, value):
return utils.jsonpath_replace(self, value, '.metadata.substitutions')
self.metadata.substitutions = value
@property
def actions(self):
return utils.jsonpath_parse(
self, 'metadata.layeringDefinition.actions') or []
return self.layering_definition.get('actions', [])
@property
def storage_policy(self):
return utils.jsonpath_parse(self, 'metadata.storagePolicy') or ''
return self.metadata.get('storagePolicy') or ''
@storage_policy.setter
def storage_policy(self, value):
return utils.jsonpath_replace(self, value, '.metadata.storagePolicy')
self.metadata['storagePolicy'] = value
@property
def is_encrypted(self):
@ -159,36 +142,42 @@ class DocumentDict(dict):
@property
def is_replacement(self):
return utils.jsonpath_parse(self, 'metadata.replacement') is True
return self.metadata.get('replacement') is True
@property
def has_replacement(self):
return isinstance(self._replaced_by, DocumentDict)
return isinstance(self.replaced_by, DocumentDict)
@property
def replaced_by(self):
return self._replaced_by
return getattr(self, '_replaced_by', None)
@replaced_by.setter
def replaced_by(self, other):
self._replaced_by = other
setattr(self, '_replaced_by', other)
def __hash__(self):
return hash(json.dumps(self, sort_keys=True))
@classmethod
def from_list(cls, documents):
"""Convert an iterable of documents into instances of this class.
def wrap_documents(f):
"""Decorator to wrap dictionary-formatted documents in instances of
``DocumentDict``.
"""
@functools.wraps(f)
def wrapper(*args, **kwargs):
fargs = inspect.getargspec(f)
if 'documents' in fargs[0]:
pos = fargs[0].index('documents')
new_args = list(args)
if new_args[pos] and not isinstance(
new_args[pos][0], DocumentDict):
new_args[pos] = DocumentDict.from_dict(args[pos])
return f(*tuple(new_args), **kwargs)
return wrapper
:param documents: Documents to wrap in this class.
:type documents: iterable
"""
if not isinstance(documents, collections.Iterable):
documents = [documents]
return [DocumentDict(d) for d in documents]
def document_dict_representer(dumper, data):
return dumper.represent_mapping('tag:yaml.org,2002:map', dict(data))
yaml.add_representer(DocumentDict, document_dict_representer)
# Required for py27 compatibility: yaml.safe_dump/safe_dump_all doesn't
# work unless SafeRepresenter add_representer method is called.
safe_representer = yaml.representer.SafeRepresenter
safe_representer.add_representer(DocumentDict, document_dict_representer)

View File

@ -13,7 +13,6 @@
# limitations under the License.
import ast
import copy
import re
import string
@ -171,8 +170,6 @@ def jsonpath_replace(data, value, jsonpath, pattern=None):
# http://admin:super-duper-secret@svc-name:8080/v1
doc['data'].update(replaced_data)
"""
data = copy.copy(data)
value = copy.copy(value)
jsonpath = _normalize_jsonpath(jsonpath)

View File

@ -12,10 +12,9 @@
# See the License for the specific language governing permissions and
# limitations under the License.
import yaml
import falcon
from oslo_log import log as logging
import yaml
from deckhand import context

View File

@ -16,6 +16,7 @@ import falcon
from oslo_log import log as logging
from oslo_utils import excutils
from deckhand.common import document as document_wrapper
from deckhand.control import base as api_base
from deckhand.control.views import document as document_view
from deckhand.db.sqlalchemy import api as db_api
@ -35,7 +36,8 @@ class BucketsResource(api_base.BaseResource):
@policy.authorize('deckhand:create_cleartext_documents')
def on_put(self, req, resp, bucket_name=None):
documents = self.from_yaml(req, expect_list=True, allow_empty=True)
data = self.from_yaml(req, expect_list=True, allow_empty=True)
documents = document_wrapper.DocumentDict.from_list(data)
# NOTE: Must validate documents before doing policy enforcement,
# because we expect certain formatting of the documents while doing

View File

@ -17,6 +17,7 @@ from oslo_log import log as logging
from oslo_utils import excutils
import six
from deckhand.common import document as document_wrapper
from deckhand.common import utils
from deckhand.common import validation_message as vm
from deckhand.control import base as api_base
@ -110,10 +111,9 @@ class RenderedDocumentsResource(api_base.BaseResource):
if include_encrypted:
filters['metadata.storagePolicy'].append('encrypted')
documents = self._retrieve_documents_for_rendering(revision_id,
**filters)
data = self._retrieve_documents_for_rendering(revision_id, **filters)
documents = document_wrapper.DocumentDict.from_list(data)
encryption_sources = self._retrieve_encrypted_documents(documents)
try:
# NOTE(fmontei): `validate` is False because documents have already
# been pre-validated during ingestion. Documents are post-validated

View File

@ -28,7 +28,6 @@ from oslo_serialization import jsonutils as json
import sqlalchemy.orm as sa_orm
from sqlalchemy import text
from deckhand.common import document as document_wrapper
from deckhand.common import utils
from deckhand.db.sqlalchemy import models
from deckhand import errors
@ -92,6 +91,14 @@ def raw_query(query, **kwargs):
return get_engine().execute(stmt)
def _meta(document):
return (
document['schema'],
document['metadata'].get('layeringDefinition', {}).get('layer'),
document['metadata'].get('name')
)
def require_unique_document_schema(schema=None):
"""Decorator to enforce only one singleton document exists in the system.
@ -122,12 +129,12 @@ def require_unique_document_schema(schema=None):
existing_documents = revision_documents_get(
schema=schema, deleted=False, include_history=False)
existing_document_names = [
x.meta for x in existing_documents
_meta(x) for x in existing_documents
]
conflicting_names = [
x.meta for x in documents
if x.meta not in existing_document_names and
x.schema.startswith(schema)
_meta(x) for x in documents
if _meta(x) not in existing_document_names and
x['schema'].startswith(schema)
]
if existing_document_names and conflicting_names:
raise errors.SingletonDocumentConflict(
@ -141,7 +148,6 @@ def require_unique_document_schema(schema=None):
return decorator
@document_wrapper.wrap_documents
@require_unique_document_schema(types.LAYERING_POLICY_SCHEMA)
def documents_create(bucket_name, documents, validations=None,
session=None):
@ -172,8 +178,8 @@ def documents_create(bucket_name, documents, validations=None,
d for d in revision_documents_get(bucket_name=bucket_name)
]
documents_to_delete = [
h for h in document_history if h.meta not in [
d.meta for d in documents]
h for h in document_history if _meta(h) not in [
_meta(d) for d in documents]
]
# Only create a revision if any docs have been created, changed or deleted.
@ -187,18 +193,18 @@ def documents_create(bucket_name, documents, validations=None,
if documents_to_delete:
LOG.debug('Deleting documents: %s.',
[d.meta for d in documents_to_delete])
[_meta(d) for d in documents_to_delete])
deleted_documents = []
for d in documents_to_delete:
doc = models.Document()
with session.begin():
# Store bare minimum information about the document.
doc['schema'] = d.schema
doc['name'] = d.name
doc['layer'] = d.layer
doc['schema'] = d['schema']
doc['name'] = d['name']
doc['layer'] = d['layer']
doc['data'] = {}
doc['meta'] = d.metadata
doc['meta'] = d['metadata']
doc['data_hash'] = _make_hash({})
doc['metadata_hash'] = _make_hash({})
doc['bucket_id'] = bucket['id']
@ -374,7 +380,7 @@ def document_get(session=None, raw_dict=False, revision_id=None, **filters):
for doc in documents:
d = doc.to_dict(raw_dict=raw_dict)
if utils.deepfilter(d, **nested_filters):
return document_wrapper.DocumentDict(d)
return d
filters.update(nested_filters)
raise errors.DocumentNotFound(filters=filters)
@ -426,7 +432,7 @@ def document_get_all(session=None, raw_dict=False, revision_id=None,
if utils.deepfilter(d, **nested_filters):
final_documents.append(d)
return document_wrapper.DocumentDict.from_dict(final_documents)
return final_documents
####################
@ -592,7 +598,6 @@ def revision_delete_all():
raw_query("DELETE FROM revisions;")
@document_wrapper.wrap_documents
def _exclude_deleted_documents(documents):
"""Excludes all documents that have been deleted including all documents
earlier in the revision history with the same ``metadata.name`` and
@ -602,12 +607,12 @@ def _exclude_deleted_documents(documents):
for doc in sorted(documents, key=lambda x: x['created_at']):
if doc['deleted'] is True:
previous_doc = documents_map.get(doc.meta)
previous_doc = documents_map.get(_meta(doc))
if previous_doc:
if doc['deleted_at'] >= previous_doc['created_at']:
documents_map[doc.meta] = None
documents_map[_meta(doc)] = None
else:
documents_map[doc.meta] = doc
documents_map[_meta(doc)] = doc
return [d for d in documents_map.values() if d is not None]
@ -694,7 +699,7 @@ def revision_documents_get(revision_id=None, include_history=True,
filtered_documents = _filter_revision_documents(
revision_documents, unique_only, **filters)
return document_wrapper.DocumentDict.from_dict(filtered_documents)
return filtered_documents
# NOTE(fmontei): No need to include `@require_revision_exists` decorator as
@ -1099,7 +1104,7 @@ def validation_get_all(revision_id, session=None):
expected_validations = set()
for vp in validation_policies:
expected_validations = expected_validations.union(
list(v['name'] for v in vp.data.get('validations', [])))
list(v['name'] for v in vp['data'].get('validations', [])))
missing_validations = expected_validations - actual_validations
extra_validations = actual_validations - expected_validations
@ -1138,7 +1143,7 @@ def _check_validation_entries_against_validation_policies(
expected_validations = set()
for vp in validation_policies:
expected_validations |= set(
v['name'] for v in vp.data.get('validations', []))
v['name'] for v in vp['data'].get('validations', []))
missing_validations = expected_validations - actual_validations
extra_validations = actual_validations - expected_validations
@ -1165,19 +1170,19 @@ def _check_validation_entries_against_validation_policies(
for entry in result_map[extra_name]:
original_status = entry['status']
entry['status'] = 'ignored [%s]' % original_status
entry.setdefault('errors', [])
msg_args = _meta(vp) + (
', '.join(v['name'] for v in vp['data'].get(
'validations', [])),
)
for vp in validation_policies:
entry['errors'].append({
'message': (
'The result for this validation was externally '
'registered but has been ignored because it is not '
'found in the validations for ValidationPolicy '
'[%s, %s] %s: %s.' % (
vp.schema, vp.layer, vp.name,
', '.join(v['name'] for v in vp['data'].get(
'validations', []))
)
'[%s, %s] %s: %s.' % msg_args
)
})

View File

@ -199,11 +199,11 @@ class DataSchemaValidator(GenericValidator):
for data_schema in data_schemas:
# Ensure that each `DataSchema` document has required properties
# before they themselves can be used to validate other documents.
if 'name' not in data_schema.metadata:
if not data_schema.name:
continue
if self._schema_re.match(data_schema.name) is None:
continue
if 'data' not in data_schema:
if not data_schema.data:
continue
schema_prefix, schema_version = _get_schema_parts(
data_schema, 'metadata.name')

View File

@ -22,7 +22,7 @@ from oslo_log import log as logging
from oslo_log import versionutils
from oslo_utils import excutils
from deckhand.common import document as document_wrapper
from deckhand.common.document import DocumentDict as dd
from deckhand.common import utils
from deckhand.common.validation_message import ValidationMessage
from deckhand.engine import document_validation
@ -127,7 +127,7 @@ class DocumentLayering(object):
substitution_source_map = {}
for src in substitution_sources:
src_ref = document_wrapper.DocumentDict(src)
src_ref = dd(src)
if src_ref.meta in self._documents_by_index:
src_ref = self._documents_by_index[src_ref.meta]
if src_ref.has_replacement:
@ -432,8 +432,7 @@ class DocumentLayering(object):
filter(lambda x: x.get('schema').startswith(
types.LAYERING_POLICY_SCHEMA), documents))
if layering_policies:
self._layering_policy = document_wrapper.DocumentDict(
layering_policies[0])
self._layering_policy = dd(layering_policies[0])
if len(layering_policies) > 1:
LOG.warning('More than one layering policy document was '
'passed in. Using the first one found: [%s] %s.',
@ -448,7 +447,7 @@ class DocumentLayering(object):
raise errors.LayeringPolicyNotFound()
for document in documents:
document = document_wrapper.DocumentDict(document)
document = dd(document)
self._documents_by_index.setdefault(document.meta, document)
@ -542,6 +541,7 @@ class DocumentLayering(object):
:raises MissingDocumentKey: If a layering action path isn't found
in the child document.
"""
method = action['method']
if method not in self._SUPPORTED_METHODS:
raise errors.UnsupportedActionMethod(

View File

@ -23,7 +23,6 @@ from deckhand.barbican.driver import BarbicanDriver
from deckhand.common import document as document_wrapper
from deckhand.common import utils
from deckhand import errors
from deckhand import types
CONF = cfg.CONF
LOG = logging.getLogger(__name__)
@ -68,11 +67,10 @@ class SecretsManager(object):
if not isinstance(secret_doc, document_wrapper.DocumentDict):
secret_doc = document_wrapper.DocumentDict(secret_doc)
if secret_doc.storage_policy == types.ENCRYPTED:
if secret_doc.is_encrypted:
payload = cls.barbican_driver.create_secret(secret_doc)
else:
payload = secret_doc.data
return payload
@classmethod

View File

@ -12,8 +12,6 @@
# See the License for the specific language governing permissions and
# limitations under the License.
import collections
import falcon
from oslo_log import log as logging
import six
@ -30,34 +28,6 @@ def get_version_from_request(req):
return 'N/A'
def _safe_yaml_dump(error_response):
"""Cast every instance of ``DocumentDict`` into a dictionary for
compatibility with ``yaml.safe_dump``.
This should only be called for error formatting.
"""
is_dict_sublcass = (
lambda v: type(v) is not dict and issubclass(v.__class__, dict)
)
def _to_dict(value, parent):
if isinstance(value, (list, tuple, set)):
for v in value:
_to_dict(v, value)
elif isinstance(value, collections.Mapping):
for v in value.values():
_to_dict(v, value)
else:
if isinstance(parent, (list, tuple, set)):
parent[parent.index(value)] = (
dict(value) if is_dict_sublcass(value) else value)
elif isinstance(parent, dict):
for k, v in parent.items():
parent[k] = dict(v) if is_dict_sublcass(v) else v
_to_dict(error_response, None)
return yaml.safe_dump(error_response)
def format_error_resp(req,
resp,
status_code=falcon.HTTP_500,
@ -131,8 +101,8 @@ def format_error_resp(req,
'retry': True if status_code is falcon.HTTP_500 else False
}
resp.body = _safe_yaml_dump(error_response)
resp.status = status_code
resp.body = yaml.safe_dump(error_response)
def default_exception_handler(ex, req, resp, params):

View File

@ -116,7 +116,7 @@ class TestValidationMessageFormatting(test_base.BaseControllerTest):
resp = self.app.simulate_put(
'/api/v1.0/buckets/test/documents',
headers={'Content-Type': 'application/x-yaml'},
body='name: test')
body='foo: bar')
expected = {
'status': 'Failure',
@ -132,8 +132,8 @@ class TestValidationMessageFormatting(test_base.BaseControllerTest):
{
'diagnostic': mock.ANY,
'documents': [{
'layer': None,
'name': None,
'layer': '',
'name': '',
'schema': ''
}],
'error': True,
@ -146,8 +146,8 @@ class TestValidationMessageFormatting(test_base.BaseControllerTest):
{
'diagnostic': mock.ANY,
'documents': [{
'layer': None,
'name': None,
'layer': '',
'name': '',
'schema': ''
}],
'error': True,
@ -211,8 +211,8 @@ class TestValidationMessageFormatting(test_base.BaseControllerTest):
{
'diagnostic': mock.ANY,
'documents': [{
'layer': None,
'name': None,
'layer': '',
'name': '',
'schema': invalid_document['schema']
}],
'error': True,

View File

@ -221,6 +221,32 @@ class TestDocumentLayeringNegative(
self.assertRaises(
errors.InvalidDocumentParent, self._test_layering, documents)
def test_layering_invalid_layer_order_raises_exc(self):
"""Validate that an invalid layerOrder (which means that the document
layer won't be found in the layerOrder) raises an exception.
"""
doc_factory = factories.DocumentFactory(1, [1])
lp_template, document = doc_factory.gen_test({
"_GLOBAL_SUBSTITUTIONS_1_": [{
"dest": {
"path": ".c"
},
"src": {
"schema": "deckhand/Certificate/v1",
"name": "global-cert",
"path": "."
}
}],
}, global_abstract=False)
layering_policy = copy.deepcopy(lp_template)
del layering_policy['data']['layerOrder']
error_re = "layer \'global\' .* was not found in layerOrder.*"
self.assertRaisesRegexp(
errors.InvalidDocumentLayer, error_re, self._test_layering,
[layering_policy, document], validate=True)
class TestDocumentLayeringValidationNegative(
test_document_layering.TestDocumentLayering):
@ -261,34 +287,3 @@ class TestDocumentLayeringValidationNegative(
self.assertRaises(errors.InvalidDocumentFormat,
self._test_layering, [layering_policy, document],
validate=True)
def test_layering_invalid_document_format_generates_error_messages(self):
doc_factory = factories.DocumentFactory(1, [1])
lp_template, document = doc_factory.gen_test({
"_GLOBAL_SUBSTITUTIONS_1_": [{
"dest": {
"path": ".c"
},
"src": {
"schema": "deckhand/Certificate/v1",
"name": "global-cert",
"path": "."
}
}],
}, global_abstract=False)
layering_policy = copy.deepcopy(lp_template)
del layering_policy['data']['layerOrder']
error_re = r"^'layerOrder' is a required property$"
e = self.assertRaises(
errors.InvalidDocumentFormat, self._test_layering,
[layering_policy, document], validate=True)
self.assertRegex(e.error_list[0]['message'], error_re)
self.assertEqual(layering_policy['schema'],
e.error_list[0]['documents'][0]['schema'])
self.assertEqual(layering_policy['metadata']['name'],
e.error_list[0]['documents'][0]['name'])
self.assertEqual(layering_policy['metadata']['layeringDefinition'][
'layer'],
e.error_list[0]['documents'][0]['layer'])