Validate attributes restrictions

Since Nailgun contains attributes restriction
mechanism it's possible to verify attributes
restrictions. This commit applies restrictions
checks into validation for both node attributes
and cluster attributes.

Change-Id: I269da9a7a7df5fea336c07784b37d6ced1641993
Closes-Bug: #1567394
This commit is contained in:
Artur Svechnikov 2016-04-12 14:23:24 +03:00
parent 11f55603d7
commit 7e3fe2c308
9 changed files with 241 additions and 51 deletions

View File

@ -384,7 +384,11 @@ class NodeAttributesHandler(BaseHandler):
"""
node = self.get_object_or_404(objects.Node, node_id)
data = self.checked_data(node=node)
if not node.cluster:
raise self.http(400, "Node '{}' doesn't belong to any cluster"
.format(node.id))
data = self.checked_data(node=node, cluster=node.cluster)
objects.Node.update_attributes(node, data)
return objects.Node.get_attributes(node)

View File

@ -141,14 +141,31 @@ class BasicAttributesValidator(BasicValidator):
return attrs
@classmethod
def validate_attributes(cls, data):
"""Validate attributes."""
def validate_attributes(cls, data, models=None, force=False):
"""Validate attributes.
:param data: attributes
:type data: dict
:param models: models which are used in
restrictions conditions
:type models: dict
:param force: don't check restrictions
:type force: bool
"""
for attrs in six.itervalues(data):
if not isinstance(attrs, dict):
continue
for attr_name, attr in six.iteritems(attrs):
cls.validate_attribute(attr_name, attr)
# If settings are present restrictions can be checked
if models and not force:
restrict_err = restrictions.AttributesRestriction.check_data(
models, data)
if restrict_err:
raise errors.InvalidData(
"Some restrictions didn't pass verification: {}"
.format(restrict_err))
return data
@classmethod

View File

@ -28,6 +28,7 @@ from nailgun.db.sqlalchemy.models import Node
from nailgun import errors
from nailgun import objects
from nailgun.plugins.manager import PluginManager
from nailgun.settings import settings
class ClusterValidator(base.BasicValidator):
@ -232,11 +233,21 @@ class ClusterAttributesValidator(base.BasicAttributesValidator):
)
attrs = d
models = None
if cluster is not None:
attrs = objects.Cluster.get_updated_editable_attributes(cluster, d)
cls.validate_provision(cluster, attrs)
cls.validate_allowed_attributes(cluster, d, force)
cls.validate_attributes(attrs.get('editable', {}))
models = {
'settings': attrs.get('editable', {}),
'cluster': cluster,
'version': settings.VERSION,
'networking_parameters': cluster.network_config,
}
cls.validate_attributes(attrs.get('editable', {}), models, force=force)
return d

View File

@ -26,6 +26,7 @@ from nailgun.db.sqlalchemy.models import Node
from nailgun.db.sqlalchemy.models import NodeNICInterface
from nailgun import errors
from nailgun import objects
from nailgun.settings import settings
from nailgun import utils
@ -442,10 +443,17 @@ class NodeDeploymentValidator(TaskDeploymentValidator,
class NodeAttributesValidator(base.BasicAttributesValidator):
@classmethod
def validate(cls, data, node):
def validate(cls, data, node, cluster):
data = cls.validate_json(data)
full_data = utils.dict_merge(objects.Node.get_attributes(node), data)
attrs = cls.validate_attributes(full_data)
models = {
'settings': objects.Cluster.get_editable_attributes(cluster),
'cluster': cluster,
'version': settings.VERSION,
'networking_parameters': cluster.network_config,
}
attrs = cls.validate_attributes(full_data, models=models)
cls._validate_cpu_pinning(node, attrs)
cls._validate_hugepages(node, attrs)

View File

@ -180,6 +180,60 @@ class TestClusterAttributes(BaseIntegrationTest):
)
self.assertEqual(400, resp.status_code)
def test_failing_attributes_with_restrictions(self):
cluster = self.env.create_cluster(api=False)
objects.Cluster.patch_attributes(cluster, {
'editable': {
'test': {
'comp1': {
'description': 'desc',
'label': 'Comp 1',
'type': 'checkbox',
'value': False,
'weight': 10,
},
'comp2': {
'description': 'desc',
'label': 'Comp 2',
'type': 'checkbox',
'value': False,
'weight': 20,
'restrictions': ["settings:test.comp1.value == true"],
},
},
},
})
resp = self.app.patch(
reverse(
'ClusterAttributesHandler',
kwargs={'cluster_id': cluster.id}),
params=jsonutils.dumps({
'editable': {
'test': {
'comp1': {
'value': True
},
'comp2': {
'value': True
},
},
},
}),
headers=self.default_headers,
expect_errors=True
)
self.assertEqual(400, resp.status_code)
extended_restr = {
'condition': "settings:test.comp1.value == true",
'action': 'disable',
}
self.assertIn(
"Validation failed for attribute 'Comp 2': restriction with action"
"='{}' and condition='{}' failed due to attribute value='True'"
.format(extended_restr['action'], extended_restr['condition']),
resp.json_body['message'])
def test_get_default_attributes(self):
cluster = self.env.create_cluster(api=True)
release = self.db.query(Release).get(
@ -781,7 +835,9 @@ class TestAttributesWithPlugins(BaseIntegrationTest):
'label': 'label',
'value': '1',
'weight': 25,
'restrictions': [{'action': 'hide'}]
'restrictions': [{
'condition': 'true',
'action': 'hide'}]
}
}]
},

View File

@ -1639,8 +1639,13 @@ class TestHandlers(BaseIntegrationTest):
kwargs={'cluster_id': cluster['id']}),
params=jsonutils.dumps({
'editable': {
'storage': {'volumes_ceph': {'value': True},
'osd_pool_size': {'value': '3'}}}}),
'storage': {
'volumes_ceph': {'value': True},
'osd_pool_size': {'value': '3'},
'volumes_lvm': {'value': False},
}
}
}),
headers=self.default_headers)
task = self.env.launch_deployment()
@ -1701,8 +1706,13 @@ class TestHandlers(BaseIntegrationTest):
kwargs={'cluster_id': cluster['id']}),
params=jsonutils.dumps({
'editable': {
'storage': {'volumes_ceph': {'value': True},
'osd_pool_size': {'value': '1'}}}}),
'storage': {
'volumes_ceph': {'value': True},
'osd_pool_size': {'value': '1'},
'volumes_lvm': {'value': False},
}
}
}),
headers=self.default_headers)
task = self.env.launch_deployment()

View File

@ -426,7 +426,8 @@ class TestHandlers(BaseIntegrationTest):
self.assertEqual(fake_attributes, resp.json_body)
def test_put_node_attributes(self):
node = self.env.create_node(api=False)
self.env.create(nodes_kwargs=[{}])
node = self.env.nodes[-1]
fake_attributes = {
'group1': {
'metadata': {},

View File

@ -17,13 +17,34 @@
import json
import mock
from nailgun.api.v1.validators import node as node_validator
from nailgun import consts
from nailgun import errors
from nailgun import objects
from nailgun.test import base
validator = node_validator.NodeAttributesValidator.validate
def mock_cluster_attributes(func):
def wrapper(*args, **kwargs):
attr_mock = mock.patch.object(
objects.Cluster,
'get_editable_attributes',
return_value={
'common': {
'libvirt_type': {
'value': consts.HYPERVISORS.kvm,
}
}
}
)
with attr_mock:
func(*args, **kwargs)
return wrapper
class BaseNodeAttributeValidatorTest(base.BaseTestCase):
def setUp(self):
super(BaseNodeAttributeValidatorTest, self).setUp()
@ -61,16 +82,19 @@ class BaseNodeAttributeValidatorTest(base.BaseTestCase):
}
}
self.node = mock.Mock(meta=meta, attributes=attributes)
self.cluster = mock.Mock()
class TestNodeAttributesValidatorHugepages(BaseNodeAttributeValidatorTest):
@mock_cluster_attributes
def test_defaults(self):
data = {}
self.assertNotRaises(errors.InvalidData, validator,
json.dumps(data), self.node)
json.dumps(data), self.node, self.cluster)
@mock_cluster_attributes
def test_valid_hugepages(self):
data = {
'hugepages': {
@ -87,8 +111,9 @@ class TestNodeAttributesValidatorHugepages(BaseNodeAttributeValidatorTest):
}
self.assertNotRaises(errors.InvalidData, validator,
json.dumps(data), self.node)
json.dumps(data), self.node, self.cluster)
@mock_cluster_attributes
def test_too_much_hugepages(self):
data = {
'hugepages': {
@ -103,8 +128,9 @@ class TestNodeAttributesValidatorHugepages(BaseNodeAttributeValidatorTest):
self.assertRaisesWithMessageIn(
errors.InvalidData, 'Not enough memory for components',
validator, json.dumps(data), self.node)
validator, json.dumps(data), self.node, self.cluster)
@mock_cluster_attributes
def test_dpdk_requires_too_much(self):
data = {
'hugepages': {
@ -116,10 +142,11 @@ class TestNodeAttributesValidatorHugepages(BaseNodeAttributeValidatorTest):
self.assertRaisesWithMessageIn(
errors.InvalidData, 'could not require more memory than node has',
validator, json.dumps(data), self.node)
validator, json.dumps(data), self.node, self.cluster)
class TestNodeAttributesValidatorCpuPinning(BaseNodeAttributeValidatorTest):
@mock_cluster_attributes
def test_valid_data(self):
data = {
'cpu_pinning': {
@ -128,8 +155,9 @@ class TestNodeAttributesValidatorCpuPinning(BaseNodeAttributeValidatorTest):
}
self.assertNotRaises(errors.InvalidData, validator,
json.dumps(data), self.node)
json.dumps(data), self.node, self.cluster)
@mock_cluster_attributes
def test_no_cpu_for_os(self):
pinned_count = self.node.meta['cpu']['total']
@ -141,8 +169,9 @@ class TestNodeAttributesValidatorCpuPinning(BaseNodeAttributeValidatorTest):
self.assertRaisesWithMessageIn(
errors.InvalidData, 'at least one cpu',
validator, json.dumps(data), self.node)
validator, json.dumps(data), self.node, self.cluster)
@mock_cluster_attributes
def test_one_cpu_for_os(self):
pinned_count = self.node.meta['cpu']['total'] - 1
@ -153,4 +182,4 @@ class TestNodeAttributesValidatorCpuPinning(BaseNodeAttributeValidatorTest):
}
self.assertNotRaises(errors.InvalidData, validator,
json.dumps(data), self.node)
json.dumps(data), self.node, self.cluster)

View File

@ -176,7 +176,8 @@ class RestrictionBase(object):
:type action: string
:param strict: disallow undefined variables in condition
:type strict: bool
:returns: dict -- object with 'result' as bool and 'message' as dict
:returns: dict -- object with 'result' as list of satisfied
restrictions and 'message' as dict
"""
satisfied = []
@ -197,9 +198,9 @@ class RestrictionBase(object):
filterd_by_action_restrictions)
return {
'result': bool(satisfied),
'message': '. '.join([item.get('message') for item in
satisfied if item.get('message')])
'result': satisfied,
'message': '. '.join(item.get('message') for item in
satisfied if item.get('message'))
}
@staticmethod
@ -235,6 +236,28 @@ class RestrictionBase(object):
class AttributesRestriction(RestrictionBase):
@staticmethod
def _group_attribute(data):
return 'metadata' in data
@classmethod
def _get_label(cls, data):
if cls._group_attribute(data):
return data['metadata'].get('label')
return data.get('label')
@classmethod
def _get_value(cls, data):
if cls._group_attribute(data):
return data['metadata'].get('enabled', True)
return data.get('value')
@classmethod
def _get_restrictions(cls, data):
if cls._group_attribute(data):
return data['metadata'].get('restrictions', [])
return data.get('restrictions', [])
@classmethod
def check_data(cls, models, data):
"""Check cluster attributes data
@ -243,42 +266,73 @@ class AttributesRestriction(RestrictionBase):
:type models: dict
:param data: cluster attributes object
:type data: list|dict
:retruns: func -- generator which produces errors
:returns: list of errors
"""
def find_errors(data=data):
"""Generator which traverses through cluster attributes tree
Also checks restrictions for attributes and values for correctness
with regex
Checks regex for each attribute. Check restrictions for each
enabled attribute with type 'checkbox'. All hidden attributes
are skipped as well as disabled group attrbites. Similar
logic is used on UI
"""
if isinstance(data, dict):
restr = cls.check_restrictions(
models, data.get('restrictions', []))
if restr.get('result'):
# TODO(apopovych): handle restriction message
return
else:
attr_type = data.get('type')
if (
attr_type == 'text_list' or
attr_type == 'textarea_list'
):
err = cls.check_fields_length(data)
if err is not None:
yield err
regex_error = cls.validate_regex(data)
if regex_error is not None:
yield regex_error
for key, value in six.iteritems(data):
if key not in ['restrictions', 'regex']:
for err in find_errors(value):
yield err
elif isinstance(data, list):
if isinstance(data, list):
for item in data:
for err in find_errors(item):
yield err
return
if not isinstance(data, dict):
return
group_attribute = cls._group_attribute(data)
value = cls._get_value(data)
label = cls._get_label(data)
restrictions = cls._get_restrictions(data)
# hidden attributes should be skipped
if cls.check_restrictions(
models, restrictions, action='hide')['result']:
return
if group_attribute and not value:
# disabled group attribute should be skipped as well
# as all sub-attributes
return
attr_type = data.get('type')
if attr_type in ['text_list', 'textarea_list']:
err = cls.check_fields_length(data)
if err is not None:
yield err
regex_error = cls.validate_regex(data)
if regex_error is not None:
yield regex_error
# restrictions with 'disable' action should be checked only for
# enabled attributes which type is 'checkbox' or group attributes
if (attr_type == 'checkbox' or group_attribute) and value:
restr = cls.check_restrictions(
models, restrictions, action='disable')
for item in restr['result']:
error = (
"restriction with action='{}' and condition="
"'{}' failed due to attribute value='{}'"
.format(item['action'], item['condition'], value)
)
if item.get('message'):
error = item['message']
yield ("Validation failed for attribute '{}':"
" {}".format(label, error))
for key, value in six.iteritems(data):
if key == 'metadata':
continue
for err in find_errors(value):
yield err
return list(find_errors())