Revert "Limit the minimal RAM amount for OVS+DPDK to 1024MB"

This reverts commit 6e4ef67269.

Change-Id: Ib9144e2becebfa9b4817e8389419892c0daadd35
Closes-Bug: #1656804
This commit is contained in:
Anastasia Balobashina 2017-01-17 18:35:08 +00:00 committed by Anastasiya
parent 75db9b9d2b
commit 092acaa562
9 changed files with 17 additions and 213 deletions

View File

@ -502,14 +502,6 @@ class NodeAttributesValidator(base.BasicAttributesValidator):
", ".join(supported_hugepages) ", ".join(supported_hugepages)
) )
) )
dpdk_hugepages = utils.get_in(attrs, 'hugepages', 'dpdk', 'value')
min_dpdk_hugepages = utils.get_in(attrs, 'hugepages', 'dpdk', 'min')
if dpdk_hugepages < min_dpdk_hugepages:
raise errors.InvalidData(
"Node {0} does not have enough hugepages for dpdk."
"Need to allocate at least {1} MB.".format(node.id,
min_dpdk_hugepages)
)
try: try:
objects.NodeAttributes.distribute_hugepages(node, attrs) objects.NodeAttributes.distribute_hugepages(node, attrs)

View File

@ -42,7 +42,6 @@ down_revision = 'f2314e5d63c9'
def upgrade(): def upgrade():
upgrade_vmware_attributes_metadata() upgrade_vmware_attributes_metadata()
upgrade_attributes_metadata() upgrade_attributes_metadata()
upgrade_attributes_node()
upgrade_cluster_roles() upgrade_cluster_roles()
upgrade_tags_meta() upgrade_tags_meta()
upgrade_primary_unit() upgrade_primary_unit()
@ -64,7 +63,6 @@ def downgrade():
downgrade_primary_unit() downgrade_primary_unit()
downgrade_tags_meta() downgrade_tags_meta()
downgrade_cluster_roles() downgrade_cluster_roles()
downgrade_attributes_node()
downgrade_attributes_metadata() downgrade_attributes_metadata()
downgrade_vmware_attributes_metadata() downgrade_vmware_attributes_metadata()
@ -308,8 +306,6 @@ DEFAULT_RELEASE_BOND_NFV_ATTRIBUTES = {
} }
} }
MIN_DPDK_HUGEPAGES_MEMORY = 1024
NEW_BONDING_AVAILABILITY = [ NEW_BONDING_AVAILABILITY = [
{'dpdkovs': "'experimental' in version:feature_groups and " {'dpdkovs': "'experimental' in version:feature_groups and "
@ -498,12 +494,6 @@ def upgrade_attributes_metadata():
upgrade_cluster_attributes(connection) upgrade_cluster_attributes(connection)
def upgrade_attributes_node():
connection = op.get_bind()
upgrade_release_node_attributes(connection)
upgrade_node_attributes(connection)
def upgrade_release_attributes_metadata(connection): def upgrade_release_attributes_metadata(connection):
select_query = sa.sql.text( select_query = sa.sql.text(
'SELECT id, attributes_metadata, version FROM releases ' 'SELECT id, attributes_metadata, version FROM releases '
@ -551,57 +541,12 @@ def upgrade_cluster_attributes(connection):
editable=jsonutils.dumps(editable)) editable=jsonutils.dumps(editable))
def upgrade_release_node_attributes(connection):
select_query = sa.sql.text(
'SELECT id, node_attributes FROM releases '
'WHERE node_attributes IS NOT NULL')
update_query = sa.sql.text(
'UPDATE releases SET node_attributes = :node_attributes '
'WHERE id = :release_id')
for release_id, node_attrs in connection.execute(select_query):
node_attrs = jsonutils.loads(node_attrs)
dpdk = node_attrs.setdefault('hugepages', {}).setdefault('dpdk', {})
dpdk['min'] = MIN_DPDK_HUGEPAGES_MEMORY
dpdk['value'] = MIN_DPDK_HUGEPAGES_MEMORY
connection.execute(
update_query,
release_id=release_id,
node_attributes=jsonutils.dumps(node_attrs))
def upgrade_node_attributes(connection):
select_query = sa.sql.text(
'SELECT id, attributes FROM nodes '
'WHERE attributes IS NOT NULL')
update_query = sa.sql.text(
'UPDATE nodes SET attributes = :attributes '
'WHERE id = :node_id')
for node_id, attrs in connection.execute(select_query):
attrs = jsonutils.loads(attrs)
dpdk = attrs.setdefault('hugepages', {}).setdefault('dpdk', {})
dpdk['min'] = MIN_DPDK_HUGEPAGES_MEMORY
connection.execute(
update_query,
node_id=node_id,
attributes=jsonutils.dumps(attrs))
def downgrade_attributes_metadata(): def downgrade_attributes_metadata():
connection = op.get_bind() connection = op.get_bind()
downgrade_cluster_attributes(connection) downgrade_cluster_attributes(connection)
downgrade_release_attributes_metadata(connection) downgrade_release_attributes_metadata(connection)
def downgrade_attributes_node():
connection = op.get_bind()
downgrade_release_node_attributes(connection)
downgrade_node_attributes(connection)
def downgrade_release_attributes_metadata(connection): def downgrade_release_attributes_metadata(connection):
select_query = sa.sql.text( select_query = sa.sql.text(
'SELECT id, attributes_metadata FROM releases ' 'SELECT id, attributes_metadata FROM releases '
@ -639,44 +584,6 @@ def downgrade_cluster_attributes(connection):
editable=jsonutils.dumps(editable)) editable=jsonutils.dumps(editable))
def downgrade_release_node_attributes(connection):
select_query = sa.sql.text(
'SELECT id, node_attributes FROM releases '
'WHERE node_attributes IS NOT NULL')
update_query = sa.sql.text(
'UPDATE releases SET node_attributes = :node_attributes '
'WHERE id = :release_id')
for release_id, node_attrs in connection.execute(select_query):
node_attrs = jsonutils.loads(node_attrs)
dpdk = node_attrs.setdefault('hugepages', {}).setdefault('dpdk', {})
dpdk['min'] = 0
connection.execute(
update_query,
release_id=release_id,
node_attributes=jsonutils.dumps(node_attrs))
def downgrade_node_attributes(connection):
select_query = sa.sql.text(
'SELECT id, attributes FROM nodes '
'WHERE attributes IS NOT NULL')
update_query = sa.sql.text(
'UPDATE nodes SET attributes = :attributes '
'WHERE id = :node_id')
for node_id, attrs in connection.execute(select_query):
attrs = jsonutils.loads(attrs)
dpdk = attrs.setdefault('hugepages', {}).setdefault('dpdk', {})
dpdk['min'] = 0
connection.execute(
update_query,
node_id=node_id,
attributes=jsonutils.dumps(attrs))
def upgrade_cluster_roles(): def upgrade_cluster_roles():
op.add_column( op.add_column(
'clusters', 'clusters',

View File

@ -2527,8 +2527,8 @@
description: "DPDK Huge Pages per NUMA node in MB" description: "DPDK Huge Pages per NUMA node in MB"
label: "DPDK Huge Pages" label: "DPDK Huge Pages"
type: "number" type: "number"
value: 1024 value: 0
min: 1024 min: 0
restrictions: restrictions:
- condition: "not ('experimental' in version:feature_groups)" - condition: "not ('experimental' in version:feature_groups)"
action: "hide" action: "hide"

View File

@ -1563,8 +1563,7 @@ class NodeAttributes(object):
@classmethod @classmethod
def is_dpdk_hugepages_enabled(cls, node, attributes=None): def is_dpdk_hugepages_enabled(cls, node, attributes=None):
hugepages = cls._safe_get_hugepages(node, attributes=attributes) hugepages = cls._safe_get_hugepages(node, attributes=attributes)
return ('dpdk' in hugepages and bool(hugepages['dpdk']['value']) and return 'dpdk' in hugepages and bool(hugepages['dpdk']['value'])
Node.dpdk_enabled(node))
@classmethod @classmethod
def dpdk_hugepages_attrs(cls, node): def dpdk_hugepages_attrs(cls, node):
@ -1578,8 +1577,7 @@ class NodeAttributes(object):
""" """
hugepages = cls._safe_get_hugepages(node) hugepages = cls._safe_get_hugepages(node)
if (not Node.dpdk_enabled(node) and 'dpdk' not in hugepages or if 'dpdk' not in hugepages or not hugepages['dpdk']['value']:
not hugepages['dpdk']['value']):
return {} return {}
dpdk_memory = hugepages['dpdk']['value'] dpdk_memory = hugepages['dpdk']['value']

View File

@ -488,8 +488,9 @@ class TestDeploymentAttributesSerialization90(
numa_nodes.append({ numa_nodes.append({
'id': i, 'id': i,
'cpus': [i], 'cpus': [i],
'memory': 2 * 1024 ** 3 'memory': 1024 ** 3
}) })
meta = { meta = {
'numa_topology': { 'numa_topology': {
'supported_hugepages': [2048], 'supported_hugepages': [2048],
@ -500,21 +501,17 @@ class TestDeploymentAttributesSerialization90(
cluster_id=self.cluster_db.id, cluster_id=self.cluster_db.id,
roles=['compute'], roles=['compute'],
meta=meta) meta=meta)
node.interfaces[0].attributes.get('dpdk', {}).get(
'enabled', {})['value'] = True
node.attributes.update({ node.attributes.update({
'hugepages': { 'hugepages': {
'dpdk': { 'dpdk': {
'value': 1024}, 'value': 128},
'nova': { 'nova': {
'value': {'2048': 1}}}} 'value': {'2048': 1}}}}
) )
serialized_for_astute = self.serialize() serialized_for_astute = self.serialize()
serialized_node = serialized_for_astute['nodes'][0] serialized_node = serialized_for_astute['nodes'][0]
self.assertEquals( self.assertEquals(
[1024, 1024, 1024], [128, 128, 128],
serialized_node['dpdk']['ovs_socket_mem']) serialized_node['dpdk']['ovs_socket_mem'])
self.assertTrue(serialized_node['nova']['enable_hugepages']) self.assertTrue(serialized_node['nova']['enable_hugepages'])

View File

@ -496,7 +496,6 @@ class TestProvisioningSerializer90(BaseIntegrationTest):
node = self.env.nodes[0] node = self.env.nodes[0]
node.attributes['hugepages']['nova']['value'] = {'2048': 5} node.attributes['hugepages']['nova']['value'] = {'2048': 5}
node.attributes['hugepages']['dpdk']['value'] = 0
serialized_info = self.serializer.serialize(node.cluster, [node]) serialized_info = self.serializer.serialize(node.cluster, [node])

View File

@ -98,17 +98,6 @@ TAGS_META = {
} }
} }
NODE_ATTRIBUTES = {
'hugepages':
{
'dpdk':
{
'value': 1024,
'min': 1024
}
}
}
def setup_module(): def setup_module():
dropdb() dropdb()
@ -148,8 +137,7 @@ def prepare():
'tags_matadata': jsonutils.dumps(TAGS_META), 'tags_matadata': jsonutils.dumps(TAGS_META),
'is_deployable': True, 'is_deployable': True,
'networks_metadata': '{}', 'networks_metadata': '{}',
'attributes_metadata': jsonutils.dumps(attrs), 'attributes_metadata': jsonutils.dumps(attrs)
'node_attributes': jsonutils.dumps(NODE_ATTRIBUTES),
} }
result = db.execute(meta.tables['releases'].insert(), [release]) result = db.execute(meta.tables['releases'].insert(), [release])
release_id = result.inserted_primary_key[0] release_id = result.inserted_primary_key[0]
@ -188,7 +176,6 @@ def prepare():
'status': 'ready', 'status': 'ready',
'roles': ['role_x', 'role_y'], 'roles': ['role_x', 'role_y'],
'primary_tags': ['role_y', 'test'], 'primary_tags': ['role_y', 'test'],
'attributes': jsonutils.dumps(NODE_ATTRIBUTES),
'meta': '{}', 'meta': '{}',
'mac': 'bb:aa:aa:aa:aa:aa', 'mac': 'bb:aa:aa:aa:aa:aa',
'timestamp': datetime.datetime.utcnow(), 'timestamp': datetime.datetime.utcnow(),
@ -203,8 +190,7 @@ def prepare():
'group_id': None, 'group_id': None,
'status': 'discover', 'status': 'discover',
'mac': 'aa:aa:aa:aa:aa:aa', 'mac': 'aa:aa:aa:aa:aa:aa',
'timestamp': datetime.datetime.utcnow(), 'timestamp': datetime.datetime.utcnow()
'attributes': jsonutils.dumps(NODE_ATTRIBUTES),
}] }]
) )
node_id = new_node.inserted_primary_key[0] node_id = new_node.inserted_primary_key[0]
@ -324,16 +310,6 @@ class TestAttributesDowngrade(base.BaseAlembicMigrationTest):
common = attrs.setdefault('editable', {}).setdefault('common', {}) common = attrs.setdefault('editable', {}).setdefault('common', {})
self.assertEqual(common.get('security_groups'), None) self.assertEqual(common.get('security_groups'), None)
def test_release_node_attributes_downgrade(self):
releases = self.meta.tables['releases']
results = db.execute(
sa.select([releases.c.node_attributes]))
for node_attrs in results:
node_attrs = jsonutils.loads(node_attrs[0])
dpdk = node_attrs.setdefault('hugepages', {}).setdefault('dpdk',
{})
self.assertEqual(dpdk.get('min'), 0)
class TestTags(base.BaseAlembicMigrationTest): class TestTags(base.BaseAlembicMigrationTest):
def test_primary_tags_downgrade(self): def test_primary_tags_downgrade(self):

View File

@ -255,20 +255,6 @@ NODE_OFFLOADING_MODES = [
'sub': [] 'sub': []
} }
] ]
NODE_ATTRIBUTES = {
'hugepages':
{
'dpdk':
{
'value': 0,
'min': 0
}
}
}
MIN_DPDK_HUGEPAGES_VALUE = 1024
# version of Fuel when security group switch was added # version of Fuel when security group switch was added
RELEASE_VERSION = '9.0' RELEASE_VERSION = '9.0'
# version of Fuel when tags was introduced # version of Fuel when tags was introduced
@ -318,7 +304,6 @@ def prepare():
'state': 'available', 'state': 'available',
'networks_metadata': '{}', 'networks_metadata': '{}',
'attributes_metadata': jsonutils.dumps(ATTRIBUTES_METADATA), 'attributes_metadata': jsonutils.dumps(ATTRIBUTES_METADATA),
'node_attributes': jsonutils.dumps(NODE_ATTRIBUTES),
'deployment_tasks': '{}', 'deployment_tasks': '{}',
'roles': jsonutils.dumps([ 'roles': jsonutils.dumps([
'controller', 'controller',
@ -406,7 +391,6 @@ def prepare():
'status': 'ready', 'status': 'ready',
'roles': ['controller', 'ceph-osd'], 'roles': ['controller', 'ceph-osd'],
'primary_roles': ['controller'], 'primary_roles': ['controller'],
'attributes': jsonutils.dumps(NODE_ATTRIBUTES),
'meta': jsonutils.dumps({ 'meta': jsonutils.dumps({
'interfaces': [{ 'interfaces': [{
'mac': '00:00:00:00:00:01' 'mac': '00:00:00:00:00:01'
@ -427,7 +411,6 @@ def prepare():
'group_id': None, 'group_id': None,
'status': 'ready', 'status': 'ready',
'roles': ['controller', 'ceph-osd'], 'roles': ['controller', 'ceph-osd'],
'attributes': jsonutils.dumps(NODE_ATTRIBUTES),
'meta': jsonutils.dumps({ 'meta': jsonutils.dumps({
'interfaces': [ 'interfaces': [
{ {
@ -673,25 +656,6 @@ class TestAttributesUpdate(base.BaseAlembicMigrationTest):
common = editable.setdefault('common', {}) common = editable.setdefault('common', {})
self.assertEqual(common.get('security_groups'), None) self.assertEqual(common.get('security_groups'), None)
def test_release_node_attributes_update(self):
releases = self.meta.tables['releases']
results = db.execute(
sa.select([releases.c.node_attributes]))
for node_attrs in results:
node_attrs = jsonutils.loads(node_attrs[0])
dpdk = node_attrs.setdefault('hugepages', {}).setdefault('dpdk',
{})
self.assertEqual(dpdk.get('min'), MIN_DPDK_HUGEPAGES_VALUE)
def test_node_attributes_update(self):
nodes = self.meta.tables['nodes']
results = db.execute(
sa.select([nodes.c.attributes]))
for attrs in results:
attrs = jsonutils.loads(attrs[0])
dpdk = attrs.setdefault('hugepages', {}).setdefault('dpdk', {})
self.assertEqual(dpdk.get('min'), MIN_DPDK_HUGEPAGES_VALUE)
def test_upgrade_release_with_nic_attributes(self): def test_upgrade_release_with_nic_attributes(self):
releases_table = self.meta.tables['releases'] releases_table = self.meta.tables['releases']
result = db.execute( result = db.execute(

View File

@ -28,7 +28,7 @@ validator = node_validator.NodeAttributesValidator.validate
def mock_cluster_attributes(func): def mock_cluster_attributes(func):
def wrapper(*args, **kwargs): def wrapper(*args, **kwargs):
cluster_attr_mock = mock.patch.object( attr_mock = mock.patch.object(
objects.Cluster, objects.Cluster,
'get_editable_attributes', 'get_editable_attributes',
return_value={ return_value={
@ -39,12 +39,7 @@ def mock_cluster_attributes(func):
} }
} }
) )
node_dpdk_mock = mock.patch.object( with attr_mock:
objects.Node,
'dpdk_enabled',
return_value=True
)
with cluster_attr_mock, node_dpdk_mock:
func(*args, **kwargs) func(*args, **kwargs)
return wrapper return wrapper
@ -59,8 +54,8 @@ class BaseNodeAttributeValidatorTest(base.BaseTestCase):
meta['numa_topology'] = { meta['numa_topology'] = {
"supported_hugepages": [2048, 1048576], "supported_hugepages": [2048, 1048576],
"numa_nodes": [ "numa_nodes": [
{"id": 0, "cpus": [0, 1], 'memory': 3 * 1024 ** 3}, {"id": 0, "cpus": [0, 1], 'memory': 2 * 1024 ** 3},
{"id": 1, "cpus": [2, 3], 'memory': 3 * 1024 ** 3}, {"id": 1, "cpus": [2, 3], 'memory': 2 * 1024 ** 3},
] ]
} }
meta['cpu']['total'] = 4 meta['cpu']['total'] = 4
@ -73,8 +68,7 @@ class BaseNodeAttributeValidatorTest(base.BaseTestCase):
}, },
'dpdk': { 'dpdk': {
'type': 'number', 'type': 'number',
'value': 1024, 'value': 0,
'min': 1024,
}, },
}, },
'cpu_pinning': { 'cpu_pinning': {
@ -113,11 +107,11 @@ class TestNodeAttributesValidatorHugepages(BaseNodeAttributeValidatorTest):
}, },
}, },
'dpdk': { 'dpdk': {
'value': 1024, 'value': 2,
'min': 1024
}, },
} }
} }
self.assertNotRaises(errors.InvalidData, validator, self.assertNotRaises(errors.InvalidData, validator,
json.dumps(data), self.node, self.cluster) json.dumps(data), self.node, self.cluster)
@ -138,29 +132,6 @@ class TestNodeAttributesValidatorHugepages(BaseNodeAttributeValidatorTest):
errors.InvalidData, 'Not enough memory for components', errors.InvalidData, 'Not enough memory for components',
validator, json.dumps(data), self.node, self.cluster) validator, json.dumps(data), self.node, self.cluster)
@mock_cluster_attributes
def test_not_enough_dpdk_hugepages(self, m_dpdk_nics):
data = {
'hugepages': {
'nova': {
'value': {
'2048': 1,
'1048576': 0,
},
},
'dpdk': {
'value': 1023,
'min': 1024
},
}
}
message = ("Node {0} does not have enough hugepages for dpdk."
"Need to allocate at least {1} MB.").format(self.node.id,
1024)
self.assertRaisesWithMessageIn(
errors.InvalidData, message,
validator, json.dumps(data), self.node, self.cluster)
@mock_cluster_attributes @mock_cluster_attributes
def test_dpdk_requires_too_much(self, m_dpdk_nics): def test_dpdk_requires_too_much(self, m_dpdk_nics):
data = { data = {