Pass hugepages to kernel parameters

Change-Id: Id5898f0b3e193fbd6267975d0ec70ef72cbce789
Implements: blueprint support-hugepages
This commit is contained in:
Artur Svechnikov 2016-02-19 17:08:51 +03:00
parent de9fe4fe40
commit 1155cc83cb
8 changed files with 178 additions and 15 deletions

View File

@ -501,3 +501,7 @@ DEPLOYMENT_TASK_VERSION_MAX_LEN = 255
NODE_ROLE_NAME_MAX_LEN = 255
DEPLOYMENT_TASK_DEFAULT_VERSION = '1.0.0'
DEFAULT_DEPLOYMENT_GRAPH_TYPE = 'default'
# default size for hugepages is 2048 KiB
DEFAULT_HUGEPAGE_SIZE = '2048'
HUGE_PAGES_SIZE_MAP = [('2048', '2M'), ('1048576', '1G')]

View File

@ -1918,13 +1918,18 @@
uri: "http://{settings.MASTER_IP}:8080/targetimages/centos_65_x86_64-boot.img.gz"
format: "ext2"
container: "gzip"
#these values are needed only for tests
node_attributes:
cpu_pinning:
nova:
value: 0
dpdk:
value: 0
hugepages: {}
hugepages:
dpdk:
value: 0
nova:
value: {}
- pk: 2
extend: *base_release
fields:

View File

@ -347,7 +347,10 @@
}
],
"supported_hugepages": [2048, 1048576],
"distances": []
"distances": [
["1.0", "2.1"],
["2.1", "1.0"]
]
}
},
"timestamp": "",
@ -576,7 +579,9 @@
}
],
"supported_hugepages": [2048, 1048576],
"distances": []
"distances": [
["1.0"]
]
}
},
"timestamp": "",
@ -784,7 +789,9 @@
}
],
"supported_hugepages": [2048, 1048576],
"distances": []
"distances": [
["1.0"]
]
}
},
"timestamp": "",
@ -926,7 +933,9 @@
}
],
"supported_hugepages": [2048, 1048576],
"distances": []
"distances": [
["1.0"]
]
}
},
"timestamp": "",
@ -1069,7 +1078,9 @@
}
],
"supported_hugepages": [2048, 1048576],
"distances": []
"distances": [
["1.0"]
]
}
},
"timestamp": "",
@ -1169,7 +1180,9 @@
}
],
"supported_hugepages": [2048],
"distances": []
"distances": [
["1.0"]
]
}
},
"timestamp": "",
@ -1292,7 +1305,9 @@
}
],
"supported_hugepages": [2048],
"distances": []
"distances": [
["1.0"]
]
}
},
"timestamp": "",
@ -1592,7 +1607,10 @@
}
],
"supported_hugepages": [2048, 1048576],
"distances": []
"distances": [
["1.0", "2.1"],
["2.1", "1.0"]
]
}
},
"timestamp": "",

View File

@ -18,9 +18,11 @@
Node-related objects and collections
"""
import collections
import copy
from datetime import datetime
import itertools
import math
import operator
import traceback
@ -1082,6 +1084,10 @@ class Node(NailgunObject):
if 'amd_iommu=' not in kernel_params:
kernel_params += ' amd_iommu=on'
break
if 'hugepages' not in kernel_params:
kernel_params += NodeAttributes.hugepages_kernel_opts(instance)
return kernel_params
@classmethod
@ -1186,6 +1192,17 @@ class Node(NailgunObject):
return
instance.attributes = instance.cluster.release.node_attributes
cls._set_default_hugepages(instance)
@classmethod
def _set_default_hugepages(cls, instance):
supported_hugepages = \
instance.meta['numa_topology']['supported_hugepages']
hugepages_attributes = instance.attributes['hugepages']
for name, attrs in six.iteritems(hugepages_attributes):
if attrs.get('type') == 'custom_hugepages':
attrs['value'] = dict.fromkeys(supported_hugepages, 0)
@classmethod
def get_attributes(cls, instance):
@ -1290,3 +1307,53 @@ class NodeAttributes(object):
@classmethod
def is_nova_cpu_pinning_enabled(cls, node):
return bool(Node.get_attributes(node)['cpu_pinning']['nova']['value'])
@classmethod
def total_hugepages(cls, node):
"""Return total hugepages for the instance
Iterate over hugepages attributes and sum them
according their type: custom_hugepages - contains
items (size: count), text - this is the number of
memory in MB which must be allocated as hugepages
on each NUMA node (default hugepages size 2M will
be used and count will be calculated according to
number of specified memory).
:return: Dictionary with (size: count) items
"""
hugepages = collections.defaultdict(int)
numa_count = len(node.meta['numa_topology']['numa_nodes'])
hugepages_attributes = Node.get_attributes(node)['hugepages']
for name, attrs in six.iteritems(hugepages_attributes):
if attrs.get('type') == 'custom_hugepages':
value = attrs['value']
for size, count in six.iteritems(value):
hugepages[size] += int(count)
elif attrs.get('type') == 'text':
# type text means that value is the number of memory in MB
# per NUMA node which should be covered by 2M hugepages
# for python3 capabilites we have to use int() and float()
count_per_numa_node = int(math.ceil(float(attrs['value']) / 2))
hugepages[consts.DEFAULT_HUGEPAGE_SIZE] += (
count_per_numa_node * numa_count)
return hugepages
@classmethod
def hugepages_kernel_opts(cls, node):
hugepages = cls.total_hugepages(node)
kernel_opts = ""
for size, human_size in consts.HUGE_PAGES_SIZE_MAP:
hugepage_size = hugepages.get(size, 0)
if hugepage_size:
# extend kernel params with lines for huge pages
# hugepagesz is the size (2M, 1G, etc.)
# hugepages is the number of pages for specific size
kernel_opts += " hugepagesz={0} hugepages={1}".format(
human_size, hugepage_size)
return kernel_opts

View File

@ -58,13 +58,12 @@ class TestDeploymentAttributesSerialization90(
TestDeploymentAttributesSerialization80
):
def test_attributes_cpu_pinning(self):
meta = {'numa_topology': {
'numa_nodes': [{'id': 1, 'cpus': [1, 2, 3, 4]},
{'id': 2, 'cpus': [5, 6, 7, 8]}]
}}
numa_nodes = [{'id': 1, 'cpus': [1, 2, 3, 4]},
{'id': 2, 'cpus': [5, 6, 7, 8]}]
node = self.env.create_node(cluster_id=self.cluster_db.id,
roles=['compute'],
meta=meta)
roles=['compute'])
node.meta['numa_topology']['numa_nodes'] = numa_nodes
node.attributes.update({
'cpu_pinning': {
'nova': {'value': 2},

View File

@ -518,3 +518,19 @@ class TestProvisioningSerializer90(BaseIntegrationTest):
kernel_opts = serialized_node['ks_meta']['pm_data']['kernel_params']
self.assertIn("intel_iommu=on", kernel_opts)
self.assertIn("amd_iommu=on", kernel_opts)
def test_serialize_node_attributes(self):
self.env.create(
api=False,
release_kwargs={'operating_system': consts.RELEASE_OS.ubuntu},
nodes_kwargs=[
{'roles': ['compute']}])
node = self.env.nodes[0]
node.attributes['hugepages']['nova']['value'] = {'2048': 5}
serialized_info = self.serializer.serialize(node.cluster, [node])
serialized_node = serialized_info['nodes'][0]
kernel_opts = serialized_node['ks_meta']['pm_data']['kernel_params']
self.assertIn(" hugepagesz=2M hugepages=5", kernel_opts)

View File

@ -51,3 +51,40 @@ class TestNodeAttributes(base.BaseUnitTest):
'comp2': {'name': 'comp2',
'required_cpus': 3}}},
objects.NodeAttributes.node_cpu_pinning_info(node))
def test_total_hugepages(self):
node = mock.Mock(
attributes={
'hugepages': {
'comp1': {
'type': 'custom_hugepages',
'value': {
'2048': 14,
'1048576': '2'}},
'comp2': {
'type': 'text',
'value': 20}}},
meta={'numa_topology': {'numa_nodes': [{'id': 0}]}})
expected = {
'2048': 24,
'1048576': 2}
self.assertDictEqual(
expected,
objects.NodeAttributes.total_hugepages(node))
def test_hugepages_kernel_opts(self):
node = mock.Mock(
attributes={
'hugepages': {
'comp1': {
'type': 'custom_hugepages',
'value': {
'1048576': 2}},
'comp2': {
'type': 'text',
'value': '10'}}},
meta={'numa_topology': {'numa_nodes': [{'id': '0'}]}})
expected = " hugepagesz=2M hugepages=5 hugepagesz=1G hugepages=2"
self.assertEqual(
expected,
objects.NodeAttributes.hugepages_kernel_opts(node))

View File

@ -677,6 +677,23 @@ class TestNodeObject(BaseIntegrationTest):
}
self.assertEqual(expected_attributes, node.attributes)
def test_set_default_hugepages(self):
fake_hugepages = ['0', '1', '2', '3']
node = mock.Mock(
attributes={
'hugepages': {
'nova': {'type': 'custom_hugepages'}}},
meta={
'numa_topology': {
'supported_hugepages': fake_hugepages,
'numa_nodes': []}}
)
objects.Node._set_default_hugepages(node)
expected = dict.fromkeys(fake_hugepages, 0)
self.assertDictEqual(
expected,
node.attributes['hugepages']['nova']['value'])
class TestTaskObject(BaseIntegrationTest):