Modify left filters for RequestSpec
Also, removing the compat_legacy_props decorator as now all the in-tree filters are using the RequestSpec object. Change-Id: Id2cf1879b0ce1aa722c213ccabfaf5bacc8c3198 Partially-Implements: blueprint request-spec-object-mitaka
This commit is contained in:
parent
42caebda49
commit
ffcbc50e03
|
@ -16,10 +16,7 @@
|
|||
"""
|
||||
Scheduler host filters
|
||||
"""
|
||||
import functools
|
||||
|
||||
from nova import filters
|
||||
from nova import objects
|
||||
|
||||
|
||||
class BaseHostFilter(filters.BaseFilter):
|
||||
|
@ -47,31 +44,3 @@ def all_filters():
|
|||
and should return a list of all filter classes available.
|
||||
"""
|
||||
return HostFilterHandler().get_all_classes()
|
||||
|
||||
|
||||
# TODO(sbauza): Remove that decorator once all filters are using RequestSpec
|
||||
# object directly.
|
||||
def compat_legacy_props(function):
|
||||
"""Decorator for returning a legacy filter_properties dictionary.
|
||||
|
||||
This is used for keeping unchanged the existing filters without yet using
|
||||
the RequestSpec fields by returning a legacy dictionary.
|
||||
"""
|
||||
|
||||
@functools.wraps(function)
|
||||
def decorated_host_passes(self, host_state, filter_properties):
|
||||
if isinstance(filter_properties, objects.RequestSpec):
|
||||
legacy_props = filter_properties.to_legacy_filter_properties_dict()
|
||||
legacy_props.update({'request_spec': (
|
||||
filter_properties.to_legacy_request_spec_dict()),
|
||||
'instance_type': filter_properties.flavor})
|
||||
# TODO(sbauza): Adding two keys not used in-tree but which will be
|
||||
# provided as non-fields for the RequestSpec once we provide it to
|
||||
# the filters
|
||||
legacy_props.update(
|
||||
{'context': filter_properties._context,
|
||||
'config_options': filter_properties.config_options})
|
||||
filter_properties = legacy_props
|
||||
return function(self, host_state, filter_properties)
|
||||
|
||||
return decorated_host_passes
|
||||
|
|
|
@ -23,6 +23,5 @@ class AllHostsFilter(filters.BaseHostFilter):
|
|||
# list of hosts doesn't change within a request
|
||||
run_filter_once_per_request = True
|
||||
|
||||
@filters.compat_legacy_props
|
||||
def host_passes(self, host_state, filter_properties):
|
||||
def host_passes(self, host_state, spec_obj):
|
||||
return True
|
||||
|
|
|
@ -25,8 +25,7 @@ class IsolatedHostsFilter(filters.BaseHostFilter):
|
|||
# The configuration values do not change within a request
|
||||
run_filter_once_per_request = True
|
||||
|
||||
@filters.compat_legacy_props
|
||||
def host_passes(self, host_state, filter_properties):
|
||||
def host_passes(self, host_state, spec_obj):
|
||||
"""Result Matrix with 'restrict_isolated_hosts_to_isolated_images' set
|
||||
to True::
|
||||
|
||||
|
@ -58,9 +57,7 @@ class IsolatedHostsFilter(filters.BaseHostFilter):
|
|||
return ((not restrict_isolated_hosts_to_isolated_images) or
|
||||
(host_state.host not in isolated_hosts))
|
||||
|
||||
spec = filter_properties.get('request_spec', {})
|
||||
props = spec.get('instance_properties', {})
|
||||
image_ref = props.get('image_ref')
|
||||
image_ref = spec_obj.image.id if spec_obj.image else None
|
||||
image_isolated = image_ref in isolated_images
|
||||
host_isolated = host_state.host in isolated_hosts
|
||||
|
||||
|
|
|
@ -126,15 +126,11 @@ class JsonFilter(filters.BaseHostFilter):
|
|||
result = method(self, cooked_args)
|
||||
return result
|
||||
|
||||
@filters.compat_legacy_props
|
||||
def host_passes(self, host_state, filter_properties):
|
||||
def host_passes(self, host_state, spec_obj):
|
||||
"""Return a list of hosts that can fulfill the requirements
|
||||
specified in the query.
|
||||
"""
|
||||
try:
|
||||
query = filter_properties['scheduler_hints']['query']
|
||||
except KeyError:
|
||||
query = None
|
||||
query = spec_obj.get_scheduler_hint('query')
|
||||
if not query:
|
||||
return True
|
||||
|
||||
|
|
|
@ -241,10 +241,10 @@ class TrustedFilter(filters.BaseHostFilter):
|
|||
# The hosts the instances are running on doesn't change within a request
|
||||
run_filter_once_per_request = True
|
||||
|
||||
@filters.compat_legacy_props
|
||||
def host_passes(self, host_state, filter_properties):
|
||||
instance_type = filter_properties.get('instance_type', {})
|
||||
extra = instance_type.get('extra_specs', {})
|
||||
def host_passes(self, host_state, spec_obj):
|
||||
instance_type = spec_obj.flavor
|
||||
extra = (instance_type.extra_specs
|
||||
if 'extra_specs' in instance_type else {})
|
||||
trust = extra.get('trust:trusted_host')
|
||||
host = host_state.nodename
|
||||
if trust:
|
||||
|
|
|
@ -25,15 +25,14 @@ class TypeAffinityFilter(filters.BaseHostFilter):
|
|||
(spread) set to 1 (default).
|
||||
"""
|
||||
|
||||
@filters.compat_legacy_props
|
||||
def host_passes(self, host_state, filter_properties):
|
||||
def host_passes(self, host_state, spec_obj):
|
||||
"""Dynamically limits hosts to one instance type
|
||||
|
||||
Return False if host has any instance types other than the requested
|
||||
type. Return True if all instance types match or if host is empty.
|
||||
"""
|
||||
instance_type = filter_properties.get('instance_type')
|
||||
instance_type_id = instance_type['id']
|
||||
instance_type = spec_obj.flavor
|
||||
instance_type_id = instance_type.id
|
||||
other_types_on_host = utils.other_types_on_host(host_state,
|
||||
instance_type_id)
|
||||
return not other_types_on_host
|
||||
|
@ -49,15 +48,14 @@ class AggregateTypeAffinityFilter(filters.BaseHostFilter):
|
|||
# Aggregate data does not change within a request
|
||||
run_filter_once_per_request = True
|
||||
|
||||
@filters.compat_legacy_props
|
||||
def host_passes(self, host_state, filter_properties):
|
||||
instance_type = filter_properties.get('instance_type')
|
||||
def host_passes(self, host_state, spec_obj):
|
||||
instance_type = spec_obj.flavor
|
||||
|
||||
aggregate_vals = utils.aggregate_values_from_key(
|
||||
host_state, 'instance_type')
|
||||
|
||||
for val in aggregate_vals:
|
||||
if (instance_type['name'] in
|
||||
if (instance_type.name in
|
||||
[x.strip() for x in val.split(',')]):
|
||||
return True
|
||||
return not aggregate_vals
|
||||
|
|
|
@ -10,6 +10,7 @@
|
|||
# License for the specific language governing permissions and limitations
|
||||
# under the License.
|
||||
|
||||
from nova import objects
|
||||
from nova.scheduler.filters import isolated_hosts_filter
|
||||
from nova import test
|
||||
from nova.tests.unit.scheduler import fakes
|
||||
|
@ -31,13 +32,9 @@ class TestIsolatedHostsFilter(test.NoDBTestCase):
|
|||
restrict_isolated_hosts_to_isolated_images)
|
||||
host_name = 'isolated_host' if host_in_list else 'free_host'
|
||||
image_ref = 'isolated_image' if image_in_list else 'free_image'
|
||||
filter_properties = {
|
||||
'request_spec': {
|
||||
'instance_properties': {'image_ref': image_ref}
|
||||
}
|
||||
}
|
||||
spec_obj = objects.RequestSpec(image=objects.ImageMeta(id=image_ref))
|
||||
host = fakes.FakeHostState(host_name, 'node', {})
|
||||
return self.filt_cls.host_passes(host, filter_properties)
|
||||
return self.filt_cls.host_passes(host, spec_obj)
|
||||
|
||||
def test_isolated_hosts_fails_isolated_on_non_isolated(self):
|
||||
self.assertFalse(self._do_test_isolated_hosts(False, True))
|
||||
|
|
|
@ -12,6 +12,7 @@
|
|||
|
||||
from oslo_serialization import jsonutils
|
||||
|
||||
from nova import objects
|
||||
from nova.scheduler.filters import json_filter
|
||||
from nova import test
|
||||
from nova.tests.unit.scheduler import fakes
|
||||
|
@ -27,56 +28,61 @@ class TestJsonFilter(test.NoDBTestCase):
|
|||
['>=', '$free_disk_mb', 200 * 1024]])
|
||||
|
||||
def test_json_filter_passes(self):
|
||||
filter_properties = {'instance_type': {'memory_mb': 1024,
|
||||
'root_gb': 200,
|
||||
'ephemeral_gb': 0},
|
||||
'scheduler_hints': {'query': self.json_query}}
|
||||
spec_obj = objects.RequestSpec(
|
||||
flavor=objects.Flavor(memory_mb=1024,
|
||||
root_gb=200,
|
||||
ephemeral_gb=0),
|
||||
scheduler_hints=dict(query=[self.json_query]))
|
||||
host = fakes.FakeHostState('host1', 'node1',
|
||||
{'free_ram_mb': 1024,
|
||||
'free_disk_mb': 200 * 1024})
|
||||
self.assertTrue(self.filt_cls.host_passes(host, filter_properties))
|
||||
self.assertTrue(self.filt_cls.host_passes(host, spec_obj))
|
||||
|
||||
def test_json_filter_passes_with_no_query(self):
|
||||
filter_properties = {'instance_type': {'memory_mb': 1024,
|
||||
'root_gb': 200,
|
||||
'ephemeral_gb': 0}}
|
||||
spec_obj = objects.RequestSpec(
|
||||
flavor=objects.Flavor(memory_mb=1024,
|
||||
root_gb=200,
|
||||
ephemeral_gb=0),
|
||||
scheduler_hints=None)
|
||||
host = fakes.FakeHostState('host1', 'node1',
|
||||
{'free_ram_mb': 0,
|
||||
'free_disk_mb': 0})
|
||||
self.assertTrue(self.filt_cls.host_passes(host, filter_properties))
|
||||
self.assertTrue(self.filt_cls.host_passes(host, spec_obj))
|
||||
|
||||
def test_json_filter_fails_on_memory(self):
|
||||
filter_properties = {'instance_type': {'memory_mb': 1024,
|
||||
'root_gb': 200,
|
||||
'ephemeral_gb': 0},
|
||||
'scheduler_hints': {'query': self.json_query}}
|
||||
spec_obj = objects.RequestSpec(
|
||||
flavor=objects.Flavor(memory_mb=1024,
|
||||
root_gb=200,
|
||||
ephemeral_gb=0),
|
||||
scheduler_hints=dict(query=[self.json_query]))
|
||||
host = fakes.FakeHostState('host1', 'node1',
|
||||
{'free_ram_mb': 1023,
|
||||
'free_disk_mb': 200 * 1024})
|
||||
self.assertFalse(self.filt_cls.host_passes(host, filter_properties))
|
||||
self.assertFalse(self.filt_cls.host_passes(host, spec_obj))
|
||||
|
||||
def test_json_filter_fails_on_disk(self):
|
||||
filter_properties = {'instance_type': {'memory_mb': 1024,
|
||||
'root_gb': 200,
|
||||
'ephemeral_gb': 0},
|
||||
'scheduler_hints': {'query': self.json_query}}
|
||||
spec_obj = objects.RequestSpec(
|
||||
flavor=objects.Flavor(memory_mb=1024,
|
||||
root_gb=200,
|
||||
ephemeral_gb=0),
|
||||
scheduler_hints=dict(query=[self.json_query]))
|
||||
host = fakes.FakeHostState('host1', 'node1',
|
||||
{'free_ram_mb': 1024,
|
||||
'free_disk_mb': (200 * 1024) - 1})
|
||||
self.assertFalse(self.filt_cls.host_passes(host, filter_properties))
|
||||
self.assertFalse(self.filt_cls.host_passes(host, spec_obj))
|
||||
|
||||
def test_json_filter_fails_on_service_disabled(self):
|
||||
json_query = jsonutils.dumps(
|
||||
['and', ['>=', '$free_ram_mb', 1024],
|
||||
['>=', '$free_disk_mb', 200 * 1024],
|
||||
['not', '$service.disabled']])
|
||||
filter_properties = {'instance_type': {'memory_mb': 1024,
|
||||
'local_gb': 200},
|
||||
'scheduler_hints': {'query': json_query}}
|
||||
spec_obj = objects.RequestSpec(
|
||||
flavor=objects.Flavor(memory_mb=1024, local_gb=200),
|
||||
scheduler_hints=dict(query=[json_query]))
|
||||
host = fakes.FakeHostState('host1', 'node1',
|
||||
{'free_ram_mb': 1024,
|
||||
'free_disk_mb': 200 * 1024})
|
||||
self.assertFalse(self.filt_cls.host_passes(host, filter_properties))
|
||||
self.assertFalse(self.filt_cls.host_passes(host, spec_obj))
|
||||
|
||||
def test_json_filter_happy_day(self):
|
||||
# Test json filter more thoroughly.
|
||||
|
@ -90,11 +96,8 @@ class TestJsonFilter(test.NoDBTestCase):
|
|||
['and',
|
||||
['>', '$free_ram_mb', 30],
|
||||
['>', '$free_disk_mb', 300]]]]
|
||||
filter_properties = {
|
||||
'scheduler_hints': {
|
||||
'query': jsonutils.dumps(raw),
|
||||
},
|
||||
}
|
||||
spec_obj = objects.RequestSpec(
|
||||
scheduler_hints=dict(query=[jsonutils.dumps(raw)]))
|
||||
|
||||
# Passes
|
||||
capabilities = {'opt1': 'match'}
|
||||
|
@ -104,7 +107,7 @@ class TestJsonFilter(test.NoDBTestCase):
|
|||
'free_disk_mb': 200,
|
||||
'capabilities': capabilities,
|
||||
'service': service})
|
||||
self.assertTrue(self.filt_cls.host_passes(host, filter_properties))
|
||||
self.assertTrue(self.filt_cls.host_passes(host, spec_obj))
|
||||
|
||||
# Passes
|
||||
capabilities = {'opt1': 'match'}
|
||||
|
@ -114,7 +117,7 @@ class TestJsonFilter(test.NoDBTestCase):
|
|||
'free_disk_mb': 400,
|
||||
'capabilities': capabilities,
|
||||
'service': service})
|
||||
self.assertTrue(self.filt_cls.host_passes(host, filter_properties))
|
||||
self.assertTrue(self.filt_cls.host_passes(host, spec_obj))
|
||||
|
||||
# Fails due to capabilities being disabled
|
||||
capabilities = {'enabled': False, 'opt1': 'match'}
|
||||
|
@ -124,7 +127,7 @@ class TestJsonFilter(test.NoDBTestCase):
|
|||
'free_disk_mb': 400,
|
||||
'capabilities': capabilities,
|
||||
'service': service})
|
||||
self.assertFalse(self.filt_cls.host_passes(host, filter_properties))
|
||||
self.assertFalse(self.filt_cls.host_passes(host, spec_obj))
|
||||
|
||||
# Fails due to being exact memory/disk we don't want
|
||||
capabilities = {'enabled': True, 'opt1': 'match'}
|
||||
|
@ -134,7 +137,7 @@ class TestJsonFilter(test.NoDBTestCase):
|
|||
'free_disk_mb': 300,
|
||||
'capabilities': capabilities,
|
||||
'service': service})
|
||||
self.assertFalse(self.filt_cls.host_passes(host, filter_properties))
|
||||
self.assertFalse(self.filt_cls.host_passes(host, spec_obj))
|
||||
|
||||
# Fails due to memory lower but disk higher
|
||||
capabilities = {'enabled': True, 'opt1': 'match'}
|
||||
|
@ -144,7 +147,7 @@ class TestJsonFilter(test.NoDBTestCase):
|
|||
'free_disk_mb': 400,
|
||||
'capabilities': capabilities,
|
||||
'service': service})
|
||||
self.assertFalse(self.filt_cls.host_passes(host, filter_properties))
|
||||
self.assertFalse(self.filt_cls.host_passes(host, spec_obj))
|
||||
|
||||
# Fails due to capabilities 'opt1' not equal
|
||||
capabilities = {'enabled': True, 'opt1': 'no-match'}
|
||||
|
@ -154,7 +157,7 @@ class TestJsonFilter(test.NoDBTestCase):
|
|||
'free_disk_mb': 400,
|
||||
'capabilities': capabilities,
|
||||
'service': service})
|
||||
self.assertFalse(self.filt_cls.host_passes(host, filter_properties))
|
||||
self.assertFalse(self.filt_cls.host_passes(host, spec_obj))
|
||||
|
||||
def test_json_filter_basic_operators(self):
|
||||
host = fakes.FakeHostState('host1', 'node1', {})
|
||||
|
@ -189,101 +192,81 @@ class TestJsonFilter(test.NoDBTestCase):
|
|||
|
||||
for (op, args, expected) in ops_to_test:
|
||||
raw = [op] + args
|
||||
filter_properties = {
|
||||
'scheduler_hints': {
|
||||
'query': jsonutils.dumps(raw),
|
||||
},
|
||||
}
|
||||
spec_obj = objects.RequestSpec(
|
||||
scheduler_hints=dict(
|
||||
query=[jsonutils.dumps(raw)]))
|
||||
self.assertEqual(expected,
|
||||
self.filt_cls.host_passes(host, filter_properties))
|
||||
self.filt_cls.host_passes(host, spec_obj))
|
||||
|
||||
# This results in [False, True, False, True] and if any are True
|
||||
# then it passes...
|
||||
raw = ['not', True, False, True, False]
|
||||
filter_properties = {
|
||||
'scheduler_hints': {
|
||||
'query': jsonutils.dumps(raw),
|
||||
},
|
||||
}
|
||||
self.assertTrue(self.filt_cls.host_passes(host, filter_properties))
|
||||
spec_obj = objects.RequestSpec(
|
||||
scheduler_hints=dict(
|
||||
query=[jsonutils.dumps(raw)]))
|
||||
self.assertTrue(self.filt_cls.host_passes(host, spec_obj))
|
||||
|
||||
# This results in [False, False, False] and if any are True
|
||||
# then it passes...which this doesn't
|
||||
raw = ['not', True, True, True]
|
||||
filter_properties = {
|
||||
'scheduler_hints': {
|
||||
'query': jsonutils.dumps(raw),
|
||||
},
|
||||
}
|
||||
self.assertFalse(self.filt_cls.host_passes(host, filter_properties))
|
||||
spec_obj = objects.RequestSpec(
|
||||
scheduler_hints=dict(
|
||||
query=[jsonutils.dumps(raw)]))
|
||||
self.assertFalse(self.filt_cls.host_passes(host, spec_obj))
|
||||
|
||||
def test_json_filter_unknown_operator_raises(self):
|
||||
raw = ['!=', 1, 2]
|
||||
filter_properties = {
|
||||
'scheduler_hints': {
|
||||
'query': jsonutils.dumps(raw),
|
||||
},
|
||||
}
|
||||
spec_obj = objects.RequestSpec(
|
||||
scheduler_hints=dict(
|
||||
query=[jsonutils.dumps(raw)]))
|
||||
host = fakes.FakeHostState('host1', 'node1',
|
||||
{})
|
||||
self.assertRaises(KeyError,
|
||||
self.filt_cls.host_passes, host, filter_properties)
|
||||
self.filt_cls.host_passes, host, spec_obj)
|
||||
|
||||
def test_json_filter_empty_filters_pass(self):
|
||||
host = fakes.FakeHostState('host1', 'node1',
|
||||
{})
|
||||
|
||||
raw = []
|
||||
filter_properties = {
|
||||
'scheduler_hints': {
|
||||
'query': jsonutils.dumps(raw),
|
||||
},
|
||||
}
|
||||
self.assertTrue(self.filt_cls.host_passes(host, filter_properties))
|
||||
spec_obj = objects.RequestSpec(
|
||||
scheduler_hints=dict(
|
||||
query=[jsonutils.dumps(raw)]))
|
||||
self.assertTrue(self.filt_cls.host_passes(host, spec_obj))
|
||||
raw = {}
|
||||
filter_properties = {
|
||||
'scheduler_hints': {
|
||||
'query': jsonutils.dumps(raw),
|
||||
},
|
||||
}
|
||||
self.assertTrue(self.filt_cls.host_passes(host, filter_properties))
|
||||
spec_obj = objects.RequestSpec(
|
||||
scheduler_hints=dict(
|
||||
query=[jsonutils.dumps(raw)]))
|
||||
self.assertTrue(self.filt_cls.host_passes(host, spec_obj))
|
||||
|
||||
def test_json_filter_invalid_num_arguments_fails(self):
|
||||
host = fakes.FakeHostState('host1', 'node1',
|
||||
{})
|
||||
|
||||
raw = ['>', ['and', ['or', ['not', ['<', ['>=', ['<=', ['in', ]]]]]]]]
|
||||
filter_properties = {
|
||||
'scheduler_hints': {
|
||||
'query': jsonutils.dumps(raw),
|
||||
},
|
||||
}
|
||||
self.assertFalse(self.filt_cls.host_passes(host, filter_properties))
|
||||
spec_obj = objects.RequestSpec(
|
||||
scheduler_hints=dict(
|
||||
query=[jsonutils.dumps(raw)]))
|
||||
self.assertFalse(self.filt_cls.host_passes(host, spec_obj))
|
||||
|
||||
raw = ['>', 1]
|
||||
filter_properties = {
|
||||
'scheduler_hints': {
|
||||
'query': jsonutils.dumps(raw),
|
||||
},
|
||||
}
|
||||
self.assertFalse(self.filt_cls.host_passes(host, filter_properties))
|
||||
spec_obj = objects.RequestSpec(
|
||||
scheduler_hints=dict(
|
||||
query=[jsonutils.dumps(raw)]))
|
||||
self.assertFalse(self.filt_cls.host_passes(host, spec_obj))
|
||||
|
||||
def test_json_filter_unknown_variable_ignored(self):
|
||||
host = fakes.FakeHostState('host1', 'node1',
|
||||
{})
|
||||
|
||||
raw = ['=', '$........', 1, 1]
|
||||
filter_properties = {
|
||||
'scheduler_hints': {
|
||||
'query': jsonutils.dumps(raw),
|
||||
},
|
||||
}
|
||||
self.assertTrue(self.filt_cls.host_passes(host, filter_properties))
|
||||
spec_obj = objects.RequestSpec(
|
||||
scheduler_hints=dict(
|
||||
query=[jsonutils.dumps(raw)]))
|
||||
self.assertTrue(self.filt_cls.host_passes(host, spec_obj))
|
||||
|
||||
raw = ['=', '$foo', 2, 2]
|
||||
filter_properties = {
|
||||
'scheduler_hints': {
|
||||
'query': jsonutils.dumps(raw),
|
||||
},
|
||||
}
|
||||
self.assertTrue(self.filt_cls.host_passes(host, filter_properties))
|
||||
spec_obj = objects.RequestSpec(
|
||||
scheduler_hints=dict(
|
||||
query=[jsonutils.dumps(raw)]))
|
||||
self.assertTrue(self.filt_cls.host_passes(host, spec_obj))
|
||||
|
|
|
@ -105,10 +105,11 @@ class TestTrustedFilter(test.NoDBTestCase):
|
|||
self.filt_cls = trusted_filter.TrustedFilter()
|
||||
|
||||
def test_trusted_filter_default_passes(self, req_mock):
|
||||
filter_properties = {'context': mock.sentinel.ctx,
|
||||
'instance_type': {'memory_mb': 1024}}
|
||||
spec_obj = objects.RequestSpec(
|
||||
context=mock.sentinel.ctx,
|
||||
flavor=objects.Flavor(memory_mb=1024))
|
||||
host = fakes.FakeHostState('host1', 'node1', {})
|
||||
self.assertTrue(self.filt_cls.host_passes(host, filter_properties))
|
||||
self.assertTrue(self.filt_cls.host_passes(host, spec_obj))
|
||||
self.assertFalse(req_mock.called)
|
||||
|
||||
def test_trusted_filter_trusted_and_trusted_passes(self, req_mock):
|
||||
|
@ -118,11 +119,12 @@ class TestTrustedFilter(test.NoDBTestCase):
|
|||
req_mock.return_value = requests.codes.OK, oat_data
|
||||
|
||||
extra_specs = {'trust:trusted_host': 'trusted'}
|
||||
filter_properties = {'context': mock.sentinel.ctx,
|
||||
'instance_type': {'memory_mb': 1024,
|
||||
'extra_specs': extra_specs}}
|
||||
spec_obj = objects.RequestSpec(
|
||||
context=mock.sentinel.ctx,
|
||||
flavor=objects.Flavor(memory_mb=1024,
|
||||
extra_specs=extra_specs))
|
||||
host = fakes.FakeHostState('host1', 'node1', {})
|
||||
self.assertTrue(self.filt_cls.host_passes(host, filter_properties))
|
||||
self.assertTrue(self.filt_cls.host_passes(host, spec_obj))
|
||||
req_mock.assert_called_once_with("POST", "PollHosts", ["node1"])
|
||||
|
||||
def test_trusted_filter_trusted_and_untrusted_fails(self, req_mock):
|
||||
|
@ -131,11 +133,12 @@ class TestTrustedFilter(test.NoDBTestCase):
|
|||
"vtime": utils.isotime()}]}
|
||||
req_mock.return_value = requests.codes.OK, oat_data
|
||||
extra_specs = {'trust:trusted_host': 'trusted'}
|
||||
filter_properties = {'context': mock.sentinel.ctx,
|
||||
'instance_type': {'memory_mb': 1024,
|
||||
'extra_specs': extra_specs}}
|
||||
spec_obj = objects.RequestSpec(
|
||||
context=mock.sentinel.ctx,
|
||||
flavor=objects.Flavor(memory_mb=1024,
|
||||
extra_specs=extra_specs))
|
||||
host = fakes.FakeHostState('host1', 'node1', {})
|
||||
self.assertFalse(self.filt_cls.host_passes(host, filter_properties))
|
||||
self.assertFalse(self.filt_cls.host_passes(host, spec_obj))
|
||||
|
||||
def test_trusted_filter_untrusted_and_trusted_fails(self, req_mock):
|
||||
oat_data = {"hosts": [{"host_name": "node",
|
||||
|
@ -143,11 +146,12 @@ class TestTrustedFilter(test.NoDBTestCase):
|
|||
"vtime": utils.isotime()}]}
|
||||
req_mock.return_value = requests.codes.OK, oat_data
|
||||
extra_specs = {'trust:trusted_host': 'untrusted'}
|
||||
filter_properties = {'context': mock.sentinel.ctx,
|
||||
'instance_type': {'memory_mb': 1024,
|
||||
'extra_specs': extra_specs}}
|
||||
spec_obj = objects.RequestSpec(
|
||||
context=mock.sentinel.ctx,
|
||||
flavor=objects.Flavor(memory_mb=1024,
|
||||
extra_specs=extra_specs))
|
||||
host = fakes.FakeHostState('host1', 'node1', {})
|
||||
self.assertFalse(self.filt_cls.host_passes(host, filter_properties))
|
||||
self.assertFalse(self.filt_cls.host_passes(host, spec_obj))
|
||||
|
||||
def test_trusted_filter_untrusted_and_untrusted_passes(self, req_mock):
|
||||
oat_data = {"hosts": [{"host_name": "node1",
|
||||
|
@ -155,11 +159,12 @@ class TestTrustedFilter(test.NoDBTestCase):
|
|||
"vtime": utils.isotime()}]}
|
||||
req_mock.return_value = requests.codes.OK, oat_data
|
||||
extra_specs = {'trust:trusted_host': 'untrusted'}
|
||||
filter_properties = {'context': mock.sentinel.ctx,
|
||||
'instance_type': {'memory_mb': 1024,
|
||||
'extra_specs': extra_specs}}
|
||||
spec_obj = objects.RequestSpec(
|
||||
context=mock.sentinel.ctx,
|
||||
flavor=objects.Flavor(memory_mb=1024,
|
||||
extra_specs=extra_specs))
|
||||
host = fakes.FakeHostState('host1', 'node1', {})
|
||||
self.assertTrue(self.filt_cls.host_passes(host, filter_properties))
|
||||
self.assertTrue(self.filt_cls.host_passes(host, spec_obj))
|
||||
|
||||
def test_trusted_filter_update_cache(self, req_mock):
|
||||
oat_data = {"hosts": [{"host_name": "node1",
|
||||
|
@ -168,15 +173,16 @@ class TestTrustedFilter(test.NoDBTestCase):
|
|||
|
||||
req_mock.return_value = requests.codes.OK, oat_data
|
||||
extra_specs = {'trust:trusted_host': 'untrusted'}
|
||||
filter_properties = {'context': mock.sentinel.ctx,
|
||||
'instance_type': {'memory_mb': 1024,
|
||||
'extra_specs': extra_specs}}
|
||||
spec_obj = objects.RequestSpec(
|
||||
context=mock.sentinel.ctx,
|
||||
flavor=objects.Flavor(memory_mb=1024,
|
||||
extra_specs=extra_specs))
|
||||
host = fakes.FakeHostState('host1', 'node1', {})
|
||||
|
||||
self.filt_cls.host_passes(host, filter_properties) # Fill the caches
|
||||
self.filt_cls.host_passes(host, spec_obj) # Fill the caches
|
||||
|
||||
req_mock.reset_mock()
|
||||
self.filt_cls.host_passes(host, filter_properties)
|
||||
self.filt_cls.host_passes(host, spec_obj)
|
||||
self.assertFalse(req_mock.called)
|
||||
|
||||
req_mock.reset_mock()
|
||||
|
@ -184,7 +190,7 @@ class TestTrustedFilter(test.NoDBTestCase):
|
|||
timeutils.set_time_override(timeutils.utcnow())
|
||||
timeutils.advance_time_seconds(
|
||||
CONF.trusted_computing.attestation_auth_timeout + 80)
|
||||
self.filt_cls.host_passes(host, filter_properties)
|
||||
self.filt_cls.host_passes(host, spec_obj)
|
||||
self.assertTrue(req_mock.called)
|
||||
|
||||
timeutils.clear_time_override()
|
||||
|
@ -195,25 +201,26 @@ class TestTrustedFilter(test.NoDBTestCase):
|
|||
"vtime": "2012-09-09T05:10:40-04:00"}]}
|
||||
req_mock.return_value = requests.codes.OK, oat_data
|
||||
extra_specs = {'trust:trusted_host': 'untrusted'}
|
||||
filter_properties = {'context': mock.sentinel.ctx,
|
||||
'instance_type': {'memory_mb': 1024,
|
||||
'extra_specs': extra_specs}}
|
||||
spec_obj = objects.RequestSpec(
|
||||
context=mock.sentinel.ctx,
|
||||
flavor=objects.Flavor(memory_mb=1024,
|
||||
extra_specs=extra_specs))
|
||||
host = fakes.FakeHostState('host1', 'node1', {})
|
||||
|
||||
timeutils.set_time_override(
|
||||
timeutils.normalize_time(
|
||||
timeutils.parse_isotime("2012-09-09T09:10:40Z")))
|
||||
|
||||
self.filt_cls.host_passes(host, filter_properties) # Fill the caches
|
||||
self.filt_cls.host_passes(host, spec_obj) # Fill the caches
|
||||
|
||||
req_mock.reset_mock()
|
||||
self.filt_cls.host_passes(host, filter_properties)
|
||||
self.filt_cls.host_passes(host, spec_obj)
|
||||
self.assertFalse(req_mock.called)
|
||||
|
||||
req_mock.reset_mock()
|
||||
timeutils.advance_time_seconds(
|
||||
CONF.trusted_computing.attestation_auth_timeout - 10)
|
||||
self.filt_cls.host_passes(host, filter_properties)
|
||||
self.filt_cls.host_passes(host, spec_obj)
|
||||
self.assertFalse(req_mock.called)
|
||||
|
||||
timeutils.clear_time_override()
|
||||
|
@ -231,12 +238,13 @@ class TestTrustedFilter(test.NoDBTestCase):
|
|||
"vtime": "2012-09-09T05:10:40-04:00"}]}
|
||||
req_mock.return_value = requests.codes.OK, oat_data
|
||||
extra_specs = {'trust:trusted_host': 'trusted'}
|
||||
filter_properties = {'context': mock.sentinel.ctx,
|
||||
'instance_type': {'memory_mb': 1024,
|
||||
'extra_specs': extra_specs}}
|
||||
spec_obj = objects.RequestSpec(
|
||||
context=mock.sentinel.ctx,
|
||||
flavor=objects.Flavor(memory_mb=1024,
|
||||
extra_specs=extra_specs))
|
||||
host = fakes.FakeHostState('host1', 'node1', {})
|
||||
|
||||
self.filt_cls.host_passes(host, filter_properties) # Fill the caches
|
||||
self.filt_cls.host_passes(host, spec_obj) # Fill the caches
|
||||
self.assertTrue(req_mock.called)
|
||||
self.assertEqual(1, req_mock.call_count)
|
||||
call_args = list(req_mock.call_args[0])
|
||||
|
@ -260,12 +268,13 @@ class TestTrustedFilter(test.NoDBTestCase):
|
|||
]}
|
||||
req_mock.return_value = requests.codes.OK, oat_data
|
||||
extra_specs = {'trust:trusted_host': 'trusted'}
|
||||
filter_properties = {'context': mock.sentinel.ctx,
|
||||
'instance_type': {'memory_mb': 1024,
|
||||
'extra_specs': extra_specs}}
|
||||
spec_obj = objects.RequestSpec(
|
||||
context=mock.sentinel.ctx,
|
||||
flavor=objects.Flavor(memory_mb=1024,
|
||||
extra_specs=extra_specs))
|
||||
host = fakes.FakeHostState('host1', 'host1', {})
|
||||
bad_host = fakes.FakeHostState('host2', 'host2', {})
|
||||
|
||||
self.assertTrue(self.filt_cls.host_passes(host, filter_properties))
|
||||
self.assertTrue(self.filt_cls.host_passes(host, spec_obj))
|
||||
self.assertFalse(self.filt_cls.host_passes(bad_host,
|
||||
filter_properties))
|
||||
spec_obj))
|
||||
|
|
|
@ -25,44 +25,48 @@ class TestTypeFilter(test.NoDBTestCase):
|
|||
host = fakes.FakeHostState('fake_host', 'fake_node', {})
|
||||
host.instances = {}
|
||||
target_id = 1
|
||||
filter_properties = {'context': mock.MagicMock(),
|
||||
'instance_type': {'id': target_id}}
|
||||
spec_obj = objects.RequestSpec(
|
||||
context=mock.MagicMock(),
|
||||
flavor=objects.Flavor(id=target_id))
|
||||
# True since no instances on host
|
||||
self.assertTrue(self.filt_cls.host_passes(host, filter_properties))
|
||||
self.assertTrue(self.filt_cls.host_passes(host, spec_obj))
|
||||
# Add an instance with the same instance_type_id
|
||||
inst1 = objects.Instance(uuid='aa', instance_type_id=target_id)
|
||||
host.instances = {inst1.uuid: inst1}
|
||||
# True since only same instance_type_id on host
|
||||
self.assertTrue(self.filt_cls.host_passes(host, filter_properties))
|
||||
self.assertTrue(self.filt_cls.host_passes(host, spec_obj))
|
||||
# Add an instance with a different instance_type_id
|
||||
diff_type = target_id + 1
|
||||
inst2 = objects.Instance(uuid='bb', instance_type_id=diff_type)
|
||||
host.instances.update({inst2.uuid: inst2})
|
||||
# False since host now has an instance of a different type
|
||||
self.assertFalse(self.filt_cls.host_passes(host, filter_properties))
|
||||
self.assertFalse(self.filt_cls.host_passes(host, spec_obj))
|
||||
|
||||
@mock.patch('nova.scheduler.filters.utils.aggregate_values_from_key')
|
||||
def test_aggregate_type_filter_no_metadata(self, agg_mock):
|
||||
self.filt_cls = type_filter.AggregateTypeAffinityFilter()
|
||||
|
||||
filter_properties = {'context': mock.sentinel.ctx,
|
||||
'instance_type': {'name': 'fake1'}}
|
||||
spec_obj = objects.RequestSpec(
|
||||
context=mock.sentinel.ctx,
|
||||
flavor=objects.Flavor(name='fake1'))
|
||||
host = fakes.FakeHostState('fake_host', 'fake_node', {})
|
||||
|
||||
# tests when no instance_type is defined for aggregate
|
||||
agg_mock.return_value = set([])
|
||||
# True as no instance_type set for aggregate
|
||||
self.assertTrue(self.filt_cls.host_passes(host, filter_properties))
|
||||
self.assertTrue(self.filt_cls.host_passes(host, spec_obj))
|
||||
agg_mock.assert_called_once_with(host, 'instance_type')
|
||||
|
||||
@mock.patch('nova.scheduler.filters.utils.aggregate_values_from_key')
|
||||
def test_aggregate_type_filter_single_instance_type(self, agg_mock):
|
||||
self.filt_cls = type_filter.AggregateTypeAffinityFilter()
|
||||
|
||||
filter_properties = {'context': mock.sentinel.ctx,
|
||||
'instance_type': {'name': 'fake1'}}
|
||||
filter2_properties = {'context': mock.sentinel.ctx,
|
||||
'instance_type': {'name': 'fake2'}}
|
||||
spec_obj = objects.RequestSpec(
|
||||
context=mock.sentinel.ctx,
|
||||
flavor=objects.Flavor(name='fake1'))
|
||||
spec_obj2 = objects.RequestSpec(
|
||||
context=mock.sentinel.ctx,
|
||||
flavor=objects.Flavor(name='fake2'))
|
||||
host = fakes.FakeHostState('fake_host', 'fake_node', {})
|
||||
|
||||
# tests when a single instance_type is defined for an aggregate
|
||||
|
@ -70,21 +74,24 @@ class TestTypeFilter(test.NoDBTestCase):
|
|||
agg_mock.return_value = set(['fake1'])
|
||||
|
||||
# True as instance_type is allowed for aggregate
|
||||
self.assertTrue(self.filt_cls.host_passes(host, filter_properties))
|
||||
self.assertTrue(self.filt_cls.host_passes(host, spec_obj))
|
||||
|
||||
# False as instance_type is not allowed for aggregate
|
||||
self.assertFalse(self.filt_cls.host_passes(host, filter2_properties))
|
||||
self.assertFalse(self.filt_cls.host_passes(host, spec_obj2))
|
||||
|
||||
@mock.patch('nova.scheduler.filters.utils.aggregate_values_from_key')
|
||||
def test_aggregate_type_filter_multi_aggregate(self, agg_mock):
|
||||
self.filt_cls = type_filter.AggregateTypeAffinityFilter()
|
||||
|
||||
filter_properties = {'context': mock.sentinel.ctx,
|
||||
'instance_type': {'name': 'fake1'}}
|
||||
filter2_properties = {'context': mock.sentinel.ctx,
|
||||
'instance_type': {'name': 'fake2'}}
|
||||
filter3_properties = {'context': mock.sentinel.ctx,
|
||||
'instance_type': {'name': 'fake3'}}
|
||||
spec_obj = objects.RequestSpec(
|
||||
context=mock.sentinel.ctx,
|
||||
flavor=objects.Flavor(name='fake1'))
|
||||
spec_obj2 = objects.RequestSpec(
|
||||
context=mock.sentinel.ctx,
|
||||
flavor=objects.Flavor(name='fake2'))
|
||||
spec_obj3 = objects.RequestSpec(
|
||||
context=mock.sentinel.ctx,
|
||||
flavor=objects.Flavor(name='fake3'))
|
||||
host = fakes.FakeHostState('fake_host', 'fake_node', {})
|
||||
|
||||
# tests when a single instance_type is defined for multiple aggregates
|
||||
|
@ -92,30 +99,33 @@ class TestTypeFilter(test.NoDBTestCase):
|
|||
agg_mock.return_value = set(['fake1', 'fake2'])
|
||||
|
||||
# True as instance_type is allowed for first aggregate
|
||||
self.assertTrue(self.filt_cls.host_passes(host, filter_properties))
|
||||
self.assertTrue(self.filt_cls.host_passes(host, spec_obj))
|
||||
# True as instance_type is allowed for second aggregate
|
||||
self.assertTrue(self.filt_cls.host_passes(host, filter2_properties))
|
||||
self.assertTrue(self.filt_cls.host_passes(host, spec_obj2))
|
||||
# False as instance_type is not allowed for aggregates
|
||||
self.assertFalse(self.filt_cls.host_passes(host, filter3_properties))
|
||||
self.assertFalse(self.filt_cls.host_passes(host, spec_obj3))
|
||||
|
||||
@mock.patch('nova.scheduler.filters.utils.aggregate_values_from_key')
|
||||
def test_aggregate_type_filter_multi_instance_type(self, agg_mock):
|
||||
self.filt_cls = type_filter.AggregateTypeAffinityFilter()
|
||||
|
||||
filter_properties = {'context': mock.sentinel.ctx,
|
||||
'instance_type': {'name': 'fake1'}}
|
||||
filter2_properties = {'context': mock.sentinel.ctx,
|
||||
'instance_type': {'name': 'fake2'}}
|
||||
filter3_properties = {'context': mock.sentinel.ctx,
|
||||
'instance_type': {'name': 'fake3'}}
|
||||
spec_obj = objects.RequestSpec(
|
||||
context=mock.sentinel.ctx,
|
||||
flavor=objects.Flavor(name='fake1'))
|
||||
spec_obj2 = objects.RequestSpec(
|
||||
context=mock.sentinel.ctx,
|
||||
flavor=objects.Flavor(name='fake2'))
|
||||
spec_obj3 = objects.RequestSpec(
|
||||
context=mock.sentinel.ctx,
|
||||
flavor=objects.Flavor(name='fake3'))
|
||||
host = fakes.FakeHostState('fake_host', 'fake_node', {})
|
||||
|
||||
# tests when multiple instance_types are defined for aggregate
|
||||
agg_mock.return_value = set(['fake1,fake2'])
|
||||
|
||||
# True as instance_type is allowed for aggregate
|
||||
self.assertTrue(self.filt_cls.host_passes(host, filter_properties))
|
||||
self.assertTrue(self.filt_cls.host_passes(host, spec_obj))
|
||||
# True as instance_type is allowed for aggregate
|
||||
self.assertTrue(self.filt_cls.host_passes(host, filter2_properties))
|
||||
self.assertTrue(self.filt_cls.host_passes(host, spec_obj2))
|
||||
# False as instance_type is not allowed for aggregate
|
||||
self.assertFalse(self.filt_cls.host_passes(host, filter3_properties))
|
||||
self.assertFalse(self.filt_cls.host_passes(host, spec_obj3))
|
||||
|
|
|
@ -14,9 +14,6 @@
|
|||
"""
|
||||
Tests For Scheduler Host Filters.
|
||||
"""
|
||||
import mock
|
||||
|
||||
from nova import objects
|
||||
from nova.scheduler import filters
|
||||
from nova.scheduler.filters import all_hosts_filter
|
||||
from nova.scheduler.filters import compute_filter
|
||||
|
@ -38,27 +35,3 @@ class HostFiltersTestCase(test.NoDBTestCase):
|
|||
filt_cls = all_hosts_filter.AllHostsFilter()
|
||||
host = fakes.FakeHostState('host1', 'node1', {})
|
||||
self.assertTrue(filt_cls.host_passes(host, {}))
|
||||
|
||||
@mock.patch.object(objects.RequestSpec, 'to_legacy_request_spec_dict')
|
||||
@mock.patch.object(objects.RequestSpec, 'to_legacy_filter_properties_dict')
|
||||
def test_compat_legacy_props(self, to_props, to_spec):
|
||||
fake_flavor = objects.Flavor()
|
||||
fake_context = mock.Mock()
|
||||
fake_spec = objects.RequestSpec(context=fake_context,
|
||||
flavor=fake_flavor)
|
||||
fake_spec.config_options = None
|
||||
to_props.return_value = {'prop1': 'val1'}
|
||||
to_spec.return_value = {'spec1': 'val2'}
|
||||
|
||||
@filters.compat_legacy_props
|
||||
def fake_host_passes(self, host_state, filter_properties):
|
||||
# NOTE(sbauza): Convenient way to verify the passed properties
|
||||
return filter_properties
|
||||
|
||||
expected = {'prop1': 'val1',
|
||||
'request_spec': {'spec1': 'val2'},
|
||||
'instance_type': fake_flavor,
|
||||
'context': fake_context,
|
||||
'config_options': None}
|
||||
self.assertEqual(expected,
|
||||
fake_host_passes('self', 'host_state', fake_spec))
|
||||
|
|
Loading…
Reference in New Issue