Fix issues with get_pool scheduler API

The scheduler API 'get_pools' was copied from Cinder and has several
problems.  The REST endpoint, /scheduler-stats/get_pools, is not a
REST-appropriate name.  The API allows filtering but filters are not
currently supported by the Manila scheduler.  The API was added as an
extension, not part of the v1 API as it should be.  After restarting
the scheduler service, the API doesn't return any data until a share
has been created (Cinder was used as a model for fixing this one).
This patch fixes all of these issues.

Closes-Bug: #1422042
Change-Id: I7e32efb5390fd4b0a62f644ca5a12dcad14d8a9b
This commit is contained in:
Clinton Knight 2015-02-14 19:51:34 -05:00
parent 4db796339a
commit 72f5f0485a
16 changed files with 950 additions and 278 deletions

View File

@ -0,0 +1,134 @@
# Copyright (c) 2015 Clinton Knight. All rights reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License"); you may
# not use this file except in compliance with the License. You may obtain
# a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
# License for the specific language governing permissions and limitations
# under the License.
from tempest_lib import exceptions as lib_exc # noqa
from tempest.api.share import base
from tempest import config_share as config
from tempest import test
CONF = config.CONF
class SchedulerStatsAdminTest(base.BaseSharesAdminTest):
@test.attr(type=["gate", "smoke", ])
def test_pool_list(self):
# List pools
resp, pool_response = self.shares_client.list_pools()
pool_list = pool_response.get('pools')
self.assertIsNotNone(pool_list, 'No pools returned from pools API')
self.assertNotEmpty(pool_list)
pool = pool_list[0]
required_keys = {'name', 'host', 'backend', 'pool'}
actual_keys = set(pool.keys())
self.assertTrue(actual_keys.issuperset(required_keys))
@test.attr(type=["gate", "smoke", ])
def test_pool_list_with_filters(self):
# List pools
resp, pool_response = self.shares_client.list_pools()
pool_list = pool_response.get('pools')
# Ensure we got at least one pool
self.assertIsNotNone(pool_list, 'No pools returned from pools API')
self.assertNotEmpty(pool_list)
pool = pool_list[0]
# Build search opts from data and get pools again with filter
search_opts = {
'host': pool.get('host'),
'backend': pool.get('backend'),
'pool': pool.get('pool'),
}
resp, pool_response = self.shares_client.list_pools(
search_opts=search_opts)
filtered_pool_list = pool_response.get('pools')
# Ensure we got exactly one pool matching the first one from above
self.assertEqual(1, len(filtered_pool_list))
self.assertDictEqual(pool, filtered_pool_list[0])
@test.attr(type=["gate", "smoke", ])
def test_pool_list_with_filters_negative(self):
# Build search opts for a non-existent pool
search_opts = {
'host': 'foo',
'backend': 'bar',
'pool': 'shark',
}
resp, pool_response = self.shares_client.list_pools(
search_opts=search_opts)
pool_list = pool_response.get('pools')
# Ensure we got no pools
self.assertEmpty(pool_list)
@test.attr(type=["gate", "smoke", ])
def test_pool_list_detail(self):
# List pools
resp, pool_response = self.shares_client.list_pools(detail=True)
pool_list = pool_response.get('pools')
self.assertIsNotNone(pool_list, 'No pools returned from pools API')
self.assertNotEmpty(pool_list)
pool = pool_list[0]
required_keys = {'name', 'host', 'backend', 'pool', 'capabilities'}
actual_keys = set(pool.keys())
self.assertTrue(actual_keys.issuperset(required_keys))
@test.attr(type=["gate", "smoke", ])
def test_pool_list_detail_with_filters(self):
# List pools
resp, pool_response = self.shares_client.list_pools(detail=True)
pool_list = pool_response.get('pools')
# Ensure we got at least one pool
self.assertIsNotNone(pool_list, 'No pools returned from pools API')
self.assertNotEmpty(pool_list)
pool = pool_list[0]
# Build search opts from data and get pools again with filter
search_opts = {
'host': pool.get('host'),
'backend': pool.get('backend'),
'pool': pool.get('pool'),
}
resp, pool_response = self.shares_client.list_pools(
detail=True, search_opts=search_opts)
filtered_pool_list = pool_response.get('pools')
# Ensure we got exactly one pool matching the first one from above
self.assertEqual(1, len(filtered_pool_list))
self.assertDictEqual(pool, filtered_pool_list[0])
@test.attr(type=["gate", "smoke", ])
def test_pool_list_detail_with_filters_negative(self):
# Build search opts for a non-existent pool
search_opts = {
'host': 'foo',
'backend': 'bar',
'pool': 'shark',
}
resp, pool_response = self.shares_client.list_pools(
detail=True, search_opts=search_opts)
pool_list = pool_response.get('pools')
# Ensure we got no pools
self.assertEmpty(pool_list)

View File

@ -600,3 +600,16 @@ class SharesClient(service_client.ServiceClient):
uri = "share-servers/%s/details" % share_server_id uri = "share-servers/%s/details" % share_server_id
resp, body = self.get(uri) resp, body = self.get(uri)
return resp, self._parse_resp(body) return resp, self._parse_resp(body)
###############
def list_pools(self, detail=False, search_opts=None):
"""Get list of scheduler pools."""
uri = 'scheduler-stats/pools'
if detail:
uri += '/detail'
if search_opts:
uri += "?%s" % urllib.urlencode(search_opts)
resp, body = self.get(uri)
self.expected_success(200, resp.status)
return resp, json.loads(body)

View File

@ -69,5 +69,6 @@
"share_network:remove_security_service": [["rule:default"]], "share_network:remove_security_service": [["rule:default"]],
"share_network:get_all_share_networks": [["rule:admin_api"]], "share_network:get_all_share_networks": [["rule:admin_api"]],
"scheduler_extension:scheduler_stats:get_pools" : "rule:admin_api" "scheduler_stats:pools:index": [["rule:admin_api"]],
"scheduler_stats:pools:detail": [["rule:admin_api"]]
} }

View File

@ -1,63 +0,0 @@
# Copyright (c) 2014 eBay Inc.
# Copyright (c) 2015 Rushil Chugh
# All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License"); you may
# not use this file except in compliance with the License. You may obtain
# a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
# License for the specific language governing permissions and limitations
# under the License.
"""The Scheduler Stats extension"""
from manila.api import extensions
from manila.api.openstack import wsgi
from manila.api.views import scheduler_stats as scheduler_stats_view
from manila.scheduler import rpcapi
def authorize(context, action_name):
action = 'scheduler_stats:%s' % action_name
extensions.extension_authorizer('scheduler', action)(context)
class SchedulerStatsController(wsgi.Controller):
"""The Scheduler Stats controller for the OpenStack API."""
_view_builder_class = scheduler_stats_view.ViewBuilder
def __init__(self):
self.scheduler_api = rpcapi.SchedulerAPI()
super(SchedulerStatsController, self).__init__()
def get_pools(self, req):
"""List all active pools in scheduler."""
context = req.environ['manila.context']
authorize(context, 'get_pools')
detail = req.params.get('detail', False)
pools = self.scheduler_api.get_pools(context, filters=None)
return self._view_builder.pools(req, pools, detail)
class Scheduler_stats(extensions.ExtensionDescriptor):
"""Scheduler stats support."""
name = "Scheduler_stats"
alias = "scheduler-stats"
updated = "2015-08-01T00:00:00+00:00"
def get_resources(self):
res = extensions.ResourceExtension(
Scheduler_stats.alias,
SchedulerStatsController(),
collection_actions={"get_pools": "GET"})
return [res]

View File

@ -24,6 +24,7 @@ from oslo_log import log
from manila.api import extensions from manila.api import extensions
import manila.api.openstack import manila.api.openstack
from manila.api.v1 import limits from manila.api.v1 import limits
from manila.api.v1 import scheduler_stats
from manila.api.v1 import security_service from manila.api.v1 import security_service
from manila.api.v1 import share_metadata from manila.api.v1 import share_metadata
from manila.api.v1 import share_networks from manila.api.v1 import share_networks
@ -110,3 +111,13 @@ class APIRouter(manila.api.openstack.APIRouter):
controller=self.resources['types'], controller=self.resources['types'],
collection={'detail': 'GET', 'default': 'GET'}, collection={'detail': 'GET', 'default': 'GET'},
member={'action': 'POST'}) member={'action': 'POST'})
self.resources['scheduler_stats'] = scheduler_stats.create_resource()
mapper.connect('pools', '/{project_id}/scheduler-stats/pools',
controller=self.resources['scheduler_stats'],
action='pools_index',
conditions={'method': ['GET']})
mapper.connect('pools', '/{project_id}/scheduler-stats/pools/detail',
controller=self.resources['scheduler_stats'],
action='pools_detail',
conditions={'method': ['GET']})

View File

@ -0,0 +1,53 @@
# Copyright (c) 2015 Clinton Knight. All rights reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License"); you may
# not use this file except in compliance with the License. You may obtain
# a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
# License for the specific language governing permissions and limitations
# under the License.
from oslo_log import log
from manila.api.openstack import wsgi
from manila.api.views import scheduler_stats as scheduler_stats_views
from manila import policy
from manila.scheduler import rpcapi
POOLS_RESOURCES_NAME = 'scheduler_stats:pools'
LOG = log.getLogger(__name__)
class SchedulerStatsController(wsgi.Controller):
"""The Scheduler Stats API controller for the OpenStack API."""
def __init__(self):
self.scheduler_api = rpcapi.SchedulerAPI()
self._view_builder_class = scheduler_stats_views.ViewBuilder
super(SchedulerStatsController, self).__init__()
def pools_index(self, req):
"""Returns a list of storage pools known to the scheduler."""
return self._pools(req, action='index')
def pools_detail(self, req):
"""Returns a detailed list of storage pools known to the scheduler."""
return self._pools(req, action='detail')
def _pools(self, req, action='index'):
context = req.environ['manila.context']
policy.check_policy(context, POOLS_RESOURCES_NAME, action)
search_opts = {}
search_opts.update(req.GET)
pools = self.scheduler_api.get_pools(context, filters=search_opts)
detail = (action == 'detail')
return self._view_builder.pools(pools, detail=detail)
def create_resource():
return wsgi.Resource(SchedulerStatsController())

View File

@ -1,5 +1,6 @@
# Copyright (c) 2014 eBay Inc. # Copyright (c) 2014 eBay Inc.
# Copyright (c) 2015 Rushil Chugh # Copyright (c) 2015 Rushil Chugh
# Copyright (c) 2015 Clinton Knight
# All Rights Reserved. # All Rights Reserved.
# #
# Licensed under the Apache License, Version 2.0 (the "License"); you may # Licensed under the Apache License, Version 2.0 (the "License"); you may
@ -22,25 +23,30 @@ class ViewBuilder(common.ViewBuilder):
_collection_name = "scheduler-stats" _collection_name = "scheduler-stats"
def summary(self, request, pool): def pool_summary(self, pool):
"""Summary view of a single pool.""" """Summary view of a single pool."""
return { return {
'pool': { 'pool': {
'name': pool.get('name'), 'name': pool.get('name'),
'host': pool.get('host'),
'backend': pool.get('backend'),
'pool': pool.get('pool'),
} }
} }
def detail(self, request, pool): def pool_detail(self, pool):
"""Detailed view of a single pool.""" """Detailed view of a single pool."""
return { return {
'pool': { 'pool': {
'name': pool.get('name'), 'name': pool.get('name'),
'host': pool.get('host'),
'backend': pool.get('backend'),
'pool': pool.get('pool'),
'capabilities': pool.get('capabilities'), 'capabilities': pool.get('capabilities'),
} }
} }
def pools(self, request, pools, detail): def pools(self, pools, detail=False):
"""Summary view of a list of pools seen by scheduler.""" """View of a list of pools seen by scheduler."""
pdict = self.detail if detail else self.summary view_method = self.pool_detail if detail else self.pool_summary
return {"pools": [view_method(pool)['pool'] for pool in pools]}
return {"pools": [pdict(request, pool)['pool'] for pool in pools]}

View File

@ -91,5 +91,4 @@ class Scheduler(object):
def get_pools(self, context, filters): def get_pools(self, context, filters):
"""Must override schedule method for scheduler to work.""" """Must override schedule method for scheduler to work."""
raise NotImplementedError(_( raise NotImplementedError(_("Must implement get_pools"))
"Must implement get_pools"))

View File

@ -51,8 +51,7 @@ class FilterScheduler(driver.Scheduler):
return self.options.get_configuration() return self.options.get_configuration()
def get_pools(self, context, filters): def get_pools(self, context, filters):
# TODO(zhiteng) Add filters support return self.host_manager.get_pools(context, filters)
return self.host_manager.get_pools(context)
def _post_select_populate_filter_properties(self, filter_properties, def _post_select_populate_filter_properties(self, filter_properties,
host_state): host_state):

View File

@ -1,5 +1,6 @@
# Copyright (c) 2011 OpenStack, LLC. # Copyright (c) 2011 OpenStack, LLC.
# Copyright (c) 2015 Rushil Chugh # Copyright (c) 2015 Rushil Chugh
# Copyright (c) 2015 Clinton Knight
# All Rights Reserved. # All Rights Reserved.
# #
# Licensed under the Apache License, Version 2.0 (the "License"); you may # Licensed under the Apache License, Version 2.0 (the "License"); you may
@ -18,6 +19,7 @@
Manage hosts in the current zone. Manage hosts in the current zone.
""" """
import re
import UserDict import UserDict
from oslo_config import cfg from oslo_config import cfg
@ -27,7 +29,7 @@ import six
from manila import db from manila import db
from manila import exception from manila import exception
from manila.i18n import _LI from manila.i18n import _LI, _LW
from manila.openstack.common.scheduler import filters from manila.openstack.common.scheduler import filters
from manila.openstack.common.scheduler import weights from manila.openstack.common.scheduler import weights
from manila.share import utils as share_utils from manila.share import utils as share_utils
@ -407,44 +409,40 @@ class HostManager(object):
def update_service_capabilities(self, service_name, host, capabilities): def update_service_capabilities(self, service_name, host, capabilities):
"""Update the per-service capabilities based on this notification.""" """Update the per-service capabilities based on this notification."""
if service_name not in ('share'): if service_name not in ('share',):
LOG.debug('Ignoring %(service_name)s service update ' LOG.debug('Ignoring %(service_name)s service update '
'from %(host)s', 'from %(host)s',
{'service_name': service_name, 'host': host}) {'service_name': service_name, 'host': host})
return return
# Copy the capabilities, so we don't modify the original dict # Copy the capabilities, so we don't modify the original dict
capab_copy = dict(capabilities) capability_copy = dict(capabilities)
capab_copy["timestamp"] = timeutils.utcnow() # Reported time capability_copy["timestamp"] = timeutils.utcnow() # Reported time
self.service_states[host] = capab_copy self.service_states[host] = capability_copy
LOG.debug("Received %(service_name)s service update from " LOG.debug("Received %(service_name)s service update from "
"%(host)s: %(cap)s" % "%(host)s: %(cap)s" %
{'service_name': service_name, 'host': host, {'service_name': service_name, 'host': host,
'cap': capabilities}) 'cap': capabilities})
def get_all_host_states_share(self, context): def _update_host_state_map(self, context):
"""Get all hosts and their states.
Returns a dict of all the hosts the HostManager knows
about. Also, each of the consumable resources in HostState are
pre-populated and adjusted based on data in the db.
For example:
{'192.168.1.100': HostState(), ...}
"""
# Get resource usage across the available share nodes: # Get resource usage across the available share nodes:
all_pools = {}
topic = CONF.share_topic topic = CONF.share_topic
share_services = db.service_get_all_by_topic(context, topic) share_services = db.service_get_all_by_topic(context, topic)
for service in share_services: for service in share_services:
host = service['host'] host = service['host']
# Warn about down services and remove them from host_state_map
if not utils.service_is_up(service) or service['disabled']: if not utils.service_is_up(service) or service['disabled']:
LOG.info(_LI("Removing non-active host: %(host)s from " LOG.warn(_LW("Share service is down. (host: %s)") % host)
"scheduler cache.") % {'host': host}) if self.host_state_map.pop(host, None):
self.host_state_map.pop(host, None) LOG.info(_LI("Removing non-active host: %s from "
"scheduler cache.") % host)
continue continue
# Create and register host_state if not in host_state_map
capabilities = self.service_states.get(host, None) capabilities = self.service_states.get(host, None)
host_state = self.host_state_map.get(host) host_state = self.host_state_map.get(host)
if not host_state: if not host_state:
@ -453,11 +451,26 @@ class HostManager(object):
capabilities=capabilities, capabilities=capabilities,
service=dict(six.iteritems(service))) service=dict(six.iteritems(service)))
self.host_state_map[host] = host_state self.host_state_map[host] = host_state
# Update host_state
# Update capabilities and attributes in host_state
host_state.update_from_share_capability( host_state.update_from_share_capability(
capabilities, service=dict(six.iteritems(service))) capabilities, service=dict(six.iteritems(service)))
# Build a pool_state map and return that instead of host_state_map
state = self.host_state_map[host] def get_all_host_states_share(self, context):
"""Returns a dict of all the hosts the HostManager knows about.
Each of the consumable resources in HostState are
populated with capabilities scheduler received from RPC.
For example:
{'192.168.1.100': HostState(), ...}
"""
self._update_host_state_map(context)
# Build a pool_state map and return that map instead of host_state_map
all_pools = {}
for host, state in self.host_state_map.items():
for key in state.pools: for key in state.pools:
pool = state.pools[key] pool = state.pools[key]
# Use host.pool_name to make sure key is unique # Use host.pool_name to make sure key is unique
@ -466,17 +479,54 @@ class HostManager(object):
return six.itervalues(all_pools) return six.itervalues(all_pools)
def get_pools(self, context): def get_pools(self, context, filters=None):
"""Returns a dict of all pools on all hosts HostManager knows about.""" """Returns a dict of all pools on all hosts HostManager knows about."""
all_pools = [] self._update_host_state_map(context)
for host, state in self.host_state_map.items():
for key in state.pools:
pool = state.pools[key]
# Use host.pool_name to make sure key is unique
pool_key = share_utils.append_host(host, pool.pool_name)
new_pool = dict(name=pool_key)
new_pool.update(dict(capabilities=pool.capabilities))
all_pools.append(new_pool)
all_pools = []
for host, host_state in self.host_state_map.items():
for pool in host_state.pools.values():
fully_qualified_pool_name = share_utils.append_host(
host, pool.pool_name)
host_name = share_utils.extract_host(
fully_qualified_pool_name, level='host')
backend_name = share_utils.extract_host(
fully_qualified_pool_name, level='backend').split('@')[1] \
if '@' in fully_qualified_pool_name else None
pool_name = share_utils.extract_host(
fully_qualified_pool_name, level='pool')
new_pool = {
'name': fully_qualified_pool_name,
'host': host_name,
'backend': backend_name,
'pool': pool_name,
'capabilities': pool.capabilities,
}
if self._passes_filters(new_pool, filters):
all_pools.append(new_pool)
return all_pools return all_pools
def _passes_filters(self, dict_to_check, filter_dict):
"""Applies a set of regex filters to a dictionary.
If no filter keys are supplied, the data passes unfiltered and
the method returns True. Otherwise, each key in the filter
(filter_dict) must be present in the data (dict_to_check)
and the filter values are applied as regex expressions to
the data values. If any of the filter values fail to match
their corresponding data values, the method returns False.
But if all filters match, the method returns True.
"""
if not filter_dict:
return True
for filter_key, filter_value in six.iteritems(filter_dict):
if filter_key not in dict_to_check:
return False
if not re.match(filter_value, dict_to_check.get(filter_key)):
return False
return True

View File

@ -1,111 +0,0 @@
# Copyright 2014 eBay Inc.
# Copyright 2013 OpenStack Foundation
# Copyright (c) 2015 Rushil Chugh
# All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License"); you may
# not use this file except in compliance with the License. You may obtain
# a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
# License for the specific language governing permissions and limitations
# under the License.
import mock
from manila.api.contrib import scheduler_stats
from manila import context
from manila import test
from manila.tests.api import fakes
def schedule_rpcapi_get_pools(self, context, filters=None):
all_pools = []
pool1 = dict(name='pool1',
capabilities=dict(
total_capacity=1024, free_capacity=100,
share_backend_name='pool1', reserved_percentage=0,
driver_version='1.0.0', storage_protocol='iSCSI',
QoS_support='False', updated=None))
all_pools.append(pool1)
pool2 = dict(name='pool2',
capabilities=dict(
total_capacity=512, free_capacity=200,
share_backend_name='pool2', reserved_percentage=0,
driver_version='1.0.1', storage_protocol='iSER',
QoS_support='True', updated=None))
all_pools.append(pool2)
return all_pools
@mock.patch('manila.scheduler.rpcapi.SchedulerAPI.get_pools',
schedule_rpcapi_get_pools)
class SchedulerStatsAPITest(test.TestCase):
def setUp(self):
super(SchedulerStatsAPITest, self).setUp()
self.flags(host='fake')
self.controller = scheduler_stats.SchedulerStatsController()
self.ctxt = context.RequestContext('admin', 'fake', True)
def test_get_pools_summery(self):
req = fakes.HTTPRequest.blank('/v2/fake/scheduler_stats')
req.environ['manila.context'] = self.ctxt
res = self.controller.get_pools(req)
self.assertEqual(2, len(res['pools']))
expected = {
'pools': [
{
'name': 'pool1',
},
{
'name': 'pool2',
}
]
}
self.assertDictMatch(res, expected)
def test_get_pools_detail(self):
req = fakes.HTTPRequest.blank('/v2/fake/scheduler_stats?detail=True')
req.environ['manila.context'] = self.ctxt
res = self.controller.get_pools(req)
self.assertEqual(2, len(res['pools']))
expected = {
'pools': [
{
'name': 'pool1',
'capabilities': {
'updated': None,
'total_capacity': 1024,
'free_capacity': 100,
'share_backend_name': 'pool1',
'reserved_percentage': 0,
'driver_version': '1.0.0',
'storage_protocol': 'iSCSI',
'QoS_support': 'False', }
},
{
'name': 'pool2',
'capabilities': {
'updated': None,
'total_capacity': 512,
'free_capacity': 200,
'share_backend_name': 'pool2',
'reserved_percentage': 0,
'driver_version': '1.0.1',
'storage_protocol': 'iSER',
'QoS_support': 'True', }
}
]
}
self.assertDictMatch(res, expected)

View File

@ -0,0 +1,187 @@
# Copyright (c) 2015 Clinton Knight. All rights reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License"); you may
# not use this file except in compliance with the License. You may obtain
# a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
# License for the specific language governing permissions and limitations
# under the License.
import mock
from manila.api.v1 import scheduler_stats
from manila import context
from manila.scheduler import rpcapi
from manila import test
from manila.tests.api import fakes
FAKE_POOLS = [
{
'name': 'host1@backend1#pool1',
'host': 'host1',
'backend': 'backend1',
'pool': 'pool1',
'capabilities': {
'updated': None,
'total_capacity': 1024,
'free_capacity': 100,
'share_backend_name': 'pool1',
'reserved_percentage': 0,
'driver_version': '1.0.0',
'storage_protocol': 'iSCSI',
'QoS_support': 'False',
},
},
{
'name': 'host1@backend1#pool2',
'host': 'host1',
'backend': 'backend1',
'pool': 'pool2',
'capabilities': {
'updated': None,
'total_capacity': 512,
'free_capacity': 200,
'share_backend_name': 'pool2',
'reserved_percentage': 0,
'driver_version': '1.0.1',
'storage_protocol': 'iSER',
'QoS_support': 'True',
},
},
]
class SchedulerStatsControllerTestCase(test.TestCase):
def setUp(self):
super(SchedulerStatsControllerTestCase, self).setUp()
self.flags(host='fake')
self.controller = scheduler_stats.SchedulerStatsController()
self.ctxt = context.RequestContext('admin', 'fake', True)
def test_pools_index(self):
mock_get_pools = self.mock_object(rpcapi.SchedulerAPI,
'get_pools',
mock.Mock(return_value=FAKE_POOLS))
req = fakes.HTTPRequest.blank('/v1/fake_project/scheduler_stats/pools')
req.environ['manila.context'] = self.ctxt
result = self.controller.pools_index(req)
expected = {
'pools': [
{
'name': 'host1@backend1#pool1',
'host': 'host1',
'backend': 'backend1',
'pool': 'pool1',
},
{
'name': 'host1@backend1#pool2',
'host': 'host1',
'backend': 'backend1',
'pool': 'pool2',
}
]
}
self.assertDictMatch(result, expected)
mock_get_pools.assert_called_once_with(self.ctxt, filters={})
def test_pools_index_with_filters(self):
mock_get_pools = self.mock_object(rpcapi.SchedulerAPI,
'get_pools',
mock.Mock(return_value=FAKE_POOLS))
url = '/v1/fake_project/scheduler-stats/pools/detail'
url += '?backend=.%2A&host=host1&pool=pool%2A'
req = fakes.HTTPRequest.blank(url)
req.environ['manila.context'] = self.ctxt
result = self.controller.pools_index(req)
expected = {
'pools': [
{
'name': 'host1@backend1#pool1',
'host': 'host1',
'backend': 'backend1',
'pool': 'pool1',
},
{
'name': 'host1@backend1#pool2',
'host': 'host1',
'backend': 'backend1',
'pool': 'pool2',
}
]
}
expected_filters = {'host': 'host1', 'pool': 'pool*', 'backend': '.*'}
self.assertDictMatch(result, expected)
mock_get_pools.assert_called_once_with(self.ctxt,
filters=expected_filters)
def test_get_pools_detail(self):
mock_get_pools = self.mock_object(rpcapi.SchedulerAPI,
'get_pools',
mock.Mock(return_value=FAKE_POOLS))
req = fakes.HTTPRequest.blank(
'/v1/fake_project/scheduler_stats/pools/detail')
req.environ['manila.context'] = self.ctxt
result = self.controller.pools_detail(req)
expected = {
'pools': [
{
'name': 'host1@backend1#pool1',
'host': 'host1',
'backend': 'backend1',
'pool': 'pool1',
'capabilities': {
'updated': None,
'total_capacity': 1024,
'free_capacity': 100,
'share_backend_name': 'pool1',
'reserved_percentage': 0,
'driver_version': '1.0.0',
'storage_protocol': 'iSCSI',
'QoS_support': 'False',
},
},
{
'name': 'host1@backend1#pool2',
'host': 'host1',
'backend': 'backend1',
'pool': 'pool2',
'capabilities': {
'updated': None,
'total_capacity': 512,
'free_capacity': 200,
'share_backend_name': 'pool2',
'reserved_percentage': 0,
'driver_version': '1.0.1',
'storage_protocol': 'iSER',
'QoS_support': 'True',
},
},
],
}
self.assertDictMatch(expected, result)
mock_get_pools.assert_called_once_with(self.ctxt, filters={})
class SchedulerStatsTestCase(test.TestCase):
def test_create_resource(self):
result = scheduler_stats.create_resource()
self.assertTrue(isinstance(result.controller,
scheduler_stats.SchedulerStatsController))

View File

@ -0,0 +1,107 @@
# Copyright (c) 2015 Clinton Knight. All rights reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License"); you may
# not use this file except in compliance with the License. You may obtain
# a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
# License for the specific language governing permissions and limitations
# under the License.
import copy
from manila.api.views import scheduler_stats
from manila import test
POOL1 = {
'name': 'host1@backend1#pool1',
'host': 'host1',
'backend': 'backend1',
'pool': 'pool1',
'other': 'junk',
'capabilities': {
'pool_name': 'pool1',
'driver_handles_share_servers': False,
'QoS_support': 'False',
'timestamp': '2015-03-15T19:15:42.611690',
'allocated_capacity_gb': 5,
'total_capacity_gb': 10,
},
}
POOL2 = {
'name': 'host1@backend1#pool2',
'host': 'host1',
'backend': 'backend1',
'pool': 'pool2',
'capabilities': {
'pool_name': 'pool2',
'driver_handles_share_servers': False,
'QoS_support': 'False',
'timestamp': '2015-03-15T19:15:42.611690',
'allocated_capacity_gb': 15,
'total_capacity_gb': 20,
},
}
POOLS = [POOL1, POOL2]
POOLS_DETAIL_VIEW = {
'pools': [
{
'name': 'host1@backend1#pool1',
'host': 'host1',
'backend': 'backend1',
'pool': 'pool1',
'capabilities': {
'pool_name': 'pool1',
'driver_handles_share_servers': False,
'QoS_support': 'False',
'timestamp': '2015-03-15T19:15:42.611690',
'allocated_capacity_gb': 5,
'total_capacity_gb': 10,
},
}, {
'name': 'host1@backend1#pool2',
'host': 'host1',
'backend': 'backend1',
'pool': 'pool2',
'capabilities': {
'pool_name': 'pool2',
'driver_handles_share_servers': False,
'QoS_support': 'False',
'timestamp': '2015-03-15T19:15:42.611690',
'allocated_capacity_gb': 15,
'total_capacity_gb': 20,
}
}
]
}
class ViewBuilderTestCase(test.TestCase):
def setUp(self):
super(ViewBuilderTestCase, self).setUp()
self.builder = scheduler_stats.ViewBuilder()
def test_pools(self):
result = self.builder.pools(POOLS)
# Remove capabilities for summary view
expected = copy.deepcopy(POOLS_DETAIL_VIEW)
for pool in expected['pools']:
del pool['capabilities']
self.assertDictEqual(expected, result)
def test_pools_with_details(self):
result = self.builder.pools(POOLS, detail=True)
expected = POOLS_DETAIL_VIEW
self.assertDictEqual(expected, result)

View File

@ -48,5 +48,7 @@
"security_service:get_all_security_services": [["rule:admin_api"]], "security_service:get_all_security_services": [["rule:admin_api"]],
"limits_extension:used_limits": [], "limits_extension:used_limits": [],
"scheduler_extension:scheduler_stats:get_pools" : "rule:admin_api"
"scheduler_stats:pools:index": [["rule:admin_api"]],
"scheduler_stats:pools:detail": [["rule:admin_api"]]
} }

View File

@ -22,21 +22,90 @@ import six
from manila.scheduler import filter_scheduler from manila.scheduler import filter_scheduler
from manila.scheduler import host_manager from manila.scheduler import host_manager
SHARE_SERVICES_NO_POOLS = [
SHARE_SERVICES = [
dict(id=1, host='host1', topic='share', disabled=False, dict(id=1, host='host1', topic='share', disabled=False,
availability_zone='zone1', updated_at=timeutils.utcnow()), availability_zone='zone1', updated_at=timeutils.utcnow()),
dict(id=2, host='host2', topic='share', disabled=False, dict(id=2, host='host2@back1', topic='share', disabled=False,
availability_zone='zone1', updated_at=timeutils.utcnow()), availability_zone='zone1', updated_at=timeutils.utcnow()),
dict(id=3, host='host3', topic='share', disabled=False, dict(id=3, host='host2@back2', topic='share', disabled=False,
availability_zone='zone2', updated_at=timeutils.utcnow()), availability_zone='zone2', updated_at=timeutils.utcnow()),
dict(id=4, host='host4', topic='share', disabled=False, ]
SERVICE_STATES_NO_POOLS = {
'host1': dict(share_backend_name='AAA',
total_capacity_gb=512, free_capacity_gb=200,
timestamp=None, reserved_percentage=0,
driver_handles_share_servers=False),
'host2@back1': dict(share_backend_name='BBB',
total_capacity_gb=256, free_capacity_gb=100,
timestamp=None, reserved_percentage=0,
driver_handles_share_servers=False),
'host2@back2': dict(share_backend_name='CCC',
total_capacity_gb=10000, free_capacity_gb=700,
timestamp=None, reserved_percentage=0,
driver_handles_share_servers=False),
}
SHARE_SERVICES_WITH_POOLS = [
dict(id=1, host='host1@AAA', topic='share', disabled=False,
availability_zone='zone1', updated_at=timeutils.utcnow()),
dict(id=2, host='host2@BBB', topic='share', disabled=False,
availability_zone='zone1', updated_at=timeutils.utcnow()),
dict(id=3, host='host3@CCC', topic='share', disabled=False,
availability_zone='zone2', updated_at=timeutils.utcnow()),
dict(id=4, host='host4@DDD', topic='share', disabled=False,
availability_zone='zone3', updated_at=timeutils.utcnow()), availability_zone='zone3', updated_at=timeutils.utcnow()),
# service on host5 is disabled # service on host5 is disabled
dict(id=5, host='host5', topic='share', disabled=True, dict(id=5, host='host5@EEE', topic='share', disabled=True,
availability_zone='zone4', updated_at=timeutils.utcnow()), availability_zone='zone4', updated_at=timeutils.utcnow()),
] ]
SHARE_SERVICE_STATES_WITH_POOLS = {
'host1@AAA': dict(share_backend_name='AAA',
timestamp=None, reserved_percentage=0,
driver_handles_share_servers=False,
pools=[dict(pool_name='pool1',
total_capacity_gb=51,
free_capacity_gb=41,
reserved_percentage=0)]),
'host2@BBB': dict(share_backend_name='BBB',
timestamp=None, reserved_percentage=0,
driver_handles_share_servers=False,
pools=[dict(pool_name='pool2',
total_capacity_gb=52,
free_capacity_gb=42,
reserved_percentage=0)]),
'host3@CCC': dict(share_backend_name='CCC',
timestamp=None, reserved_percentage=0,
driver_handles_share_servers=False,
pools=[dict(pool_name='pool3',
total_capacity_gb=53,
free_capacity_gb=43,
reserved_percentage=0)]),
'host4@DDD': dict(share_backend_name='DDD',
timestamp=None, reserved_percentage=0,
driver_handles_share_servers=False,
pools=[dict(pool_name='pool4a',
total_capacity_gb=541,
free_capacity_gb=441,
reserved_percentage=0),
dict(pool_name='pool4b',
total_capacity_gb=542,
free_capacity_gb=442,
reserved_percentage=0)]),
'host5@EEE': dict(share_backend_name='EEE',
timestamp=None, reserved_percentage=0,
driver_handles_share_servers=False,
pools=[dict(pool_name='pool5a',
total_capacity_gb=551,
free_capacity_gb=451,
reserved_percentage=0),
dict(pool_name='pool5b',
total_capacity_gb=552,
free_capacity_gb=452,
reserved_percentage=0)]),
}
class FakeFilterScheduler(filter_scheduler.FilterScheduler): class FakeFilterScheduler(filter_scheduler.FilterScheduler):
def __init__(self, *args, **kwargs): def __init__(self, *args, **kwargs):

View File

@ -1,5 +1,6 @@
# Copyright (c) 2011 OpenStack, LLC # Copyright (c) 2011 OpenStack, LLC
# Copyright (c) 2015 Rushil Chugh # Copyright (c) 2015 Rushil Chugh
# Copyright (c) 2015 Clinton Knight
# All Rights Reserved. # All Rights Reserved.
# #
# Licensed under the Apache License, Version 2.0 (the "License"); you may # Licensed under the Apache License, Version 2.0 (the "License"); you may
@ -16,6 +17,8 @@
""" """
Tests For HostManager Tests For HostManager
""" """
import ddt
import mock import mock
from oslo_config import cfg from oslo_config import cfg
from oslo_utils import timeutils from oslo_utils import timeutils
@ -27,6 +30,7 @@ from manila.openstack.common.scheduler import filters
from manila.scheduler import host_manager from manila.scheduler import host_manager
from manila import test from manila import test
from manila.tests.scheduler import fakes from manila.tests.scheduler import fakes
from manila import utils
CONF = cfg.CONF CONF = cfg.CONF
@ -42,6 +46,7 @@ class FakeFilterClass2(filters.BaseHostFilter):
pass pass
@ddt.ddt
class HostManagerTestCase(test.TestCase): class HostManagerTestCase(test.TestCase):
"""Test case for HostManager class.""" """Test case for HostManager class."""
@ -139,9 +144,12 @@ class HostManagerTestCase(test.TestCase):
def test_get_all_host_states_share(self): def test_get_all_host_states_share(self):
context = 'fake_context' context = 'fake_context'
topic = CONF.share_topic topic = CONF.share_topic
ret_services = fakes.SHARE_SERVICES self.mock_object(
with mock.patch.object(db, 'service_get_all_by_topic', db, 'service_get_all_by_topic',
mock.Mock(return_value=ret_services)): mock.Mock(return_value=fakes.SHARE_SERVICES_WITH_POOLS))
with mock.patch.dict(self.host_manager.service_states,
fakes.SHARE_SERVICE_STATES_WITH_POOLS):
# Disabled service # Disabled service
self.host_manager.get_all_host_states_share(context) self.host_manager.get_all_host_states_share(context)
host_state_map = self.host_manager.host_state_map host_state_map = self.host_manager.host_state_map
@ -149,59 +157,214 @@ class HostManagerTestCase(test.TestCase):
self.assertEqual(4, len(host_state_map)) self.assertEqual(4, len(host_state_map))
# Check that service is up # Check that service is up
for i in xrange(4): for i in xrange(4):
share_node = fakes.SHARE_SERVICES[i] share_node = fakes.SHARE_SERVICES_WITH_POOLS[i]
host = share_node['host'] host = share_node['host']
self.assertEqual(share_node, host_state_map[host].service) self.assertEqual(share_node, host_state_map[host].service)
db.service_get_all_by_topic.assert_called_once_with(context, topic) db.service_get_all_by_topic.assert_called_once_with(context, topic)
@mock.patch('manila.db.service_get_all_by_topic') def test_get_pools_no_pools(self):
@mock.patch('manila.utils.service_is_up')
def test_get_pools(self, _mock_service_is_up,
_mock_service_get_all_by_topic):
context = 'fake_context' context = 'fake_context'
self.mock_object(utils, 'service_is_up', mock.Mock(return_value=True))
services = [ self.mock_object(
dict(id=1, host='host1', topic='share', disabled=False, db, 'service_get_all_by_topic',
availability_zone='zone1', updated_at=timeutils.utcnow()), mock.Mock(return_value=fakes.SHARE_SERVICES_NO_POOLS))
dict(id=2, host='host2@back1', topic='share', disabled=False, host_manager.LOG.warn = mock.Mock()
availability_zone='zone1', updated_at=timeutils.utcnow()),
dict(id=3, host='host2@back2', topic='share', disabled=False,
availability_zone='zone2', updated_at=timeutils.utcnow()),
]
mocked_service_states = {
'host1': dict(share_backend_name='AAA',
total_capacity_gb=512, free_capacity_gb=200,
timestamp=None, reserved_percentage=0,
driver_handles_share_servers=False),
'host2@back1': dict(share_backend_name='BBB',
total_capacity_gb=256, free_capacity_gb=100,
timestamp=None, reserved_percentage=0,
driver_handles_share_servers=False),
'host2@back2': dict(share_backend_name='CCC',
total_capacity_gb=10000, free_capacity_gb=700,
timestamp=None, reserved_percentage=0,
driver_handles_share_servers=False),
}
_mock_service_get_all_by_topic.return_value = services
_mock_service_is_up.return_value = True
_mock_warning = mock.Mock()
host_manager.LOG.warn = _mock_warning
with mock.patch.dict(self.host_manager.service_states, with mock.patch.dict(self.host_manager.service_states,
mocked_service_states): fakes.SERVICE_STATES_NO_POOLS):
# Call get_all_host_states to populate host_state_map
self.host_manager.get_all_host_states_share(context)
res = self.host_manager.get_pools(context) res = self.host_manager.get_pools(context)
# Check if get_pools returns all 3 pools
self.assertEqual(3, len(res))
expected = [ expected = [
{ {
'name': 'host1#AAA', 'name': 'host1#AAA',
'host': 'host1',
'backend': None,
'pool': 'AAA',
'capabilities': {
'timestamp': None,
'share_backend_name': 'AAA',
'free_capacity_gb': 200,
'driver_version': None,
'total_capacity_gb': 512,
'reserved_percentage': 0,
'vendor_name': None,
'storage_protocol': None,
'driver_handles_share_servers': False,
},
}, {
'name': 'host2@back1#BBB',
'host': 'host2',
'backend': 'back1',
'pool': 'BBB',
'capabilities': {
'timestamp': None,
'share_backend_name': 'BBB',
'free_capacity_gb': 100,
'driver_version': None,
'total_capacity_gb': 256,
'reserved_percentage': 0,
'vendor_name': None,
'storage_protocol': None,
'driver_handles_share_servers': False,
},
}, {
'name': 'host2@back2#CCC',
'host': 'host2',
'backend': 'back2',
'pool': 'CCC',
'capabilities': {
'timestamp': None,
'share_backend_name': 'CCC',
'free_capacity_gb': 700,
'driver_version': None,
'total_capacity_gb': 10000,
'reserved_percentage': 0,
'vendor_name': None,
'storage_protocol': None,
'driver_handles_share_servers': False,
},
},
]
self.assertEqual(len(expected), len(res))
self.assertEqual(sorted(expected), sorted(res))
def test_get_pools(self):
context = 'fake_context'
self.mock_object(utils, 'service_is_up', mock.Mock(return_value=True))
self.mock_object(
db, 'service_get_all_by_topic',
mock.Mock(return_value=fakes.SHARE_SERVICES_WITH_POOLS))
host_manager.LOG.warn = mock.Mock()
with mock.patch.dict(self.host_manager.service_states,
fakes.SHARE_SERVICE_STATES_WITH_POOLS):
res = self.host_manager.get_pools(context)
expected = [
{
'name': 'host1@AAA#pool1',
'host': 'host1',
'backend': 'AAA',
'pool': 'pool1',
'capabilities': {
'pool_name': 'pool1',
'timestamp': None,
'share_backend_name': 'AAA',
'free_capacity_gb': 41,
'driver_version': None,
'total_capacity_gb': 51,
'reserved_percentage': 0,
'vendor_name': None,
'storage_protocol': None,
'driver_handles_share_servers': False,
},
}, {
'name': 'host2@BBB#pool2',
'host': 'host2',
'backend': 'BBB',
'pool': 'pool2',
'capabilities': {
'pool_name': 'pool2',
'timestamp': None,
'share_backend_name': 'BBB',
'free_capacity_gb': 42,
'driver_version': None,
'total_capacity_gb': 52,
'reserved_percentage': 0,
'vendor_name': None,
'storage_protocol': None,
'driver_handles_share_servers': False,
},
}, {
'name': 'host3@CCC#pool3',
'host': 'host3',
'backend': 'CCC',
'pool': 'pool3',
'capabilities': {
'pool_name': 'pool3',
'timestamp': None,
'share_backend_name': 'CCC',
'free_capacity_gb': 43,
'driver_version': None,
'total_capacity_gb': 53,
'reserved_percentage': 0,
'vendor_name': None,
'storage_protocol': None,
'driver_handles_share_servers': False,
},
}, {
'name': 'host4@DDD#pool4a',
'host': 'host4',
'backend': 'DDD',
'pool': 'pool4a',
'capabilities': {
'pool_name': 'pool4a',
'timestamp': None,
'share_backend_name': 'DDD',
'free_capacity_gb': 441,
'driver_version': None,
'total_capacity_gb': 541,
'reserved_percentage': 0,
'vendor_name': None,
'storage_protocol': None,
'driver_handles_share_servers': False,
},
}, {
'name': 'host4@DDD#pool4b',
'host': 'host4',
'backend': 'DDD',
'pool': 'pool4b',
'capabilities': {
'pool_name': 'pool4b',
'timestamp': None,
'share_backend_name': 'DDD',
'free_capacity_gb': 442,
'driver_version': None,
'total_capacity_gb': 542,
'reserved_percentage': 0,
'vendor_name': None,
'storage_protocol': None,
'driver_handles_share_servers': False,
},
},
]
self.assertEqual(len(expected), len(res))
self.assertEqual(sorted(expected), sorted(res))
def test_get_pools_host_down(self):
context = 'fake_context'
mock_service_is_up = self.mock_object(utils, 'service_is_up')
self.mock_object(
db, 'service_get_all_by_topic',
mock.Mock(return_value=fakes.SHARE_SERVICES_NO_POOLS))
host_manager.LOG.warn = mock.Mock()
with mock.patch.dict(self.host_manager.service_states,
fakes.SERVICE_STATES_NO_POOLS):
# Initialize host data with all services present
mock_service_is_up.side_effect = [True, True, True]
# Call once to update the host state map
self.host_manager.get_pools(context)
self.assertEqual(len(fakes.SHARE_SERVICES_NO_POOLS),
len(self.host_manager.host_state_map))
# Then mock one host as down
mock_service_is_up.side_effect = [True, True, False]
res = self.host_manager.get_pools(context)
expected = [
{
'name': 'host1#AAA',
'host': 'host1',
'backend': None,
'pool': 'AAA',
'capabilities': { 'capabilities': {
'timestamp': None, 'timestamp': None,
'driver_handles_share_servers': False, 'driver_handles_share_servers': False,
@ -211,10 +374,13 @@ class HostManagerTestCase(test.TestCase):
'total_capacity_gb': 512, 'total_capacity_gb': 512,
'reserved_percentage': 0, 'reserved_percentage': 0,
'vendor_name': None, 'vendor_name': None,
'storage_protocol': None}, 'storage_protocol': None
}, },
{ }, {
'name': 'host2@back1#BBB', 'name': 'host2@back1#BBB',
'host': 'host2',
'backend': 'back1',
'pool': 'BBB',
'capabilities': { 'capabilities': {
'timestamp': None, 'timestamp': None,
'driver_handles_share_servers': False, 'driver_handles_share_servers': False,
@ -224,25 +390,74 @@ class HostManagerTestCase(test.TestCase):
'total_capacity_gb': 256, 'total_capacity_gb': 256,
'reserved_percentage': 0, 'reserved_percentage': 0,
'vendor_name': None, 'vendor_name': None,
'storage_protocol': None}, 'storage_protocol': None
},
}, },
]
self.assertEqual(len(expected),
len(self.host_manager.host_state_map))
self.assertEqual(len(expected), len(res))
self.assertEqual(sorted(expected), sorted(res))
def test_get_pools_with_filters(self):
context = 'fake_context'
self.mock_object(utils, 'service_is_up', mock.Mock(return_value=True))
self.mock_object(
db, 'service_get_all_by_topic',
mock.Mock(return_value=fakes.SHARE_SERVICES_WITH_POOLS))
host_manager.LOG.warn = mock.Mock()
with mock.patch.dict(self.host_manager.service_states,
fakes.SHARE_SERVICE_STATES_WITH_POOLS):
res = self.host_manager.get_pools(
context, filters={'host': 'host2', 'pool': 'pool*'})
expected = [
{ {
'name': 'host2@back2#CCC', 'name': 'host2@BBB#pool2',
'host': 'host2',
'backend': 'BBB',
'pool': 'pool2',
'capabilities': { 'capabilities': {
'pool_name': 'pool2',
'timestamp': None, 'timestamp': None,
'driver_handles_share_servers': False, 'driver_handles_share_servers': False,
'share_backend_name': 'CCC', 'share_backend_name': 'BBB',
'free_capacity_gb': 700, 'free_capacity_gb': 42,
'driver_version': None, 'driver_version': None,
'total_capacity_gb': 10000, 'total_capacity_gb': 52,
'reserved_percentage': 0, 'reserved_percentage': 0,
'vendor_name': None, 'vendor_name': None,
'storage_protocol': None}, 'storage_protocol': None
} },
},
] ]
self.assertEqual(len(expected), len(res)) self.assertEqual(len(expected), len(res))
self.assertEqual(sorted(expected), sorted(res)) self.assertEqual(sorted(expected), sorted(res))
@ddt.data(
None,
{},
{'key1': 'value1'},
{'key1': 'value1', 'key2': 'value*'},
{'key1': '.*', 'key2': '.*'},
)
def test_passes_filters_true(self, filter):
data = {'key1': 'value1', 'key2': 'value2', 'key3': 'value3'}
self.assertTrue(self.host_manager._passes_filters(data, filter))
@ddt.data(
{'key1': 'value$'},
{'key4': 'value'},
{'key1': 'value1.+', 'key2': 'value*'},
)
def test_passes_filters_false(self, filter):
data = {'key1': 'value1', 'key2': 'value2', 'key3': 'value3'}
self.assertFalse(self.host_manager._passes_filters(data, filter))
class HostStateTestCase(test.TestCase): class HostStateTestCase(test.TestCase):
"""Test case for HostState class.""" """Test case for HostState class."""