Merge "Pool-aware Scheduler Support"

This commit is contained in:
Jenkins 2015-02-05 01:37:54 +00:00 committed by Gerrit Code Review
commit 79857b61c5
27 changed files with 1130 additions and 103 deletions

View File

@ -60,5 +60,7 @@
"share_network:show": [["rule:default"]],
"share_network:add_security_service": [["rule:default"]],
"share_network:remove_security_service": [["rule:default"]],
"share_network:get_all_share_networks": [["rule:admin_api"]]
"share_network:get_all_share_networks": [["rule:admin_api"]],
"scheduler_extension:scheduler_stats:get_pools" : "rule:admin_api"
}

View File

@ -0,0 +1,63 @@
# Copyright (c) 2014 eBay Inc.
# Copyright (c) 2015 Rushil Chugh
# All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License"); you may
# not use this file except in compliance with the License. You may obtain
# a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
# License for the specific language governing permissions and limitations
# under the License.
"""The Scheduler Stats extension"""
from manila.api import extensions
from manila.api.openstack import wsgi
from manila.api.views import scheduler_stats as scheduler_stats_view
from manila.scheduler import rpcapi
def authorize(context, action_name):
action = 'scheduler_stats:%s' % action_name
extensions.extension_authorizer('scheduler', action)(context)
class SchedulerStatsController(wsgi.Controller):
"""The Scheduler Stats controller for the OpenStack API."""
_view_builder_class = scheduler_stats_view.ViewBuilder
def __init__(self):
self.scheduler_api = rpcapi.SchedulerAPI()
super(SchedulerStatsController, self).__init__()
def get_pools(self, req):
"""List all active pools in scheduler."""
context = req.environ['manila.context']
authorize(context, 'get_pools')
detail = req.params.get('detail', False)
pools = self.scheduler_api.get_pools(context, filters=None)
return self._view_builder.pools(req, pools, detail)
class Scheduler_stats(extensions.ExtensionDescriptor):
"""Scheduler stats support."""
name = "Scheduler_stats"
alias = "scheduler-stats"
updated = "2015-08-01T00:00:00+00:00"
def get_resources(self):
res = extensions.ResourceExtension(
Scheduler_stats.alias,
SchedulerStatsController(),
collection_actions={"get_pools": "GET"})
return [res]

View File

@ -0,0 +1,46 @@
# Copyright (c) 2014 eBay Inc.
# Copyright (c) 2015 Rushil Chugh
# All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License"); you may
# not use this file except in compliance with the License. You may obtain
# a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
# License for the specific language governing permissions and limitations
# under the License.
from manila.api import common
class ViewBuilder(common.ViewBuilder):
"""Model scheduler-stats API responses as a python dictionary."""
_collection_name = "scheduler-stats"
def summary(self, request, pool):
"""Summary view of a single pool."""
return {
'pool': {
'name': pool.get('name'),
}
}
def detail(self, request, pool):
"""Detailed view of a single pool."""
return {
'pool': {
'name': pool.get('name'),
'capabilities': pool.get('capabilities'),
}
}
def pools(self, request, pools, detail):
"""Summary view of a list of pools seen by scheduler."""
pdict = self.detail if detail else self.summary
return {"pools": [pdict(request, pool)['pool'] for pool in pools]}

View File

@ -1193,8 +1193,13 @@ def _share_get_all_with_filters(context, project_id=None, share_server_id=None,
query = query.filter_by(project_id=project_id)
if share_server_id:
query = query.filter_by(share_server_id=share_server_id)
if host:
query = query.filter_by(host=host)
if host and isinstance(host, six.string_types):
session = get_session()
with session.begin():
host_attr = getattr(models.Share, 'host')
conditions = [host_attr == host,
host_attr.op('LIKE')(host + '#%')]
query = query.filter(or_(*conditions))
# Apply filters
if not filters:
@ -1244,6 +1249,7 @@ def share_get_all(context, filters=None, sort_key=None, sort_dir=None):
@require_admin_context
def share_get_all_by_host(context, host, filters=None,
sort_key=None, sort_dir=None):
"""Retrieves all shares hosted on a host."""
query = _share_get_all_with_filters(
context, host=host, filters=filters,
sort_key=sort_key, sort_dir=sort_dir,

View File

@ -88,3 +88,8 @@ class Scheduler(object):
def schedule_create_share(self, context, request_spec, filter_properties):
"""Must override schedule method for scheduler to work."""
raise NotImplementedError(_("Must implement schedule_create_share"))
def get_pools(self, context, filters):
"""Must override schedule method for scheduler to work."""
raise NotImplementedError(_(
"Must implement get_pools"))

View File

@ -49,6 +49,10 @@ class FilterScheduler(driver.Scheduler):
"""Fetch options dictionary. Broken out for testing."""
return self.options.get_configuration()
def get_pools(self, context, filters):
# TODO(zhiteng) Add filters support
return self.host_manager.get_pools(context)
def _post_select_populate_filter_properties(self, filter_properties,
host_state):
"""Add additional information to filter properties.

View File

@ -1,4 +1,5 @@
# Copyright (c) 2011 OpenStack, LLC.
# Copyright (c) 2015 Rushil Chugh
# All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License"); you may
@ -29,6 +30,7 @@ from manila.i18n import _LI
from manila.openstack.common import log as logging
from manila.openstack.common.scheduler import filters
from manila.openstack.common.scheduler import weights
from manila.share import utils as share_utils
from manila import utils
host_manager_opts = [
@ -89,6 +91,8 @@ class HostState(object):
"""Mutable and immutable information tracked for a host."""
def __init__(self, host, capabilities=None, service=None):
self.capabilities = None
self.service = None
self.host = host
self.update_capabilities(capabilities, service)
@ -103,6 +107,8 @@ class HostState(object):
self.free_capacity_gb = None
self.reserved_percentage = 0
# PoolState for all pools
self.pools = {}
self.updated = None
def update_capabilities(self, capabilities=None, service=None):
@ -115,23 +121,142 @@ class HostState(object):
service = {}
self.service = ReadOnlyDict(service)
def update_from_share_capability(self, capability):
"""Update information about a host from its volume_node info."""
def update_from_share_capability(self, capability, service=None):
"""Update information about a host from its share_node info.
'capability' is the status info reported by share backend, a typical
capability looks like this:
capability = {
'share_backend_name': 'Local NFS', #\
'vendor_name': 'OpenStack', # backend level
'driver_version': '1.0', # mandatory/fixed
'storage_protocol': 'NFS', #/ stats&capabilities
'active_shares': 10, #\
'IOPS_provisioned': 30000, # optional custom
'fancy_capability_1': 'eat', # stats & capabilities
'fancy_capability_2': 'drink', #/
'pools': [
{'pool_name': '1st pool', #\
'total_capacity_gb': 500, # mandatory stats for
'free_capacity_gb': 230, # pools
'allocated_capacity_gb': 270, # |
'QoS_support': 'False', # |
'reserved_percentage': 0, #/
'dying_disks': 100, #\
'super_hero_1': 'spider-man', # optional custom
'super_hero_2': 'flash', # stats & capabilities
'super_hero_3': 'neoncat' #/
},
{'pool_name': '2nd pool',
'total_capacity_gb': 1024,
'free_capacity_gb': 1024,
'allocated_capacity_gb': 0,
'QoS_support': 'False',
'reserved_percentage': 0,
'dying_disks': 200,
'super_hero_1': 'superman',
'super_hero_2': ' ',
'super_hero_2': 'Hulk',
}
]
}
"""
self.update_capabilities(capability, service)
if capability:
if self.updated and self.updated > capability['timestamp']:
return
self.share_backend = capability.get('share_backend_name', None)
self.vendor_name = capability.get('vendor_name', None)
self.driver_version = capability.get('driver_version', None)
self.storage_protocol = capability.get('storage_protocol', None)
self.QoS_support = capability.get('QoS_support', False)
# Update backend level info
self.update_backend(capability)
self.total_capacity_gb = capability['total_capacity_gb']
self.free_capacity_gb = capability['free_capacity_gb']
self.reserved_percentage = capability['reserved_percentage']
# Update pool level info
self.update_pools(capability, service)
self.updated = capability['timestamp']
def update_pools(self, capability, service):
"""Update storage pools information from backend reported info."""
if not capability:
return
pools = capability.get('pools', None)
active_pools = set()
if pools and isinstance(pools, list):
# Update all pools stats according to information from list
# of pools in share capacity
for pool_cap in pools:
pool_name = pool_cap['pool_name']
self._append_backend_info(pool_cap)
cur_pool = self.pools.get(pool_name, None)
if not cur_pool:
# Add new pool
cur_pool = PoolState(self.host, pool_cap, pool_name)
self.pools[pool_name] = cur_pool
cur_pool.update_from_share_capability(pool_cap, service)
active_pools.add(pool_name)
elif pools is None:
# To handle legacy driver that doesn't report pool
# information in the capability, we have to prepare
# a pool from backend level info, or to update the one
# we created in self.pools.
pool_name = self.share_backend_name
if pool_name is None:
# To get DEFAULT_POOL_NAME
pool_name = share_utils.extract_host(self.host, 'pool', True)
if len(self.pools) == 0:
# No pool was there
single_pool = PoolState(self.host, capability, pool_name)
self._append_backend_info(capability)
self.pools[pool_name] = single_pool
else:
# This is a update from legacy driver
try:
single_pool = self.pools[pool_name]
except KeyError:
single_pool = PoolState(self.host, capability, pool_name)
self._append_backend_info(capability)
self.pools[pool_name] = single_pool
single_pool.update_from_share_capability(capability, service)
active_pools.add(pool_name)
# Remove non-active pools from self.pools
nonactive_pools = set(self.pools.keys()) - active_pools
for pool in nonactive_pools:
LOG.debug("Removing non-active pool %(pool)s @ %(host)s "
"from scheduler cache.",
{'pool': pool, 'host': self.host})
del self.pools[pool]
def _append_backend_info(self, pool_cap):
# Fill backend level info to pool if needed.
if not pool_cap.get('share_backend_name'):
pool_cap['share_backend_name'] = self.share_backend_name
if not pool_cap.get('storage_protocol'):
pool_cap['storage_protocol'] = self.storage_protocol
if not pool_cap.get('vendor_name'):
pool_cap['vendor_name'] = self.vendor_name
if not pool_cap.get('driver_version'):
pool_cap['driver_version'] = self.driver_version
if not pool_cap.get('timestamp'):
pool_cap['timestamp'] = self.updated
def update_backend(self, capability):
self.share_backend_name = capability.get('share_backend_name')
self.vendor_name = capability.get('vendor_name')
self.driver_version = capability.get('driver_version')
self.storage_protocol = capability.get('storage_protocol')
self.updated = capability['timestamp']
def consume_from_share(self, share):
"""Incrementally update host state from an share."""
@ -146,6 +271,41 @@ class HostState(object):
self.free_capacity_gb -= share_gb
self.updated = timeutils.utcnow()
def __repr__(self):
return ("host: '%(host)s', free_capacity_gb: %(free)s, "
"pools: %(pools)s" % {'host': self.host,
'free': self.free_capacity_gb,
'pools': self.pools}
)
class PoolState(HostState):
def __init__(self, host, capabilities, pool_name):
new_host = share_utils.append_host(host, pool_name)
super(PoolState, self).__init__(new_host, capabilities)
self.pool_name = pool_name
# No pools in pool
self.pools = None
def update_from_share_capability(self, capability, service=None):
"""Update information about a pool from its share_node info."""
self.update_capabilities(capability, service)
if capability:
if self.updated and self.updated > capability['timestamp']:
return
self.update_backend(capability)
self.total_capacity_gb = capability['total_capacity_gb']
self.free_capacity_gb = capability['free_capacity_gb']
self.allocated_capacity_gb = capability.get(
'allocated_capacity_gb', 0)
self.QoS_support = capability.get('QoS_support', False)
self.reserved_percentage = capability['reserved_percentage']
def update_pools(self, capability):
# Do nothing, since we don't have pools within pool, yet
pass
class HostManager(object):
"""Base HostManager class."""
@ -243,14 +403,16 @@ class HostManager(object):
{'service_name': service_name, 'host': host})
return
LOG.debug("Received %(service_name)s service update from "
"%(host)s.", {"service_name": service_name, "host": host})
# Copy the capabilities, so we don't modify the original dict
capab_copy = dict(capabilities)
capab_copy["timestamp"] = timeutils.utcnow() # Reported time
self.service_states[host] = capab_copy
LOG.debug("Received %(service_name)s service update from "
"%(host)s: %(cap)s" %
{'service_name': service_name, 'host': host,
'cap': capabilities})
def get_all_host_states_share(self, context):
"""Get all hosts and their states.
@ -263,6 +425,7 @@ class HostManager(object):
"""
# Get resource usage across the available share nodes:
all_pools = {}
topic = CONF.share_topic
share_services = db.service_get_all_by_topic(context, topic)
for service in share_services:
@ -274,17 +437,36 @@ class HostManager(object):
continue
capabilities = self.service_states.get(host, None)
host_state = self.host_state_map.get(host)
if host_state:
# copy capabilities to host_state.capabilities
host_state.update_capabilities(capabilities,
dict(six.iteritems(service)))
else:
if not host_state:
host_state = self.host_state_cls(
host,
capabilities=capabilities,
service=dict(six.iteritems(service)))
self.host_state_map[host] = host_state
# update host_state
host_state.update_from_share_capability(capabilities)
# Update host_state
host_state.update_from_share_capability(
capabilities, service=dict(six.iteritems(service)))
# Build a pool_state map and return that instead of host_state_map
state = self.host_state_map[host]
for key in state.pools:
pool = state.pools[key]
# Use host.pool_name to make sure key is unique
pool_key = '.'.join([host, pool.pool_name])
all_pools[pool_key] = pool
return self.host_state_map.itervalues()
return six.itervalues(all_pools)
def get_pools(self, context):
"""Returns a dict of all pools on all hosts HostManager knows about."""
all_pools = []
for host, state in self.host_state_map.items():
for key in state.pools:
pool = state.pools[key]
# Use host.pool_name to make sure key is unique
pool_key = share_utils.append_host(host, pool.pool_name)
new_pool = dict(name=pool_key)
new_pool.update(dict(capabilities=pool.capabilities))
all_pools.append(new_pool)
return all_pools

View File

@ -19,6 +19,7 @@
Scheduler Service
"""
from oslo import messaging
from oslo_config import cfg
from oslo_utils import excutils
from oslo_utils import importutils
@ -46,6 +47,10 @@ CONF.register_opt(scheduler_driver_opt)
class SchedulerManager(manager.Manager):
"""Chooses a host to create shares."""
RPC_API_VERSION = '1.1'
target = messaging.Target(version=RPC_API_VERSION)
def __init__(self, scheduler_driver=None, service_name=None,
*args, **kwargs):
if not scheduler_driver:
@ -88,6 +93,10 @@ class SchedulerManager(manager.Manager):
context, ex,
request_spec)
def get_pools(self, context, filters=None):
"""Get active pools from the scheduler's cache."""
return self.driver.get_pools(context, filters)
def _set_share_error_state_and_notify(self, method, context, ex,
request_spec):
LOG.warning(_LW("Failed to schedule_%(method)s: %(ex)s"),

View File

@ -31,15 +31,16 @@ class SchedulerAPI(object):
API version history:
1.0 - Initial version.
1.1 - Add get_pools method
'''
RPC_API_VERSION = '1.0'
RPC_API_VERSION = '1.1'
def __init__(self):
super(SchedulerAPI, self).__init__()
target = messaging.Target(topic=CONF.scheduler_topic,
version=self.RPC_API_VERSION)
self.client = rpc.get_client(target, version_cap='1.0')
self.client = rpc.get_client(target, version_cap='1.1')
def create_share(self, ctxt, topic, share_id, snapshot_id=None,
request_spec=None, filter_properties=None):
@ -66,3 +67,8 @@ class SchedulerAPI(object):
host=host,
capabilities=capabilities,
)
def get_pools(self, ctxt, filters=None):
cctxt = self.client.prepare(version='1.1')
return cctxt.call(ctxt, 'get_pools',
filters=filters)

View File

@ -174,6 +174,9 @@ class ShareDriver(object):
"""
super(ShareDriver, self).__init__()
self.configuration = kwargs.get('configuration', None)
self._stats = {}
self.pools = {}
if self.configuration:
self.configuration.append_config_values(share_opts)
network_config_group = (self.configuration.network_config_group or
@ -243,6 +246,12 @@ class ShareDriver(object):
"""Is called to remove snapshot."""
raise NotImplementedError()
def get_pool(self, share):
"""Return pool name where the share resides on.
:param share: The share hosted by the driver.
"""
def ensure_share(self, context, share, share_server=None):
"""Invoked to sure that share is exported."""
raise NotImplementedError()

View File

@ -35,6 +35,7 @@ from manila import manager
from manila.openstack.common import log as logging
from manila import quota
import manila.share.configuration
from manila.share import utils as share_utils
from manila import utils
LOG = logging.getLogger(__name__)
@ -82,6 +83,27 @@ class ShareManager(manager.SchedulerDependentManager):
self.driver = importutils.import_object(
share_driver, self.db, configuration=self.configuration)
def _ensure_share_has_pool(self, ctxt, share):
pool = share_utils.extract_host(share['host'], 'pool')
if pool is None:
# No pool name encoded in host, so this is a legacy
# share created before pool is introduced, ask
# driver to provide pool info if it has such
# knowledge and update the DB.
try:
pool = self.driver.get_pool(share)
except Exception as err:
LOG.error(_LE("Failed to fetch pool name for share: "
"%(share)s. Error: %(error)s."),
{'share': share['id'], 'error': err})
return
if pool:
new_host = share_utils.append_host(share['host'], pool)
self.db.share_update(ctxt, share['id'], {'host': new_host})
return pool
def init_host(self):
"""Initialization for a standalone service."""
@ -93,6 +115,7 @@ class ShareManager(manager.SchedulerDependentManager):
LOG.debug("Re-exporting %s shares", len(shares))
for share in shares:
if share['status'] == 'available':
self._ensure_share_has_pool(ctxt, share)
share_server = self._get_share_server(ctxt, share)
try:
self.driver.ensure_share(

View File

@ -21,6 +21,7 @@ from oslo_config import cfg
from oslo_serialization import jsonutils
from manila import rpc
from manila.share import utils
CONF = cfg.CONF
@ -44,7 +45,8 @@ class ShareAPI(object):
def create_share(self, ctxt, share, host,
request_spec, filter_properties,
snapshot_id=None):
cctxt = self.client.prepare(server=host, version='1.0')
new_host = utils.extract_host(host)
cctxt = self.client.prepare(server=new_host, version='1.0')
request_spec_p = jsonutils.to_primitive(request_spec)
cctxt.cast(
ctxt,
@ -56,15 +58,18 @@ class ShareAPI(object):
)
def delete_share(self, ctxt, share):
cctxt = self.client.prepare(server=share['host'], version='1.0')
host = utils.extract_host(share['host'])
cctxt = self.client.prepare(server=host, version='1.0')
cctxt.cast(ctxt, 'delete_share', share_id=share['id'])
def delete_share_server(self, ctxt, share_server):
cctxt = self.client.prepare(server=share_server['host'], version='1.0')
host = utils.extract_host(share_server['host'])
cctxt = self.client.prepare(server=host, version='1.0')
cctxt.cast(ctxt, 'delete_share_server', share_server=share_server)
def create_snapshot(self, ctxt, share, snapshot):
cctxt = self.client.prepare(server=share['host'])
host = utils.extract_host(share['host'])
cctxt = self.client.prepare(server=host)
cctxt.cast(
ctxt,
'create_snapshot',
@ -73,15 +78,18 @@ class ShareAPI(object):
)
def delete_snapshot(self, ctxt, snapshot, host):
cctxt = self.client.prepare(server=host)
new_host = utils.extract_host(host)
cctxt = self.client.prepare(server=new_host)
cctxt.cast(ctxt, 'delete_snapshot', snapshot_id=snapshot['id'])
def allow_access(self, ctxt, share, access):
cctxt = self.client.prepare(server=share['host'], version='1.0')
host = utils.extract_host(share['host'])
cctxt = self.client.prepare(server=host, version='1.0')
cctxt.cast(ctxt, 'allow_access', access_id=access['id'])
def deny_access(self, ctxt, share, access):
cctxt = self.client.prepare(server=share['host'], version='1.0')
host = utils.extract_host(share['host'])
cctxt = self.client.prepare(server=host, version='1.0')
cctxt.cast(ctxt, 'deny_access', access_id=access['id'])
def publish_service_capabilities(self, ctxt):

75
manila/share/utils.py Normal file
View File

@ -0,0 +1,75 @@
# Copyright (c) 2012 OpenStack Foundation
# Copyright (c) 2015 Rushil Chugh
# All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License"); you may
# not use this file except in compliance with the License. You may obtain
# a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
# License for the specific language governing permissions and limitations
# under the License.
"""Share-related Utilities and helpers."""
DEFAULT_POOL_NAME = '_pool0'
def extract_host(host, level='backend', use_default_pool_name=False):
"""Extract Host, Backend or Pool information from host string.
:param host: String for host, which could include host@backend#pool info
:param level: Indicate which level of information should be extracted
from host string. Level can be 'host', 'backend' or 'pool',
default value is 'backend'
:param use_default_pool_name: This flag specifies what to do
if level == 'pool' and there is no 'pool' info
encoded in host string. default_pool_name=True
will return DEFAULT_POOL_NAME, otherwise it will
return None. Default value of this parameter
is False.
:return: expected level of information
For example:
host = 'HostA@BackendB#PoolC'
ret = extract_host(host, 'host')
# ret is 'HostA'
ret = extract_host(host, 'backend')
# ret is 'HostA@BackendB'
ret = extract_host(host, 'pool')
# ret is 'PoolC'
host = 'HostX@BackendY'
ret = extract_host(host, 'pool')
# ret is None
ret = extract_host(host, 'pool', True)
# ret is '_pool0'
"""
if level == 'host':
# Make sure pool is not included
hst = host.split('#')[0]
return hst.split('@')[0]
elif level == 'backend':
return host.split('#')[0]
elif level == 'pool':
lst = host.split('#')
if len(lst) == 2:
return lst[1]
elif use_default_pool_name is True:
return DEFAULT_POOL_NAME
else:
return None
def append_host(host, pool):
"""Encode pool into host info."""
if not host or not pool:
return host
new_host = "#".join([host, pool])
return new_host

View File

@ -324,3 +324,16 @@ class TestCase(base_test.BaseTestCase):
self.assertTrue(a is None)
else:
f(a, *args, **kwargs)
def _dict_from_object(self, obj, ignored_keys):
if ignored_keys is None:
ignored_keys = []
return dict([(k, v) for k, v in obj.iteritems()
if k not in ignored_keys])
def _assertEqualListsOfObjects(self, objs1, objs2, ignored_keys=None):
obj_to_dict = lambda o: self._dict_from_object(o, ignored_keys)
sort_key = lambda d: [d[k] for k in sorted(d)]
conv_and_sort = lambda obj: sorted(map(obj_to_dict, obj), key=sort_key)
self.assertEqual(conv_and_sort(objs1), conv_and_sort(objs2))

View File

@ -0,0 +1,111 @@
# Copyright 2014 eBay Inc.
# Copyright 2013 OpenStack Foundation
# Copyright (c) 2015 Rushil Chugh
# All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License"); you may
# not use this file except in compliance with the License. You may obtain
# a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
# License for the specific language governing permissions and limitations
# under the License.
import mock
from manila.api.contrib import scheduler_stats
from manila import context
from manila import test
from manila.tests.api import fakes
def schedule_rpcapi_get_pools(self, context, filters=None):
all_pools = []
pool1 = dict(name='pool1',
capabilities=dict(
total_capacity=1024, free_capacity=100,
share_backend_name='pool1', reserved_percentage=0,
driver_version='1.0.0', storage_protocol='iSCSI',
QoS_support='False', updated=None))
all_pools.append(pool1)
pool2 = dict(name='pool2',
capabilities=dict(
total_capacity=512, free_capacity=200,
share_backend_name='pool2', reserved_percentage=0,
driver_version='1.0.1', storage_protocol='iSER',
QoS_support='True', updated=None))
all_pools.append(pool2)
return all_pools
@mock.patch('manila.scheduler.rpcapi.SchedulerAPI.get_pools',
schedule_rpcapi_get_pools)
class SchedulerStatsAPITest(test.TestCase):
def setUp(self):
super(SchedulerStatsAPITest, self).setUp()
self.flags(host='fake')
self.controller = scheduler_stats.SchedulerStatsController()
self.ctxt = context.RequestContext('admin', 'fake', True)
def test_get_pools_summery(self):
req = fakes.HTTPRequest.blank('/v2/fake/scheduler_stats')
req.environ['manila.context'] = self.ctxt
res = self.controller.get_pools(req)
self.assertEqual(2, len(res['pools']))
expected = {
'pools': [
{
'name': 'pool1',
},
{
'name': 'pool2',
}
]
}
self.assertDictMatch(res, expected)
def test_get_pools_detail(self):
req = fakes.HTTPRequest.blank('/v2/fake/scheduler_stats?detail=True')
req.environ['manila.context'] = self.ctxt
res = self.controller.get_pools(req)
self.assertEqual(2, len(res['pools']))
expected = {
'pools': [
{
'name': 'pool1',
'capabilities': {
'updated': None,
'total_capacity': 1024,
'free_capacity': 100,
'share_backend_name': 'pool1',
'reserved_percentage': 0,
'driver_version': '1.0.0',
'storage_protocol': 'iSCSI',
'QoS_support': 'False', }
},
{
'name': 'pool2',
'capabilities': {
'updated': None,
'total_capacity': 512,
'free_capacity': 200,
'share_backend_name': 'pool2',
'reserved_percentage': 0,
'driver_version': '1.0.1',
'storage_protocol': 'iSER',
'QoS_support': 'True', }
}
]
}
self.assertDictMatch(res, expected)

View File

View File

@ -0,0 +1,50 @@
# Copyright (c) 2015 Rushil Chugh
# All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License"); you may
# not use this file except in compliance with the License. You may obtain
# a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
# License for the specific language governing permissions and limitations
# under the License.
"""Testing of SQLAlchemy backend."""
from manila import context
from manila.db.sqlalchemy import api
from manila import test
class SQLAlchemyAPIShareTestCase(test.TestCase):
def setUp(self):
"""Run before each test."""
super(SQLAlchemyAPIShareTestCase, self).setUp()
self.ctxt = context.get_admin_context()
def test_share_filter_by_host_with_pools(self):
shares = [[api.share_create(self.ctxt, {'host': value})
for value in ('foo', 'foo#pool0')]]
api.share_create(self.ctxt, {'host': 'foobar'})
self._assertEqualListsOfObjects(shares[0],
api.share_get_all_by_host(
self.ctxt, 'foo'),
ignored_keys=['volume_type',
'volume_type_id'])
def test_share_filter_all_by_host_with_pools_multiple_hosts(self):
shares = [[api.share_create(self.ctxt, {'host': value})
for value in ('foo', 'foo#pool0', 'foo', 'foo#pool1')]]
api.share_create(self.ctxt, {'host': 'foobar'})
self._assertEqualListsOfObjects(shares[0],
api.share_get_all_by_host(
self.ctxt, 'foo'),
ignored_keys=['volume_type',
'volume_type_id'])

View File

@ -40,5 +40,6 @@
"security_service:index": [],
"security_service:get_all_security_services": [["rule:admin_api"]],
"limits_extension:used_limits": []
"limits_extension:used_limits": [],
"scheduler_extension:scheduler_stats:get_pools" : "rule:admin_api"
}

View File

@ -65,6 +65,10 @@ class FakeHostManager(host_manager.HostManager):
'free_capacity_gb': 200,
'reserved_percentage': 5,
'timestamp': None},
'host5': {'total_capacity_gb': 2048,
'free_capacity_gb': 500,
'reserved_percentage': 5,
'timestamp': None},
}
@ -85,9 +89,8 @@ def mock_host_manager_db_calls(mock_obj, disabled=None):
availability_zone='zone2', updated_at=timeutils.utcnow()),
dict(id=4, host='host4', topic='share', disabled=False,
availability_zone='zone3', updated_at=timeutils.utcnow()),
# service on host5 is disabled
dict(id=5, host='host5', topic='share', disabled=True,
availability_zone='zone4', updated_at=timeutils.utcnow()),
dict(id=5, host='host5', topic='share', disabled=False,
availability_zone='zone3', updated_at=timeutils.utcnow()),
]
if disabled is None:
mock_obj.return_value = services

View File

@ -22,6 +22,7 @@ from oslo_config import cfg
from manila import context
from manila.openstack.common.scheduler import weights
from manila.scheduler.weights import capacity
from manila.share import utils
from manila import test
from manila.tests.scheduler import fakes
@ -64,7 +65,8 @@ class CapacityWeigherTestCase(test.TestCase):
# so, host1 should win:
weighed_host = self._get_weighed_host(hostinfo_list)
self.assertEqual(weighed_host.weight, 1.0)
self.assertEqual(weighed_host.obj.host, 'host1')
self.assertEqual(
'host1', utils.extract_host(weighed_host.obj.host))
def test_capacity_weight_multiplier1(self):
self.flags(capacity_weight_multiplier=-1.0)
@ -78,7 +80,8 @@ class CapacityWeigherTestCase(test.TestCase):
# so, host4 should win:
weighed_host = self._get_weighed_host(hostinfo_list)
self.assertEqual(weighed_host.weight, 0.0)
self.assertEqual(weighed_host.obj.host, 'host4')
self.assertEqual(
'host4', utils.extract_host(weighed_host.obj.host))
def test_capacity_weight_multiplier2(self):
self.flags(capacity_weight_multiplier=2.0)
@ -92,4 +95,5 @@ class CapacityWeigherTestCase(test.TestCase):
# so, host1 should win:
weighed_host = self._get_weighed_host(hostinfo_list)
self.assertEqual(weighed_host.weight, 2.0)
self.assertEqual(weighed_host.obj.host, 'host1')
self.assertEqual(
'host1', utils.extract_host(weighed_host.obj.host))

View File

@ -57,6 +57,17 @@ class HostFiltersTestCase(test.TestCase):
'service': service})
self.assertTrue(filt_cls.host_passes(host, filter_properties))
def test_capacity_filter_current_host_passes(self):
self._stub_service_is_up(True)
filt_cls = self.class_map['CapacityFilter']()
filter_properties = {'size': 100, 'share_exists_on': 'host1#pool1'}
service = {'disabled': False}
host = fakes.FakeHostState('host1#pools1',
{'free_capacity_gb': 200,
'updated_at': None,
'service': service})
self.assertTrue(filt_cls.host_passes(host, filter_properties))
def test_capacity_filter_fails(self):
self._stub_service_is_up(True)
filt_cls = self.class_map['CapacityFilter']()

View File

@ -1,4 +1,5 @@
# Copyright (c) 2011 OpenStack, LLC
# Copyright (c) 2015 Rushil Chugh
# All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License"); you may
@ -15,8 +16,6 @@
"""
Tests For HostManager
"""
import datetime
import mock
from oslo_config import cfg
from oslo_utils import timeutils
@ -29,6 +28,7 @@ from manila.scheduler import host_manager
from manila import test
from manila.tests.scheduler import fakes
CONF = cfg.CONF
@ -63,7 +63,7 @@ class HostManagerTestCase(test.TestCase):
self.host_manager.filter_classes = [FakeFilterClass1,
FakeFilterClass2]
# Test 'volume' returns 1 correct function
# Test 'share' returns 1 correct function
filter_classes = self.host_manager._choose_host_filters(None)
self.assertEqual(1, len(filter_classes))
self.assertEqual('FakeFilterClass2', filter_classes[0].__name__)
@ -140,7 +140,6 @@ class HostManagerTestCase(test.TestCase):
context = 'fake_context'
topic = CONF.share_topic
ret_services = fakes.SHARE_SERVICES
with mock.patch.object(db, 'service_get_all_by_topic',
mock.Mock(return_value=ret_services)):
# Disabled service
@ -149,133 +148,341 @@ class HostManagerTestCase(test.TestCase):
self.assertEqual(4, len(host_state_map))
# Check that service is up
for i in moves.range(4):
for i in xrange(4):
share_node = fakes.SHARE_SERVICES[i]
host = share_node['host']
self.assertEqual(share_node, host_state_map[host].service)
db.service_get_all_by_topic.assert_called_once_with(context, topic)
def test_get_all_host_states_share_after_host_status_change(self):
@mock.patch('manila.db.service_get_all_by_topic')
@mock.patch('manila.utils.service_is_up')
def test_get_pools(self, _mock_service_is_up,
_mock_service_get_all_by_topic):
context = 'fake_context'
ret_services = fakes.SHARE_SERVICES
with mock.patch.object(db, 'service_get_all_by_topic',
mock.Mock(return_value=ret_services)):
services = [
dict(id=1, host='host1', topic='share', disabled=False,
availability_zone='zone1', updated_at=timeutils.utcnow()),
dict(id=2, host='host2@back1', topic='share', disabled=False,
availability_zone='zone1', updated_at=timeutils.utcnow()),
dict(id=3, host='host2@back2', topic='share', disabled=False,
availability_zone='zone2', updated_at=timeutils.utcnow()),
]
mocked_service_states = {
'host1': dict(share_backend_name='AAA',
total_capacity_gb=512, free_capacity_gb=200,
timestamp=None, reserved_percentage=0),
'host2@back1': dict(share_backend_name='BBB',
total_capacity_gb=256, free_capacity_gb=100,
timestamp=None, reserved_percentage=0),
'host2@back2': dict(share_backend_name='CCC',
total_capacity_gb=10000, free_capacity_gb=700,
timestamp=None, reserved_percentage=0),
}
_mock_service_get_all_by_topic.return_value = services
_mock_service_is_up.return_value = True
_mock_warning = mock.Mock()
host_manager.LOG.warn = _mock_warning
with mock.patch.dict(self.host_manager.service_states,
mocked_service_states):
# Call get_all_host_states to populate host_state_map
self.host_manager.get_all_host_states_share(context)
host_state_map = self.host_manager.host_state_map
delta_time = datetime.timedelta(0, CONF.service_down_time + 10)
# disable host4
ret_services[3]['disabled'] = True
# down host3
ret_services[2]['updated_at'] -= delta_time
# disabled and down host2
ret_services[1]['disabled'] = True
ret_services[1]['updated_at'] -= delta_time
res = self.host_manager.get_pools(context)
self.host_manager.get_all_host_states_share(context)
host_state_map = self.host_manager.host_state_map
# Check if get_pools returns all 3 pools
self.assertEqual(3, len(res))
# only 1 host is up and active.
self.assertEqual(1, len(host_state_map))
# The up and active host is host1
share_node = fakes.SHARE_SERVICES[0]
host = share_node['host']
self.assertEqual(share_node, host_state_map[host].service)
expected = [
{
'name': 'host1#AAA',
'capabilities': {
'timestamp': None,
'share_backend_name': 'AAA',
'free_capacity_gb': 200,
'driver_version': None,
'total_capacity_gb': 512,
'reserved_percentage': 0,
'vendor_name': None,
'storage_protocol': None},
},
{
'name': 'host2@back1#BBB',
'capabilities': {
'timestamp': None,
'share_backend_name': 'BBB',
'free_capacity_gb': 100,
'driver_version': None,
'total_capacity_gb': 256,
'reserved_percentage': 0,
'vendor_name': None,
'storage_protocol': None},
},
{
'name': 'host2@back2#CCC',
'capabilities': {
'timestamp': None,
'share_backend_name': 'CCC',
'free_capacity_gb': 700,
'driver_version': None,
'total_capacity_gb': 10000,
'reserved_percentage': 0,
'vendor_name': None,
'storage_protocol': None},
}
]
self.assertEqual(len(expected), len(res))
self.assertEqual(sorted(expected), sorted(res))
class HostStateTestCase(test.TestCase):
"""Test case for HostState class."""
def test_update_from_share_capability(self):
fake_host = host_manager.HostState('host1')
self.assertEqual(None, fake_host.free_capacity_gb)
share_capability = {'total_capacity_gb': 1024,
'free_capacity_gb': 512,
def test_update_from_share_capability_nopool(self):
share_capability = {'total_capacity_gb': 0,
'free_capacity_gb': 100,
'reserved_percentage': 0,
'timestamp': None}
fake_host = host_manager.HostState('host1', share_capability)
self.assertIsNone(fake_host.free_capacity_gb)
fake_host.update_from_share_capability(share_capability)
self.assertEqual(512, fake_host.free_capacity_gb)
# Backend level stats remain uninitialized
self.assertEqual(0, fake_host.total_capacity_gb)
self.assertIsNone(fake_host.free_capacity_gb)
# Pool stats has been updated
self.assertEqual(0, fake_host.pools['_pool0'].total_capacity_gb)
self.assertEqual(100, fake_host.pools['_pool0'].free_capacity_gb)
# Test update for existing host state
share_capability.update(dict(total_capacity_gb=1000))
fake_host.update_from_share_capability(share_capability)
self.assertEqual(1000, fake_host.pools['_pool0'].total_capacity_gb)
# Test update for existing host state with different backend name
share_capability.update(dict(share_backend_name='magic'))
fake_host.update_from_share_capability(share_capability)
self.assertEqual(1000, fake_host.pools['magic'].total_capacity_gb)
self.assertEqual(100, fake_host.pools['magic'].free_capacity_gb)
# 'pool0' becomes nonactive pool, and is deleted
self.assertRaises(KeyError, lambda: fake_host.pools['pool0'])
def test_update_from_share_capability_with_pools(self):
fake_host = host_manager.HostState('host1#pool1')
self.assertIsNone(fake_host.free_capacity_gb)
capability = {
'share_backend_name': 'Backend1',
'vendor_name': 'OpenStack',
'driver_version': '1.1',
'storage_protocol': 'NFS_CIFS',
'pools': [
{'pool_name': 'pool1',
'total_capacity_gb': 500,
'free_capacity_gb': 230,
'allocated_capacity_gb': 270,
'QoS_support': 'False',
'reserved_percentage': 0,
'dying_disks': 100,
'super_hero_1': 'spider-man',
'super_hero_2': 'flash',
'super_hero_3': 'neoncat',
},
{'pool_name': 'pool2',
'total_capacity_gb': 1024,
'free_capacity_gb': 1024,
'allocated_capacity_gb': 0,
'QoS_support': 'False',
'reserved_percentage': 0,
'dying_disks': 200,
'super_hero_1': 'superman',
'super_hero_2': ' ',
'super_hero_2': 'Hulk',
}
],
'timestamp': None,
}
fake_host.update_from_share_capability(capability)
self.assertEqual('Backend1', fake_host.share_backend_name)
self.assertEqual('NFS_CIFS', fake_host.storage_protocol)
self.assertEqual('OpenStack', fake_host.vendor_name)
self.assertEqual('1.1', fake_host.driver_version)
# Backend level stats remain uninitialized
self.assertEqual(0, fake_host.total_capacity_gb)
self.assertIsNone(fake_host.free_capacity_gb)
# Pool stats has been updated
self.assertEqual(2, len(fake_host.pools))
self.assertEqual(500, fake_host.pools['pool1'].total_capacity_gb)
self.assertEqual(230, fake_host.pools['pool1'].free_capacity_gb)
self.assertEqual(1024, fake_host.pools['pool2'].total_capacity_gb)
self.assertEqual(1024, fake_host.pools['pool2'].free_capacity_gb)
capability = {
'share_backend_name': 'Backend1',
'vendor_name': 'OpenStack',
'driver_version': '1.0',
'storage_protocol': 'NFS_CIFS',
'pools': [
{'pool_name': 'pool3',
'total_capacity_gb': 10000,
'free_capacity_gb': 10000,
'allocated_capacity_gb': 0,
'QoS_support': 'False',
'reserved_percentage': 0,
},
],
'timestamp': None,
}
# test update HostState Record
fake_host.update_from_share_capability(capability)
self.assertEqual('1.0', fake_host.driver_version)
# Non-active pool stats has been removed
self.assertEqual(1, len(fake_host.pools))
self.assertRaises(KeyError, lambda: fake_host.pools['pool1'])
self.assertRaises(KeyError, lambda: fake_host.pools['pool2'])
self.assertEqual(10000, fake_host.pools['pool3'].total_capacity_gb)
self.assertEqual(10000, fake_host.pools['pool3'].free_capacity_gb)
def test_update_from_share_infinite_capability(self):
fake_host = host_manager.HostState('host1')
self.assertEqual(None, fake_host.free_capacity_gb)
share_capability = {'total_capacity_gb': 'infinite',
'free_capacity_gb': 'infinite',
'reserved_percentage': 0,
'timestamp': None}
fake_host = host_manager.HostState('host1#_pool0')
self.assertIsNone(fake_host.free_capacity_gb)
fake_host.update_from_share_capability(share_capability)
self.assertEqual('infinite', fake_host.total_capacity_gb)
self.assertEqual('infinite', fake_host.free_capacity_gb)
# Backend level stats remain uninitialized
self.assertEqual(fake_host.total_capacity_gb, 0)
self.assertIsNone(fake_host.free_capacity_gb)
# Pool stats has been updated
self.assertEqual(fake_host.pools['_pool0'].total_capacity_gb,
'infinite')
self.assertEqual(fake_host.pools['_pool0'].free_capacity_gb,
'infinite')
def test_update_from_share_unknown_capability(self):
fake_host = host_manager.HostState('host1')
self.assertEqual(None, fake_host.free_capacity_gb)
share_capability = {
'total_capacity_gb': 'infinite',
'free_capacity_gb': 'unknown',
'reserved_percentage': 0,
'timestamp': None
}
fake_host = host_manager.HostState('host1#_pool0')
self.assertIsNone(fake_host.free_capacity_gb)
fake_host.update_from_share_capability(share_capability)
self.assertEqual('infinite', fake_host.total_capacity_gb)
self.assertEqual('unknown', fake_host.free_capacity_gb)
# Backend level stats remain uninitialized
self.assertEqual(fake_host.total_capacity_gb, 0)
self.assertIsNone(fake_host.free_capacity_gb)
# Pool stats has been updated
self.assertEqual(fake_host.pools['_pool0'].total_capacity_gb,
'infinite')
self.assertEqual(fake_host.pools['_pool0'].free_capacity_gb,
'unknown')
def test_consume_from_share_capability(self):
fake_host = host_manager.HostState('host1')
share_size = 10
free_capacity = 100
fake_share = {'id': 'foo', 'size': share_size}
share_capability = {
'total_capacity_gb': free_capacity * 2,
'free_capacity_gb': free_capacity,
'reserved_percentage': 0,
'timestamp': None
}
fake_host = host_manager.PoolState('host1', share_capability, '_pool0')
fake_host.update_from_share_capability(share_capability)
fake_host.consume_from_share(fake_share)
self.assertEqual(free_capacity - share_size,
fake_host.free_capacity_gb)
self.assertEqual(fake_host.free_capacity_gb,
free_capacity - share_size)
def test_consume_from_share_infinite_capability(self):
fake_host = host_manager.HostState('host1')
share_size = 1000
fake_share = {'id': 'foo', 'size': share_size}
share_capability = {
'total_capacity_gb': 'infinite',
'free_capacity_gb': 'infinite',
'reserved_percentage': 0,
'timestamp': None
}
fake_host.update_from_share_capability(share_capability)
fake_host.consume_from_share(fake_share)
self.assertEqual('infinite', fake_host.total_capacity_gb)
self.assertEqual('infinite', fake_host.free_capacity_gb)
def test_consume_from_share_unknown_capability(self):
fake_host = host_manager.HostState('host1')
fake_host = host_manager.PoolState('host1', share_capability, '_pool0')
share_size = 1000
fake_share = {'id': 'foo', 'size': share_size}
fake_host.update_from_share_capability(share_capability)
fake_host.consume_from_share(fake_share)
self.assertEqual(fake_host.total_capacity_gb, 'infinite')
self.assertEqual(fake_host.free_capacity_gb, 'infinite')
def test_consume_from_share_unknown_capability(self):
share_capability = {
'total_capacity_gb': 'infinite',
'free_capacity_gb': 'unknown',
'reserved_percentage': 0,
'timestamp': None
}
fake_host = host_manager.PoolState('host1', share_capability, '_pool0')
share_size = 1000
fake_share = {'id': 'foo', 'size': share_size}
fake_host.update_from_share_capability(share_capability)
fake_host.consume_from_share(fake_share)
self.assertEqual('infinite', fake_host.total_capacity_gb)
self.assertEqual('unknown', fake_host.free_capacity_gb)
self.assertEqual(fake_host.total_capacity_gb, 'infinite')
self.assertEqual(fake_host.free_capacity_gb, 'unknown')
def test_repr(self):
capability = {
'share_backend_name': 'Backend1',
'vendor_name': 'OpenStack',
'driver_version': '1.0',
'storage_protocol': 'NFS_CIFS',
'total_capacity_gb': 20000,
'free_capacity_gb': 15000,
'allocated_capacity_gb': 5000,
'timestamp': None,
'reserved_percentage': 0,
}
fake_host = host_manager.HostState('host1')
fake_host.update_from_share_capability(capability)
result = fake_host.__repr__()
expected = "host: 'host1', free_capacity_gb: None, " \
"pools: {'Backend1': host: 'host1#Backend1', " \
"free_capacity_gb: 15000, pools: None}"
self.assertEqual(expected, result)
class PoolStateTestCase(test.TestCase):
"""Test case for HostState class."""
def test_update_from_share_capability(self):
share_capability = {
'total_capacity_gb': 1024,
'free_capacity_gb': 512,
'reserved_percentage': 0,
'timestamp': None,
'cap1': 'val1',
'cap2': 'val2'
}
fake_pool = host_manager.PoolState('host1', None, 'pool0')
self.assertIsNone(fake_pool.free_capacity_gb)
fake_pool.update_from_share_capability(share_capability)
self.assertEqual(fake_pool.host, 'host1#pool0')
self.assertEqual(fake_pool.pool_name, 'pool0')
self.assertEqual(fake_pool.total_capacity_gb, 1024)
self.assertEqual(fake_pool.free_capacity_gb, 512)
self.assertDictMatch(fake_pool.capabilities, share_capability)

View File

@ -43,7 +43,7 @@ class SchedulerRpcAPITestCase(test.TestCase):
target = {
"fanout": fanout,
"version": kwargs.pop('version', rpcapi.RPC_API_VERSION),
"version": kwargs.pop('version', '1.0'),
}
expected_msg = copy.deepcopy(kwargs)
@ -89,3 +89,9 @@ class SchedulerRpcAPITestCase(test.TestCase):
request_spec='fake_request_spec',
filter_properties='filter_properties',
version='1.0')
def test_get_pools(self):
self._test_scheduler_api('get_pools',
rpc_method='call',
filters=None,
version='1.1')

View File

@ -97,6 +97,16 @@ class SchedulerManagerTestCase(test.TestCase):
self.manager.driver.schedule_create_share.assert_called_once_with(
self.context, request_spec, {})
def test_get_pools(self):
"""Ensure get_pools exists and calls driver.get_pools."""
mock_get_pools = self.mock_object(self.manager.driver, 'get_pools',
mock.Mock(return_value='fake_pools'))
result = self.manager.get_pools(self.context, filters='fake_filters')
mock_get_pools.assert_called_once_with(self.context, 'fake_filters')
self.assertEqual('fake_pools', result)
class SchedulerTestCase(test.TestCase):
"""Test case for base scheduler driver class."""

View File

@ -168,6 +168,7 @@ class ShareManagerTestCase(test.TestCase):
'share_get_all_by_host',
mock.Mock(return_value=shares))
self.mock_object(self.share_manager.driver, 'ensure_share')
self.mock_object(self.share_manager, '_ensure_share_has_pool')
self.mock_object(self.share_manager, '_get_share_server',
mock.Mock(return_value=share_server))
self.mock_object(self.share_manager, 'publish_service_capabilities',
@ -188,6 +189,9 @@ class ShareManagerTestCase(test.TestCase):
utils.IsAMatcher(context.RequestContext))
self.share_manager.driver.check_for_setup_error.\
assert_called_once_with()
self.share_manager._ensure_share_has_pool.\
assert_called_once_with(utils.IsAMatcher(context.RequestContext),
shares[0])
self.share_manager._get_share_server.assert_called_once_with(
utils.IsAMatcher(context.RequestContext), shares[0])
self.share_manager.driver.ensure_share.assert_called_once_with(
@ -218,6 +222,7 @@ class ShareManagerTestCase(test.TestCase):
mock.Mock(return_value=shares))
self.mock_object(self.share_manager.driver, 'ensure_share',
mock.Mock(side_effect=raise_exception))
self.mock_object(self.share_manager, '_ensure_share_has_pool')
self.mock_object(self.share_manager, '_get_share_server',
mock.Mock(return_value=share_server))
self.mock_object(self.share_manager, 'publish_service_capabilities')
@ -233,6 +238,10 @@ class ShareManagerTestCase(test.TestCase):
self.share_manager.driver.do_setup.assert_called_once_with(
utils.IsAMatcher(context.RequestContext))
self.share_manager.driver.check_for_setup_error.assert_called_with()
self.share_manager._ensure_share_has_pool.assert_has_calls([
mock.call(utils.IsAMatcher(context.RequestContext), shares[0]),
mock.call(utils.IsAMatcher(context.RequestContext), shares[2]),
])
self.share_manager._get_share_server.assert_has_calls([
mock.call(utils.IsAMatcher(context.RequestContext), shares[0]),
mock.call(utils.IsAMatcher(context.RequestContext), shares[2]),
@ -269,6 +278,7 @@ class ShareManagerTestCase(test.TestCase):
'share_get_all_by_host',
mock.Mock(return_value=shares))
self.mock_object(self.share_manager.driver, 'ensure_share')
self.mock_object(self.share_manager, '_ensure_share_has_pool')
self.mock_object(self.share_manager, '_get_share_server',
mock.Mock(return_value=share_server))
self.mock_object(self.share_manager, 'publish_service_capabilities')
@ -289,6 +299,10 @@ class ShareManagerTestCase(test.TestCase):
self.share_manager.driver.do_setup.assert_called_once_with(
utils.IsAMatcher(context.RequestContext))
self.share_manager.driver.check_for_setup_error.assert_called_with()
self.share_manager._ensure_share_has_pool.assert_has_calls([
mock.call(utils.IsAMatcher(context.RequestContext), shares[0]),
mock.call(utils.IsAMatcher(context.RequestContext), shares[2]),
])
self.share_manager._get_share_server.assert_has_calls([
mock.call(utils.IsAMatcher(context.RequestContext), shares[0]),
mock.call(utils.IsAMatcher(context.RequestContext), shares[2]),
@ -960,3 +974,30 @@ class ShareManagerTestCase(test.TestCase):
def test_setup_server_exception_in_driver(self):
self.setup_server_raise_exception(detail_data_proper=True)
def test_ensure_share_has_pool_with_only_host(self):
fake_share = {'status': 'available', 'host': 'host1', 'id': 1}
host = self.share_manager._ensure_share_has_pool(context.
get_admin_context(),
fake_share)
self.assertIsNone(host)
def test_ensure_share_has_pool_with_full_pool_name(self):
fake_share = {'host': 'host1#pool0', 'id': 1,
'status': 'available'}
fake_share_expected_value = 'pool0'
host = self.share_manager._ensure_share_has_pool(context.
get_admin_context(),
fake_share)
self.assertEqual(fake_share_expected_value, host)
def test_ensure_share_has_pool_unable_to_fetch_share(self):
fake_share = {'host': 'host@backend', 'id': 1,
'status': 'available'}
with mock.patch.object(self.share_manager.driver, 'get_pool',
side_effect=Exception):
with mock.patch.object(manager, 'LOG') as mock_LOG:
self.share_manager._ensure_share_has_pool(context.
get_admin_context(),
fake_share)
self.assertEqual(1, mock_LOG.error.call_count)

View File

@ -0,0 +1,130 @@
# Copyright 2011 OpenStack Foundation
# Copyright (c) 2015 Rushil Chugh
# All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License"); you may
# not use this file except in compliance with the License. You may obtain
# a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
# License for the specific language governing permissions and limitations
# under the License.
"""Tests For miscellaneous util methods used with share."""
from manila.share import utils as share_utils
from manila import test
class ShareUtilsTestCase(test.TestCase):
def test_extract_host_without_pool(self):
host = 'Host@Backend'
self.assertEqual(
'Host@Backend', share_utils.extract_host(host))
def test_extract_host_only_return_host(self):
host = 'Host@Backend'
self.assertEqual(
'Host', share_utils.extract_host(host, 'host'))
def test_extract_host_only_return_pool(self):
host = 'Host@Backend'
self.assertEqual(
None, share_utils.extract_host(host, 'pool'))
def test_extract_host_only_return_backend(self):
host = 'Host@Backend'
self.assertEqual(
'Host@Backend', share_utils.extract_host(host, 'backend'))
def test_extract_host_missing_backend_and_pool(self):
host = 'Host'
# Default level is 'backend'
self.assertEqual(
'Host', share_utils.extract_host(host))
def test_extract_host_missing_backend(self):
host = 'Host#Pool'
self.assertEqual(
'Host', share_utils.extract_host(host))
self.assertEqual(
'Host', share_utils.extract_host(host, 'host'))
def test_extract_host_missing_backend_only_return_backend(self):
host = 'Host#Pool'
self.assertEqual(
'Host', share_utils.extract_host(host, 'backend'))
def test_extract_host_missing_backend_only_return_pool(self):
host = 'Host#Pool'
self.assertEqual(
'Pool', share_utils.extract_host(host, 'pool'))
self.assertEqual(
'Pool', share_utils.extract_host(host, 'pool', True))
def test_extract_host_missing_pool(self):
host = 'Host@Backend'
self.assertEqual(
None, share_utils.extract_host(host, 'pool'))
def test_extract_host_missing_pool_use_default_pool(self):
host = 'Host@Backend'
self.assertEqual(
'_pool0', share_utils.extract_host(host, 'pool', True))
def test_extract_host_with_default_pool(self):
host = 'Host'
# Default_pool_name doesn't work for level other than 'pool'
self.assertEqual(
'Host', share_utils.extract_host(host, 'host', True))
self.assertEqual(
'Host', share_utils.extract_host(host, 'host', False))
self.assertEqual(
'Host', share_utils.extract_host(host, 'backend', True))
self.assertEqual(
'Host', share_utils.extract_host(host, 'backend', False))
def test_extract_host_with_pool(self):
host = 'Host@Backend#Pool'
self.assertEqual(
'Host@Backend', share_utils.extract_host(host))
self.assertEqual(
'Host', share_utils.extract_host(host, 'host'))
self.assertEqual(
'Host@Backend', share_utils.extract_host(host, 'backend'),)
self.assertEqual(
'Pool', share_utils.extract_host(host, 'pool'))
self.assertEqual(
'Pool', share_utils.extract_host(host, 'pool', True))
def test_append_host_with_host_and_pool(self):
host = 'Host'
pool = 'Pool'
expected = 'Host#Pool'
self.assertEqual(expected,
share_utils.append_host(host, pool))
def test_append_host_with_host(self):
host = 'Host'
pool = None
expected = 'Host'
self.assertEqual(expected,
share_utils.append_host(host, pool))
def test_append_host_with_pool(self):
host = None
pool = 'pool'
expected = None
self.assertEqual(expected,
share_utils.append_host(host, pool))
def test_append_host_with_no_values(self):
host = None
pool = None
expected = None
self.assertEqual(expected,
share_utils.append_host(host, pool))

View File

@ -52,6 +52,7 @@ class QuotaIntegrationTestCase(test.TestCase):
share['project_id'] = self.project_id
share['size'] = size
share['status'] = 'available'
share['host'] = 'fake_host'
return db.share_create(self.context, share)
def _create_snapshot(self, share):
@ -60,6 +61,7 @@ class QuotaIntegrationTestCase(test.TestCase):
snapshot['project_id'] = self.project_id
snapshot['share_id'] = share['id']
snapshot['share_size'] = share['size']
snapshot['host'] = share['host']
snapshot['status'] = 'available'
return db.share_snapshot_create(self.context, snapshot)