ganesha: NFS-Ganesha instrumentation

Introduce the ganesha share driver helper module
which provides the GaneshaNASHelper class from which
share drivers can derive NFS-Ganesha backed protocol
helpers.

Some utility functions are also added to ease
integration.

Partially implements blueprint gateway-mediated-with-ganesha

Change-Id: I8683ea5eb43d7a8eaf0dfa6af3791782d32b944a
This commit is contained in:
Csaba Henk 2014-08-30 14:50:13 +02:00
parent 28f311c9ad
commit 559b478e85
13 changed files with 1533 additions and 18 deletions

View File

@ -3,6 +3,7 @@
[Filters]
# manila/share/drivers/glusterfs.py: 'mkdir', '%s'
# manila/share/drivers/ganesha/manager.py: 'mkdir', '-p', '%s'
mkdir: CommandFilter, /usr/bin/mkdir, root
# manila/share/drivers/glusterfs.py: 'rm', '-rf', '%s'
@ -49,6 +50,7 @@ rsync: CommandFilter, /usr/bin/rsync, root
exportfs: CommandFilter, /usr/sbin/exportfs, root
# Ganesha commands
# manila/share/drivers/ibm/ganesha_utils.py: 'mv', '%s', '%s'
# manila/share/drivers/ganesha/manager.py: 'mv', '%s', '%s'
mv: CommandFilter, /bin/mv, root
# manila/share/drivers/ibm/ganesha_utils.py: 'cp', '%s', '%s'
cp: CommandFilter, /bin/cp, root
@ -60,3 +62,18 @@ ssh: CommandFilter, /usr/bin/ssh, root
chmod: CommandFilter, /bin/chmod, root
# manila/share/drivers/ibm/ganesha_utils.py: 'service', '%s', 'restart'
service: CommandFilter, /sbin/service, root
# manila/share/drivers/ganesha/manager.py: 'mktemp', '-p', '%s', '-t', '%s'
mktemp: CommandFilter, /bin/mktemp, root
# manila/share/drivers/ganesha/manager.py:
shcat: RegExpFilter, /bin/sh, root, sh, -c, cat > /.*
# manila/share/drivers/ganesha/manager.py:
dbus-addexport: RegExpFilter, /usr/bin/dbus-send, root, dbus-send, --print-reply, --system, --dest=org\.ganesha\.nfsd, /org/ganesha/nfsd/ExportMgr, org\.ganesha\.nfsd\.exportmgr\.(Add|Remove)Export, .*, .*
# manila/share/drivers/ganesha/manager.py:
dbus-removeexport: RegExpFilter, /usr/bin/dbus-send, root, dbus-send, --print-reply, --system, --dest=org\.ganesha\.nfsd, /org/ganesha/nfsd/ExportMgr, org\.ganesha\.nfsd\.exportmgr\.(Add|Remove)Export, .*
# manila/share/drivers/ganesha/manager.py:
rmconf: RegExpFilter, /bin/sh, root, sh, -c, rm /.*/\*\.conf$

View File

@ -464,3 +464,16 @@ class GPFSException(ManilaException):
class GPFSGaneshaException(ManilaException):
message = _("GPFS Ganesha exception occurred.")
class GaneshaCommandFailure(ProcessExecutionError):
_description = _("Ganesha management command failed.")
def __init__(self, **kw):
if 'description' not in kw:
kw['description'] = self._description
super(GaneshaCommandFailure, self).__init__(**kw)
class InvalidSqliteDB(Invalid):
message = _("Invalid Sqlite database.")

View File

@ -95,6 +95,7 @@ _global_opt_lists = [
manila.scheduler.weights.capacity.capacity_weight_opts,
manila.service.service_opts,
manila.share.api.share_api_opts,
manila.share.driver.ganesha_opts,
manila.share.driver.share_opts,
manila.share.driver.ssh_opts,
manila.share.drivers.emc.driver.EMC_NAS_OPTS,

View File

@ -77,9 +77,40 @@ ssh_opts = [
help='Maximum number of connections in the SSH pool.'),
]
ganesha_opts = [
cfg.StrOpt('ganesha_config_dir',
default='/etc/ganesha',
help='Directory where Ganesha config files are stored.'),
cfg.StrOpt('ganesha_config_path',
default='$ganesha_config_dir/ganesha.conf',
help='Path to main Ganesha config file.'),
cfg.StrOpt('ganesha_nfs_export_options',
default='maxread = 65536, prefread = 65536',
help='Options to use when exporting a share using ganesha '
'NFS server. Note that these defaults can be overridden '
'when a share is created by passing metadata with key '
'name export_options. Also note the complete set of '
'default ganesha export options is specified in '
'ganesha_utils. (GPFS only.)'),
cfg.StrOpt('ganesha_service_name',
default='ganesha.nfsd',
help='Name of the ganesha nfs service.'),
cfg.StrOpt('ganesha_db_path',
default='$state_path/manila-ganesha.db',
help='Location of Ganesha database file. '
'(Ganesha module only.)'),
cfg.StrOpt('ganesha_export_dir',
default='$ganesha_config_dir/export.d',
help='Path to Ganesha export template. (Ganesha module only.)'),
cfg.StrOpt('ganesha_export_template_dir',
default='/etc/manila/ganesha-export-templ.d',
help='Path to Ganesha export template. (Ganesha module only.)'),
]
CONF = cfg.CONF
CONF.register_opts(share_opts)
CONF.register_opts(ssh_opts)
CONF.register_opts(ganesha_opts)
class ExecuteMixin(object):
@ -111,6 +142,14 @@ class ExecuteMixin(object):
time.sleep(tries ** 2)
class GaneshaMixin(object):
"""Augment derived classes with Ganesha configuration."""
def init_ganesha_mixin(self, *args, **kwargs):
if self.configuration:
self.configuration.append_config_values(ganesha_opts)
class ShareDriver(object):
"""Class defines interface of NAS driver."""
@ -129,6 +168,9 @@ class ShareDriver(object):
if hasattr(self, 'init_execute_mixin'):
# Instance with 'ExecuteMixin'
self.init_execute_mixin(*args, **kwargs) # pylint: disable=E1101
if hasattr(self, 'init_ganesha_mixin'):
# Instance with 'GaneshaMixin'
self.init_execute_mixin(*args, **kwargs) # pylint: disable=E1101
self.network_api = network.API(config_group_name=network_config_group)
def _validate_driver_mode(self, mode):

View File

@ -0,0 +1,141 @@
# Copyright (c) 2014 Red Hat, Inc.
# All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License"); you may
# not use this file except in compliance with the License. You may obtain
# a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
# License for the specific language governing permissions and limitations
# under the License.
import abc
import errno
import os
import re
from oslo.config import cfg
import six
from manila import exception
from manila.i18n import _LI
from manila.openstack.common import log as logging
from manila.share.drivers.ganesha import manager as ganesha_manager
from manila.share.drivers.ganesha import utils as ganesha_utils
CONF = cfg.CONF
LOG = logging.getLogger(__name__)
@six.add_metaclass(abc.ABCMeta)
class NASHelperBase(object):
"""Interface to work with share."""
def __init__(self, execute, config, **kwargs):
self.configuration = config
self._execute = execute
def init_helper(self):
"""Initializes protocol-specific NAS drivers."""
@abc.abstractmethod
def allow_access(self, base_path, share, access):
"""Allow access to the host."""
@abc.abstractmethod
def deny_access(self, base_path, share, access):
"""Deny access to the host."""
class GaneshaNASHelper(NASHelperBase):
"""Execute commands relating to Shares."""
def __init__(self, execute, config, tag='<no name>', **kwargs):
super(GaneshaNASHelper, self).__init__(execute, config, **kwargs)
self.tag = tag
confrx = re.compile('\.(conf|json)\Z')
def _load_conf_dir(self, dirpath, must_exist=True):
"""Load Ganesha config files in dirpath in alphabetic order."""
try:
dirlist = os.listdir(dirpath)
except OSError as e:
if e.errno != errno.ENOENT or must_exist:
raise
dirlist = []
LOG.info(_LI('Loading Ganesha config from %s.'), dirpath)
conf_files = filter(self.confrx.search, dirlist)
conf_files.sort()
export_template = {}
for conf_file in conf_files:
with open(os.path.join(dirpath, conf_file)) as f:
ganesha_utils.patch(
export_template,
ganesha_manager.parseconf(f.read()))
return export_template
def init_helper(self):
"""Initializes protocol-specific NAS drivers."""
self.ganesha = ganesha_manager.GaneshaManager(
self._execute,
self.tag,
ganesha_config_path=self.configuration.ganesha_config_path,
ganesha_export_dir=self.configuration.ganesha_export_dir,
ganesha_db_path=self.configuration.ganesha_db_path,
ganesha_service_name=self.configuration.ganesha_service_name)
system_export_template = self._load_conf_dir(
self.configuration.ganesha_export_template_dir,
must_exist=False)
if system_export_template:
self.export_template = system_export_template
else:
self.export_template = self._default_config_hook()
def _default_config_hook(self):
"""The default export block.
Subclass this to add FSAL specific defaults.
Suggested approach: take the return value of superclass'
method, patch with dict containing your defaults, and
return the result. However, you can also provide your
defaults from scratch with no regard to superclass.
"""
return self._load_conf_dir(ganesha_utils.path_from(__file__, "conf"))
def _fsal_hook(self, base_path, share, access):
"""Subclass this to create FSAL block."""
return {}
def allow_access(self, base_path, share, access):
"""Allow access to the share."""
if access['access_type'] != 'ip':
raise exception.InvalidShareAccess('Only IP access type allowed')
cf = {}
accid = access['id']
name = share['name']
export_name = "%s--%s" % (name, accid)
ganesha_utils.patch(cf, self.export_template, {
'EXPORT': {
'Export_Id': self.ganesha.get_export_id(),
'Path': os.path.join(base_path, name),
'Pseudo': os.path.join(base_path, export_name),
'Tag': accid,
'CLIENT': {
'Clients': access['access_to']
},
'FSAL': self._fsal_hook(base_path, share, access)
}
})
self.ganesha.add_export(export_name, cf)
def deny_access(self, base_path, share, access):
"""Deny access to the share."""
self.ganesha.remove_export("%s--%s" % (share['name'], access['id']))

View File

@ -0,0 +1,48 @@
# This is a Ganesha config template.
# Syntactically, a valid Ganesha config
# file, but some values in it are stubs.
# Fields that have stub values are managed
# by Manila; the stubs are of two kinds:
# - @config:
# value will be taken from Manila config
# - @runtime:
# value will be determined at runtime
# User is free to set Ganesha parameters
# which are not reserved to Manila by
# stubbing.
EXPORT {
# Each EXPORT must have a unique Export_Id.
Export_Id = @runtime;
# The directory in the exported file system this export
# is rooted on.
Path = @runtime;
# FSAL, Ganesha's module component
FSAL {
# FSAL name
Name = @config;
}
# Path of export in the NFSv4 pseudo filesystem
Pseudo = @runtime;
# RPC security flavor, one of none, sys, krb5{,i,p}
SecType = sys;
# Alternative export identifier for NFSv3
Tag = @runtime;
# Client specification
CLIENT {
# Comma separated list of clients
Clients = @runtime;
# Access type, one of RW, RO, MDONLY, MDONLY_RO, NONE
Access_Type = RW;
}
# User id squashing, one of None, Root, All
Squash = None;
}

View File

@ -0,0 +1,344 @@
# Copyright (c) 2014 Red Hat, Inc.
# All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License"); you may
# not use this file except in compliance with the License. You may obtain
# a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
# License for the specific language governing permissions and limitations
# under the License.
import os
import pipes
import re
import sys
from oslo.serialization import jsonutils
import six
from manila import exception
from manila.i18n import _
from manila.i18n import _LE
from manila.openstack.common import log as logging
from manila.share.drivers.ganesha import utils as ganesha_utils
from manila import utils
LOG = logging.getLogger(__name__)
IWIDTH = 4
def _conf2json(conf):
"""Convert Ganesha config to JSON."""
# tokenize config string
token_list = [six.StringIO()]
state = {
'in_quote': False,
'in_comment': False,
'escape': False,
}
cbk = []
for char in conf:
if state['in_quote']:
if not state['escape']:
if char == '"':
state['in_quote'] = False
cbk.append(lambda: token_list.append(six.StringIO()))
elif char == '\\':
cbk.append(lambda: state.update({'escape': True}))
else:
if char == "#":
state['in_comment'] = True
if state['in_comment']:
if char == "\n":
state['in_comment'] = False
else:
if char == '"':
token_list.append(six.StringIO())
state['in_quote'] = True
state['escape'] = False
if not state['in_comment']:
token_list[-1].write(char)
while cbk:
cbk.pop(0)()
if state['in_quote']:
raise RuntimeError("Unterminated quoted string")
# jsonify tokens
js_token_list = ["{"]
for tok in token_list:
tok = tok.getvalue()
if tok[0] == '"':
js_token_list.append(tok)
continue
for pat, s in [
# add omitted "=" signs to block openings
('([^=\s])\s*{', '\\1={'),
# delete trailing semicolons in blocks
(';\s*}', '}'),
# add omitted semicolons after blocks
('}\s*([^}\s])', '};\\1'),
# separate syntactically significant characters
('([;{}=])', ' \\1 ')]:
tok = re.sub(pat, s, tok)
# map tokens to JSON equivalents
for word in tok.split():
if word == "=":
word = ":"
elif word == ";":
word = ','
elif (word in ['{', '}'] or
re.search('\A-?[1-9]\d*(\.\d+)?\Z', word)):
pass
else:
word = jsonutils.dumps(word)
js_token_list.append(word)
js_token_list.append("}")
# group quouted strings
token_grp_list = []
for tok in js_token_list:
if tok[0] == '"':
if not (token_grp_list and isinstance(token_grp_list[-1], list)):
token_grp_list.append([])
token_grp_list[-1].append(tok)
else:
token_grp_list.append(tok)
# process quoted string groups by joining them
js_token_list2 = []
for x in token_grp_list:
if isinstance(x, list):
x = ''.join(['"'] + [tok[1:-1] for tok in x] + ['"'])
js_token_list2.append(x)
return ''.join(js_token_list2)
def _dump_to_conf(confdict, out=sys.stdout, indent=0):
"""Output confdict in Ganesha config format."""
if isinstance(confdict, dict):
for k, v in six.iteritems(confdict):
if v is None:
continue
out.write(' ' * (indent * IWIDTH) + k + ' ')
if isinstance(v, dict):
out.write("{\n")
_dump_to_conf(v, out, indent + 1)
out.write(' ' * (indent * IWIDTH) + '}')
else:
out.write('= ')
_dump_to_conf(v, out, indent)
out.write(';')
out.write('\n')
else:
dj = jsonutils.dumps(confdict)
if confdict == dj[1:-1]:
out.write(confdict)
else:
out.write(dj)
def parseconf(conf):
"""Parse Ganesha config.
Both native format and JSON are supported.
"""
try:
# allow config to be specified in JSON --
# for sake of people who might feel Ganesha config foreign.
d = jsonutils.loads(conf)
except ValueError:
d = jsonutils.loads(_conf2json(conf))
return d
def mkconf(confdict):
"""Create Ganesha config string from confdict."""
s = six.StringIO()
_dump_to_conf(confdict, s)
return s.getvalue()
class GaneshaManager(object):
"""Ganesha instrumentation class."""
def __init__(self, execute, tag, **kwargs):
self.confrx = re.compile('\.conf\Z')
self.ganesha_config_path = kwargs['ganesha_config_path']
self.tag = tag
def _execute(*args, **kwargs):
msg = kwargs.pop('message', args[0])
makelog = kwargs.pop('makelog', True)
try:
return execute(*args, **kwargs)
except exception.ProcessExecutionError as e:
if makelog:
LOG.error(
_LE("Error while executing management command on "
"Ganesha node %(tag)s: %(msg)s."),
{'tag': tag, 'msg': msg})
raise exception.GaneshaCommandFailure(
stdout=e.stdout, stderr=e.stderr, exit_code=e.exit_code,
cmd=e.cmd)
self.execute = _execute
self.ganesha_export_dir = kwargs['ganesha_export_dir']
self.execute('mkdir', '-p', self.ganesha_export_dir)
self.ganesha_db_path = kwargs['ganesha_db_path']
self.execute('mkdir', '-p', os.path.dirname(self.ganesha_db_path))
self.ganesha_service = kwargs['ganesha_service_name']
# Here we are to make sure that an SQLite database of the
# required scheme exists at self.ganesha_db_path.
# The following command gets us there -- provided the file
# does not yet exist (otherwise it just fails). However,
# we don't care about this condition, we just execute the
# command unconditionally (ignoring failure). Instead we
# directly query the db right after, to check its validity.
self.execute("sqlite3", self.ganesha_db_path,
'create table ganesha(key varchar(20) primary key, '
'value int); insert into ganesha values("exportid", '
'100);', run_as_root=False, check_exit_code=False)
self.get_export_id(bump=False)
# Starting from empty state. State will be rebuilt in a later
# stage of service initalization.
self.reset_exports()
self.restart_service()
def _getpath(self, name):
"""Get the path of config file for name."""
return os.path.join(self.ganesha_export_dir, name + ".conf")
def _write_file(self, path, data):
"""Write data to path atomically."""
dirpath, fname = (getattr(os.path, q + "name")(path) for q in
("dir", "base"))
tmpf = self.execute('mktemp', '-p', dirpath, "-t",
fname + ".XXXXXX")[0][:-1]
self.execute('sh', '-c', 'cat > ' + pipes.quote(tmpf),
process_input=data, message='writing ' + tmpf)
self.execute('mv', tmpf, path)
def _write_conf_file(self, name, data):
"""Write data to config file for name atomically."""
path = self._getpath(name)
self._write_file(path, data)
return path
def _mkindex(self):
"""Generate the index file for current exports."""
@utils.synchronized("ganesha-index-" + self.tag, external=True)
def _mkindex():
files = filter(lambda f: self.confrx.search(f) and
f != "INDEX.conf",
self.execute('ls', self.ganesha_export_dir,
run_as_root=False)[0].split("\n"))
index = "".join(map(lambda f: "%include " + os.path.join(
self.ganesha_export_dir, f) + "\n", files))
self._write_conf_file("INDEX", index)
_mkindex()
def _read_export_file(self, name):
"""Return the dict of the export identified by name."""
return parseconf(self.execute("cat", self._getpath(name),
message='reading export ' + name)[0])
def _write_export_file(self, name, confdict):
"""Write confdict to the export file of name."""
for k, v in ganesha_utils.walk(confdict):
# values in the export block template that need to be
# filled in by Manila are pre-fixed by '@'
if isinstance(v, basestring) and v[0] == '@':
msg = _("Incomplete export block: value %(val)s of attribute "
"%(key)s is a stub.") % {'key': k, 'val': v}
raise exception.InvalidParameterValue(err=msg)
return self._write_conf_file(name, mkconf(confdict))
def _rm_export_file(self, name):
"""Remove export file of name."""
self.execute("rm", self._getpath(name))
def _dbus_send_ganesha(self, method, *args, **kwargs):
"""Send a message to Ganesha via dbus."""
service = kwargs.pop("service", "exportmgr")
self.execute("dbus-send", "--print-reply", "--system",
"--dest=org.ganesha.nfsd", "/org/ganesha/nfsd/ExportMgr",
"org.ganesha.nfsd.%s.%s" % (service, method), *args,
message='dbus call %s.%s' % (service, method), **kwargs)
def _remove_export_dbus(self, xid):
"""Remove an export from Ganesha runtime with given export id."""
self._dbus_send_ganesha("RemoveExport", "uint16:%d" % xid)
def add_export(self, name, confdict):
"""Add an export to Ganesha specified by confdict."""
xid = confdict["EXPORT"]["Export_Id"]
undos = []
_mkindex_called = False
try:
path = self._write_export_file(name, confdict)
undos.append(lambda: self._rm_export_file(name))
self._dbus_send_ganesha("AddExport", "string:" + path,
"string:EXPORT(Export_Id=%d)" % xid)
undos.append(lambda: self._remove_export_dbus(xid))
_mkindex_called = True
self._mkindex()
except Exception:
for u in undos:
u()
if not _mkindex_called:
self._mkindex()
raise
def remove_export(self, name):
"""Remove an export from Ganesha."""
try:
confdict = self._read_export_file(name)
self._remove_export_dbus(confdict["EXPORT"]["Export_Id"])
finally:
self._rm_export_file(name)
self._mkindex()
def get_export_id(self, bump=True):
"""Get a new export id."""
# XXX overflowing the export id (16 bit unsigned integer)
# is not handled
if bump:
bumpcode = 'update ganesha set value = value + 1;'
else:
bumpcode = ''
out = self.execute(
"sqlite3", self.ganesha_db_path,
bumpcode + 'select * from ganesha where key = "exportid";',
run_as_root=False)[0]
match = re.search('\Aexportid\|(\d+)$', out)
if not match:
LOG.error(_LE("Invalid export database on "
"Ganesha node %(tag)s: %(db)s."),
{'tag': self.tag, 'db': self.ganesha_db_path})
raise exception.InvalidSqliteDB()
return int(match.groups()[0])
def restart_service(self):
"""Restart the Ganesha service."""
self.execute("service", self.ganesha_service, "restart")
def reset_exports(self):
"""Delete all export files."""
self.execute('sh', '-c',
'rm %s/*.conf' % pipes.quote(self.ganesha_export_dir))
self._mkindex()

View File

@ -0,0 +1,76 @@
# Copyright (c) 2014 Red Hat, Inc.
# All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License"); you may
# not use this file except in compliance with the License. You may obtain
# a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
# License for the specific language governing permissions and limitations
# under the License.
import os
import pipes
from oslo_concurrency import processutils
import six
from manila import utils
def patch(base, *overlays):
"""Recursive dictionary patching."""
for ovl in overlays:
for k, v in six.iteritems(ovl):
if isinstance(v, dict) and isinstance(base.get(k), dict):
patch(base[k], v)
else:
base[k] = v
return base
def walk(dct):
"""Recursive iteration over dictionary."""
for k, v in six.iteritems(dct):
if isinstance(v, dict):
for w in walk(v):
yield w
else:
yield k, v
class RootExecutor(object):
"""Execute wrapper defaulting to root exection."""
def __init__(self, execute=utils.execute):
self.execute = execute
def __call__(self, *args, **kwargs):
exkwargs = {"run_as_root": True}
exkwargs.update(kwargs)
return self.execute(*args, **exkwargs)
class SSHExecutor(object):
"""Callable encapsulating exec through ssh."""
def __init__(self, *args, **kwargs):
self.pool = utils.SSHPool(*args, **kwargs)
def __call__(self, *args, **kwargs):
cmd = ' '.join(pipes.quote(a) for a in args)
ssh = self.pool.get()
try:
ret = processutils.ssh_execute(ssh, cmd, **kwargs)
finally:
self.pool.put(ssh)
return ret
def path_from(fpath, *rpath):
"""Return the join of the dir of fpath and rpath in absolute form."""
return os.path.join(os.path.abspath(os.path.dirname(fpath)), *rpath)

View File

@ -101,22 +101,6 @@ gpfs_share_opts = [
'NFS server. Note that these defaults can be overridden '
'when a share is created by passing metadata with key '
'name export_options.')),
cfg.StrOpt('gnfs_export_options',
default=('maxread = 65536, prefread = 65536'),
help=('Options to use when exporting a share using ganesha '
'NFS server. Note that these defaults can be overridden '
'when a share is created by passing metadata with key '
'name export_options. Also note the complete set of '
'default ganesha export options is specified in '
'ganesha_utils.')),
cfg.StrOpt('ganesha_config_path',
default='/etc/ganesha/ganesha_exports.conf',
help=('Path to ganesha export config file. The config file '
'may also contain non-export configuration data but it '
'must be placed before the EXPORT clauses.')),
cfg.StrOpt('ganesha_service_name',
default='ganesha.nfsd',
help=('Name of the ganesha nfs service.')),
]
@ -124,7 +108,9 @@ CONF = cfg.CONF
CONF.register_opts(gpfs_share_opts)
class GPFSShareDriver(driver.ExecuteMixin, driver.ShareDriver):
class GPFSShareDriver(driver.ExecuteMixin, driver.GaneshaMixin,
driver.ShareDriver):
"""GPFS Share Driver.
Executes commands relating to Shares.
@ -696,7 +682,9 @@ class GNFSHelper(NASHelperBase):
def __init__(self, execute, config_object):
super(GNFSHelper, self).__init__(execute, config_object)
self.default_export_options = dict()
for m in AVPATTERN.finditer(self.configuration.gnfs_export_options):
for m in AVPATTERN.finditer(
self.configuration.ganesha_nfs_export_options
):
self.default_export_options[m.group('attr')] = m.group('val')
def _get_export_options(self, share):

View File

@ -0,0 +1,518 @@
# Copyright (c) 2014 Red Hat, Inc.
# All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License"); you may
# not use this file except in compliance with the License. You may obtain
# a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
# License for the specific language governing permissions and limitations
# under the License.
import contextlib
import re
import mock
from oslo.serialization import jsonutils
import six
from manila import exception
from manila.share.drivers.ganesha import manager
from manila import test
from manila import utils
test_export_id = 101
test_name = 'fakefile'
test_path = '/fakedir0/export.d/fakefile.conf'
test_ganesha_cnf = """EXPORT {
Export_Id = 101;
CLIENT {
Clients = ip1;
}
}"""
test_dict_unicode = {
u'EXPORT': {
u'Export_Id': 101,
u'CLIENT': {u'Clients': u"ip1"}
}
}
test_dict_str = {
'EXPORT': {
'Export_Id': 101,
'CLIENT': {'Clients': "ip1"}
}
}
manager_fake_kwargs = {
'ganesha_config_path': '/fakedir0/fakeconfig',
'ganesha_db_path': '/fakedir1/fake.db',
'ganesha_export_dir': '/fakedir0/export.d',
'ganesha_service_name': 'ganesha.fakeservice'
}
class GaneshaConfigTests(test.TestCase):
"""Tests Ganesha config file format convertor functions."""
ref_ganesha_cnf = """EXPORT {
CLIENT {
Clients = ip1;
}
Export_Id = 101;
}"""
@staticmethod
def conf_mangle(*confs):
"""A "mangler" for the conf format.
Its purpose is to transform conf data in a way so that semantically
equivalent confs yield identical results. Besides this objective
criteria, we seek a good trade-off between the following
requirements:
- low lossiness;
- low code complexity.
"""
def _conf_mangle(conf):
# split to expressions by the delimiter ";"
# (braces are forced to be treated as expressions
# by sandwiching them in ";"-s)
conf = re.sub('[{}]', ';\g<0>;', conf).split(';')
# whitespace-split expressions to tokens with
# (equality is forced to be treated as token by
# sandwiching in space)
conf = map(lambda l: l.replace("=", " = ").split(), conf)
# get rid of by-product empty lists (derived from superflouous
# ";"-s that might have crept in due to "sandwiching")
conf = map(lambda x: x, conf)
# handle the non-deterministic order of confs
conf.sort()
return conf
return (_conf_mangle(conf) for conf in confs)
def test_conf2json(self):
test_ganesha_cnf_with_comment = """EXPORT {
# fake_export_block
Export_Id = 101;
CLIENT {
Clients = ip1;
}
}"""
ret = manager._conf2json(test_ganesha_cnf_with_comment)
self.assertEqual(test_dict_unicode, jsonutils.loads(ret))
def test_parseconf_ganesha_cnf_input(self):
ret = manager.parseconf(test_ganesha_cnf)
self.assertEqual(test_dict_unicode, ret)
def test_parseconf_json_input(self):
ret = manager.parseconf(jsonutils.dumps(test_dict_str))
self.assertEqual(test_dict_unicode, ret)
def test_dump_to_conf(self):
ganesha_cnf = six.StringIO()
manager._dump_to_conf(test_dict_str, ganesha_cnf)
self.assertEqual(*self.conf_mangle(self.ref_ganesha_cnf,
ganesha_cnf.getvalue()))
def test_mkconf(self):
ganesha_cnf = manager.mkconf(test_dict_str)
self.assertEqual(*self.conf_mangle(self.ref_ganesha_cnf,
ganesha_cnf))
class GaneshaManagerTestCase(test.TestCase):
"""Tests GaneshaManager."""
def setUp(self):
super(GaneshaManagerTestCase, self).setUp()
self._execute = mock.Mock(return_value=('', ''))
with contextlib.nested(
mock.patch.object(manager.GaneshaManager, 'get_export_id',
return_value=100),
mock.patch.object(manager.GaneshaManager, 'reset_exports'),
mock.patch.object(manager.GaneshaManager, 'restart_service')
) as (self.mock_get_export_id, self.mock_reset_exports,
self.mock_restart_service):
self._manager = manager.GaneshaManager(
self._execute, 'faketag', **manager_fake_kwargs)
self.stubs.Set(utils, 'synchronized',
mock.Mock(return_value=lambda f: f))
def test_init(self):
self.stubs.Set(self._manager, 'reset_exports', mock.Mock())
self.stubs.Set(self._manager, 'restart_service', mock.Mock())
self.assertEqual('/fakedir0/fakeconfig',
self._manager.ganesha_config_path)
self.assertEqual('faketag', self._manager.tag)
self.assertEqual('/fakedir0/export.d',
self._manager.ganesha_export_dir)
self.assertEqual('/fakedir1/fake.db', self._manager.ganesha_db_path)
self.assertEqual('ganesha.fakeservice', self._manager.ganesha_service)
self.assertEqual(
[mock.call('mkdir', '-p', self._manager.ganesha_export_dir),
mock.call('mkdir', '-p', '/fakedir1'),
mock.call('sqlite3', self._manager.ganesha_db_path,
'create table ganesha(key varchar(20) primary key, '
'value int); insert into ganesha values("exportid", '
'100);', run_as_root=False, check_exit_code=False)],
self._execute.call_args_list)
self.mock_get_export_id.assert_called_once_with(bump=False)
self.mock_reset_exports.assert_called_once_with()
self.mock_restart_service.assert_called_once_with()
def test_init_execute_error_log_message(self):
fake_args = ('foo', 'bar')
def raise_exception(*args, **kwargs):
if args == fake_args:
raise exception.GaneshaCommandFailure()
test_execute = mock.Mock(side_effect=raise_exception)
self.stubs.Set(manager.LOG, 'error', mock.Mock())
with contextlib.nested(
mock.patch.object(manager.GaneshaManager, 'get_export_id',
return_value=100),
mock.patch.object(manager.GaneshaManager, 'reset_exports'),
mock.patch.object(manager.GaneshaManager, 'restart_service')
) as (self.mock_get_export_id, self.mock_reset_exports,
self.mock_restart_service):
test_manager = manager.GaneshaManager(
test_execute, 'faketag', **manager_fake_kwargs)
self.assertRaises(
exception.GaneshaCommandFailure,
test_manager.execute,
*fake_args, message='fakemsg')
manager.LOG.error.assert_called_once_with(
mock.ANY, {'tag': 'faketag', 'msg': 'fakemsg'})
def test_init_execute_error_no_log_message(self):
fake_args = ('foo', 'bar')
def raise_exception(*args, **kwargs):
if args == fake_args:
raise exception.GaneshaCommandFailure()
test_execute = mock.Mock(side_effect=raise_exception)
self.stubs.Set(manager.LOG, 'error', mock.Mock())
with contextlib.nested(
mock.patch.object(manager.GaneshaManager, 'get_export_id',
return_value=100),
mock.patch.object(manager.GaneshaManager, 'reset_exports'),
mock.patch.object(manager.GaneshaManager, 'restart_service')
) as (self.mock_get_export_id, self.mock_reset_exports,
self.mock_restart_service):
test_manager = manager.GaneshaManager(
test_execute, 'faketag', **manager_fake_kwargs)
self.assertRaises(
exception.GaneshaCommandFailure,
test_manager.execute,
*fake_args, message='fakemsg', makelog=False)
self.assertFalse(manager.LOG.error.called)
def test_ganesha_export_dir(self):
self.assertEqual(
'/fakedir0/export.d', self._manager.ganesha_export_dir)
def test_getpath(self):
self.assertEqual(
'/fakedir0/export.d/fakefile.conf',
self._manager._getpath('fakefile'))
def test_write_file(self):
test_data = 'fakedata'
self.stubs.Set(manager.pipes, 'quote',
mock.Mock(return_value='fakefile.conf.RANDOM'))
test_args = [
('mktemp', '-p', '/fakedir0/export.d', '-t',
'fakefile.conf.XXXXXX'),
('sh', '-c', 'cat > fakefile.conf.RANDOM'),
('mv', 'fakefile.conf.RANDOM', test_path)]
test_kwargs = {
'process_input': test_data,
'message': 'writing fakefile.conf.RANDOM'
}
def return_tmpfile(*args, **kwargs):
if args == test_args[0]:
return ('fakefile.conf.RANDOM\n', '')
self.stubs.Set(self._manager, 'execute',
mock.Mock(side_effect=return_tmpfile))
self._manager._write_file(test_path, test_data)
self._manager.execute.assert_has_calls([
mock.call(*test_args[0]),
mock.call(*test_args[1], **test_kwargs),
mock.call(*test_args[2])])
manager.pipes.quote.assert_called_once_with('fakefile.conf.RANDOM')
def test_write_conf_file(self):
test_data = 'fakedata'
self.stubs.Set(self._manager, '_getpath',
mock.Mock(return_value=test_path))
self.stubs.Set(self._manager, '_write_file', mock.Mock())
ret = self._manager._write_conf_file(test_name, test_data)
self.assertEqual(test_path, ret)
self._manager._getpath.assert_called_once_with(test_name)
self._manager._write_file.assert_called_once_with(
test_path, test_data)
def test_mkindex(self):
test_ls_output = 'INDEX.conf\nfakefile.conf\nfakefile.txt'
test_index = '%include /fakedir0/export.d/fakefile.conf\n'
self.stubs.Set(self._manager, 'execute',
mock.Mock(return_value=(test_ls_output, '')))
self.stubs.Set(self._manager, '_write_conf_file', mock.Mock())
ret = self._manager._mkindex()
self._manager.execute.assert_called_once_with(
'ls', '/fakedir0/export.d', run_as_root=False)
self._manager._write_conf_file.assert_called_once_with(
'INDEX', test_index)
self.assertEqual(None, ret)
def test_read_export_file(self):
test_args = ('cat', test_path)
test_kwargs = {'message': 'reading export fakefile'}
self.stubs.Set(self._manager, '_getpath',
mock.Mock(return_value=test_path))
self.stubs.Set(self._manager, 'execute',
mock.Mock(return_value=(test_ganesha_cnf,)))
self.stubs.Set(manager, 'parseconf',
mock.Mock(return_value=test_dict_unicode))
ret = self._manager._read_export_file(test_name)
self._manager._getpath.assert_called_once_with(test_name)
self._manager.execute.assert_called_once_with(
*test_args, **test_kwargs)
manager.parseconf.assert_called_once_with(test_ganesha_cnf)
self.assertEqual(test_dict_unicode, ret)
def test_write_export_file(self):
self.stubs.Set(manager, 'mkconf',
mock.Mock(return_value=test_ganesha_cnf))
self.stubs.Set(self._manager, '_write_conf_file',
mock.Mock(return_value=test_path))
ret = self._manager._write_export_file(test_name, test_dict_str)
manager.mkconf.assert_called_once_with(test_dict_str)
self._manager._write_conf_file.assert_called_once_with(
test_name, test_ganesha_cnf)
self.assertEqual(test_path, ret)
def test_write_export_file_error_incomplete_export_block(self):
test_errordict = {
u'EXPORT': {
u'Export_Id': '@config',
u'CLIENT': {u'Clients': u"'ip1','ip2'"}
}
}
self.stubs.Set(manager, 'mkconf',
mock.Mock(return_value=test_ganesha_cnf))
self.stubs.Set(self._manager, '_write_conf_file',
mock.Mock(return_value=test_path))
self.assertRaises(exception.InvalidParameterValue,
self._manager._write_export_file,
test_name, test_errordict)
self.assertFalse(manager.mkconf.called)
self.assertFalse(self._manager._write_conf_file.called)
def test_rm_export_file(self):
self.stubs.Set(self._manager, 'execute',
mock.Mock(return_value=('', '')))
self.stubs.Set(self._manager, '_getpath',
mock.Mock(return_value=test_path))
ret = self._manager._rm_export_file(test_name)
self._manager._getpath.assert_called_once_with(test_name)
self._manager.execute.assert_called_once_with('rm', test_path)
self.assertEqual(None, ret)
def test_dbus_send_ganesha(self):
test_args = ('arg1', 'arg2')
test_kwargs = {'key': 'value'}
self.stubs.Set(self._manager, 'execute',
mock.Mock(return_value=('', '')))
ret = self._manager._dbus_send_ganesha('fakemethod', *test_args,
**test_kwargs)
self._manager.execute.assert_called_once_with(
'dbus-send', '--print-reply', '--system',
'--dest=org.ganesha.nfsd', '/org/ganesha/nfsd/ExportMgr',
'org.ganesha.nfsd.exportmgr.fakemethod',
*test_args, message='dbus call exportmgr.fakemethod',
**test_kwargs)
self.assertEqual(None, ret)
def test_remove_export_dbus(self):
self.stubs.Set(self._manager, '_dbus_send_ganesha',
mock.Mock())
ret = self._manager._remove_export_dbus(test_export_id)
self._manager._dbus_send_ganesha.assert_called_once_with(
'RemoveExport', 'uint16:101')
self.assertEqual(None, ret)
def test_add_export(self):
self.stubs.Set(self._manager, '_write_export_file',
mock.Mock(return_value=test_path))
self.stubs.Set(self._manager, '_dbus_send_ganesha', mock.Mock())
self.stubs.Set(self._manager, '_mkindex', mock.Mock())
ret = self._manager.add_export(test_name, test_dict_str)
self._manager._write_export_file.assert_called_once_with(
test_name, test_dict_str)
self._manager._dbus_send_ganesha.assert_called_once_with(
'AddExport', 'string:' + test_path,
'string:EXPORT(Export_Id=101)')
self._manager._mkindex.assert_called_once_with()
self.assertEqual(None, ret)
def test_add_export_error_during_mkindex(self):
self.stubs.Set(self._manager, '_write_export_file',
mock.Mock(return_value=test_path))
self.stubs.Set(self._manager, '_dbus_send_ganesha', mock.Mock())
self.stubs.Set(self._manager, '_mkindex',
mock.Mock(side_effect=exception.GaneshaCommandFailure))
self.stubs.Set(self._manager, '_rm_export_file', mock.Mock())
self.stubs.Set(self._manager, '_remove_export_dbus', mock.Mock())
self.assertRaises(exception.GaneshaCommandFailure,
self._manager.add_export, test_name, test_dict_str)
self._manager._write_export_file.assert_called_once_with(
test_name, test_dict_str)
self._manager._dbus_send_ganesha.assert_called_once_with(
'AddExport', 'string:' + test_path,
'string:EXPORT(Export_Id=101)')
self._manager._mkindex.assert_called_once_with()
self._manager._rm_export_file.assert_called_once_with(test_name)
self._manager._remove_export_dbus.assert_called_once_with(
test_export_id)
def test_add_export_error_during_write_export_file(self):
self.stubs.Set(self._manager, '_write_export_file',
mock.Mock(side_effect=exception.GaneshaCommandFailure))
self.stubs.Set(self._manager, '_dbus_send_ganesha', mock.Mock())
self.stubs.Set(self._manager, '_mkindex', mock.Mock())
self.stubs.Set(self._manager, '_rm_export_file', mock.Mock())
self.stubs.Set(self._manager, '_remove_export_dbus', mock.Mock())
self.assertRaises(exception.GaneshaCommandFailure,
self._manager.add_export, test_name, test_dict_str)
self._manager._write_export_file.assert_called_once_with(
test_name, test_dict_str)
self.assertFalse(self._manager._dbus_send_ganesha.called)
self._manager._mkindex.assert_called_once_with()
self.assertFalse(self._manager._rm_export_file.called)
self.assertFalse(self._manager._remove_export_dbus.called)
def test_add_export_error_during_dbus_send_ganesha(self):
self.stubs.Set(self._manager, '_write_export_file',
mock.Mock(return_value=test_path))
self.stubs.Set(self._manager, '_dbus_send_ganesha',
mock.Mock(side_effect=exception.GaneshaCommandFailure))
self.stubs.Set(self._manager, '_mkindex',
mock.Mock())
self.stubs.Set(self._manager, '_rm_export_file', mock.Mock())
self.stubs.Set(self._manager, '_remove_export_dbus', mock.Mock())
self.assertRaises(exception.GaneshaCommandFailure,
self._manager.add_export, test_name, test_dict_str)
self._manager._write_export_file.assert_called_once_with(
test_name, test_dict_str)
self._manager._dbus_send_ganesha.assert_called_once_with(
'AddExport', 'string:' + test_path,
'string:EXPORT(Export_Id=101)')
self._manager._rm_export_file.assert_called_once_with(test_name)
self._manager._mkindex.assert_called_once_with()
self.assertFalse(self._manager._remove_export_dbus.called)
def test_remove_export(self):
self.stubs.Set(self._manager, '_read_export_file',
mock.Mock(return_value=test_dict_unicode))
methods = ('_remove_export_dbus', '_rm_export_file', '_mkindex')
for method in methods:
self.stubs.Set(self._manager, method, mock.Mock())
ret = self._manager.remove_export(test_name)
self._manager._read_export_file.assert_called_once_with(test_name)
self._manager._remove_export_dbus.assert_called_once_with(
test_dict_unicode['EXPORT']['Export_Id'])
self._manager._rm_export_file.assert_called_once_with(test_name)
self._manager._mkindex.assert_called_once_with()
self.assertEqual(None, ret)
def test_remove_export_error_during_read_export_file(self):
self.stubs.Set(self._manager, '_read_export_file',
mock.Mock(side_effect=exception.GaneshaCommandFailure))
methods = ('_remove_export_dbus', '_rm_export_file', '_mkindex')
for method in methods:
self.stubs.Set(self._manager, method, mock.Mock())
self.assertRaises(exception.GaneshaCommandFailure,
self._manager.remove_export, test_name)
self._manager._read_export_file.assert_called_once_with(test_name)
self.assertFalse(self._manager._remove_export_dbus.called)
self._manager._rm_export_file.assert_called_once_with(test_name)
self._manager._mkindex.assert_called_once_with()
def test_remove_export_error_during_remove_export_dbus(self):
self.stubs.Set(self._manager, '_read_export_file',
mock.Mock(return_value=test_dict_unicode))
self.stubs.Set(self._manager, '_remove_export_dbus',
mock.Mock(side_effect=exception.GaneshaCommandFailure))
methods = ('_rm_export_file', '_mkindex')
for method in methods:
self.stubs.Set(self._manager, method, mock.Mock())
self.assertRaises(exception.GaneshaCommandFailure,
self._manager.remove_export, test_name)
self._manager._read_export_file.assert_called_once_with(test_name)
self._manager._remove_export_dbus.assert_called_once_with(
test_dict_unicode['EXPORT']['Export_Id'])
self._manager._rm_export_file.assert_called_once_with(test_name)
self._manager._mkindex.assert_called_once_with()
def test_get_export_id(self):
self.stubs.Set(self._manager, 'execute',
mock.Mock(return_value=('exportid|101', '')))
ret = self._manager.get_export_id()
self._manager.execute.assert_called_once_with(
'sqlite3', self._manager.ganesha_db_path,
'update ganesha set value = value + 1;'
'select * from ganesha where key = "exportid";',
run_as_root=False)
self.assertEqual(101, ret)
def test_get_export_id_nobump(self):
self.stubs.Set(self._manager, 'execute',
mock.Mock(return_value=('exportid|101', '')))
ret = self._manager.get_export_id(bump=False)
self._manager.execute.assert_called_once_with(
'sqlite3', self._manager.ganesha_db_path,
'select * from ganesha where key = "exportid";',
run_as_root=False)
self.assertEqual(101, ret)
def test_get_export_id_error_invalid_export_db(self):
self.stubs.Set(self._manager, 'execute',
mock.Mock(return_value=('invalid', '')))
self.stubs.Set(manager.LOG, 'error', mock.Mock())
self.assertRaises(exception.InvalidSqliteDB,
self._manager.get_export_id)
manager.LOG.error.assert_called_once_with(
mock.ANY, mock.ANY)
self._manager.execute.assert_called_once_with(
'sqlite3', self._manager.ganesha_db_path,
'update ganesha set value = value + 1;'
'select * from ganesha where key = "exportid";',
run_as_root=False)
def test_restart_service(self):
self.stubs.Set(self._manager, 'execute', mock.Mock())
ret = self._manager.restart_service()
self._manager.execute.assert_called_once_with(
'service', 'ganesha.fakeservice', 'restart')
self.assertEqual(None, ret)
def test_reset_exports(self):
self.stubs.Set(self._manager, 'execute', mock.Mock())
self.stubs.Set(self._manager, '_mkindex', mock.Mock())
ret = self._manager.reset_exports()
self._manager.execute.assert_called_once_with(
'sh', '-c', 'rm /fakedir0/export.d/*.conf')
self._manager._mkindex.assert_called_once_with()
self.assertEqual(None, ret)

View File

@ -0,0 +1,51 @@
# Copyright (c) 2014 Red Hat, Inc.
# All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License"); you may
# not use this file except in compliance with the License. You may obtain
# a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
# License for the specific language governing permissions and limitations
# under the License.
import os
from manila.share.drivers.ganesha import utils
from manila import test
patch_test_dict1 = {'a': 1, 'b': {'c': 2}, 'd': 3, 'e': 4}
patch_test_dict2 = {'a': 11, 'b': {'f': 5}, 'd': {'g': 6}}
patch_test_dict3 = {'b': {'c': 22, 'h': {'i': 7}}, 'e': None}
patch_test_dict_result = {
'a': 11,
'b': {'c': 22, 'f': 5, 'h': {'i': 7}},
'd': {'g': 6},
'e': None,
}
walk_test_dict = {'a': {'b': {'c': {'d': {'e': 'f'}}}}}
walk_test_list = [('e', 'f')]
class GaneshaUtilsTests(test.TestCase):
"""Tests Ganesha utility functions."""
def test_patch(self):
ret = utils.patch(patch_test_dict1, patch_test_dict2, patch_test_dict3)
self.assertEqual(patch_test_dict_result, ret)
def test_walk(self):
ret = [elem for elem in utils.walk(walk_test_dict)]
self.assertEqual(walk_test_list, ret)
def test_path_from(self):
self.stubs.Set(os.path, 'abspath',
lambda path: os.path.join('/foo/bar', path))
ret = utils.path_from('baz.py', '../quux', 'tic/tac/toe')
self.assertEqual('/foo/quux/tic/tac/toe', os.path.normpath(ret))

View File

@ -0,0 +1,276 @@
# Copyright (c) 2014 Red Hat, Inc.
# All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License"); you may
# not use this file except in compliance with the License. You may obtain
# a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
# License for the specific language governing permissions and limitations
# under the License.
import copy
import errno
import os
import mock
from oslo.config import cfg
from manila import exception
from manila.share import configuration as config
from manila.share.drivers import ganesha
from manila import test
from manila.tests.db import fakes as db_fakes
CONF = cfg.CONF
def fake_access(**kwargs):
access = {
'id': 'fakeaccessid',
'access_type': 'ip',
'access_to': '10.0.0.1'
}
access.update(kwargs)
return db_fakes.FakeModel(access)
def fake_share(**kwargs):
share = {
'id': 'fakeid',
'name': 'fakename',
'size': 1,
'share_proto': 'NFS',
'export_location': '127.0.0.1:/mnt/nfs/testvol',
}
share.update(kwargs)
return db_fakes.FakeModel(share)
fake_basepath = '/fakepath'
fake_export_name = 'fakename--fakeaccessid'
fake_output_template = {
'EXPORT': {
'Export_Id': 101,
'Path': '/fakepath/fakename',
'Pseudo': '/fakepath/fakename--fakeaccessid',
'Tag': 'fakeaccessid',
'CLIENT': {
'Clients': '10.0.0.1'
},
'FSAL': 'fakefsal'
}
}
class GaneshaNASHelperTestCase(test.TestCase):
"""Tests GaneshaNASHElper."""
def setUp(self):
super(GaneshaNASHelperTestCase, self).setUp()
CONF.set_default('ganesha_config_path', '/fakedir0/fakeconfig')
CONF.set_default('ganesha_db_path', '/fakedir1/fake.db')
CONF.set_default('ganesha_export_dir', '/fakedir0/export.d')
CONF.set_default('ganesha_export_template_dir',
'/fakedir2/faketempl.d')
CONF.set_default('ganesha_service_name', 'ganesha.fakeservice')
self._execute = mock.Mock(return_value=('', ''))
self.fake_conf = config.Configuration(None)
self.fake_conf_dir_path = '/fakedir0/exports.d'
self._helper = ganesha.GaneshaNASHelper(
self._execute, self.fake_conf, tag='faketag')
self._helper.ganesha = mock.Mock()
self._helper.export_template = {'key': 'value'}
self.share = fake_share()
self.access = fake_access()
def test_load_conf_dir(self):
fake_template1 = {'key': 'value1'}
fake_template2 = {'key': 'value2'}
fake_ls_dir = ['fakefile0.conf', 'fakefile1.json', 'fakefile2.txt']
mock_ganesha_utils_patch = mock.Mock()
def fake_patch_run(tmpl1, tmpl2):
mock_ganesha_utils_patch(
copy.deepcopy(tmpl1), copy.deepcopy(tmpl2))
tmpl1.update(tmpl2)
self.stubs.Set(ganesha.os, 'listdir',
mock.Mock(return_value=fake_ls_dir))
self.stubs.Set(ganesha.LOG, 'info', mock.Mock())
self.stubs.Set(ganesha.ganesha_manager, 'parseconf',
mock.Mock(side_effect=[fake_template1,
fake_template2]))
self.stubs.Set(ganesha.ganesha_utils, 'patch',
mock.Mock(side_effect=fake_patch_run))
with mock.patch('six.moves.builtins.open',
mock.mock_open()) as mockopen:
mockopen().read.side_effect = ['fakeconf0', 'fakeconf1']
ret = self._helper._load_conf_dir(self.fake_conf_dir_path)
ganesha.os.listdir.assert_called_once_with(
self.fake_conf_dir_path)
ganesha.LOG.info.assert_called_once_with(
mock.ANY, self.fake_conf_dir_path)
mockopen.assert_has_calls([
mock.call('/fakedir0/exports.d/fakefile0.conf'),
mock.call('/fakedir0/exports.d/fakefile1.json')],
any_order=True)
ganesha.ganesha_manager.parseconf.assert_has_calls([
mock.call('fakeconf0'), mock.call('fakeconf1')])
mock_ganesha_utils_patch.assert_has_calls([
mock.call({}, fake_template1),
mock.call(fake_template1, fake_template2)])
self.assertEqual(fake_template2, ret)
def test_load_conf_dir_no_conf_dir_must_exist_false(self):
self.stubs.Set(
ganesha.os, 'listdir',
mock.Mock(side_effect=OSError(errno.ENOENT,
os.strerror(errno.ENOENT))))
self.stubs.Set(ganesha.LOG, 'info', mock.Mock())
self.stubs.Set(ganesha.ganesha_manager, 'parseconf', mock.Mock())
self.stubs.Set(ganesha.ganesha_utils, 'patch', mock.Mock())
with mock.patch('six.moves.builtins.open',
mock.mock_open(read_data='fakeconf')) as mockopen:
ret = self._helper._load_conf_dir(self.fake_conf_dir_path,
must_exist=False)
ganesha.os.listdir.assert_called_once_with(
self.fake_conf_dir_path)
ganesha.LOG.info.assert_called_once_with(
mock.ANY, self.fake_conf_dir_path)
self.assertFalse(mockopen.called)
self.assertFalse(ganesha.ganesha_manager.parseconf.called)
self.assertFalse(ganesha.ganesha_utils.patch.called)
self.assertEqual({}, ret)
def test_load_conf_dir_error_no_conf_dir_must_exist_true(self):
self.stubs.Set(
ganesha.os, 'listdir',
mock.Mock(side_effect=OSError(errno.ENOENT,
os.strerror(errno.ENOENT))))
self.assertRaises(OSError, self._helper._load_conf_dir,
self.fake_conf_dir_path)
ganesha.os.listdir.assert_called_once_with(self.fake_conf_dir_path)
def test_load_conf_dir_error_conf_dir_present_must_exist_false(self):
self.stubs.Set(
ganesha.os, 'listdir',
mock.Mock(side_effect=OSError(errno.EACCES,
os.strerror(errno.EACCES))))
self.assertRaises(OSError, self._helper._load_conf_dir,
self.fake_conf_dir_path, must_exist=False)
ganesha.os.listdir.assert_called_once_with(self.fake_conf_dir_path)
def test_load_conf_dir_error(self):
self.stubs.Set(
ganesha.os, 'listdir',
mock.Mock(side_effect=RuntimeError('fake error')))
self.assertRaises(RuntimeError, self._helper._load_conf_dir,
self.fake_conf_dir_path)
ganesha.os.listdir.assert_called_once_with(self.fake_conf_dir_path)
def test_init_helper(self):
mock_template = mock.Mock()
mock_ganesha_manager = mock.Mock()
self.stubs.Set(ganesha.ganesha_manager, 'GaneshaManager',
mock.Mock(return_value=mock_ganesha_manager))
self.stubs.Set(self._helper, '_load_conf_dir',
mock.Mock(return_value=mock_template))
self.stubs.Set(self._helper, '_default_config_hook', mock.Mock())
ret = self._helper.init_helper()
ganesha.ganesha_manager.GaneshaManager.assert_called_once_with(
self._execute, 'faketag',
ganesha_config_path='/fakedir0/fakeconfig',
ganesha_export_dir='/fakedir0/export.d',
ganesha_db_path='/fakedir1/fake.db',
ganesha_service_name='ganesha.fakeservice')
self._helper._load_conf_dir.assert_called_once_with(
'/fakedir2/faketempl.d', must_exist=False)
self.assertFalse(self._helper._default_config_hook.called)
self.assertEqual(mock_ganesha_manager, self._helper.ganesha)
self.assertEqual(mock_template, self._helper.export_template)
self.assertEqual(None, ret)
def test_init_helper_conf_dir_empty(self):
mock_template = mock.Mock()
mock_ganesha_manager = mock.Mock()
self.stubs.Set(ganesha.ganesha_manager, 'GaneshaManager',
mock.Mock(return_value=mock_ganesha_manager))
self.stubs.Set(self._helper, '_load_conf_dir',
mock.Mock(return_value={}))
self.stubs.Set(self._helper, '_default_config_hook',
mock.Mock(return_value=mock_template))
ret = self._helper.init_helper()
ganesha.ganesha_manager.GaneshaManager.assert_called_once_with(
self._execute, 'faketag',
ganesha_config_path='/fakedir0/fakeconfig',
ganesha_export_dir='/fakedir0/export.d',
ganesha_db_path='/fakedir1/fake.db',
ganesha_service_name='ganesha.fakeservice')
self._helper._load_conf_dir.assert_called_once_with(
'/fakedir2/faketempl.d', must_exist=False)
self._helper._default_config_hook.assert_called_once_with()
self.assertEqual(mock_ganesha_manager, self._helper.ganesha)
self.assertEqual(mock_template, self._helper.export_template)
self.assertEqual(None, ret)
def test_default_config_hook(self):
fake_template = {'key': 'value'}
self.stubs.Set(ganesha.ganesha_utils, 'path_from',
mock.Mock(return_value='/fakedir3/fakeconfdir'))
self.stubs.Set(self._helper, '_load_conf_dir',
mock.Mock(return_value=fake_template))
ret = self._helper._default_config_hook()
ganesha.ganesha_utils.path_from.assert_called_once_with(
ganesha.__file__, 'conf')
self._helper._load_conf_dir.assert_called_once_with(
'/fakedir3/fakeconfdir')
self.assertEqual(fake_template, ret)
def test_fsal_hook(self):
ret = self._helper._fsal_hook('/fakepath', self.share, self.access)
self.assertEqual({}, ret)
def test_allow_access(self):
mock_ganesha_utils_patch = mock.Mock()
def fake_patch_run(tmpl1, tmpl2, tmpl3):
mock_ganesha_utils_patch(copy.deepcopy(tmpl1), tmpl2, tmpl3)
tmpl1.update(tmpl3)
self.stubs.Set(self._helper.ganesha, 'get_export_id',
mock.Mock(return_value=101))
self.stubs.Set(self._helper, '_fsal_hook',
mock.Mock(return_value='fakefsal'))
self.stubs.Set(ganesha.ganesha_utils, 'patch',
mock.Mock(side_effect=fake_patch_run))
ret = self._helper.allow_access(fake_basepath, self.share,
self.access)
self._helper.ganesha.get_export_id.assert_called_once_with()
self._helper._fsal_hook.assert_called_once_with(
fake_basepath, self.share, self.access)
mock_ganesha_utils_patch.assert_called_once_with(
{}, self._helper.export_template, fake_output_template)
self._helper._fsal_hook.assert_called_once_with(
fake_basepath, self.share, self.access)
self._helper.ganesha.add_export.assert_called_once_with(
fake_export_name, fake_output_template)
self.assertEqual(None, ret)
def test_allow_access_error_invalid_share(self):
access = fake_access(access_type='notip')
self.assertRaises(exception.InvalidShareAccess,
self._helper.allow_access, '/fakepath',
self.share, access)
def test_deny_access(self):
ret = self._helper.deny_access('/fakepath', self.share, self.access)
self._helper.ganesha.remove_export.assert_called_once_with(
'fakename--fakeaccessid')
self.assertEqual(None, ret)