Merge "Cleaning up code in oslo-incubator"
This commit is contained in:
commit
c870213685
|
@ -1,22 +0,0 @@
|
|||
# Copyright 2011 OpenStack Foundation.
|
||||
# All Rights Reserved.
|
||||
#
|
||||
# Licensed under the Apache License, Version 2.0 (the "License"); you may
|
||||
# not use this file except in compliance with the License. You may obtain
|
||||
# a copy of the License at
|
||||
#
|
||||
# http://www.apache.org/licenses/LICENSE-2.0
|
||||
#
|
||||
# Unless required by applicable law or agreed to in writing, software
|
||||
# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
|
||||
# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
|
||||
# License for the specific language governing permissions and limitations
|
||||
# under the License.
|
||||
|
||||
# This ensures the openstack namespace is defined
|
||||
try:
|
||||
import pkg_resources
|
||||
pkg_resources.declare_namespace(__name__)
|
||||
except ImportError:
|
||||
import pkgutil
|
||||
__path__ = pkgutil.extend_path(__path__, __name__)
|
|
@ -1,45 +0,0 @@
|
|||
# Licensed under the Apache License, Version 2.0 (the "License"); you may
|
||||
# not use this file except in compliance with the License. You may obtain
|
||||
# a copy of the License at
|
||||
#
|
||||
# http://www.apache.org/licenses/LICENSE-2.0
|
||||
#
|
||||
# Unless required by applicable law or agreed to in writing, software
|
||||
# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
|
||||
# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
|
||||
# License for the specific language governing permissions and limitations
|
||||
# under the License.
|
||||
|
||||
"""oslo.i18n integration module.
|
||||
|
||||
See http://docs.openstack.org/developer/oslo.i18n/usage.html
|
||||
|
||||
"""
|
||||
|
||||
try:
|
||||
import oslo_i18n
|
||||
|
||||
# NOTE(dhellmann): This reference to o-s-l-o will be replaced by the
|
||||
# application name when this module is synced into the separate
|
||||
# repository. It is OK to have more than one translation function
|
||||
# using the same domain, since there will still only be one message
|
||||
# catalog.
|
||||
_translators = oslo_i18n.TranslatorFactory(domain='oslo')
|
||||
|
||||
# The primary translation function using the well-known name "_"
|
||||
_ = _translators.primary
|
||||
|
||||
# Translators for log levels.
|
||||
#
|
||||
# The abbreviated names are meant to reflect the usual use of a short
|
||||
# name like '_'. The "L" is for "log" and the other letter comes from
|
||||
# the level.
|
||||
_LI = _translators.log_info
|
||||
_LW = _translators.log_warning
|
||||
_LE = _translators.log_error
|
||||
_LC = _translators.log_critical
|
||||
except ImportError:
|
||||
# NOTE(dims): Support for cases where a project wants to use
|
||||
# code from oslo-incubator, but is not ready to be internationalized
|
||||
# (like tempest)
|
||||
_ = _LI = _LW = _LE = _LC = lambda x: x
|
|
@ -1,166 +0,0 @@
|
|||
# Copyright 2013 Red Hat, Inc.
|
||||
#
|
||||
# Licensed under the Apache License, Version 2.0 (the "License"); you may
|
||||
# not use this file except in compliance with the License. You may obtain
|
||||
# a copy of the License at
|
||||
#
|
||||
# http://www.apache.org/licenses/LICENSE-2.0
|
||||
#
|
||||
# Unless required by applicable law or agreed to in writing, software
|
||||
# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
|
||||
# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
|
||||
# License for the specific language governing permissions and limitations
|
||||
# under the License.
|
||||
|
||||
import collections
|
||||
|
||||
from oslo_concurrency import lockutils
|
||||
from oslo_utils import timeutils
|
||||
|
||||
from openstack.common.cache import backends
|
||||
|
||||
|
||||
class MemoryBackend(backends.BaseCache):
|
||||
|
||||
def __init__(self, parsed_url, options=None):
|
||||
super(MemoryBackend, self).__init__(parsed_url, options)
|
||||
self._clear()
|
||||
|
||||
def _set_unlocked(self, key, value, ttl=0):
|
||||
expires_at = 0
|
||||
if ttl != 0:
|
||||
expires_at = timeutils.utcnow_ts() + ttl
|
||||
|
||||
self._cache[key] = (expires_at, value)
|
||||
|
||||
if expires_at:
|
||||
self._keys_expires[expires_at].add(key)
|
||||
|
||||
def _set(self, key, value, ttl=0, not_exists=False):
|
||||
with lockutils.lock(key):
|
||||
|
||||
# NOTE(flaper87): This is needed just in `set`
|
||||
# calls, hence it's not in `_set_unlocked`
|
||||
if not_exists and self._exists_unlocked(key):
|
||||
return False
|
||||
|
||||
self._set_unlocked(key, value, ttl)
|
||||
return True
|
||||
|
||||
def _get_unlocked(self, key, default=None):
|
||||
now = timeutils.utcnow_ts()
|
||||
|
||||
try:
|
||||
timeout, value = self._cache[key]
|
||||
except KeyError:
|
||||
return (0, default)
|
||||
|
||||
if timeout and now >= timeout:
|
||||
|
||||
# NOTE(flaper87): Record expired,
|
||||
# remove it from the cache but catch
|
||||
# KeyError and ValueError in case
|
||||
# _purge_expired removed this key already.
|
||||
try:
|
||||
del self._cache[key]
|
||||
except KeyError:
|
||||
pass
|
||||
|
||||
try:
|
||||
# NOTE(flaper87): Keys with ttl == 0
|
||||
# don't exist in the _keys_expires dict
|
||||
self._keys_expires[timeout].remove(key)
|
||||
except (KeyError, ValueError):
|
||||
pass
|
||||
|
||||
return (0, default)
|
||||
|
||||
return (timeout, value)
|
||||
|
||||
def _get(self, key, default=None):
|
||||
with lockutils.lock(key):
|
||||
return self._get_unlocked(key, default)[1]
|
||||
|
||||
def _exists_unlocked(self, key):
|
||||
now = timeutils.utcnow_ts()
|
||||
try:
|
||||
timeout = self._cache[key][0]
|
||||
return not timeout or now <= timeout
|
||||
except KeyError:
|
||||
return False
|
||||
|
||||
def __contains__(self, key):
|
||||
with lockutils.lock(key):
|
||||
return self._exists_unlocked(key)
|
||||
|
||||
def _incr_append(self, key, other):
|
||||
with lockutils.lock(key):
|
||||
timeout, value = self._get_unlocked(key)
|
||||
|
||||
if value is None:
|
||||
return None
|
||||
|
||||
ttl = timeutils.utcnow_ts() - timeout
|
||||
new_value = value + other
|
||||
self._set_unlocked(key, new_value, ttl)
|
||||
return new_value
|
||||
|
||||
def _incr(self, key, delta):
|
||||
if not isinstance(delta, int):
|
||||
raise TypeError('delta must be an int instance')
|
||||
|
||||
return self._incr_append(key, delta)
|
||||
|
||||
def _append_tail(self, key, tail):
|
||||
return self._incr_append(key, tail)
|
||||
|
||||
def _purge_expired(self):
|
||||
"""Removes expired keys from the cache."""
|
||||
|
||||
now = timeutils.utcnow_ts()
|
||||
for timeout in sorted(self._keys_expires.keys()):
|
||||
|
||||
# NOTE(flaper87): If timeout is greater
|
||||
# than `now`, stop the iteration, remaining
|
||||
# keys have not expired.
|
||||
if now < timeout:
|
||||
break
|
||||
|
||||
# NOTE(flaper87): Unset every key in
|
||||
# this set from the cache if its timeout
|
||||
# is equal to `timeout`. (The key might
|
||||
# have been updated)
|
||||
for subkey in self._keys_expires.pop(timeout):
|
||||
try:
|
||||
if self._cache[subkey][0] == timeout:
|
||||
del self._cache[subkey]
|
||||
except KeyError:
|
||||
continue
|
||||
|
||||
def __delitem__(self, key):
|
||||
self._purge_expired()
|
||||
|
||||
# NOTE(flaper87): Delete the key. Using pop
|
||||
# since it could have been deleted already
|
||||
value = self._cache.pop(key, None)
|
||||
|
||||
if value:
|
||||
try:
|
||||
# NOTE(flaper87): Keys with ttl == 0
|
||||
# don't exist in the _keys_expires dict
|
||||
self._keys_expires[value[0]].remove(key)
|
||||
except (KeyError, ValueError):
|
||||
pass
|
||||
|
||||
def _clear(self):
|
||||
self._cache = {}
|
||||
self._keys_expires = collections.defaultdict(set)
|
||||
|
||||
def _get_many(self, keys, default):
|
||||
return super(MemoryBackend, self)._get_many(keys, default)
|
||||
|
||||
def _set_many(self, data, ttl=0):
|
||||
return super(MemoryBackend, self)._set_many(data, ttl)
|
||||
|
||||
def _unset_many(self, keys):
|
||||
return super(MemoryBackend, self)._unset_many(keys)
|
|
@ -1,250 +0,0 @@
|
|||
# Copyright 2013 Red Hat, Inc.
|
||||
#
|
||||
# Licensed under the Apache License, Version 2.0 (the "License"); you may
|
||||
# not use this file except in compliance with the License. You may obtain
|
||||
# a copy of the License at
|
||||
#
|
||||
# http://www.apache.org/licenses/LICENSE-2.0
|
||||
#
|
||||
# Unless required by applicable law or agreed to in writing, software
|
||||
# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
|
||||
# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
|
||||
# License for the specific language governing permissions and limitations
|
||||
# under the License.
|
||||
|
||||
import abc
|
||||
|
||||
import six
|
||||
|
||||
|
||||
NOTSET = object()
|
||||
|
||||
|
||||
@six.add_metaclass(abc.ABCMeta)
|
||||
class BaseCache(object):
|
||||
"""Base Cache Abstraction
|
||||
|
||||
:params parsed_url: Parsed url object.
|
||||
:params options: A dictionary with configuration parameters
|
||||
for the cache. For example:
|
||||
|
||||
- default_ttl: An integer defining the default ttl for keys.
|
||||
"""
|
||||
|
||||
def __init__(self, parsed_url, options=None):
|
||||
self._parsed_url = parsed_url
|
||||
self._options = options or {}
|
||||
self._default_ttl = int(self._options.get('default_ttl', 0))
|
||||
|
||||
@abc.abstractmethod
|
||||
def _set(self, key, value, ttl, not_exists=False):
|
||||
"""Implementations of this class have to override this method."""
|
||||
|
||||
def set(self, key, value, ttl, not_exists=False):
|
||||
"""Sets or updates a cache entry
|
||||
|
||||
.. note:: Thread-safety is required and has to be guaranteed by the
|
||||
backend implementation.
|
||||
|
||||
:params key: Item key as string.
|
||||
:type key: `unicode string`
|
||||
:params value: Value to assign to the key. This can be anything that
|
||||
is handled by the current backend.
|
||||
:params ttl: Key's timeout in seconds. 0 means no timeout.
|
||||
:type ttl: int
|
||||
:params not_exists: If True, the key will be set if it doesn't exist.
|
||||
Otherwise, it'll always be set.
|
||||
:type not_exists: bool
|
||||
|
||||
:returns: True if the operation succeeds, False otherwise.
|
||||
"""
|
||||
if ttl is None:
|
||||
ttl = self._default_ttl
|
||||
|
||||
return self._set(key, value, ttl, not_exists)
|
||||
|
||||
def __setitem__(self, key, value):
|
||||
self.set(key, value, self._default_ttl)
|
||||
|
||||
def setdefault(self, key, value):
|
||||
"""Sets the key value to `value` if it doesn't exist
|
||||
|
||||
:params key: Item key as string.
|
||||
:type key: `unicode string`
|
||||
:params value: Value to assign to the key. This can be anything that
|
||||
is handled by the current backend.
|
||||
"""
|
||||
try:
|
||||
return self[key]
|
||||
except KeyError:
|
||||
self[key] = value
|
||||
return value
|
||||
|
||||
@abc.abstractmethod
|
||||
def _get(self, key, default):
|
||||
"""Implementations of this class have to override this method."""
|
||||
|
||||
def get(self, key, default=None):
|
||||
"""Gets one item from the cache
|
||||
|
||||
.. note:: Thread-safety is required and it has to be guaranteed
|
||||
by the backend implementation.
|
||||
|
||||
:params key: Key for the item to retrieve from the cache.
|
||||
:params default: The default value to return.
|
||||
|
||||
:returns: `key`'s value in the cache if it exists, otherwise
|
||||
`default` should be returned.
|
||||
"""
|
||||
return self._get(key, default)
|
||||
|
||||
def __getitem__(self, key):
|
||||
value = self.get(key, NOTSET)
|
||||
|
||||
if value is NOTSET:
|
||||
raise KeyError
|
||||
|
||||
return value
|
||||
|
||||
@abc.abstractmethod
|
||||
def __delitem__(self, key):
|
||||
"""Removes an item from cache.
|
||||
|
||||
.. note:: Thread-safety is required and it has to be guaranteed by
|
||||
the backend implementation.
|
||||
|
||||
:params key: The key to remove.
|
||||
|
||||
:returns: The key value if there's one
|
||||
"""
|
||||
|
||||
@abc.abstractmethod
|
||||
def _clear(self):
|
||||
"""Implementations of this class have to override this method."""
|
||||
|
||||
def clear(self):
|
||||
"""Removes all items from the cache.
|
||||
|
||||
.. note:: Thread-safety is required and it has to be guaranteed by
|
||||
the backend implementation.
|
||||
"""
|
||||
return self._clear()
|
||||
|
||||
@abc.abstractmethod
|
||||
def _incr(self, key, delta):
|
||||
"""Implementations of this class have to override this method."""
|
||||
|
||||
def incr(self, key, delta=1):
|
||||
"""Increments the value for a key
|
||||
|
||||
:params key: The key for the value to be incremented
|
||||
:params delta: Number of units by which to increment the value.
|
||||
Pass a negative number to decrement the value.
|
||||
|
||||
:returns: The new value
|
||||
"""
|
||||
return self._incr(key, delta)
|
||||
|
||||
@abc.abstractmethod
|
||||
def _append_tail(self, key, tail):
|
||||
"""Implementations of this class have to override this method."""
|
||||
|
||||
def append_tail(self, key, tail):
|
||||
"""Appends `tail` to `key`'s value.
|
||||
|
||||
:params key: The key of the value to which `tail` should be appended.
|
||||
:params tail: The list of values to append to the original.
|
||||
|
||||
:returns: The new value
|
||||
"""
|
||||
|
||||
if not hasattr(tail, "__iter__"):
|
||||
raise TypeError('Tail must be an iterable')
|
||||
|
||||
if not isinstance(tail, list):
|
||||
# NOTE(flaper87): Make sure we pass a list
|
||||
# down to the implementation. Not all drivers
|
||||
# have support for generators, sets or other
|
||||
# iterables.
|
||||
tail = list(tail)
|
||||
|
||||
return self._append_tail(key, tail)
|
||||
|
||||
def append(self, key, value):
|
||||
"""Appends `value` to `key`'s value.
|
||||
|
||||
:params key: The key of the value to which `tail` should be appended.
|
||||
:params value: The value to append to the original.
|
||||
|
||||
:returns: The new value
|
||||
"""
|
||||
return self.append_tail(key, [value])
|
||||
|
||||
@abc.abstractmethod
|
||||
def __contains__(self, key):
|
||||
"""Verifies that a key exists.
|
||||
|
||||
:params key: The key to verify.
|
||||
|
||||
:returns: True if the key exists, otherwise False.
|
||||
"""
|
||||
|
||||
@abc.abstractmethod
|
||||
def _get_many(self, keys, default):
|
||||
"""Implementations of this class have to override this method."""
|
||||
return ((k, self.get(k, default=default)) for k in keys)
|
||||
|
||||
def get_many(self, keys, default=NOTSET):
|
||||
"""Gets keys' value from cache
|
||||
|
||||
:params keys: List of keys to retrieve.
|
||||
:params default: The default value to return for each key that is not
|
||||
in the cache.
|
||||
|
||||
:returns: A generator of (key, value)
|
||||
"""
|
||||
return self._get_many(keys, default)
|
||||
|
||||
@abc.abstractmethod
|
||||
def _set_many(self, data, ttl):
|
||||
"""Implementations of this class have to override this method."""
|
||||
|
||||
for key, value in data.items():
|
||||
self.set(key, value, ttl=ttl)
|
||||
|
||||
def set_many(self, data, ttl=None):
|
||||
"""Puts several items into the cache at once
|
||||
|
||||
Depending on the backend, this operation may or may not be efficient.
|
||||
The default implementation calls set for each (key, value) pair
|
||||
passed, other backends support set_many operations as part of their
|
||||
protocols.
|
||||
|
||||
:params data: A dictionary like {key: val} to store in the cache.
|
||||
:params ttl: Key's timeout in seconds.
|
||||
"""
|
||||
|
||||
if ttl is None:
|
||||
ttl = self._default_ttl
|
||||
|
||||
self._set_many(data, ttl)
|
||||
|
||||
def update(self, **kwargs):
|
||||
"""Sets several (key, value) paris.
|
||||
|
||||
Refer to the `set_many` docstring.
|
||||
"""
|
||||
self.set_many(kwargs, ttl=self._default_ttl)
|
||||
|
||||
@abc.abstractmethod
|
||||
def _unset_many(self, keys):
|
||||
"""Implementations of this class have to override this method."""
|
||||
for key in keys:
|
||||
del self[key]
|
||||
|
||||
def unset_many(self, keys):
|
||||
"""Removes several keys from the cache at once
|
||||
|
||||
:params keys: List of keys to unset.
|
||||
"""
|
||||
self._unset_many(keys)
|
|
@ -1,92 +0,0 @@
|
|||
# Copyright 2013 Red Hat, Inc.
|
||||
#
|
||||
# Licensed under the Apache License, Version 2.0 (the "License"); you may
|
||||
# not use this file except in compliance with the License. You may obtain
|
||||
# a copy of the License at
|
||||
#
|
||||
# http://www.apache.org/licenses/LICENSE-2.0
|
||||
#
|
||||
# Unless required by applicable law or agreed to in writing, software
|
||||
# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
|
||||
# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
|
||||
# License for the specific language governing permissions and limitations
|
||||
# under the License.
|
||||
|
||||
"""Cache library.
|
||||
|
||||
Supported configuration options:
|
||||
|
||||
`default_backend`: Name of the cache backend to use.
|
||||
`key_namespace`: Namespace under which keys will be created.
|
||||
"""
|
||||
|
||||
########################################################################
|
||||
#
|
||||
# THIS MODULE IS DEPRECATED
|
||||
#
|
||||
# Please refer to
|
||||
# https://etherpad.openstack.org/p/kilo-oslo-library-proposals for
|
||||
# the discussion leading to this deprecation.
|
||||
#
|
||||
# We recommend helping with the new oslo.cache library being created
|
||||
# as a wrapper for dogpile.
|
||||
#
|
||||
########################################################################
|
||||
|
||||
|
||||
from six.moves.urllib import parse
|
||||
from stevedore import driver
|
||||
|
||||
|
||||
def _get_oslo_configs():
|
||||
"""Returns the oslo config options to register."""
|
||||
# NOTE(flaper87): Oslo config should be
|
||||
# optional. Instead of doing try / except
|
||||
# at the top of this file, lets import cfg
|
||||
# here and assume that the caller of this
|
||||
# function already took care of this dependency.
|
||||
from oslo_config import cfg
|
||||
|
||||
return [
|
||||
cfg.StrOpt('cache_url', default='memory://',
|
||||
help='URL to connect to the cache back end.')
|
||||
]
|
||||
|
||||
|
||||
def register_oslo_configs(conf):
|
||||
"""Registers a cache configuration options
|
||||
|
||||
:params conf: Config object.
|
||||
:type conf: `cfg.ConfigOptions`
|
||||
"""
|
||||
conf.register_opts(_get_oslo_configs())
|
||||
|
||||
|
||||
def get_cache(url='memory://'):
|
||||
"""Loads the cache backend
|
||||
|
||||
This function loads the cache backend
|
||||
specified in the given configuration.
|
||||
|
||||
:param conf: Configuration instance to use
|
||||
"""
|
||||
|
||||
parsed = parse.urlparse(url)
|
||||
backend = parsed.scheme
|
||||
|
||||
query = parsed.query
|
||||
# NOTE(flaper87): We need the following hack
|
||||
# for python versions < 2.7.5. Previous versions
|
||||
# of python parsed query params just for 'known'
|
||||
# schemes. This was changed in this patch:
|
||||
# http://hg.python.org/cpython/rev/79e6ff3d9afd
|
||||
if not query and '?' in parsed.path:
|
||||
query = parsed.path.split('?', 1)[-1]
|
||||
parameters = parse.parse_qsl(query)
|
||||
kwargs = {'options': dict(parameters)}
|
||||
|
||||
mgr = driver.DriverManager('openstack.common.cache.backends', backend,
|
||||
invoke_on_load=True,
|
||||
invoke_args=[parsed],
|
||||
invoke_kwds=kwargs)
|
||||
return mgr.driver
|
|
@ -1,272 +0,0 @@
|
|||
# Copyright 2012 Red Hat, Inc.
|
||||
#
|
||||
# Licensed under the Apache License, Version 2.0 (the "License"); you may
|
||||
# not use this file except in compliance with the License. You may obtain
|
||||
# a copy of the License at
|
||||
#
|
||||
# http://www.apache.org/licenses/LICENSE-2.0
|
||||
#
|
||||
# Unless required by applicable law or agreed to in writing, software
|
||||
# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
|
||||
# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
|
||||
# License for the specific language governing permissions and limitations
|
||||
# under the License.
|
||||
|
||||
# W0603: Using the global statement
|
||||
# W0621: Redefining name %s from outer scope
|
||||
# pylint: disable=W0603,W0621
|
||||
|
||||
from __future__ import print_function
|
||||
|
||||
import getpass
|
||||
import inspect
|
||||
import os
|
||||
import sys
|
||||
import textwrap
|
||||
|
||||
from oslo_utils import encodeutils
|
||||
from oslo_utils import strutils
|
||||
import prettytable
|
||||
import six
|
||||
from six import moves
|
||||
|
||||
from openstack.common._i18n import _
|
||||
|
||||
|
||||
class MissingArgs(Exception):
|
||||
"""Supplied arguments are not sufficient for calling a function."""
|
||||
def __init__(self, missing):
|
||||
self.missing = missing
|
||||
msg = _("Missing arguments: %s") % ", ".join(missing)
|
||||
super(MissingArgs, self).__init__(msg)
|
||||
|
||||
|
||||
def validate_args(fn, *args, **kwargs):
|
||||
"""Check that the supplied args are sufficient for calling a function.
|
||||
|
||||
>>> validate_args(lambda a: None)
|
||||
Traceback (most recent call last):
|
||||
...
|
||||
MissingArgs: Missing argument(s): a
|
||||
>>> validate_args(lambda a, b, c, d: None, 0, c=1)
|
||||
Traceback (most recent call last):
|
||||
...
|
||||
MissingArgs: Missing argument(s): b, d
|
||||
|
||||
:param fn: the function to check
|
||||
:param arg: the positional arguments supplied
|
||||
:param kwargs: the keyword arguments supplied
|
||||
"""
|
||||
argspec = inspect.getargspec(fn)
|
||||
|
||||
num_defaults = len(argspec.defaults or [])
|
||||
required_args = argspec.args[:len(argspec.args) - num_defaults]
|
||||
|
||||
def isbound(method):
|
||||
return getattr(method, '__self__', None) is not None
|
||||
|
||||
if isbound(fn):
|
||||
required_args.pop(0)
|
||||
|
||||
missing = [arg for arg in required_args if arg not in kwargs]
|
||||
missing = missing[len(args):]
|
||||
if missing:
|
||||
raise MissingArgs(missing)
|
||||
|
||||
|
||||
def arg(*args, **kwargs):
|
||||
"""Decorator for CLI args.
|
||||
|
||||
Example:
|
||||
|
||||
>>> @arg("name", help="Name of the new entity")
|
||||
... def entity_create(args):
|
||||
... pass
|
||||
"""
|
||||
def _decorator(func):
|
||||
add_arg(func, *args, **kwargs)
|
||||
return func
|
||||
return _decorator
|
||||
|
||||
|
||||
def env(*args, **kwargs):
|
||||
"""Returns the first environment variable set.
|
||||
|
||||
If all are empty, defaults to '' or keyword arg `default`.
|
||||
"""
|
||||
for arg in args:
|
||||
value = os.environ.get(arg)
|
||||
if value:
|
||||
return value
|
||||
return kwargs.get('default', '')
|
||||
|
||||
|
||||
def add_arg(func, *args, **kwargs):
|
||||
"""Bind CLI arguments to a shell.py `do_foo` function."""
|
||||
|
||||
if not hasattr(func, 'arguments'):
|
||||
func.arguments = []
|
||||
|
||||
# NOTE(sirp): avoid dups that can occur when the module is shared across
|
||||
# tests.
|
||||
if (args, kwargs) not in func.arguments:
|
||||
# Because of the semantics of decorator composition if we just append
|
||||
# to the options list positional options will appear to be backwards.
|
||||
func.arguments.insert(0, (args, kwargs))
|
||||
|
||||
|
||||
def unauthenticated(func):
|
||||
"""Adds 'unauthenticated' attribute to decorated function.
|
||||
|
||||
Usage:
|
||||
|
||||
>>> @unauthenticated
|
||||
... def mymethod(f):
|
||||
... pass
|
||||
"""
|
||||
func.unauthenticated = True
|
||||
return func
|
||||
|
||||
|
||||
def isunauthenticated(func):
|
||||
"""Checks if the function does not require authentication.
|
||||
|
||||
Mark such functions with the `@unauthenticated` decorator.
|
||||
|
||||
:returns: bool
|
||||
"""
|
||||
return getattr(func, 'unauthenticated', False)
|
||||
|
||||
|
||||
def print_list(objs, fields, formatters=None, sortby_index=0,
|
||||
mixed_case_fields=None, field_labels=None):
|
||||
"""Print a list of objects as a table, one row per object.
|
||||
|
||||
:param objs: iterable of :class:`Resource`
|
||||
:param fields: attributes that correspond to columns, in order
|
||||
:param formatters: `dict` of callables for field formatting
|
||||
:param sortby_index: index of the field for sorting table rows
|
||||
:param mixed_case_fields: fields corresponding to object attributes that
|
||||
have mixed case names (e.g., 'serverId')
|
||||
:param field_labels: Labels to use in the heading of the table, default to
|
||||
fields.
|
||||
"""
|
||||
formatters = formatters or {}
|
||||
mixed_case_fields = mixed_case_fields or []
|
||||
field_labels = field_labels or fields
|
||||
if len(field_labels) != len(fields):
|
||||
raise ValueError(_("Field labels list %(labels)s has different number "
|
||||
"of elements than fields list %(fields)s"),
|
||||
{'labels': field_labels, 'fields': fields})
|
||||
|
||||
if sortby_index is None:
|
||||
kwargs = {}
|
||||
else:
|
||||
kwargs = {'sortby': field_labels[sortby_index]}
|
||||
pt = prettytable.PrettyTable(field_labels)
|
||||
pt.align = 'l'
|
||||
|
||||
for o in objs:
|
||||
row = []
|
||||
for field in fields:
|
||||
if field in formatters:
|
||||
row.append(formatters[field](o))
|
||||
else:
|
||||
if field in mixed_case_fields:
|
||||
field_name = field.replace(' ', '_')
|
||||
else:
|
||||
field_name = field.lower().replace(' ', '_')
|
||||
data = getattr(o, field_name, '')
|
||||
row.append(data)
|
||||
pt.add_row(row)
|
||||
|
||||
if six.PY3:
|
||||
print(encodeutils.safe_encode(pt.get_string(**kwargs)).decode())
|
||||
else:
|
||||
print(encodeutils.safe_encode(pt.get_string(**kwargs)))
|
||||
|
||||
|
||||
def print_dict(dct, dict_property="Property", wrap=0, dict_value='Value'):
|
||||
"""Print a `dict` as a table of two columns.
|
||||
|
||||
:param dct: `dict` to print
|
||||
:param dict_property: name of the first column
|
||||
:param wrap: wrapping for the second column
|
||||
:param dict_value: header label for the value (second) column
|
||||
"""
|
||||
pt = prettytable.PrettyTable([dict_property, dict_value])
|
||||
pt.align = 'l'
|
||||
for k, v in sorted(dct.items()):
|
||||
# convert dict to str to check length
|
||||
if isinstance(v, dict):
|
||||
v = six.text_type(v)
|
||||
if wrap > 0:
|
||||
v = textwrap.fill(six.text_type(v), wrap)
|
||||
# if value has a newline, add in multiple rows
|
||||
# e.g. fault with stacktrace
|
||||
if v and isinstance(v, six.string_types) and r'\n' in v:
|
||||
lines = v.strip().split(r'\n')
|
||||
col1 = k
|
||||
for line in lines:
|
||||
pt.add_row([col1, line])
|
||||
col1 = ''
|
||||
else:
|
||||
pt.add_row([k, v])
|
||||
|
||||
if six.PY3:
|
||||
print(encodeutils.safe_encode(pt.get_string()).decode())
|
||||
else:
|
||||
print(encodeutils.safe_encode(pt.get_string()))
|
||||
|
||||
|
||||
def get_password(max_password_prompts=3):
|
||||
"""Read password from TTY."""
|
||||
verify = strutils.bool_from_string(env("OS_VERIFY_PASSWORD"))
|
||||
pw = None
|
||||
if hasattr(sys.stdin, "isatty") and sys.stdin.isatty():
|
||||
# Check for Ctrl-D
|
||||
try:
|
||||
for __ in moves.range(max_password_prompts):
|
||||
pw1 = getpass.getpass("OS Password: ")
|
||||
if verify:
|
||||
pw2 = getpass.getpass("Please verify: ")
|
||||
else:
|
||||
pw2 = pw1
|
||||
if pw1 == pw2 and pw1:
|
||||
pw = pw1
|
||||
break
|
||||
except EOFError:
|
||||
pass
|
||||
return pw
|
||||
|
||||
|
||||
def service_type(stype):
|
||||
"""Adds 'service_type' attribute to decorated function.
|
||||
|
||||
Usage:
|
||||
|
||||
.. code-block:: python
|
||||
|
||||
@service_type('volume')
|
||||
def mymethod(f):
|
||||
...
|
||||
"""
|
||||
def inner(f):
|
||||
f.service_type = stype
|
||||
return f
|
||||
return inner
|
||||
|
||||
|
||||
def get_service_type(f):
|
||||
"""Retrieves service type from function."""
|
||||
return getattr(f, 'service_type', None)
|
||||
|
||||
|
||||
def pretty_choice_list(l):
|
||||
return ', '.join("'%s'" % i for i in l)
|
||||
|
||||
|
||||
def exit(msg=''):
|
||||
if msg:
|
||||
print (msg, file=sys.stderr)
|
||||
sys.exit(1)
|
|
@ -1,197 +0,0 @@
|
|||
# Copyright 2013 Red Hat, Inc.
|
||||
#
|
||||
# Licensed under the Apache License, Version 2.0 (the "License"); you may
|
||||
# not use this file except in compliance with the License. You may obtain
|
||||
# a copy of the License at
|
||||
#
|
||||
# http://www.apache.org/licenses/LICENSE-2.0
|
||||
#
|
||||
# Unless required by applicable law or agreed to in writing, software
|
||||
# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
|
||||
# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
|
||||
# License for the specific language governing permissions and limitations
|
||||
# under the License.
|
||||
|
||||
########################################################################
|
||||
#
|
||||
# THIS MODULE IS DEPRECATED
|
||||
#
|
||||
# Please refer to
|
||||
# https://etherpad.openstack.org/p/kilo-oslo-library-proposals for
|
||||
# the discussion leading to this deprecation.
|
||||
#
|
||||
# We recommend checking out Barbican or the cryptography.py project
|
||||
# (https://pypi.python.org/pypi/cryptography) instead of this module.
|
||||
#
|
||||
########################################################################
|
||||
|
||||
import base64
|
||||
|
||||
from Crypto.Hash import HMAC
|
||||
from Crypto import Random
|
||||
from oslo_utils import importutils
|
||||
import six
|
||||
|
||||
from openstack.common._i18n import _
|
||||
|
||||
bchr = six.int2byte
|
||||
|
||||
|
||||
class CryptoutilsException(Exception):
|
||||
"""Generic Exception for Crypto utilities."""
|
||||
|
||||
message = _("An unknown error occurred in crypto utils.")
|
||||
|
||||
|
||||
class CipherBlockLengthTooBig(CryptoutilsException):
|
||||
"""The block size is too big."""
|
||||
|
||||
def __init__(self, requested, permitted):
|
||||
msg = _("Block size of %(given)d is too big, max = %(maximum)d")
|
||||
message = msg % {'given': requested, 'maximum': permitted}
|
||||
super(CryptoutilsException, self).__init__(message)
|
||||
|
||||
|
||||
class HKDFOutputLengthTooLong(CryptoutilsException):
|
||||
"""The amount of Key Material asked is too much."""
|
||||
|
||||
def __init__(self, requested, permitted):
|
||||
msg = _("Length of %(given)d is too long, max = %(maximum)d")
|
||||
message = msg % {'given': requested, 'maximum': permitted}
|
||||
super(CryptoutilsException, self).__init__(message)
|
||||
|
||||
|
||||
class HKDF(object):
|
||||
"""An HMAC-based Key Derivation Function implementation (RFC5869)
|
||||
|
||||
This class creates an object that allows to use HKDF to derive keys.
|
||||
"""
|
||||
|
||||
def __init__(self, hashtype='SHA256'):
|
||||
self.hashfn = importutils.import_module('Crypto.Hash.' + hashtype)
|
||||
self.max_okm_length = 255 * self.hashfn.digest_size
|
||||
|
||||
def extract(self, ikm, salt=None):
|
||||
"""An extract function that can be used to derive a robust key given
|
||||
weak Input Key Material (IKM) which could be a password.
|
||||
Returns a pseudorandom key (of HashLen octets)
|
||||
|
||||
:param ikm: input keying material (ex a password)
|
||||
:param salt: optional salt value (a non-secret random value)
|
||||
"""
|
||||
if salt is None:
|
||||
salt = b'\x00' * self.hashfn.digest_size
|
||||
|
||||
return HMAC.new(salt, ikm, self.hashfn).digest()
|
||||
|
||||
def expand(self, prk, info, length):
|
||||
"""An expand function that will return arbitrary length output that can
|
||||
be used as keys.
|
||||
Returns a buffer usable as key material.
|
||||
|
||||
:param prk: a pseudorandom key of at least HashLen octets
|
||||
:param info: optional string (can be a zero-length string)
|
||||
:param length: length of output keying material (<= 255 * HashLen)
|
||||
"""
|
||||
if length > self.max_okm_length:
|
||||
raise HKDFOutputLengthTooLong(length, self.max_okm_length)
|
||||
|
||||
N = (length + self.hashfn.digest_size - 1) // self.hashfn.digest_size
|
||||
|
||||
okm = b""
|
||||
tmp = b""
|
||||
for block in range(1, N + 1):
|
||||
tmp = HMAC.new(prk, tmp + info + bchr(block), self.hashfn).digest()
|
||||
okm += tmp
|
||||
|
||||
return okm[:length]
|
||||
|
||||
|
||||
MAX_CB_SIZE = 256
|
||||
|
||||
|
||||
class SymmetricCrypto(object):
|
||||
"""Symmetric Key Crypto object.
|
||||
|
||||
This class creates a Symmetric Key Crypto object that can be used
|
||||
to encrypt, decrypt, or sign arbitrary data.
|
||||
|
||||
:param enctype: Encryption Cipher name (default: AES)
|
||||
:param hashtype: Hash/HMAC type name (default: SHA256)
|
||||
"""
|
||||
|
||||
def __init__(self, enctype='AES', hashtype='SHA256'):
|
||||
self.cipher = importutils.import_module('Crypto.Cipher.' + enctype)
|
||||
self.hashfn = importutils.import_module('Crypto.Hash.' + hashtype)
|
||||
|
||||
def new_key(self, size):
|
||||
return Random.new().read(size)
|
||||
|
||||
def encrypt(self, key, msg, b64encode=True):
|
||||
"""Encrypt the provided msg and returns the cyphertext optionally
|
||||
base64 encoded.
|
||||
|
||||
Uses AES-128-CBC with a Random IV by default.
|
||||
|
||||
The plaintext is padded to reach blocksize length.
|
||||
The last byte of the block is the length of the padding.
|
||||
The length of the padding does not include the length byte itself.
|
||||
|
||||
:param key: The Encryption key.
|
||||
:param msg: the plain text.
|
||||
|
||||
:returns enc: a block of encrypted data.
|
||||
"""
|
||||
iv = Random.new().read(self.cipher.block_size)
|
||||
cipher = self.cipher.new(key, self.cipher.MODE_CBC, iv)
|
||||
|
||||
# CBC mode requires a fixed block size. Append padding and length of
|
||||
# padding.
|
||||
if self.cipher.block_size > MAX_CB_SIZE:
|
||||
raise CipherBlockLengthTooBig(self.cipher.block_size, MAX_CB_SIZE)
|
||||
r = len(msg) % self.cipher.block_size
|
||||
padlen = self.cipher.block_size - r - 1
|
||||
msg += b'\x00' * padlen
|
||||
msg += bchr(padlen)
|
||||
|
||||
enc = iv + cipher.encrypt(msg)
|
||||
if b64encode:
|
||||
enc = base64.b64encode(enc)
|
||||
return enc
|
||||
|
||||
def decrypt(self, key, msg, b64decode=True):
|
||||
"""Decrypts the provided ciphertext, optionally base64 encoded, and
|
||||
returns the plaintext message, after padding is removed.
|
||||
|
||||
Uses AES-128-CBC with an IV by default.
|
||||
|
||||
:param key: The Encryption key.
|
||||
:param msg: the ciphetext, the first block is the IV
|
||||
|
||||
:returns plain: the plaintext message.
|
||||
"""
|
||||
if b64decode:
|
||||
msg = base64.b64decode(msg)
|
||||
iv = msg[:self.cipher.block_size]
|
||||
cipher = self.cipher.new(key, self.cipher.MODE_CBC, iv)
|
||||
|
||||
padded = cipher.decrypt(msg[self.cipher.block_size:])
|
||||
l = ord(padded[-1:]) + 1
|
||||
plain = padded[:-l]
|
||||
return plain
|
||||
|
||||
def sign(self, key, msg, b64encode=True):
|
||||
"""Signs a message string and returns a base64 encoded signature.
|
||||
|
||||
Uses HMAC-SHA-256 by default.
|
||||
|
||||
:param key: The Signing key.
|
||||
:param msg: the message to sign.
|
||||
|
||||
:returns out: a base64 encoded signature.
|
||||
"""
|
||||
h = HMAC.new(key, msg, self.hashfn)
|
||||
out = h.digest()
|
||||
if b64encode:
|
||||
out = base64.b64encode(out)
|
||||
return out
|
|
@ -1,152 +0,0 @@
|
|||
# Copyright 2010 United States Government as represented by the
|
||||
# Administrator of the National Aeronautics and Space Administration.
|
||||
# All Rights Reserved.
|
||||
# Copyright (c) 2010 Citrix Systems, Inc.
|
||||
#
|
||||
# Licensed under the Apache License, Version 2.0 (the "License"); you may
|
||||
# not use this file except in compliance with the License. You may obtain
|
||||
# a copy of the License at
|
||||
#
|
||||
# http://www.apache.org/licenses/LICENSE-2.0
|
||||
#
|
||||
# Unless required by applicable law or agreed to in writing, software
|
||||
# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
|
||||
# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
|
||||
# License for the specific language governing permissions and limitations
|
||||
# under the License.
|
||||
|
||||
"""
|
||||
Helper methods to deal with images.
|
||||
"""
|
||||
|
||||
import re
|
||||
|
||||
from oslo_utils import strutils
|
||||
|
||||
from openstack.common._i18n import _
|
||||
|
||||
|
||||
class QemuImgInfo(object):
|
||||
BACKING_FILE_RE = re.compile((r"^(.*?)\s*\(actual\s+path\s*:"
|
||||
r"\s+(.*?)\)\s*$"), re.I)
|
||||
TOP_LEVEL_RE = re.compile(r"^([\w\d\s\_\-]+):(.*)$")
|
||||
SIZE_RE = re.compile(r"(\d*\.?\d+)(\w+)?(\s*\(\s*(\d+)\s+bytes\s*\))?",
|
||||
re.I)
|
||||
|
||||
def __init__(self, cmd_output=None):
|
||||
details = self._parse(cmd_output or '')
|
||||
self.image = details.get('image')
|
||||
self.backing_file = details.get('backing_file')
|
||||
self.file_format = details.get('file_format')
|
||||
self.virtual_size = details.get('virtual_size')
|
||||
self.cluster_size = details.get('cluster_size')
|
||||
self.disk_size = details.get('disk_size')
|
||||
self.snapshots = details.get('snapshot_list', [])
|
||||
self.encrypted = details.get('encrypted')
|
||||
|
||||
def __str__(self):
|
||||
lines = [
|
||||
'image: %s' % self.image,
|
||||
'file_format: %s' % self.file_format,
|
||||
'virtual_size: %s' % self.virtual_size,
|
||||
'disk_size: %s' % self.disk_size,
|
||||
'cluster_size: %s' % self.cluster_size,
|
||||
'backing_file: %s' % self.backing_file,
|
||||
]
|
||||
if self.snapshots:
|
||||
lines.append("snapshots: %s" % self.snapshots)
|
||||
if self.encrypted:
|
||||
lines.append("encrypted: %s" % self.encrypted)
|
||||
return "\n".join(lines)
|
||||
|
||||
def _canonicalize(self, field):
|
||||
# Standardize on underscores/lc/no dash and no spaces
|
||||
# since qemu seems to have mixed outputs here... and
|
||||
# this format allows for better integration with python
|
||||
# - i.e. for usage in kwargs and such...
|
||||
field = field.lower().strip()
|
||||
for c in (" ", "-"):
|
||||
field = field.replace(c, '_')
|
||||
return field
|
||||
|
||||
def _extract_bytes(self, details):
|
||||
# Replace it with the byte amount
|
||||
real_size = self.SIZE_RE.search(details)
|
||||
if not real_size:
|
||||
raise ValueError(_('Invalid input value "%s".') % details)
|
||||
magnitude = real_size.group(1)
|
||||
unit_of_measure = real_size.group(2)
|
||||
bytes_info = real_size.group(3)
|
||||
if bytes_info:
|
||||
return int(real_size.group(4))
|
||||
elif not unit_of_measure:
|
||||
return int(magnitude)
|
||||
return strutils.string_to_bytes('%s%sB' % (magnitude, unit_of_measure),
|
||||
return_int=True)
|
||||
|
||||
def _extract_details(self, root_cmd, root_details, lines_after):
|
||||
real_details = root_details
|
||||
if root_cmd == 'backing_file':
|
||||
# Replace it with the real backing file
|
||||
backing_match = self.BACKING_FILE_RE.match(root_details)
|
||||
if backing_match:
|
||||
real_details = backing_match.group(2).strip()
|
||||
elif root_cmd in ['virtual_size', 'cluster_size', 'disk_size']:
|
||||
# Replace it with the byte amount (if we can convert it)
|
||||
if root_details == 'None':
|
||||
real_details = 0
|
||||
else:
|
||||
real_details = self._extract_bytes(root_details)
|
||||
elif root_cmd == 'file_format':
|
||||
real_details = real_details.strip().lower()
|
||||
elif root_cmd == 'snapshot_list':
|
||||
# Next line should be a header, starting with 'ID'
|
||||
if not lines_after or not lines_after.pop(0).startswith("ID"):
|
||||
msg = _("Snapshot list encountered but no header found!")
|
||||
raise ValueError(msg)
|
||||
real_details = []
|
||||
# This is the sprintf pattern we will try to match
|
||||
# "%-10s%-20s%7s%20s%15s"
|
||||
# ID TAG VM SIZE DATE VM CLOCK (current header)
|
||||
while lines_after:
|
||||
line = lines_after[0]
|
||||
line_pieces = line.split()
|
||||
if len(line_pieces) != 6:
|
||||
break
|
||||
# Check against this pattern in the final position
|
||||
# "%02d:%02d:%02d.%03d"
|
||||
date_pieces = line_pieces[5].split(":")
|
||||
if len(date_pieces) != 3:
|
||||
break
|
||||
lines_after.pop(0)
|
||||
real_details.append({
|
||||
'id': line_pieces[0],
|
||||
'tag': line_pieces[1],
|
||||
'vm_size': line_pieces[2],
|
||||
'date': line_pieces[3],
|
||||
'vm_clock': line_pieces[4] + " " + line_pieces[5],
|
||||
})
|
||||
return real_details
|
||||
|
||||
def _parse(self, cmd_output):
|
||||
# Analysis done of qemu-img.c to figure out what is going on here
|
||||
# Find all points start with some chars and then a ':' then a newline
|
||||
# and then handle the results of those 'top level' items in a separate
|
||||
# function.
|
||||
#
|
||||
# TODO(harlowja): newer versions might have a json output format
|
||||
# we should switch to that whenever possible.
|
||||
# see: http://bit.ly/XLJXDX
|
||||
contents = {}
|
||||
lines = [x for x in cmd_output.splitlines() if x.strip()]
|
||||
while lines:
|
||||
line = lines.pop(0)
|
||||
top_level = self.TOP_LEVEL_RE.match(line)
|
||||
if top_level:
|
||||
root = self._canonicalize(top_level.group(1))
|
||||
if not root:
|
||||
continue
|
||||
root_details = top_level.group(2).strip()
|
||||
details = self._extract_details(root, root_details, lines)
|
||||
contents[root] = details
|
||||
return contents
|
|
@ -1,103 +0,0 @@
|
|||
# Copyright 2010 United States Government as represented by the
|
||||
# Administrator of the National Aeronautics and Space Administration.
|
||||
# All Rights Reserved.
|
||||
#
|
||||
# Licensed under the Apache License, Version 2.0 (the "License"); you may
|
||||
# not use this file except in compliance with the License. You may obtain
|
||||
# a copy of the License at
|
||||
#
|
||||
# http://www.apache.org/licenses/LICENSE-2.0
|
||||
#
|
||||
# Unless required by applicable law or agreed to in writing, software
|
||||
# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
|
||||
# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
|
||||
# License for the specific language governing permissions and limitations
|
||||
# under the License.
|
||||
|
||||
"""Super simple fake memcache client."""
|
||||
|
||||
import copy
|
||||
|
||||
from debtcollector import removals
|
||||
from oslo_config import cfg
|
||||
from oslo_utils import timeutils
|
||||
|
||||
memcache_opts = [
|
||||
cfg.ListOpt('memcached_servers',
|
||||
help='Memcached servers or None for in process cache.'),
|
||||
]
|
||||
|
||||
CONF = cfg.CONF
|
||||
CONF.register_opts(memcache_opts)
|
||||
|
||||
|
||||
# Indicate that this module is deprecated for removal and oslo.cache should
|
||||
# be used instead.
|
||||
removals.removed_module(__name__, 'oslo.cache')
|
||||
|
||||
|
||||
def list_opts():
|
||||
"""Entry point for oslo-config-generator."""
|
||||
return [(None, copy.deepcopy(memcache_opts))]
|
||||
|
||||
|
||||
def get_client(memcached_servers=None):
|
||||
client_cls = Client
|
||||
|
||||
if not memcached_servers:
|
||||
memcached_servers = CONF.memcached_servers
|
||||
if memcached_servers:
|
||||
import memcache
|
||||
client_cls = memcache.Client
|
||||
|
||||
return client_cls(memcached_servers, debug=0)
|
||||
|
||||
|
||||
class Client(object):
|
||||
"""Replicates a tiny subset of memcached client interface."""
|
||||
|
||||
def __init__(self, *args, **kwargs):
|
||||
"""Ignores the passed in args."""
|
||||
self.cache = {}
|
||||
|
||||
def get(self, key):
|
||||
"""Retrieves the value for a key or None.
|
||||
|
||||
This expunges expired keys during each get.
|
||||
"""
|
||||
|
||||
now = timeutils.utcnow_ts()
|
||||
for k in list(self.cache):
|
||||
(timeout, _value) = self.cache[k]
|
||||
if timeout and now >= timeout:
|
||||
del self.cache[k]
|
||||
|
||||
return self.cache.get(key, (0, None))[1]
|
||||
|
||||
def set(self, key, value, time=0, min_compress_len=0):
|
||||
"""Sets the value for a key."""
|
||||
timeout = 0
|
||||
if time != 0:
|
||||
timeout = timeutils.utcnow_ts() + time
|
||||
self.cache[key] = (timeout, value)
|
||||
return True
|
||||
|
||||
def add(self, key, value, time=0, min_compress_len=0):
|
||||
"""Sets the value for a key if it doesn't exist."""
|
||||
if self.get(key) is not None:
|
||||
return False
|
||||
return self.set(key, value, time, min_compress_len)
|
||||
|
||||
def incr(self, key, delta=1):
|
||||
"""Increments the value for a key."""
|
||||
value = self.get(key)
|
||||
if value is None:
|
||||
return None
|
||||
new_value = int(value) + delta
|
||||
self.cache[key] = (self.cache[key][0], str(new_value))
|
||||
return new_value
|
||||
|
||||
def delete(self, key, time=0):
|
||||
"""Deletes the value associated with a key."""
|
||||
if key in self.cache:
|
||||
del self.cache[key]
|
|
@ -1,95 +0,0 @@
|
|||
# Copyright (c) 2011-2012 OpenStack Foundation.
|
||||
# All Rights Reserved.
|
||||
#
|
||||
# Licensed under the Apache License, Version 2.0 (the "License"); you may
|
||||
# not use this file except in compliance with the License. You may obtain
|
||||
# a copy of the License at
|
||||
#
|
||||
# http://www.apache.org/licenses/LICENSE-2.0
|
||||
#
|
||||
# Unless required by applicable law or agreed to in writing, software
|
||||
# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
|
||||
# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
|
||||
# License for the specific language governing permissions and limitations
|
||||
# under the License.
|
||||
|
||||
"""
|
||||
Filter support
|
||||
"""
|
||||
import logging
|
||||
|
||||
from openstack.common._i18n import _LI
|
||||
from openstack.common.scheduler import base_handler
|
||||
|
||||
LOG = logging.getLogger(__name__)
|
||||
|
||||
|
||||
class BaseFilter(object):
|
||||
"""Base class for all filter classes."""
|
||||
def _filter_one(self, obj, filter_properties):
|
||||
"""Return True if it passes the filter, False otherwise.
|
||||
Override this in a subclass.
|
||||
"""
|
||||
return True
|
||||
|
||||
def filter_all(self, filter_obj_list, filter_properties):
|
||||
"""Yield objects that pass the filter.
|
||||
|
||||
Can be overridden in a subclass, if you need to base filtering
|
||||
decisions on all objects. Otherwise, one can just override
|
||||
_filter_one() to filter a single object.
|
||||
"""
|
||||
for obj in filter_obj_list:
|
||||
if self._filter_one(obj, filter_properties):
|
||||
yield obj
|
||||
|
||||
# Set to true in a subclass if a filter only needs to be run once
|
||||
# for each request rather than for each instance
|
||||
run_filter_once_per_request = False
|
||||
|
||||
def run_filter_for_index(self, index):
|
||||
"""Return True if the filter needs to be run for the "index-th"
|
||||
instance in a request. Only need to override this if a filter
|
||||
needs anything other than "first only" or "all" behaviour.
|
||||
"""
|
||||
return not (self.run_filter_once_per_request and index > 0)
|
||||
|
||||
|
||||
class BaseFilterHandler(base_handler.BaseHandler):
|
||||
"""Base class to handle loading filter classes.
|
||||
|
||||
This class should be subclassed where one needs to use filters.
|
||||
"""
|
||||
|
||||
def get_filtered_objects(self, filter_classes, objs,
|
||||
filter_properties, index=0):
|
||||
"""Get objects after filter
|
||||
|
||||
:param filter_classes: filters that will be used to filter the
|
||||
objects
|
||||
:param objs: objects that will be filtered
|
||||
:param filter_properties: client filter properties
|
||||
:param index: This value needs to be increased in the caller
|
||||
function of get_filtered_objects when handling
|
||||
each resource.
|
||||
"""
|
||||
list_objs = list(objs)
|
||||
LOG.debug("Starting with %d host(s)", len(list_objs))
|
||||
for filter_cls in filter_classes:
|
||||
cls_name = filter_cls.__name__
|
||||
filter_class = filter_cls()
|
||||
|
||||
if filter_class.run_filter_for_index(index):
|
||||
objs = filter_class.filter_all(list_objs, filter_properties)
|
||||
if objs is None:
|
||||
LOG.debug("Filter %(cls_name)s says to stop filtering",
|
||||
{'cls_name': cls_name})
|
||||
return
|
||||
list_objs = list(objs)
|
||||
msg = (_LI("Filter %(cls_name)s returned %(obj_len)d host(s)")
|
||||
% {'cls_name': cls_name, 'obj_len': len(list_objs)})
|
||||
if not list_objs:
|
||||
LOG.info(msg)
|
||||
break
|
||||
LOG.debug(msg)
|
||||
return list_objs
|
|
@ -1,46 +0,0 @@
|
|||
# Copyright (c) 2011-2013 OpenStack Foundation.
|
||||
# All Rights Reserved.
|
||||
#
|
||||
# Licensed under the Apache License, Version 2.0 (the "License"); you may
|
||||
# not use this file except in compliance with the License. You may obtain
|
||||
# a copy of the License at
|
||||
#
|
||||
# http://www.apache.org/licenses/LICENSE-2.0
|
||||
#
|
||||
# Unless required by applicable law or agreed to in writing, software
|
||||
# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
|
||||
# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
|
||||
# License for the specific language governing permissions and limitations
|
||||
# under the License.
|
||||
|
||||
"""
|
||||
A common base for handling extension classes.
|
||||
|
||||
Used by BaseFilterHandler and BaseWeightHandler
|
||||
"""
|
||||
|
||||
import inspect
|
||||
|
||||
from stevedore import extension
|
||||
|
||||
|
||||
class BaseHandler(object):
|
||||
"""Base class to handle loading filter and weight classes."""
|
||||
def __init__(self, modifier_class_type, modifier_namespace):
|
||||
self.namespace = modifier_namespace
|
||||
self.modifier_class_type = modifier_class_type
|
||||
self.extension_manager = extension.ExtensionManager(modifier_namespace)
|
||||
|
||||
def _is_correct_class(self, cls):
|
||||
"""Return whether an object is a class of the correct type and
|
||||
is not prefixed with an underscore.
|
||||
"""
|
||||
return (inspect.isclass(cls) and
|
||||
not cls.__name__.startswith('_') and
|
||||
issubclass(cls, self.modifier_class_type))
|
||||
|
||||
def get_all_classes(self):
|
||||
# We use a set, as some classes may have an entrypoint of their own,
|
||||
# and also be returned by a function such as 'all_filters' for example
|
||||
return [ext.plugin for ext in self.extension_manager if
|
||||
self._is_correct_class(ext.plugin)]
|
|
@ -1,147 +0,0 @@
|
|||
# Copyright (c) 2011-2012 OpenStack Foundation.
|
||||
# All Rights Reserved.
|
||||
#
|
||||
# Licensed under the Apache License, Version 2.0 (the "License"); you may
|
||||
# not use this file except in compliance with the License. You may obtain
|
||||
# a copy of the License at
|
||||
#
|
||||
# http://www.apache.org/licenses/LICENSE-2.0
|
||||
#
|
||||
# Unless required by applicable law or agreed to in writing, software
|
||||
# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
|
||||
# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
|
||||
# License for the specific language governing permissions and limitations
|
||||
# under the License.
|
||||
|
||||
"""
|
||||
Pluggable Weighing support
|
||||
"""
|
||||
|
||||
import abc
|
||||
|
||||
import six
|
||||
|
||||
from openstack.common.scheduler import base_handler
|
||||
|
||||
|
||||
def normalize(weight_list, minval=None, maxval=None):
|
||||
"""Normalize the values in a list between 0 and 1.0.
|
||||
|
||||
The normalization is made regarding the lower and upper values present in
|
||||
weight_list. If the minval and/or maxval parameters are set, these values
|
||||
will be used instead of the minimum and maximum from the list.
|
||||
|
||||
If all the values are equal, they are normalized to 0.
|
||||
"""
|
||||
|
||||
if not weight_list:
|
||||
return ()
|
||||
|
||||
if maxval is None:
|
||||
maxval = max(weight_list)
|
||||
|
||||
if minval is None:
|
||||
minval = min(weight_list)
|
||||
|
||||
maxval = float(maxval)
|
||||
minval = float(minval)
|
||||
|
||||
if minval == maxval:
|
||||
return [0] * len(weight_list)
|
||||
|
||||
range_ = maxval - minval
|
||||
return ((i - minval) / range_ for i in weight_list)
|
||||
|
||||
|
||||
class WeighedObject(object):
|
||||
"""Object with weight information."""
|
||||
def __init__(self, obj, weight):
|
||||
self.obj = obj
|
||||
self.weight = weight
|
||||
|
||||
def __repr__(self):
|
||||
return "<WeighedObject '%s': %s>" % (self.obj, self.weight)
|
||||
|
||||
|
||||
@six.add_metaclass(abc.ABCMeta)
|
||||
class BaseWeigher(object):
|
||||
"""Base class for pluggable weighers.
|
||||
|
||||
The attributes maxval and minval can be specified to set up the maximum
|
||||
and minimum values for the weighed objects. These values will then be
|
||||
taken into account in the normalization step, instead of taking the values
|
||||
from the calculated weights.
|
||||
"""
|
||||
|
||||
minval = None
|
||||
maxval = None
|
||||
|
||||
def weight_multiplier(self):
|
||||
"""How weighted this weigher should be.
|
||||
|
||||
Override this method in a subclass, so that the returned value is
|
||||
read from a configuration option to permit operators specify a
|
||||
multiplier for the weigher.
|
||||
"""
|
||||
return 1.0
|
||||
|
||||
@abc.abstractmethod
|
||||
def _weigh_object(self, obj, weight_properties):
|
||||
"""Override in a subclass to specify a weight for a specific
|
||||
object.
|
||||
"""
|
||||
|
||||
def weigh_objects(self, weighed_obj_list, weight_properties):
|
||||
"""Weigh multiple objects.
|
||||
|
||||
Override in a subclass if you need access to all objects in order
|
||||
to calculate weights. Do not modify the weight of an object here,
|
||||
just return a list of weights.
|
||||
"""
|
||||
# Calculate the weights
|
||||
weights = []
|
||||
for obj in weighed_obj_list:
|
||||
weight = self._weigh_object(obj.obj, weight_properties)
|
||||
|
||||
# Record the min and max values if they are None. If they anything
|
||||
# but none we assume that the weigher has set them
|
||||
if self.minval is None:
|
||||
self.minval = weight
|
||||
if self.maxval is None:
|
||||
self.maxval = weight
|
||||
|
||||
if weight < self.minval:
|
||||
self.minval = weight
|
||||
elif weight > self.maxval:
|
||||
self.maxval = weight
|
||||
|
||||
weights.append(weight)
|
||||
|
||||
return weights
|
||||
|
||||
|
||||
class BaseWeightHandler(base_handler.BaseHandler):
|
||||
object_class = WeighedObject
|
||||
|
||||
def get_weighed_objects(self, weigher_classes, obj_list,
|
||||
weighing_properties):
|
||||
"""Return a sorted (descending), normalized list of WeighedObjects."""
|
||||
|
||||
if not obj_list:
|
||||
return []
|
||||
|
||||
weighed_objs = [self.object_class(obj, 0.0) for obj in obj_list]
|
||||
for weigher_cls in weigher_classes:
|
||||
weigher = weigher_cls()
|
||||
weights = weigher.weigh_objects(weighed_objs, weighing_properties)
|
||||
|
||||
# Normalize the weights
|
||||
weights = normalize(weights,
|
||||
minval=weigher.minval,
|
||||
maxval=weigher.maxval)
|
||||
|
||||
for i, weight in enumerate(weights):
|
||||
obj = weighed_objs[i]
|
||||
obj.weight += weigher.weight_multiplier() * weight
|
||||
|
||||
return sorted(weighed_objs, key=lambda x: x.weight, reverse=True)
|
|
@ -1,38 +0,0 @@
|
|||
# Copyright (c) 2011 OpenStack Foundation.
|
||||
# All Rights Reserved.
|
||||
#
|
||||
# Licensed under the Apache License, Version 2.0 (the "License"); you may
|
||||
# not use this file except in compliance with the License. You may obtain
|
||||
# a copy of the License at
|
||||
#
|
||||
# http://www.apache.org/licenses/LICENSE-2.0
|
||||
#
|
||||
# Unless required by applicable law or agreed to in writing, software
|
||||
# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
|
||||
# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
|
||||
# License for the specific language governing permissions and limitations
|
||||
# under the License.
|
||||
|
||||
"""
|
||||
Scheduler host filters
|
||||
"""
|
||||
|
||||
from openstack.common.scheduler import base_filter
|
||||
|
||||
|
||||
class BaseHostFilter(base_filter.BaseFilter):
|
||||
"""Base class for host filters."""
|
||||
def _filter_one(self, obj, filter_properties):
|
||||
"""Return True if the object passes the filter, otherwise False."""
|
||||
return self.host_passes(obj, filter_properties)
|
||||
|
||||
def host_passes(self, host_state, filter_properties):
|
||||
"""Return True if the HostState passes the filter, otherwise False.
|
||||
Override this in a subclass.
|
||||
"""
|
||||
raise NotImplementedError()
|
||||
|
||||
|
||||
class HostFilterHandler(base_filter.BaseFilterHandler):
|
||||
def __init__(self, namespace):
|
||||
super(HostFilterHandler, self).__init__(BaseHostFilter, namespace)
|
|
@ -1,32 +0,0 @@
|
|||
# Copyright (c) 2011-2012 OpenStack Foundation.
|
||||
# All Rights Reserved.
|
||||
#
|
||||
# Licensed under the Apache License, Version 2.0 (the "License"); you may
|
||||
# not use this file except in compliance with the License. You may obtain
|
||||
# a copy of the License at
|
||||
#
|
||||
# http://www.apache.org/licenses/LICENSE-2.0
|
||||
#
|
||||
# Unless required by applicable law or agreed to in writing, software
|
||||
# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
|
||||
# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
|
||||
# License for the specific language governing permissions and limitations
|
||||
# under the License.
|
||||
|
||||
from openstack.common.scheduler import filters
|
||||
|
||||
|
||||
class AvailabilityZoneFilter(filters.BaseHostFilter):
|
||||
"""Filters Hosts by availability zone."""
|
||||
|
||||
# Availability zones do not change within a request
|
||||
run_filter_once_per_request = True
|
||||
|
||||
def host_passes(self, host_state, filter_properties):
|
||||
spec = filter_properties.get('request_spec', {})
|
||||
props = spec.get('resource_properties', {})
|
||||
availability_zone = props.get('availability_zone')
|
||||
|
||||
if availability_zone:
|
||||
return availability_zone == host_state.service['availability_zone']
|
||||
return True
|
|
@ -1,74 +0,0 @@
|
|||
# Copyright (c) 2011 OpenStack Foundation.
|
||||
# All Rights Reserved.
|
||||
#
|
||||
# Licensed under the Apache License, Version 2.0 (the "License"); you may
|
||||
# not use this file except in compliance with the License. You may obtain
|
||||
# a copy of the License at
|
||||
#
|
||||
# http://www.apache.org/licenses/LICENSE-2.0
|
||||
#
|
||||
# Unless required by applicable law or agreed to in writing, software
|
||||
# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
|
||||
# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
|
||||
# License for the specific language governing permissions and limitations
|
||||
# under the License.
|
||||
|
||||
import logging
|
||||
|
||||
import six
|
||||
|
||||
from openstack.common.scheduler import filters
|
||||
from openstack.common.scheduler.filters import extra_specs_ops
|
||||
|
||||
LOG = logging.getLogger(__name__)
|
||||
|
||||
|
||||
class CapabilitiesFilter(filters.BaseHostFilter):
|
||||
"""HostFilter to work with resource (instance & volume) type records."""
|
||||
|
||||
def _satisfies_extra_specs(self, capabilities, resource_type):
|
||||
"""Check that the capabilities provided by the services satisfy
|
||||
the extra specs associated with the resource type.
|
||||
"""
|
||||
extra_specs = resource_type.get('extra_specs', [])
|
||||
if not extra_specs:
|
||||
return True
|
||||
|
||||
for key, req in six.iteritems(extra_specs):
|
||||
# Either not scope format, or in capabilities scope
|
||||
scope = key.split(':')
|
||||
if len(scope) > 1 and scope[0] != "capabilities":
|
||||
continue
|
||||
elif scope[0] == "capabilities":
|
||||
del scope[0]
|
||||
|
||||
cap = capabilities
|
||||
for index in range(len(scope)):
|
||||
try:
|
||||
cap = cap.get(scope[index])
|
||||
except AttributeError:
|
||||
cap = None
|
||||
if cap is None:
|
||||
LOG.debug("Host doesn't provide capability '%(cap)s' "
|
||||
"listed in the extra specs",
|
||||
{'cap': scope[index]})
|
||||
return False
|
||||
if not extra_specs_ops.match(cap, req):
|
||||
LOG.debug("extra_spec requirement '%(req)s' "
|
||||
"does not match '%(cap)s'",
|
||||
{'req': req, 'cap': cap})
|
||||
return False
|
||||
return True
|
||||
|
||||
def host_passes(self, host_state, filter_properties):
|
||||
"""Return a list of hosts that can create resource_type."""
|
||||
# Note(zhiteng) Currently only Cinder and Nova are using
|
||||
# this filter, so the resource type is either instance or
|
||||
# volume.
|
||||
resource_type = filter_properties.get('resource_type')
|
||||
if not self._satisfies_extra_specs(host_state.capabilities,
|
||||
resource_type):
|
||||
LOG.debug("%(host_state)s fails resource_type extra_specs "
|
||||
"requirements", {'host_state': host_state})
|
||||
return False
|
||||
return True
|
|
@ -1,72 +0,0 @@
|
|||
# Copyright (c) 2011 OpenStack Foundation.
|
||||
# All Rights Reserved.
|
||||
#
|
||||
# Licensed under the Apache License, Version 2.0 (the "License"); you may
|
||||
# not use this file except in compliance with the License. You may obtain
|
||||
# a copy of the License at
|
||||
#
|
||||
# http://www.apache.org/licenses/LICENSE-2.0
|
||||
#
|
||||
# Unless required by applicable law or agreed to in writing, software
|
||||
# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
|
||||
# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
|
||||
# License for the specific language governing permissions and limitations
|
||||
# under the License.
|
||||
|
||||
import operator
|
||||
|
||||
from oslo_utils import strutils
|
||||
|
||||
# 1. The following operations are supported:
|
||||
# =, s==, s!=, s>=, s>, s<=, s<, <in>, <is>, <or>, ==, !=, >=, <=
|
||||
# 2. Note that <or> is handled in a different way below.
|
||||
# 3. If the first word in the extra_specs is not one of the operators,
|
||||
# it is ignored.
|
||||
_op_methods = {'=': lambda x, y: float(x) >= float(y),
|
||||
'<in>': lambda x, y: y in x,
|
||||
'<is>': lambda x, y: (strutils.bool_from_string(x) is
|
||||
strutils.bool_from_string(y)),
|
||||
'==': lambda x, y: float(x) == float(y),
|
||||
'!=': lambda x, y: float(x) != float(y),
|
||||
'>=': lambda x, y: float(x) >= float(y),
|
||||
'<=': lambda x, y: float(x) <= float(y),
|
||||
's==': operator.eq,
|
||||
's!=': operator.ne,
|
||||
's<': operator.lt,
|
||||
's<=': operator.le,
|
||||
's>': operator.gt,
|
||||
's>=': operator.ge}
|
||||
|
||||
|
||||
def match(value, req):
|
||||
words = req.split()
|
||||
|
||||
op = method = None
|
||||
if words:
|
||||
op = words.pop(0)
|
||||
method = _op_methods.get(op)
|
||||
|
||||
if op != '<or>' and not method:
|
||||
return value == req
|
||||
|
||||
if value is None:
|
||||
return False
|
||||
|
||||
if op == '<or>': # Ex: <or> v1 <or> v2 <or> v3
|
||||
while True:
|
||||
if words.pop(0) == value:
|
||||
return True
|
||||
if not words:
|
||||
break
|
||||
op = words.pop(0) # remove a keyword <or>
|
||||
if not words:
|
||||
break
|
||||
return False
|
||||
|
||||
try:
|
||||
if words and method(value, words[0]):
|
||||
return True
|
||||
except ValueError:
|
||||
pass
|
||||
|
||||
return False
|
|
@ -1,57 +0,0 @@
|
|||
# Copyright (c) 2011 OpenStack Foundation.
|
||||
# All Rights Reserved.
|
||||
#
|
||||
# Licensed under the Apache License, Version 2.0 (the "License"); you may
|
||||
# not use this file except in compliance with the License. You may obtain
|
||||
# a copy of the License at
|
||||
#
|
||||
# http://www.apache.org/licenses/LICENSE-2.0
|
||||
#
|
||||
# Unless required by applicable law or agreed to in writing, software
|
||||
# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
|
||||
# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
|
||||
# License for the specific language governing permissions and limitations
|
||||
# under the License.
|
||||
|
||||
import logging
|
||||
|
||||
from openstack.common.scheduler import filters
|
||||
|
||||
LOG = logging.getLogger(__name__)
|
||||
|
||||
|
||||
class IgnoreAttemptedHostsFilter(filters.BaseHostFilter):
|
||||
"""Filter out previously attempted hosts
|
||||
|
||||
A host passes this filter if it has not already been attempted for
|
||||
scheduling. The scheduler needs to add previously attempted hosts
|
||||
to the 'retry' key of filter_properties in order for this to work
|
||||
correctly. For example::
|
||||
|
||||
{
|
||||
'retry': {
|
||||
'hosts': ['host1', 'host2'],
|
||||
'num_attempts': 3,
|
||||
}
|
||||
}
|
||||
"""
|
||||
|
||||
def host_passes(self, host_state, filter_properties):
|
||||
"""Skip nodes that have already been attempted."""
|
||||
attempted = filter_properties.get('retry')
|
||||
if not attempted:
|
||||
# Re-scheduling is disabled
|
||||
LOG.debug("Re-scheduling is disabled.")
|
||||
return True
|
||||
|
||||
hosts = attempted.get('hosts', [])
|
||||
host = host_state.host
|
||||
|
||||
passes = host not in hosts
|
||||
pass_msg = "passes" if passes else "fails"
|
||||
|
||||
LOG.debug("Host %(host)s %(pass_msg)s. Previously tried hosts: "
|
||||
"%(hosts)s" % {'host': host,
|
||||
'pass_msg': pass_msg,
|
||||
'hosts': hosts})
|
||||
return passes
|
|
@ -1,151 +0,0 @@
|
|||
# Copyright (c) 2011 OpenStack Foundation.
|
||||
# All Rights Reserved.
|
||||
#
|
||||
# Licensed under the Apache License, Version 2.0 (the "License"); you may
|
||||
# not use this file except in compliance with the License. You may obtain
|
||||
# a copy of the License at
|
||||
#
|
||||
# http://www.apache.org/licenses/LICENSE-2.0
|
||||
#
|
||||
# Unless required by applicable law or agreed to in writing, software
|
||||
# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
|
||||
# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
|
||||
# License for the specific language governing permissions and limitations
|
||||
# under the License.
|
||||
|
||||
import operator
|
||||
|
||||
from oslo_serialization import jsonutils
|
||||
import six
|
||||
|
||||
from openstack.common.scheduler import filters
|
||||
|
||||
|
||||
class JsonFilter(filters.BaseHostFilter):
|
||||
"""Host Filter to allow simple JSON-based grammar for
|
||||
selecting hosts.
|
||||
"""
|
||||
def _op_compare(self, args, op):
|
||||
"""Returns True if the specified operator can successfully
|
||||
compare the first item in the args with all the rest. Will
|
||||
return False if only one item is in the list.
|
||||
"""
|
||||
if len(args) < 2:
|
||||
return False
|
||||
if op is operator.contains:
|
||||
bad = args[0] not in args[1:]
|
||||
else:
|
||||
bad = [arg for arg in args[1:]
|
||||
if not op(args[0], arg)]
|
||||
return not bool(bad)
|
||||
|
||||
def _equals(self, args):
|
||||
"""First term is == all the other terms."""
|
||||
return self._op_compare(args, operator.eq)
|
||||
|
||||
def _less_than(self, args):
|
||||
"""First term is < all the other terms."""
|
||||
return self._op_compare(args, operator.lt)
|
||||
|
||||
def _greater_than(self, args):
|
||||
"""First term is > all the other terms."""
|
||||
return self._op_compare(args, operator.gt)
|
||||
|
||||
def _in(self, args):
|
||||
"""First term is in set of remaining terms."""
|
||||
return self._op_compare(args, operator.contains)
|
||||
|
||||
def _less_than_equal(self, args):
|
||||
"""First term is <= all the other terms."""
|
||||
return self._op_compare(args, operator.le)
|
||||
|
||||
def _greater_than_equal(self, args):
|
||||
"""First term is >= all the other terms."""
|
||||
return self._op_compare(args, operator.ge)
|
||||
|
||||
def _not(self, args):
|
||||
"""Flip each of the arguments."""
|
||||
return [not arg for arg in args]
|
||||
|
||||
def _or(self, args):
|
||||
"""True if any arg is True."""
|
||||
return any(args)
|
||||
|
||||
def _and(self, args):
|
||||
"""True if all args are True."""
|
||||
return all(args)
|
||||
|
||||
commands = {
|
||||
'=': _equals,
|
||||
'<': _less_than,
|
||||
'>': _greater_than,
|
||||
'in': _in,
|
||||
'<=': _less_than_equal,
|
||||
'>=': _greater_than_equal,
|
||||
'not': _not,
|
||||
'or': _or,
|
||||
'and': _and,
|
||||
}
|
||||
|
||||
def _parse_string(self, string, host_state):
|
||||
"""Strings prefixed with $ are capability lookups in the
|
||||
form '$variable' where 'variable' is an attribute in the
|
||||
HostState class. If $variable is a dictionary, you may
|
||||
use: $variable.dictkey
|
||||
"""
|
||||
if not string:
|
||||
return None
|
||||
if not string.startswith("$"):
|
||||
return string
|
||||
|
||||
path = string[1:].split(".")
|
||||
obj = getattr(host_state, path[0], None)
|
||||
if obj is None:
|
||||
return None
|
||||
for item in path[1:]:
|
||||
obj = obj.get(item)
|
||||
if obj is None:
|
||||
return None
|
||||
return obj
|
||||
|
||||
def _process_filter(self, query, host_state):
|
||||
"""Recursively parse the query structure."""
|
||||
if not query:
|
||||
return True
|
||||
cmd = query[0]
|
||||
method = self.commands[cmd]
|
||||
cooked_args = []
|
||||
for arg in query[1:]:
|
||||
if isinstance(arg, list):
|
||||
arg = self._process_filter(arg, host_state)
|
||||
elif isinstance(arg, six.string_types):
|
||||
arg = self._parse_string(arg, host_state)
|
||||
if arg is not None:
|
||||
cooked_args.append(arg)
|
||||
result = method(self, cooked_args)
|
||||
return result
|
||||
|
||||
def host_passes(self, host_state, filter_properties):
|
||||
"""Return a list of hosts that can fulfill the requirements
|
||||
specified in the query.
|
||||
"""
|
||||
# TODO(zhiteng) Add description for filter_properties structure
|
||||
# and scheduler_hints.
|
||||
try:
|
||||
query = filter_properties['scheduler_hints']['query']
|
||||
except KeyError:
|
||||
query = None
|
||||
if not query:
|
||||
return True
|
||||
|
||||
# NOTE(comstud): Not checking capabilities or service for
|
||||
# enabled/disabled so that a provided json filter can decide
|
||||
|
||||
result = self._process_filter(jsonutils.loads(query), host_state)
|
||||
if isinstance(result, list):
|
||||
# If any succeeded, include the host
|
||||
result = any(result)
|
||||
if result:
|
||||
# Filter it out.
|
||||
return True
|
||||
return False
|
|
@ -1,45 +0,0 @@
|
|||
# Copyright (c) 2011 OpenStack Foundation.
|
||||
# All Rights Reserved.
|
||||
#
|
||||
# Licensed under the Apache License, Version 2.0 (the "License"); you may
|
||||
# not use this file except in compliance with the License. You may obtain
|
||||
# a copy of the License at
|
||||
#
|
||||
# http://www.apache.org/licenses/LICENSE-2.0
|
||||
#
|
||||
# Unless required by applicable law or agreed to in writing, software
|
||||
# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
|
||||
# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
|
||||
# License for the specific language governing permissions and limitations
|
||||
# under the License.
|
||||
|
||||
"""
|
||||
Scheduler host weights
|
||||
"""
|
||||
|
||||
|
||||
from openstack.common.scheduler import base_weight
|
||||
|
||||
|
||||
class WeighedHost(base_weight.WeighedObject):
|
||||
def to_dict(self):
|
||||
return {
|
||||
'weight': self.weight,
|
||||
'host': self.obj.host,
|
||||
}
|
||||
|
||||
def __repr__(self):
|
||||
return ("WeighedHost [host: %s, weight: %s]" %
|
||||
(self.obj.host, self.weight))
|
||||
|
||||
|
||||
class BaseHostWeigher(base_weight.BaseWeigher):
|
||||
"""Base class for host weights."""
|
||||
pass
|
||||
|
||||
|
||||
class HostWeightHandler(base_weight.BaseWeightHandler):
|
||||
object_class = WeighedHost
|
||||
|
||||
def __init__(self, namespace):
|
||||
super(HostWeightHandler, self).__init__(BaseHostWeigher, namespace)
|
|
@ -1,149 +0,0 @@
|
|||
# Copyright 2013 Red Hat, Inc.
|
||||
# All Rights Reserved.
|
||||
#
|
||||
# Licensed under the Apache License, Version 2.0 (the "License"); you may
|
||||
# not use this file except in compliance with the License. You may obtain
|
||||
# a copy of the License at
|
||||
#
|
||||
# http://www.apache.org/licenses/LICENSE-2.0
|
||||
#
|
||||
# Unless required by applicable law or agreed to in writing, software
|
||||
# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
|
||||
# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
|
||||
# License for the specific language governing permissions and limitations
|
||||
# under the License.
|
||||
|
||||
import time
|
||||
|
||||
from oslotest import base
|
||||
|
||||
from openstack.common.cache import cache
|
||||
|
||||
|
||||
class CacheBaseTest(base.BaseTestCase):
|
||||
|
||||
cache_url = None
|
||||
|
||||
def setUp(self):
|
||||
super(CacheBaseTest, self).setUp()
|
||||
self.client = cache.get_cache(self.cache_url)
|
||||
|
||||
def tearDown(self):
|
||||
self.client.clear()
|
||||
super(CacheBaseTest, self).tearDown()
|
||||
|
||||
def test_set_get(self):
|
||||
self.client['foo'] = 'bar'
|
||||
self.assertEqual(self.client['foo'], 'bar')
|
||||
|
||||
def test_get_keyerror(self):
|
||||
self.assertRaises(KeyError,
|
||||
self.client.__getitem__,
|
||||
"DoesNotExist")
|
||||
|
||||
def test_set_not_exists_get(self):
|
||||
self.client.set('foo', 'bar', 10, not_exists=True)
|
||||
self.assertEqual(self.client.get('foo'), 'bar')
|
||||
|
||||
def test_set_not_exists_false_get(self):
|
||||
self.client['foo'] = 'bar'
|
||||
ret = self.client.set('foo', 'baz',
|
||||
0, not_exists=True)
|
||||
self.assertFalse(ret)
|
||||
self.assertEqual(self.client.get('foo'), 'bar')
|
||||
|
||||
def test_set_unset(self):
|
||||
self.client['foo'] = 'bar'
|
||||
self.assertEqual(self.client['foo'], 'bar')
|
||||
|
||||
del self.client['foo']
|
||||
self.assertIsNone(self.client.get('foo'))
|
||||
|
||||
def test_incr(self):
|
||||
self.client['foo'] = 1
|
||||
self.assertEqual(self.client['foo'], 1)
|
||||
|
||||
self.client.incr('foo', 2)
|
||||
self.assertEqual(self.client['foo'], 3)
|
||||
|
||||
self.client.incr('foo', -3)
|
||||
self.assertEqual(self.client['foo'], 0)
|
||||
|
||||
self.client.incr('foo', -3)
|
||||
self.assertEqual(self.client['foo'], -3)
|
||||
|
||||
def test_append(self):
|
||||
self.client['foo'] = [1, 2]
|
||||
self.assertEqual(self.client['foo'], [1, 2])
|
||||
|
||||
self.client.append('foo', 3)
|
||||
self.assertEqual(self.client['foo'], [1, 2, 3])
|
||||
|
||||
self.client.append('foo', 4)
|
||||
self.assertEqual(self.client['foo'], [1, 2, 3, 4])
|
||||
|
||||
def test_append_tail(self):
|
||||
self.client['foo'] = [1, 2]
|
||||
self.assertEqual(self.client['foo'], [1, 2])
|
||||
|
||||
self.client.append_tail('foo', [3, 4])
|
||||
self.assertEqual(self.client['foo'], [1, 2, 3, 4])
|
||||
|
||||
def test_set_many(self):
|
||||
self.client.set_many(dict(foo=0, bar=1))
|
||||
self.assertEqual(self.client['foo'], 0)
|
||||
self.assertEqual(self.client['bar'], 1)
|
||||
|
||||
def test_unset_many(self):
|
||||
self.client['foo'] = 0
|
||||
self.client['bar'] = 1
|
||||
self.assertEqual(self.client['foo'], 0)
|
||||
self.assertEqual(self.client['bar'], 1)
|
||||
|
||||
self.client.unset_many(['foo', 'bar'])
|
||||
self.assertIsNone(self.client.get('foo'))
|
||||
self.assertIsNone(self.client.get('bar'))
|
||||
|
||||
def test_get_many(self):
|
||||
self.client['foo'] = 0
|
||||
self.client['bar'] = 1
|
||||
values = self.client.get_many(["foo", "bar"])
|
||||
self.assertEqual(list(values), [('foo', 0), ('bar', 1)])
|
||||
|
||||
def test_timeout(self):
|
||||
self.client.set('foo', 'bar', ttl=1)
|
||||
self.assertEqual(self.client.get('foo'), 'bar')
|
||||
|
||||
# NOTE(flaper87): It's not funny
|
||||
# to sleep tests but this test is
|
||||
# supposed to work for all backends.
|
||||
time.sleep(1)
|
||||
self.assertIsNone(self.client['foo'])
|
||||
|
||||
def test_clear(self):
|
||||
self.client['foo'] = 0
|
||||
self.client['bar'] = 1
|
||||
|
||||
self.client.clear()
|
||||
|
||||
self.assertIsNone(self.client.get('foo'))
|
||||
self.assertIsNone(self.client.get('bar'))
|
||||
|
||||
def test_exists(self):
|
||||
self.client['foo'] = 'bar'
|
||||
self.assertTrue('foo' in self.client)
|
||||
|
||||
del self.client['foo']
|
||||
self.assertFalse('foo' in self.client)
|
||||
|
||||
def test_update(self):
|
||||
self.client.update(foo='bar', bar='foo')
|
||||
self.assertEqual(self.client.get('foo'), 'bar')
|
||||
self.assertEqual(self.client.get('bar'), 'foo')
|
||||
|
||||
def test_setdefault(self):
|
||||
ret = self.client.setdefault("foo", "bar")
|
||||
self.assertEqual(ret, "bar")
|
||||
|
||||
ret = self.client.setdefault("foo", "baaaar")
|
||||
self.assertEqual(ret, "bar")
|
|
@ -1,38 +0,0 @@
|
|||
# Copyright 2013 Red Hat, Inc.
|
||||
#
|
||||
# Licensed under the Apache License, Version 2.0 (the "License"); you may
|
||||
# not use this file except in compliance with the License. You may obtain
|
||||
# a copy of the License at
|
||||
#
|
||||
# http://www.apache.org/licenses/LICENSE-2.0
|
||||
#
|
||||
# Unless required by applicable law or agreed to in writing, software
|
||||
# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
|
||||
# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
|
||||
# License for the specific language governing permissions and limitations
|
||||
# under the License.
|
||||
|
||||
from oslo_config import cfg
|
||||
from oslotest import base
|
||||
|
||||
from openstack.common.cache._backends import memory
|
||||
from openstack.common.cache import cache
|
||||
|
||||
|
||||
class TestCacheModule(base.BaseTestCase):
|
||||
|
||||
def test_oslo_config(self):
|
||||
conf = cfg.ConfigOpts()
|
||||
cache.register_oslo_configs(conf)
|
||||
driver = cache.get_cache(conf.cache_url)
|
||||
self.assertIsInstance(driver, memory.MemoryBackend)
|
||||
|
||||
def test_get_cache(self):
|
||||
driver = cache.get_cache()
|
||||
self.assertIsInstance(driver, memory.MemoryBackend)
|
||||
|
||||
def test_get_cache_options(self):
|
||||
driver = cache.get_cache('memory://?default_ttl=4')
|
||||
self.assertIsInstance(driver, memory.MemoryBackend)
|
||||
self.assertEqual(driver._options, {'default_ttl': '4'})
|
||||
self.assertEqual(driver._default_ttl, 4)
|
|
@ -1,114 +0,0 @@
|
|||
# Copyright 2013 Red Hat, Inc.
|
||||
#
|
||||
# Licensed under the Apache License, Version 2.0 (the "License"); you may
|
||||
# not use this file except in compliance with the License. You may obtain
|
||||
# a copy of the License at
|
||||
#
|
||||
# http://www.apache.org/licenses/LICENSE-2.0
|
||||
#
|
||||
# Unless required by applicable law or agreed to in writing, software
|
||||
# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
|
||||
# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
|
||||
# License for the specific language governing permissions and limitations
|
||||
# under the License.
|
||||
|
||||
import time
|
||||
|
||||
import mock
|
||||
|
||||
from tests.unit.cache import base
|
||||
|
||||
|
||||
class MemorycacheTest(base.CacheBaseTest):
|
||||
"""Test memory backend
|
||||
|
||||
Since it is the default driver, nothing
|
||||
has to be done here.
|
||||
"""
|
||||
|
||||
cache_url = 'memory://'
|
||||
|
||||
def test_timeout(self):
|
||||
now = time.time()
|
||||
with mock.patch('time.time') as time_mock:
|
||||
time_mock.return_value = now
|
||||
self.client.set('foo', 'bar', ttl=3)
|
||||
time_mock.return_value = now + 1
|
||||
self.assertEqual(self.client.get('foo'), 'bar')
|
||||
time_mock.return_value = now + 3
|
||||
self.assertIsNone(self.client.get('foo'))
|
||||
|
||||
def test_timeout_unset(self):
|
||||
now = time.time()
|
||||
with mock.patch('time.time') as time_mock:
|
||||
time_mock.return_value = now
|
||||
self.client.set('foo', 'bar', ttl=3)
|
||||
self.client.set('fooo', 'bar', ttl=4)
|
||||
self.client.set('foooo', 'bar', ttl=5)
|
||||
self.client.set('fooooo', 'bar', ttl=6)
|
||||
time_mock.return_value = now + 1
|
||||
self.assertEqual(self.client.get('foo'), 'bar')
|
||||
self.assertEqual(self.client.get('fooo'), 'bar')
|
||||
self.assertEqual(self.client.get('foooo'), 'bar')
|
||||
self.assertEqual(self.client.get('fooooo'), 'bar')
|
||||
|
||||
time_mock.return_value = now + 5
|
||||
del self.client['foo']
|
||||
self.assertIsNone(self.client.get('foo'))
|
||||
self.assertIsNone(self.client.get('fooo'))
|
||||
self.assertIsNone(self.client.get('foooo'))
|
||||
self.assertEqual(self.client.get('fooooo'), 'bar')
|
||||
|
||||
def test_timeout_unset_pop(self):
|
||||
now = time.time()
|
||||
with mock.patch('time.time') as time_mock:
|
||||
time_mock.return_value = now
|
||||
self.client.set('foo', 'bar', ttl=3)
|
||||
self.client.set('fooo', 'bar', ttl=4)
|
||||
self.client.set('foooo', 'bar', ttl=5)
|
||||
self.client.set('fooooo', 'bar', ttl=6)
|
||||
time_mock.return_value = now + 1
|
||||
self.assertEqual(self.client.get('foo'), 'bar')
|
||||
self.assertEqual(self.client.get('fooo'), 'bar')
|
||||
self.assertEqual(self.client.get('foooo'), 'bar')
|
||||
self.assertEqual(self.client.get('fooooo'), 'bar')
|
||||
|
||||
time_mock.return_value = now + 4
|
||||
|
||||
# NOTE(flaper87): Let unset delete foooo and timeout
|
||||
# expire foo and fooo.
|
||||
del self.client['foooo']
|
||||
self.assertIsNone(self.client.get('foo'))
|
||||
self.assertIsNone(self.client.get('fooo'))
|
||||
self.assertIsNone(self.client.get('foooo'))
|
||||
self.assertEqual(self.client.get('fooooo'), 'bar')
|
||||
|
||||
def test_unset_keys_expires(self):
|
||||
# NOTE(kgriffs): The only way to verify the
|
||||
# side-effects of deleting a cache entry is
|
||||
# to use a white-box test. This test was
|
||||
# added to verify a fix for a bug that was
|
||||
# preventing keys from being removed from
|
||||
# _keys_expires when the value for the
|
||||
# key was deleted.
|
||||
|
||||
# NOTE(kgriffs): _keys_expires is only used
|
||||
# to track entries with a non-zero TTL.
|
||||
ttl = 5
|
||||
|
||||
now = int(time.time())
|
||||
expiration = now + ttl
|
||||
|
||||
with mock.patch('time.time') as time_mock:
|
||||
time_mock.return_value = now
|
||||
self.client.set('foo', 'bar', ttl=ttl)
|
||||
|
||||
expiration = now + 5
|
||||
|
||||
keyset = self.client._keys_expires[expiration]
|
||||
self.assertEqual(len(keyset), 1)
|
||||
|
||||
del self.client['foo']
|
||||
|
||||
keyset = self.client._keys_expires[expiration]
|
||||
self.assertEqual(len(keyset), 0)
|
|
@ -1,189 +0,0 @@
|
|||
# Copyright 2013 Red Hat, Inc.
|
||||
#
|
||||
# Licensed under the Apache License, Version 2.0 (the "License"); you may
|
||||
# not use this file except in compliance with the License. You may obtain
|
||||
# a copy of the License at
|
||||
#
|
||||
# http://www.apache.org/licenses/LICENSE-2.0
|
||||
#
|
||||
# Unless required by applicable law or agreed to in writing, software
|
||||
# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
|
||||
# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
|
||||
# License for the specific language governing permissions and limitations
|
||||
# under the License.
|
||||
|
||||
"""
|
||||
Unit Tests for crypto utils.
|
||||
"""
|
||||
|
||||
from oslotest import base as test_base
|
||||
import six
|
||||
|
||||
from openstack.common.crypto import utils as cryptoutils
|
||||
|
||||
bchr = six.int2byte
|
||||
|
||||
|
||||
class CryptoUtilsTestCase(test_base.BaseTestCase):
|
||||
|
||||
# Uses Tests from RFC5869
|
||||
def _test_HKDF(self, ikm, prk, okm, length,
|
||||
salt=None, info=b'', hashtype='SHA256'):
|
||||
hkdf = cryptoutils.HKDF(hashtype=hashtype)
|
||||
|
||||
tprk = hkdf.extract(ikm, salt=salt)
|
||||
self.assertEqual(prk, tprk)
|
||||
|
||||
tokm = hkdf.expand(prk, info, length)
|
||||
self.assertEqual(okm, tokm)
|
||||
|
||||
def test_HKDF_1(self):
|
||||
ikm = b'\x0b' * 22
|
||||
salt = b''.join(map(lambda x: bchr(x), range(0x00, 0x0d)))
|
||||
info = b''.join(map(lambda x: bchr(x), range(0xf0, 0xfa)))
|
||||
length = 42
|
||||
|
||||
prk = (b'\x07\x77\x09\x36\x2c\x2e\x32\xdf\x0d\xdc\x3f\x0d\xc4\x7b'
|
||||
b'\xba\x63\x90\xb6\xc7\x3b\xb5\x0f\x9c\x31\x22\xec\x84\x4a'
|
||||
b'\xd7\xc2\xb3\xe5')
|
||||
|
||||
okm = (b'\x3c\xb2\x5f\x25\xfa\xac\xd5\x7a\x90\x43\x4f\x64\xd0\x36'
|
||||
b'\x2f\x2a\x2d\x2d\x0a\x90\xcf\x1a\x5a\x4c\x5d\xb0\x2d\x56'
|
||||
b'\xec\xc4\xc5\xbf\x34\x00\x72\x08\xd5\xb8\x87\x18\x58\x65')
|
||||
|
||||
self._test_HKDF(ikm, prk, okm, length, salt, info)
|
||||
|
||||
def test_HKDF_2(self):
|
||||
ikm = b''.join(map(lambda x: bchr(x), range(0x00, 0x50)))
|
||||
salt = b''.join(map(lambda x: bchr(x), range(0x60, 0xb0)))
|
||||
info = b''.join(map(lambda x: bchr(x), range(0xb0, 0x100)))
|
||||
length = 82
|
||||
|
||||
prk = (b'\x06\xa6\xb8\x8c\x58\x53\x36\x1a\x06\x10\x4c\x9c\xeb\x35'
|
||||
b'\xb4\x5c\xef\x76\x00\x14\x90\x46\x71\x01\x4a\x19\x3f\x40'
|
||||
b'\xc1\x5f\xc2\x44')
|
||||
|
||||
okm = (b'\xb1\x1e\x39\x8d\xc8\x03\x27\xa1\xc8\xe7\xf7\x8c\x59\x6a'
|
||||
b'\x49\x34\x4f\x01\x2e\xda\x2d\x4e\xfa\xd8\xa0\x50\xcc\x4c'
|
||||
b'\x19\xaf\xa9\x7c\x59\x04\x5a\x99\xca\xc7\x82\x72\x71\xcb'
|
||||
b'\x41\xc6\x5e\x59\x0e\x09\xda\x32\x75\x60\x0c\x2f\x09\xb8'
|
||||
b'\x36\x77\x93\xa9\xac\xa3\xdb\x71\xcc\x30\xc5\x81\x79\xec'
|
||||
b'\x3e\x87\xc1\x4c\x01\xd5\xc1\xf3\x43\x4f\x1d\x87')
|
||||
|
||||
self._test_HKDF(ikm, prk, okm, length, salt, info)
|
||||
|
||||
def test_HKDF_3(self):
|
||||
ikm = b'\x0b' * 22
|
||||
length = 42
|
||||
|
||||
prk = (b'\x19\xef\x24\xa3\x2c\x71\x7b\x16\x7f\x33\xa9\x1d\x6f\x64'
|
||||
b'\x8b\xdf\x96\x59\x67\x76\xaf\xdb\x63\x77\xac\x43\x4c\x1c'
|
||||
b'\x29\x3c\xcb\x04')
|
||||
|
||||
okm = (b'\x8d\xa4\xe7\x75\xa5\x63\xc1\x8f\x71\x5f\x80\x2a\x06\x3c'
|
||||
b'\x5a\x31\xb8\xa1\x1f\x5c\x5e\xe1\x87\x9e\xc3\x45\x4e\x5f'
|
||||
b'\x3c\x73\x8d\x2d\x9d\x20\x13\x95\xfa\xa4\xb6\x1a\x96\xc8')
|
||||
|
||||
self._test_HKDF(ikm, prk, okm, length)
|
||||
|
||||
def test_HKDF_4(self):
|
||||
ikm = b'\x0b' * 11
|
||||
salt = b''.join(map(lambda x: bchr(x), range(0x00, 0x0d)))
|
||||
info = b''.join(map(lambda x: bchr(x), range(0xf0, 0xfa)))
|
||||
length = 42
|
||||
|
||||
prk = (b'\x9b\x6c\x18\xc4\x32\xa7\xbf\x8f\x0e\x71\xc8\xeb\x88\xf4'
|
||||
b'\xb3\x0b\xaa\x2b\xa2\x43')
|
||||
|
||||
okm = (b'\x08\x5a\x01\xea\x1b\x10\xf3\x69\x33\x06\x8b\x56\xef\xa5'
|
||||
b'\xad\x81\xa4\xf1\x4b\x82\x2f\x5b\x09\x15\x68\xa9\xcd\xd4'
|
||||
b'\xf1\x55\xfd\xa2\xc2\x2e\x42\x24\x78\xd3\x05\xf3\xf8\x96')
|
||||
|
||||
self._test_HKDF(ikm, prk, okm, length, salt, info, hashtype='SHA')
|
||||
|
||||
def test_HKDF_5(self):
|
||||
ikm = b''.join(map(lambda x: bchr(x), range(0x00, 0x50)))
|
||||
salt = b''.join(map(lambda x: bchr(x), range(0x60, 0xb0)))
|
||||
info = b''.join(map(lambda x: bchr(x), range(0xb0, 0x100)))
|
||||
length = 82
|
||||
|
||||
prk = (b'\x8a\xda\xe0\x9a\x2a\x30\x70\x59\x47\x8d\x30\x9b\x26\xc4'
|
||||
b'\x11\x5a\x22\x4c\xfa\xf6')
|
||||
|
||||
okm = (b'\x0b\xd7\x70\xa7\x4d\x11\x60\xf7\xc9\xf1\x2c\xd5\x91\x2a'
|
||||
b'\x06\xeb\xff\x6a\xdc\xae\x89\x9d\x92\x19\x1f\xe4\x30\x56'
|
||||
b'\x73\xba\x2f\xfe\x8f\xa3\xf1\xa4\xe5\xad\x79\xf3\xf3\x34'
|
||||
b'\xb3\xb2\x02\xb2\x17\x3c\x48\x6e\xa3\x7c\xe3\xd3\x97\xed'
|
||||
b'\x03\x4c\x7f\x9d\xfe\xb1\x5c\x5e\x92\x73\x36\xd0\x44\x1f'
|
||||
b'\x4c\x43\x00\xe2\xcf\xf0\xd0\x90\x0b\x52\xd3\xb4')
|
||||
|
||||
self._test_HKDF(ikm, prk, okm, length, salt, info, hashtype='SHA')
|
||||
|
||||
def test_HKDF_6(self):
|
||||
ikm = b'\x0b' * 22
|
||||
length = 42
|
||||
|
||||
prk = (b'\xda\x8c\x8a\x73\xc7\xfa\x77\x28\x8e\xc6\xf5\xe7\xc2\x97'
|
||||
b'\x78\x6a\xa0\xd3\x2d\x01')
|
||||
|
||||
okm = (b'\x0a\xc1\xaf\x70\x02\xb3\xd7\x61\xd1\xe5\x52\x98\xda\x9d'
|
||||
b'\x05\x06\xb9\xae\x52\x05\x72\x20\xa3\x06\xe0\x7b\x6b\x87'
|
||||
b'\xe8\xdf\x21\xd0\xea\x00\x03\x3d\xe0\x39\x84\xd3\x49\x18')
|
||||
|
||||
self._test_HKDF(ikm, prk, okm, length, hashtype='SHA')
|
||||
|
||||
def test_HKDF_7(self):
|
||||
ikm = b'\x0c' * 22
|
||||
length = 42
|
||||
|
||||
prk = (b'\x2a\xdc\xca\xda\x18\x77\x9e\x7c\x20\x77\xad\x2e\xb1\x9d'
|
||||
b'\x3f\x3e\x73\x13\x85\xdd')
|
||||
|
||||
okm = (b'\x2c\x91\x11\x72\x04\xd7\x45\xf3\x50\x0d\x63\x6a\x62\xf6'
|
||||
b'\x4f\x0a\xb3\xba\xe5\x48\xaa\x53\xd4\x23\xb0\xd1\xf2\x7e'
|
||||
b'\xbb\xa6\xf5\xe5\x67\x3a\x08\x1d\x70\xcc\xe7\xac\xfc\x48')
|
||||
|
||||
self._test_HKDF(ikm, prk, okm, length, hashtype='SHA')
|
||||
|
||||
def test_HKDF_8(self):
|
||||
ikm = b'\x0b' * 22
|
||||
prk = (b'\x19\xef\x24\xa3\x2c\x71\x7b\x16\x7f\x33\xa9\x1d\x6f\x64'
|
||||
b'\x8b\xdf\x96\x59\x67\x76\xaf\xdb\x63\x77\xac\x43\x4c\x1c'
|
||||
b'\x29\x3c\xcb\x04')
|
||||
|
||||
# Just testing HKDFOutputLengthTooLong is returned
|
||||
try:
|
||||
self._test_HKDF(ikm, prk, None, 1000000)
|
||||
except cryptoutils.HKDFOutputLengthTooLong:
|
||||
pass
|
||||
|
||||
def test_SymmetricCrypto_encrypt_string(self):
|
||||
msg = b'Plain Text'
|
||||
|
||||
skc = cryptoutils.SymmetricCrypto()
|
||||
key = skc.new_key(16)
|
||||
cipher = skc.encrypt(key, msg)
|
||||
plain = skc.decrypt(key, cipher)
|
||||
self.assertEqual(msg, plain)
|
||||
|
||||
def test_SymmetricCrypto_encrypt_blocks(self):
|
||||
cb = 16
|
||||
et = 'AES'
|
||||
|
||||
skc = cryptoutils.SymmetricCrypto(enctype=et)
|
||||
key = skc.new_key(16)
|
||||
msg = skc.new_key(cb * 2)
|
||||
|
||||
for i in range(cb * 2):
|
||||
cipher = skc.encrypt(key, msg[0:i], b64encode=False)
|
||||
plain = skc.decrypt(key, cipher, b64decode=False)
|
||||
self.assertEqual(msg[0:i], plain)
|
||||
|
||||
def test_SymmetricCrypto_signing(self):
|
||||
msg = b'Authenticated Message'
|
||||
signature = b'KWjl6i30RMjc5PjnaccRwTPKTRCWM6sPpmGS2bxm5fQ='
|
||||
skey = b'L\xdd0\xf3\xb4\xc6\xe2p\xef\xc7\xbd\xaa\xc9eNC'
|
||||
|
||||
skc = cryptoutils.SymmetricCrypto()
|
||||
validate = skc.sign(skey, msg)
|
||||
self.assertEqual(signature, validate)
|
|
@ -1,23 +0,0 @@
|
|||
# Copyright 2012 IBM Corp.
|
||||
#
|
||||
# Licensed under the Apache License, Version 2.0 (the "License"); you may
|
||||
# not use this file except in compliance with the License. You may obtain
|
||||
# a copy of the License at
|
||||
#
|
||||
# http://www.apache.org/licenses/LICENSE-2.0
|
||||
#
|
||||
# Unless required by applicable law or agreed to in writing, software
|
||||
# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
|
||||
# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
|
||||
# License for the specific language governing permissions and limitations
|
||||
# under the License.
|
||||
|
||||
|
||||
class FakeDriver(object):
|
||||
def __init__(self, first_arg=True):
|
||||
self.first_arg = first_arg
|
||||
|
||||
|
||||
class FakeDriver2(object):
|
||||
def __init__(self, first_arg):
|
||||
self.first_arg = first_arg
|
|
@ -1,72 +0,0 @@
|
|||
# Copyright 2012 Intel Inc, OpenStack Foundation.
|
||||
# All Rights Reserved.
|
||||
#
|
||||
# Licensed under the Apache License, Version 2.0 (the "License"); you may
|
||||
# not use this file except in compliance with the License. You may obtain
|
||||
# a copy of the License at
|
||||
#
|
||||
# http://www.apache.org/licenses/LICENSE-2.0
|
||||
#
|
||||
# Unless required by applicable law or agreed to in writing, software
|
||||
# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
|
||||
# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
|
||||
# License for the specific language governing permissions and limitations
|
||||
# under the License.
|
||||
|
||||
"""
|
||||
Fakes For filter and weight tests.
|
||||
"""
|
||||
|
||||
import gettext
|
||||
|
||||
from openstack.common.scheduler import weights
|
||||
|
||||
|
||||
class FakeWeigher1(weights.BaseHostWeigher):
|
||||
def __init__(self):
|
||||
pass
|
||||
|
||||
|
||||
class FakeWeigher2(weights.BaseHostWeigher):
|
||||
def __init__(self):
|
||||
pass
|
||||
|
||||
|
||||
class FakeClass(object):
|
||||
def __init__(self):
|
||||
pass
|
||||
|
||||
|
||||
class FakeTranslations(gettext.GNUTranslations):
|
||||
"""A test GNUTranslations class that takes a map of msg -> translations."""
|
||||
|
||||
def __init__(self, translations):
|
||||
self.translations = translations
|
||||
|
||||
# used by Python 3
|
||||
def gettext(self, msgid):
|
||||
return self.translations.get(msgid, msgid)
|
||||
|
||||
# used by Python 2
|
||||
def ugettext(self, msgid):
|
||||
return self.translations.get(msgid, msgid)
|
||||
|
||||
@staticmethod
|
||||
def translator(locales_map):
|
||||
"""Returns a mock gettext.translation function that uses
|
||||
individual TestTranslations to translate in the given locales.
|
||||
|
||||
:param locales_map: A map from locale name to a translations map.
|
||||
{
|
||||
'es': {'Hi': 'Hola', 'Bye': 'Adios'},
|
||||
'zh': {'Hi': 'Ni Hao', 'Bye': 'Zaijian'}
|
||||
}
|
||||
"""
|
||||
def _translation(domain, localedir=None,
|
||||
languages=None, fallback=None):
|
||||
if languages:
|
||||
language = languages[0]
|
||||
if language in locales_map:
|
||||
return FakeTranslations(locales_map[language])
|
||||
return gettext.NullTranslations()
|
||||
return _translation
|
|
@ -1,53 +0,0 @@
|
|||
# Copyright 2012 Intel Inc, OpenStack Foundation.
|
||||
# All Rights Reserved.
|
||||
#
|
||||
# Licensed under the Apache License, Version 2.0 (the "License"); you may
|
||||
# not use this file except in compliance with the License. You may obtain
|
||||
# a copy of the License at
|
||||
#
|
||||
# http://www.apache.org/licenses/LICENSE-2.0
|
||||
#
|
||||
# Unless required by applicable law or agreed to in writing, software
|
||||
# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
|
||||
# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
|
||||
# License for the specific language governing permissions and limitations
|
||||
# under the License.
|
||||
|
||||
"""
|
||||
Fakes For filters tests.
|
||||
"""
|
||||
|
||||
import six
|
||||
|
||||
|
||||
class FakeHostManager(object):
|
||||
"""Defines fake hosts.
|
||||
|
||||
host1: free_ram_mb=1024-512-512=0, free_disk_gb=1024-512-512=0
|
||||
host2: free_ram_mb=2048-512=1536 free_disk_gb=2048-512=1536
|
||||
host3: free_ram_mb=4096-1024=3072 free_disk_gb=4096-1024=3072
|
||||
host4: free_ram_mb=8192 free_disk_gb=8192
|
||||
"""
|
||||
|
||||
def __init__(self):
|
||||
self.service_states = {
|
||||
'host1': {
|
||||
'compute': {'host_memory_free': 1073741824},
|
||||
},
|
||||
'host2': {
|
||||
'compute': {'host_memory_free': 2147483648},
|
||||
},
|
||||
'host3': {
|
||||
'compute': {'host_memory_free': 3221225472},
|
||||
},
|
||||
'host4': {
|
||||
'compute': {'host_memory_free': 999999999},
|
||||
},
|
||||
}
|
||||
|
||||
|
||||
class FakeHostState(object):
|
||||
def __init__(self, host, attribute_dict):
|
||||
self.host = host
|
||||
for (key, val) in six.iteritems(attribute_dict):
|
||||
setattr(self, key, val)
|
|
@ -1,168 +0,0 @@
|
|||
# Copyright (c) 2013 OpenStack Foundation.
|
||||
#
|
||||
# Licensed under the Apache License, Version 2.0 (the "License");
|
||||
# you may not use this file except in compliance with the License.
|
||||
# You may obtain a copy of the License at
|
||||
#
|
||||
# http://www.apache.org/licenses/LICENSE-2.0
|
||||
#
|
||||
# Unless required by applicable law or agreed to in writing, software
|
||||
# distributed under the License is distributed on an "AS IS" BASIS,
|
||||
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or
|
||||
# implied.
|
||||
# See the License for the specific language governing permissions and
|
||||
# limitations under the License.
|
||||
|
||||
import mock
|
||||
from oslotest import base as test_base
|
||||
from oslotest import moxstubout
|
||||
|
||||
from openstack.common.scheduler import base_filter
|
||||
|
||||
|
||||
class TestBaseFilter(test_base.BaseTestCase):
|
||||
|
||||
def setUp(self):
|
||||
super(TestBaseFilter, self).setUp()
|
||||
self.mox = self.useFixture(moxstubout.MoxStubout()).mox
|
||||
self.filter = base_filter.BaseFilter()
|
||||
|
||||
def test_filter_one_is_called(self):
|
||||
filters = [1, 2, 3, 4]
|
||||
filter_properties = {'x': 'y'}
|
||||
self.mox.StubOutWithMock(self.filter, '_filter_one')
|
||||
|
||||
self.filter._filter_one(1, filter_properties).AndReturn(False)
|
||||
self.filter._filter_one(2, filter_properties).AndReturn(True)
|
||||
self.filter._filter_one(3, filter_properties).AndReturn(True)
|
||||
self.filter._filter_one(4, filter_properties).AndReturn(False)
|
||||
|
||||
self.mox.ReplayAll()
|
||||
|
||||
result = list(self.filter.filter_all(filters, filter_properties))
|
||||
self.assertEqual([2, 3], result)
|
||||
|
||||
|
||||
class FakeExtension(object):
|
||||
|
||||
def __init__(self, plugin):
|
||||
self.plugin = plugin
|
||||
|
||||
|
||||
class BaseFakeFilter(base_filter.BaseFilter):
|
||||
pass
|
||||
|
||||
|
||||
class FakeFilter1(BaseFakeFilter):
|
||||
"""Derives from BaseFakeFilter and has a fake entry point defined.
|
||||
|
||||
Entry point is returned by fake ExtensionManager.
|
||||
Should be included in the output of all_classes.
|
||||
"""
|
||||
pass
|
||||
|
||||
|
||||
class FakeFilter2(BaseFakeFilter):
|
||||
"""Derives from BaseFakeFilter but has no entry point.
|
||||
|
||||
Should be not included in all_classes.
|
||||
"""
|
||||
pass
|
||||
|
||||
|
||||
class FakeFilter3(base_filter.BaseFilter):
|
||||
"""Does not derive from BaseFakeFilter.
|
||||
|
||||
Should not be included.
|
||||
"""
|
||||
pass
|
||||
|
||||
|
||||
class FakeFilter4(BaseFakeFilter):
|
||||
"""Derives from BaseFakeFilter and has an entry point.
|
||||
|
||||
Should be included.
|
||||
"""
|
||||
pass
|
||||
|
||||
|
||||
class FakeFilter5(BaseFakeFilter):
|
||||
"""Derives from BaseFakeFilter but has no entry point.
|
||||
|
||||
Should not be included.
|
||||
"""
|
||||
run_filter_once_per_request = True
|
||||
pass
|
||||
|
||||
|
||||
class FakeExtensionManager(list):
|
||||
|
||||
def __init__(self, namespace):
|
||||
classes = [FakeFilter1, FakeFilter3, FakeFilter4]
|
||||
exts = map(FakeExtension, classes)
|
||||
super(FakeExtensionManager, self).__init__(exts)
|
||||
self.namespace = namespace
|
||||
|
||||
|
||||
class TestBaseFilterHandler(test_base.BaseTestCase):
|
||||
|
||||
def setUp(self):
|
||||
super(TestBaseFilterHandler, self).setUp()
|
||||
self.stubs = self.useFixture(moxstubout.MoxStubout()).stubs
|
||||
self.stubs.Set(base_filter.base_handler.extension, 'ExtensionManager',
|
||||
FakeExtensionManager)
|
||||
self.handler = base_filter.BaseFilterHandler(BaseFakeFilter,
|
||||
'fake_filters')
|
||||
|
||||
def test_get_all_classes(self):
|
||||
# In order for a FakeFilter to be returned by get_all_classes, it has
|
||||
# to comply with these rules:
|
||||
# * It must be derived from BaseFakeFilter
|
||||
# AND
|
||||
# * It must have a python entrypoint assigned (returned by
|
||||
# FakeExtensionManager)
|
||||
expected = [FakeFilter1, FakeFilter4]
|
||||
result = self.handler.get_all_classes()
|
||||
self.assertEqual(expected, result)
|
||||
|
||||
def _get_filtered_objects(self, filter_classes, index=0):
|
||||
filter_objs_initial = [1, 2, 3, 4]
|
||||
filter_properties = {'x': 'y'}
|
||||
return self.handler.get_filtered_objects(filter_classes,
|
||||
filter_objs_initial,
|
||||
filter_properties,
|
||||
index)
|
||||
|
||||
@mock.patch.object(FakeFilter4, 'filter_all')
|
||||
@mock.patch.object(FakeFilter3, 'filter_all', return_value=None)
|
||||
def test_get_filtered_objects_return_none(self, fake3_filter_all,
|
||||
fake4_filter_all):
|
||||
filter_classes = [FakeFilter1, FakeFilter2, FakeFilter3, FakeFilter4]
|
||||
result = self._get_filtered_objects(filter_classes)
|
||||
self.assertIsNone(result)
|
||||
self.assertFalse(fake4_filter_all.called)
|
||||
|
||||
def test_get_filtered_objects(self):
|
||||
filter_objs_expected = [1, 2, 3, 4]
|
||||
filter_classes = [FakeFilter1, FakeFilter2, FakeFilter3, FakeFilter4]
|
||||
result = self._get_filtered_objects(filter_classes)
|
||||
self.assertEqual(filter_objs_expected, result)
|
||||
|
||||
def test_get_filtered_objects_with_filter_run_once(self):
|
||||
filter_objs_expected = [1, 2, 3, 4]
|
||||
filter_classes = [FakeFilter5]
|
||||
|
||||
with mock.patch.object(FakeFilter5, 'filter_all',
|
||||
return_value=filter_objs_expected
|
||||
) as fake5_filter_all:
|
||||
result = self._get_filtered_objects(filter_classes)
|
||||
self.assertEqual(filter_objs_expected, result)
|
||||
self.assertEqual(1, fake5_filter_all.call_count)
|
||||
|
||||
result = self._get_filtered_objects(filter_classes, index=1)
|
||||
self.assertEqual(filter_objs_expected, result)
|
||||
self.assertEqual(1, fake5_filter_all.call_count)
|
||||
|
||||
result = self._get_filtered_objects(filter_classes, index=2)
|
||||
self.assertEqual(filter_objs_expected, result)
|
||||
self.assertEqual(1, fake5_filter_all.call_count)
|
|
@ -1,704 +0,0 @@
|
|||
# Copyright 2011 OpenStack Foundation.
|
||||
# All Rights Reserved.
|
||||
#
|
||||
# Licensed under the Apache License, Version 2.0 (the "License"); you may
|
||||
# not use this file except in compliance with the License. You may obtain
|
||||
# a copy of the License at
|
||||
#
|
||||
# http://www.apache.org/licenses/LICENSE-2.0
|
||||
#
|
||||
# Unless required by applicable law or agreed to in writing, software
|
||||
# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
|
||||
# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
|
||||
# License for the specific language governing permissions and limitations
|
||||
# under the License.
|
||||
|
||||
"""
|
||||
Tests For Scheduler Host Filters.
|
||||
"""
|
||||
|
||||
from oslo_context import context
|
||||
from oslo_serialization import jsonutils
|
||||
from oslotest import base as test_base
|
||||
|
||||
from openstack.common.scheduler import filters
|
||||
from openstack.common.scheduler.filters import extra_specs_ops
|
||||
from tests.unit.scheduler import fake_hosts as fakes
|
||||
|
||||
|
||||
class TestFilter(filters.BaseHostFilter):
|
||||
pass
|
||||
|
||||
|
||||
class TestBogusFilter(object):
|
||||
"""Class that doesn't inherit from BaseHostFilter."""
|
||||
pass
|
||||
|
||||
|
||||
class ExtraSpecsOpsTestCase(test_base.BaseTestCase):
|
||||
def _do_extra_specs_ops_test(self, value, req, matches):
|
||||
assertion = self.assertTrue if matches else self.assertFalse
|
||||
assertion(extra_specs_ops.match(value, req))
|
||||
|
||||
def test_extra_specs_matches_simple(self):
|
||||
self._do_extra_specs_ops_test(
|
||||
value='1',
|
||||
req='1',
|
||||
matches=True)
|
||||
|
||||
def test_extra_specs_fails_simple(self):
|
||||
self._do_extra_specs_ops_test(
|
||||
value='',
|
||||
req='1',
|
||||
matches=False)
|
||||
|
||||
def test_extra_specs_fails_simple2(self):
|
||||
self._do_extra_specs_ops_test(
|
||||
value='3',
|
||||
req='1',
|
||||
matches=False)
|
||||
|
||||
def test_extra_specs_fails_simple3(self):
|
||||
self._do_extra_specs_ops_test(
|
||||
value='222',
|
||||
req='2',
|
||||
matches=False)
|
||||
|
||||
def test_extra_specs_fails_with_bogus_ops(self):
|
||||
self._do_extra_specs_ops_test(
|
||||
value='4',
|
||||
req='> 2',
|
||||
matches=False)
|
||||
|
||||
def test_extra_specs_matches_with_op_eq(self):
|
||||
self._do_extra_specs_ops_test(
|
||||
value='123',
|
||||
req='= 123',
|
||||
matches=True)
|
||||
|
||||
def test_extra_specs_matches_with_op_eq2(self):
|
||||
self._do_extra_specs_ops_test(
|
||||
value='124',
|
||||
req='= 123',
|
||||
matches=True)
|
||||
|
||||
def test_extra_specs_fails_with_op_eq(self):
|
||||
self._do_extra_specs_ops_test(
|
||||
value='34',
|
||||
req='= 234',
|
||||
matches=False)
|
||||
|
||||
def test_extra_specs_fails_with_op_eq3(self):
|
||||
self._do_extra_specs_ops_test(
|
||||
value='34',
|
||||
req='=',
|
||||
matches=False)
|
||||
|
||||
def test_extra_specs_matches_with_op_seq(self):
|
||||
self._do_extra_specs_ops_test(
|
||||
value='123',
|
||||
req='s== 123',
|
||||
matches=True)
|
||||
|
||||
def test_extra_specs_fails_with_op_seq(self):
|
||||
self._do_extra_specs_ops_test(
|
||||
value='1234',
|
||||
req='s== 123',
|
||||
matches=False)
|
||||
|
||||
def test_extra_specs_matches_with_op_sneq(self):
|
||||
self._do_extra_specs_ops_test(
|
||||
value='1234',
|
||||
req='s!= 123',
|
||||
matches=True)
|
||||
|
||||
def test_extra_specs_fails_with_op_sneq(self):
|
||||
self._do_extra_specs_ops_test(
|
||||
value='123',
|
||||
req='s!= 123',
|
||||
matches=False)
|
||||
|
||||
def test_extra_specs_fails_with_op_sge(self):
|
||||
self._do_extra_specs_ops_test(
|
||||
value='1000',
|
||||
req='s>= 234',
|
||||
matches=False)
|
||||
|
||||
def test_extra_specs_fails_with_op_sle(self):
|
||||
self._do_extra_specs_ops_test(
|
||||
value='1234',
|
||||
req='s<= 1000',
|
||||
matches=False)
|
||||
|
||||
def test_extra_specs_fails_with_op_sl(self):
|
||||
self._do_extra_specs_ops_test(
|
||||
value='2',
|
||||
req='s< 12',
|
||||
matches=False)
|
||||
|
||||
def test_extra_specs_fails_with_op_sg(self):
|
||||
self._do_extra_specs_ops_test(
|
||||
value='12',
|
||||
req='s> 2',
|
||||
matches=False)
|
||||
|
||||
def test_extra_specs_matches_with_op_in(self):
|
||||
self._do_extra_specs_ops_test(
|
||||
value='12311321',
|
||||
req='<in> 11',
|
||||
matches=True)
|
||||
|
||||
def test_extra_specs_matches_with_op_in2(self):
|
||||
self._do_extra_specs_ops_test(
|
||||
value='12311321',
|
||||
req='<in> 12311321',
|
||||
matches=True)
|
||||
|
||||
def test_extra_specs_matches_with_op_in3(self):
|
||||
self._do_extra_specs_ops_test(
|
||||
value='12311321',
|
||||
req='<in> 12311321 <in>',
|
||||
matches=True)
|
||||
|
||||
def test_extra_specs_fails_with_op_in(self):
|
||||
self._do_extra_specs_ops_test(
|
||||
value='12310321',
|
||||
req='<in> 11',
|
||||
matches=False)
|
||||
|
||||
def test_extra_specs_fails_with_op_in2(self):
|
||||
self._do_extra_specs_ops_test(
|
||||
value='12310321',
|
||||
req='<in> 11 <in>',
|
||||
matches=False)
|
||||
|
||||
def test_extra_specs_matches_with_op_is(self):
|
||||
self._do_extra_specs_ops_test(
|
||||
value=True,
|
||||
req='<is> True',
|
||||
matches=True)
|
||||
|
||||
def test_extra_specs_matches_with_op_is2(self):
|
||||
self._do_extra_specs_ops_test(
|
||||
value=False,
|
||||
req='<is> False',
|
||||
matches=True)
|
||||
|
||||
def test_extra_specs_matches_with_op_is3(self):
|
||||
self._do_extra_specs_ops_test(
|
||||
value=False,
|
||||
req='<is> Nonsense',
|
||||
matches=True)
|
||||
|
||||
def test_extra_specs_fails_with_op_is(self):
|
||||
self._do_extra_specs_ops_test(
|
||||
value=True,
|
||||
req='<is> False',
|
||||
matches=False)
|
||||
|
||||
def test_extra_specs_fails_with_op_is2(self):
|
||||
self._do_extra_specs_ops_test(
|
||||
value=False,
|
||||
req='<is> True',
|
||||
matches=False)
|
||||
|
||||
def test_extra_specs_matches_with_op_or(self):
|
||||
self._do_extra_specs_ops_test(
|
||||
value='12',
|
||||
req='<or> 11 <or> 12',
|
||||
matches=True)
|
||||
|
||||
def test_extra_specs_matches_with_op_or2(self):
|
||||
self._do_extra_specs_ops_test(
|
||||
value='12',
|
||||
req='<or> 11 <or> 12 <or>',
|
||||
matches=True)
|
||||
|
||||
def test_extra_specs_fails_with_op_or(self):
|
||||
self._do_extra_specs_ops_test(
|
||||
value='13',
|
||||
req='<or> 11 <or> 12',
|
||||
matches=False)
|
||||
|
||||
def test_extra_specs_fails_with_op_or2(self):
|
||||
self._do_extra_specs_ops_test(
|
||||
value='13',
|
||||
req='<or> 11 <or> 12 <or>',
|
||||
matches=False)
|
||||
|
||||
def test_extra_specs_matches_with_op_le(self):
|
||||
self._do_extra_specs_ops_test(
|
||||
value='2',
|
||||
req='<= 10',
|
||||
matches=True)
|
||||
|
||||
def test_extra_specs_fails_with_op_le(self):
|
||||
self._do_extra_specs_ops_test(
|
||||
value='3',
|
||||
req='<= 2',
|
||||
matches=False)
|
||||
|
||||
def test_extra_specs_matches_with_op_ge(self):
|
||||
self._do_extra_specs_ops_test(
|
||||
value='3',
|
||||
req='>= 1',
|
||||
matches=True)
|
||||
|
||||
def test_extra_specs_fails_with_op_ge(self):
|
||||
self._do_extra_specs_ops_test(
|
||||
value='2',
|
||||
req='>= 3',
|
||||
matches=False)
|
||||
|
||||
|
||||
class HostFiltersTestCase(test_base.BaseTestCase):
|
||||
"""Test case for host filters."""
|
||||
|
||||
def setUp(self):
|
||||
super(HostFiltersTestCase, self).setUp()
|
||||
self.context = context.RequestContext('fake', 'fake')
|
||||
self.json_query = jsonutils.dumps(
|
||||
['and', ['>=', '$free_ram_mb', 1024],
|
||||
['>=', '$free_disk_mb', 200 * 1024]])
|
||||
namespace = 'openstack.common.scheduler.filters'
|
||||
filter_handler = filters.HostFilterHandler(namespace)
|
||||
classes = filter_handler.get_all_classes()
|
||||
self.class_map = {}
|
||||
for cls in classes:
|
||||
self.class_map[cls.__name__] = cls
|
||||
|
||||
def test_all_filters(self):
|
||||
# Double check at least a couple of known filters exist
|
||||
self.assertTrue('JsonFilter' in self.class_map)
|
||||
self.assertTrue('CapabilitiesFilter' in self.class_map)
|
||||
self.assertTrue('AvailabilityZoneFilter' in self.class_map)
|
||||
self.assertTrue('IgnoreAttemptedHostsFilter' in self.class_map)
|
||||
|
||||
def _do_test_type_filter_extra_specs(self, ecaps, especs, passes):
|
||||
filt_cls = self.class_map['CapabilitiesFilter']()
|
||||
capabilities = {'enabled': True}
|
||||
capabilities.update(ecaps)
|
||||
service = {'disabled': False}
|
||||
filter_properties = {'resource_type': {'name': 'fake_type',
|
||||
'extra_specs': especs}}
|
||||
host = fakes.FakeHostState('host1',
|
||||
{'free_capacity_gb': 1024,
|
||||
'capabilities': capabilities,
|
||||
'service': service})
|
||||
assertion = self.assertTrue if passes else self.assertFalse
|
||||
assertion(filt_cls.host_passes(host, filter_properties))
|
||||
|
||||
def test_capability_filter_passes_extra_specs_simple(self):
|
||||
self._do_test_type_filter_extra_specs(
|
||||
ecaps={'opt1': '1', 'opt2': '2'},
|
||||
especs={'opt1': '1', 'opt2': '2'},
|
||||
passes=True)
|
||||
|
||||
def test_capability_filter_fails_extra_specs_simple(self):
|
||||
self._do_test_type_filter_extra_specs(
|
||||
ecaps={'opt1': '1', 'opt2': '2'},
|
||||
especs={'opt1': '1', 'opt2': '222'},
|
||||
passes=False)
|
||||
|
||||
def test_capability_filter_passes_extra_specs_complex(self):
|
||||
self._do_test_type_filter_extra_specs(
|
||||
ecaps={'opt1': 10, 'opt2': 5},
|
||||
especs={'opt1': '>= 2', 'opt2': '<= 8'},
|
||||
passes=True)
|
||||
|
||||
def test_capability_filter_fails_extra_specs_complex(self):
|
||||
self._do_test_type_filter_extra_specs(
|
||||
ecaps={'opt1': 10, 'opt2': 5},
|
||||
especs={'opt1': '>= 2', 'opt2': '>= 8'},
|
||||
passes=False)
|
||||
|
||||
def test_capability_filter_passes_scope_extra_specs(self):
|
||||
self._do_test_type_filter_extra_specs(
|
||||
ecaps={'scope_lv1': {'opt1': 10}},
|
||||
especs={'capabilities:scope_lv1:opt1': '>= 2'},
|
||||
passes=True)
|
||||
|
||||
def test_capability_filter_passes_fakescope_extra_specs(self):
|
||||
self._do_test_type_filter_extra_specs(
|
||||
ecaps={'scope_lv1': {'opt1': 10}, 'opt2': 5},
|
||||
especs={'scope_lv1:opt1': '= 2', 'opt2': '>= 3'},
|
||||
passes=True)
|
||||
|
||||
def test_capability_filter_fails_scope_extra_specs(self):
|
||||
self._do_test_type_filter_extra_specs(
|
||||
ecaps={'scope_lv1': {'opt1': 10}},
|
||||
especs={'capabilities:scope_lv1:opt1': '<= 2'},
|
||||
passes=False)
|
||||
|
||||
def test_capability_filter_passes_multi_level_scope_extra_specs(self):
|
||||
self._do_test_type_filter_extra_specs(
|
||||
ecaps={'scope_lv0': {'scope_lv1':
|
||||
{'scope_lv2': {'opt1': 10}}}},
|
||||
especs={'capabilities:scope_lv0:scope_lv1:scope_lv2:opt1': '>= 2'},
|
||||
passes=True)
|
||||
|
||||
def test_capability_filter_fails_wrong_scope_extra_specs(self):
|
||||
self._do_test_type_filter_extra_specs(
|
||||
ecaps={'scope_lv0': {'opt1': 10}},
|
||||
especs={'capabilities:scope_lv1:opt1': '>= 2'},
|
||||
passes=False)
|
||||
|
||||
def test_json_filter_passes(self):
|
||||
filt_cls = self.class_map['JsonFilter']()
|
||||
filter_properties = {'resource_type': {'memory_mb': 1024,
|
||||
'root_gb': 200,
|
||||
'ephemeral_gb': 0},
|
||||
'scheduler_hints': {'query': self.json_query}}
|
||||
capabilities = {'enabled': True}
|
||||
host = fakes.FakeHostState('host1',
|
||||
{'free_ram_mb': 1024,
|
||||
'free_disk_mb': 200 * 1024,
|
||||
'capabilities': capabilities})
|
||||
self.assertTrue(filt_cls.host_passes(host, filter_properties))
|
||||
|
||||
def test_json_filter_passes_with_no_query(self):
|
||||
filt_cls = self.class_map['JsonFilter']()
|
||||
filter_properties = {'resource_type': {'memory_mb': 1024,
|
||||
'root_gb': 200,
|
||||
'ephemeral_gb': 0}}
|
||||
capabilities = {'enabled': True}
|
||||
host = fakes.FakeHostState('host1',
|
||||
{'free_ram_mb': 0,
|
||||
'free_disk_mb': 0,
|
||||
'capabilities': capabilities})
|
||||
self.assertTrue(filt_cls.host_passes(host, filter_properties))
|
||||
|
||||
def test_json_filter_fails_on_memory(self):
|
||||
filt_cls = self.class_map['JsonFilter']()
|
||||
filter_properties = {'resource_type': {'memory_mb': 1024,
|
||||
'root_gb': 200,
|
||||
'ephemeral_gb': 0},
|
||||
'scheduler_hints': {'query': self.json_query}}
|
||||
capabilities = {'enabled': True}
|
||||
host = fakes.FakeHostState('host1',
|
||||
{'free_ram_mb': 1023,
|
||||
'free_disk_mb': 200 * 1024,
|
||||
'capabilities': capabilities})
|
||||
self.assertFalse(filt_cls.host_passes(host, filter_properties))
|
||||
|
||||
def test_json_filter_fails_on_disk(self):
|
||||
filt_cls = self.class_map['JsonFilter']()
|
||||
filter_properties = {'resource_type': {'memory_mb': 1024,
|
||||
'root_gb': 200,
|
||||
'ephemeral_gb': 0},
|
||||
'scheduler_hints': {'query': self.json_query}}
|
||||
capabilities = {'enabled': True}
|
||||
host = fakes.FakeHostState('host1',
|
||||
{'free_ram_mb': 1024,
|
||||
'free_disk_mb': (200 * 1024) - 1,
|
||||
'capabilities': capabilities})
|
||||
self.assertFalse(filt_cls.host_passes(host, filter_properties))
|
||||
|
||||
def test_json_filter_fails_on_caps_disabled(self):
|
||||
filt_cls = self.class_map['JsonFilter']()
|
||||
json_query = jsonutils.dumps(
|
||||
['and', ['>=', '$free_ram_mb', 1024],
|
||||
['>=', '$free_disk_mb', 200 * 1024],
|
||||
'$capabilities.enabled'])
|
||||
filter_properties = {'resource_type': {'memory_mb': 1024,
|
||||
'root_gb': 200,
|
||||
'ephemeral_gb': 0},
|
||||
'scheduler_hints': {'query': json_query}}
|
||||
capabilities = {'enabled': False}
|
||||
host = fakes.FakeHostState('host1',
|
||||
{'free_ram_mb': 1024,
|
||||
'free_disk_mb': 200 * 1024,
|
||||
'capabilities': capabilities})
|
||||
self.assertFalse(filt_cls.host_passes(host, filter_properties))
|
||||
|
||||
def test_json_filter_fails_on_service_disabled(self):
|
||||
filt_cls = self.class_map['JsonFilter']()
|
||||
json_query = jsonutils.dumps(
|
||||
['and', ['>=', '$free_ram_mb', 1024],
|
||||
['>=', '$free_disk_mb', 200 * 1024],
|
||||
['not', '$service.disabled']])
|
||||
filter_properties = {'resource_type': {'memory_mb': 1024,
|
||||
'local_gb': 200},
|
||||
'scheduler_hints': {'query': json_query}}
|
||||
capabilities = {'enabled': True}
|
||||
host = fakes.FakeHostState('host1',
|
||||
{'free_ram_mb': 1024,
|
||||
'free_disk_mb': 200 * 1024,
|
||||
'capabilities': capabilities})
|
||||
self.assertFalse(filt_cls.host_passes(host, filter_properties))
|
||||
|
||||
def test_json_filter_happy_day(self):
|
||||
"""Test json filter more thoroughly."""
|
||||
filt_cls = self.class_map['JsonFilter']()
|
||||
raw = ['and',
|
||||
'$capabilities.enabled',
|
||||
['=', '$capabilities.opt1', 'match'],
|
||||
['or',
|
||||
['and',
|
||||
['<', '$free_ram_mb', 30],
|
||||
['<', '$free_disk_mb', 300]],
|
||||
['and',
|
||||
['>', '$free_ram_mb', 30],
|
||||
['>', '$free_disk_mb', 300]]]]
|
||||
filter_properties = {
|
||||
'scheduler_hints': {
|
||||
'query': jsonutils.dumps(raw),
|
||||
},
|
||||
}
|
||||
|
||||
# Passes
|
||||
capabilities = {'enabled': True, 'opt1': 'match'}
|
||||
service = {'disabled': False}
|
||||
host = fakes.FakeHostState('host1',
|
||||
{'free_ram_mb': 10,
|
||||
'free_disk_mb': 200,
|
||||
'capabilities': capabilities,
|
||||
'service': service})
|
||||
self.assertTrue(filt_cls.host_passes(host, filter_properties))
|
||||
|
||||
# Passes
|
||||
capabilities = {'enabled': True, 'opt1': 'match'}
|
||||
service = {'disabled': False}
|
||||
host = fakes.FakeHostState('host1',
|
||||
{'free_ram_mb': 40,
|
||||
'free_disk_mb': 400,
|
||||
'capabilities': capabilities,
|
||||
'service': service})
|
||||
self.assertTrue(filt_cls.host_passes(host, filter_properties))
|
||||
|
||||
# Fails due to capabilities being disabled
|
||||
capabilities = {'enabled': False, 'opt1': 'match'}
|
||||
service = {'disabled': False}
|
||||
host = fakes.FakeHostState('host1',
|
||||
{'free_ram_mb': 40,
|
||||
'free_disk_mb': 400,
|
||||
'capabilities': capabilities,
|
||||
'service': service})
|
||||
self.assertFalse(filt_cls.host_passes(host, filter_properties))
|
||||
|
||||
# Fails due to being exact memory/disk we don't want
|
||||
capabilities = {'enabled': True, 'opt1': 'match'}
|
||||
service = {'disabled': False}
|
||||
host = fakes.FakeHostState('host1',
|
||||
{'free_ram_mb': 30,
|
||||
'free_disk_mb': 300,
|
||||
'capabilities': capabilities,
|
||||
'service': service})
|
||||
self.assertFalse(filt_cls.host_passes(host, filter_properties))
|
||||
|
||||
# Fails due to memory lower but disk higher
|
||||
capabilities = {'enabled': True, 'opt1': 'match'}
|
||||
service = {'disabled': False}
|
||||
host = fakes.FakeHostState('host1',
|
||||
{'free_ram_mb': 20,
|
||||
'free_disk_mb': 400,
|
||||
'capabilities': capabilities,
|
||||
'service': service})
|
||||
self.assertFalse(filt_cls.host_passes(host, filter_properties))
|
||||
|
||||
# Fails due to capabilities 'opt1' not equal
|
||||
capabilities = {'enabled': True, 'opt1': 'no-match'}
|
||||
service = {'enabled': True}
|
||||
host = fakes.FakeHostState('host1',
|
||||
{'free_ram_mb': 20,
|
||||
'free_disk_mb': 400,
|
||||
'capabilities': capabilities,
|
||||
'service': service})
|
||||
self.assertFalse(filt_cls.host_passes(host, filter_properties))
|
||||
|
||||
def test_json_filter_basic_operators(self):
|
||||
filt_cls = self.class_map['JsonFilter']()
|
||||
host = fakes.FakeHostState('host1',
|
||||
{'capabilities': {'enabled': True}})
|
||||
# (operator, arguments, expected_result)
|
||||
ops_to_test = [
|
||||
['=', [1, 1], True],
|
||||
['=', [1, 2], False],
|
||||
['<', [1, 2], True],
|
||||
['<', [1, 1], False],
|
||||
['<', [2, 1], False],
|
||||
['>', [2, 1], True],
|
||||
['>', [2, 2], False],
|
||||
['>', [2, 3], False],
|
||||
['<=', [1, 2], True],
|
||||
['<=', [1, 1], True],
|
||||
['<=', [2, 1], False],
|
||||
['>=', [2, 1], True],
|
||||
['>=', [2, 2], True],
|
||||
['>=', [2, 3], False],
|
||||
['in', [1, 1], True],
|
||||
['in', [1, 1, 2, 3], True],
|
||||
['in', [4, 1, 2, 3], False],
|
||||
['not', [True], False],
|
||||
['not', [False], True],
|
||||
['or', [True, False], True],
|
||||
['or', [False, False], False],
|
||||
['and', [True, True], True],
|
||||
['and', [False, False], False],
|
||||
['and', [True, False], False],
|
||||
# Nested ((True or False) and (2 > 1)) == Passes
|
||||
['and', [['or', True, False], ['>', 2, 1]], True]]
|
||||
|
||||
for (op, args, expected) in ops_to_test:
|
||||
raw = [op] + args
|
||||
filter_properties = {
|
||||
'scheduler_hints': {
|
||||
'query': jsonutils.dumps(raw),
|
||||
},
|
||||
}
|
||||
self.assertEqual(expected,
|
||||
filt_cls.host_passes(host, filter_properties))
|
||||
|
||||
# This results in [False, True, False, True] and if any are True
|
||||
# then it passes...
|
||||
raw = ['not', True, False, True, False]
|
||||
filter_properties = {
|
||||
'scheduler_hints': {
|
||||
'query': jsonutils.dumps(raw),
|
||||
},
|
||||
}
|
||||
self.assertTrue(filt_cls.host_passes(host, filter_properties))
|
||||
|
||||
# This results in [False, False, False] and if any are True
|
||||
# then it passes...which this doesn't
|
||||
raw = ['not', True, True, True]
|
||||
filter_properties = {
|
||||
'scheduler_hints': {
|
||||
'query': jsonutils.dumps(raw),
|
||||
},
|
||||
}
|
||||
self.assertFalse(filt_cls.host_passes(host, filter_properties))
|
||||
|
||||
def test_json_filter_unknown_operator_raises(self):
|
||||
filt_cls = self.class_map['JsonFilter']()
|
||||
raw = ['!=', 1, 2]
|
||||
filter_properties = {
|
||||
'scheduler_hints': {
|
||||
'query': jsonutils.dumps(raw),
|
||||
},
|
||||
}
|
||||
host = fakes.FakeHostState('host1',
|
||||
{'capabilities': {'enabled': True}})
|
||||
self.assertRaises(KeyError,
|
||||
filt_cls.host_passes, host, filter_properties)
|
||||
|
||||
def test_json_filter_empty_filters_pass(self):
|
||||
filt_cls = self.class_map['JsonFilter']()
|
||||
host = fakes.FakeHostState('host1',
|
||||
{'capabilities': {'enabled': True}})
|
||||
|
||||
raw = []
|
||||
filter_properties = {
|
||||
'scheduler_hints': {
|
||||
'query': jsonutils.dumps(raw),
|
||||
},
|
||||
}
|
||||
self.assertTrue(filt_cls.host_passes(host, filter_properties))
|
||||
raw = {}
|
||||
filter_properties = {
|
||||
'scheduler_hints': {
|
||||
'query': jsonutils.dumps(raw),
|
||||
},
|
||||
}
|
||||
self.assertTrue(filt_cls.host_passes(host, filter_properties))
|
||||
|
||||
def test_json_filter_invalid_num_arguments_fails(self):
|
||||
filt_cls = self.class_map['JsonFilter']()
|
||||
host = fakes.FakeHostState('host1',
|
||||
{'capabilities': {'enabled': True}})
|
||||
|
||||
raw = ['>', ['and', ['or', ['not', ['<', ['>=', ['<=', ['in', ]]]]]]]]
|
||||
filter_properties = {
|
||||
'scheduler_hints': {
|
||||
'query': jsonutils.dumps(raw),
|
||||
},
|
||||
}
|
||||
self.assertFalse(filt_cls.host_passes(host, filter_properties))
|
||||
|
||||
raw = ['>', 1]
|
||||
filter_properties = {
|
||||
'scheduler_hints': {
|
||||
'query': jsonutils.dumps(raw),
|
||||
},
|
||||
}
|
||||
self.assertFalse(filt_cls.host_passes(host, filter_properties))
|
||||
|
||||
def test_json_filter_unknown_variable_ignored(self):
|
||||
filt_cls = self.class_map['JsonFilter']()
|
||||
host = fakes.FakeHostState('host1',
|
||||
{'capabilities': {'enabled': True}})
|
||||
|
||||
raw = ['=', '$........', 1, 1]
|
||||
filter_properties = {
|
||||
'scheduler_hints': {
|
||||
'query': jsonutils.dumps(raw),
|
||||
},
|
||||
}
|
||||
self.assertTrue(filt_cls.host_passes(host, filter_properties))
|
||||
|
||||
raw = ['=', '$foo', 2, 2]
|
||||
filter_properties = {
|
||||
'scheduler_hints': {
|
||||
'query': jsonutils.dumps(raw),
|
||||
},
|
||||
}
|
||||
self.assertTrue(filt_cls.host_passes(host, filter_properties))
|
||||
|
||||
@staticmethod
|
||||
def _make_zone_request(zone, is_admin=False):
|
||||
ctxt = context.RequestContext('fake', 'fake', is_admin=is_admin)
|
||||
return {
|
||||
'context': ctxt,
|
||||
'request_spec': {
|
||||
'resource_properties': {
|
||||
'availability_zone': zone
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
def test_availability_zone_filter_same(self):
|
||||
filt_cls = self.class_map['AvailabilityZoneFilter']()
|
||||
service = {'availability_zone': 'nova'}
|
||||
request = self._make_zone_request('nova')
|
||||
host = fakes.FakeHostState('host1',
|
||||
{'service': service})
|
||||
self.assertTrue(filt_cls.host_passes(host, request))
|
||||
|
||||
def test_availability_zone_filter_different(self):
|
||||
filt_cls = self.class_map['AvailabilityZoneFilter']()
|
||||
service = {'availability_zone': 'nova'}
|
||||
request = self._make_zone_request('bad')
|
||||
host = fakes.FakeHostState('host1',
|
||||
{'service': service})
|
||||
self.assertFalse(filt_cls.host_passes(host, request))
|
||||
|
||||
def test_availability_zone_filter_empty(self):
|
||||
filt_cls = self.class_map['AvailabilityZoneFilter']()
|
||||
service = {'availability_zone': 'nova'}
|
||||
request = {}
|
||||
host = fakes.FakeHostState('host1',
|
||||
{'service': service})
|
||||
self.assertTrue(filt_cls.host_passes(host, request))
|
||||
|
||||
def test_ignore_attempted_hosts_filter_disabled(self):
|
||||
# Test case where re-scheduling is disabled.
|
||||
filt_cls = self.class_map['IgnoreAttemptedHostsFilter']()
|
||||
host = fakes.FakeHostState('host1', {})
|
||||
filter_properties = {}
|
||||
self.assertTrue(filt_cls.host_passes(host, filter_properties))
|
||||
|
||||
def test_ignore_attempted_hosts_filter_pass(self):
|
||||
# Node not previously tried.
|
||||
filt_cls = self.class_map['IgnoreAttemptedHostsFilter']()
|
||||
host = fakes.FakeHostState('host1', {})
|
||||
attempted = dict(num_attempts=2, hosts=['host2'])
|
||||
filter_properties = dict(retry=attempted)
|
||||
self.assertTrue(filt_cls.host_passes(host, filter_properties))
|
||||
|
||||
def test_ignore_attempted_hosts_filter_fail(self):
|
||||
# Node was already tried.
|
||||
filt_cls = self.class_map['IgnoreAttemptedHostsFilter']()
|
||||
host = fakes.FakeHostState('host1', {})
|
||||
attempted = dict(num_attempts=2, hosts=['host1'])
|
||||
filter_properties = dict(retry=attempted)
|
||||
self.assertFalse(filt_cls.host_passes(host, filter_properties))
|
|
@ -1,65 +0,0 @@
|
|||
# Copyright 2011-2012 OpenStack Foundation.
|
||||
# All Rights Reserved.
|
||||
#
|
||||
# Licensed under the Apache License, Version 2.0 (the "License"); you may
|
||||
# not use this file except in compliance with the License. You may obtain
|
||||
# a copy of the License at
|
||||
#
|
||||
# http://www.apache.org/licenses/LICENSE-2.0
|
||||
#
|
||||
# Unless required by applicable law or agreed to in writing, software
|
||||
# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
|
||||
# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
|
||||
# License for the specific language governing permissions and limitations
|
||||
# under the License.
|
||||
|
||||
"""
|
||||
Tests For Scheduler weights.
|
||||
"""
|
||||
|
||||
from oslotest import base as test_base
|
||||
|
||||
from openstack.common.scheduler import base_weight
|
||||
from tests.unit import fakes
|
||||
|
||||
|
||||
class TestWeightHandler(test_base.BaseTestCase):
|
||||
def test_get_all_classes(self):
|
||||
namespace = "openstack.common.tests.fakes.weights"
|
||||
handler = base_weight.BaseWeightHandler(
|
||||
base_weight.BaseWeigher, namespace)
|
||||
classes = handler.get_all_classes()
|
||||
self.assertTrue(fakes.FakeWeigher1 in classes)
|
||||
self.assertTrue(fakes.FakeWeigher2 in classes)
|
||||
self.assertFalse(fakes.FakeClass in classes)
|
||||
|
||||
def test_no_multiplier(self):
|
||||
class FakeWeigher(base_weight.BaseWeigher):
|
||||
def _weigh_object(self, *args, **kwargs):
|
||||
pass
|
||||
|
||||
self.assertEqual(1.0,
|
||||
FakeWeigher().weight_multiplier())
|
||||
|
||||
def test_no_weight_object(self):
|
||||
class FakeWeigher(base_weight.BaseWeigher):
|
||||
def weight_multiplier(self, *args, **kwargs):
|
||||
pass
|
||||
self.assertRaises(TypeError,
|
||||
FakeWeigher)
|
||||
|
||||
def test_normalization(self):
|
||||
# weight_list, expected_result, minval, maxval
|
||||
map_ = (
|
||||
((), (), None, None),
|
||||
((0.0, 0.0), (0.0, 0.0), None, None),
|
||||
((1.0, 1.0), (0.0, 0.0), None, None),
|
||||
|
||||
((20.0, 50.0), (0.0, 1.0), None, None),
|
||||
((20.0, 50.0), (0.0, 0.375), None, 100.0),
|
||||
((20.0, 50.0), (0.4, 1.0), 0.0, None),
|
||||
((20.0, 50.0), (0.2, 0.5), 0.0, 100.0),
|
||||
)
|
||||
for seq, result, minval, maxval in map_:
|
||||
ret = base_weight.normalize(seq, minval=minval, maxval=maxval)
|
||||
self.assertEqual(result, tuple(ret))
|
|
@ -1,699 +0,0 @@
|
|||
# Copyright 2012 Red Hat, Inc.
|
||||
#
|
||||
# Licensed under the Apache License, Version 2.0 (the "License"); you may
|
||||
# not use this file except in compliance with the License. You may obtain
|
||||
# a copy of the License at
|
||||
#
|
||||
# http://www.apache.org/licenses/LICENSE-2.0
|
||||
#
|
||||
# Unless required by applicable law or agreed to in writing, software
|
||||
# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
|
||||
# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
|
||||
# License for the specific language governing permissions and limitations
|
||||
# under the License.
|
||||
|
||||
import sys
|
||||
|
||||
import fixtures
|
||||
import mock
|
||||
from oslotest import base as test_base
|
||||
import six
|
||||
|
||||
from openstack.common import cliutils
|
||||
|
||||
|
||||
class ValidateArgsTest(test_base.BaseTestCase):
|
||||
|
||||
def test_lambda_no_args(self):
|
||||
cliutils.validate_args(lambda: None)
|
||||
|
||||
def _test_lambda_with_args(self, *args, **kwargs):
|
||||
cliutils.validate_args(lambda x, y: None, *args, **kwargs)
|
||||
|
||||
def test_lambda_positional_args(self):
|
||||
self._test_lambda_with_args(1, 2)
|
||||
|
||||
def test_lambda_kwargs(self):
|
||||
self._test_lambda_with_args(x=1, y=2)
|
||||
|
||||
def test_lambda_mixed_kwargs(self):
|
||||
self._test_lambda_with_args(1, y=2)
|
||||
|
||||
def test_lambda_missing_args1(self):
|
||||
self.assertRaises(cliutils.MissingArgs,
|
||||
self._test_lambda_with_args)
|
||||
|
||||
def test_lambda_missing_args2(self):
|
||||
self.assertRaises(cliutils.MissingArgs,
|
||||
self._test_lambda_with_args, 1)
|
||||
|
||||
def test_lambda_missing_args3(self):
|
||||
self.assertRaises(cliutils.MissingArgs,
|
||||
self._test_lambda_with_args, y=2)
|
||||
|
||||
def _test_lambda_with_default(self, *args, **kwargs):
|
||||
cliutils.validate_args(lambda x, y, z=3: None, *args, **kwargs)
|
||||
|
||||
def test_lambda_positional_args_with_default(self):
|
||||
self._test_lambda_with_default(1, 2)
|
||||
|
||||
def test_lambda_kwargs_with_default(self):
|
||||
self._test_lambda_with_default(x=1, y=2)
|
||||
|
||||
def test_lambda_mixed_kwargs_with_default(self):
|
||||
self._test_lambda_with_default(1, y=2)
|
||||
|
||||
def test_lambda_positional_args_all_with_default(self):
|
||||
self._test_lambda_with_default(1, 2, 3)
|
||||
|
||||
def test_lambda_kwargs_all_with_default(self):
|
||||
self._test_lambda_with_default(x=1, y=2, z=3)
|
||||
|
||||
def test_lambda_mixed_kwargs_all_with_default(self):
|
||||
self._test_lambda_with_default(1, y=2, z=3)
|
||||
|
||||
def test_lambda_with_default_missing_args1(self):
|
||||
self.assertRaises(cliutils.MissingArgs,
|
||||
self._test_lambda_with_default)
|
||||
|
||||
def test_lambda_with_default_missing_args2(self):
|
||||
self.assertRaises(cliutils.MissingArgs,
|
||||
self._test_lambda_with_default, 1)
|
||||
|
||||
def test_lambda_with_default_missing_args3(self):
|
||||
self.assertRaises(cliutils.MissingArgs,
|
||||
self._test_lambda_with_default, y=2)
|
||||
|
||||
def test_lambda_with_default_missing_args4(self):
|
||||
self.assertRaises(cliutils.MissingArgs,
|
||||
self._test_lambda_with_default, y=2, z=3)
|
||||
|
||||
def test_function_no_args(self):
|
||||
def func():
|
||||
pass
|
||||
cliutils.validate_args(func)
|
||||
|
||||
def _test_function_with_args(self, *args, **kwargs):
|
||||
def func(x, y):
|
||||
pass
|
||||
cliutils.validate_args(func, *args, **kwargs)
|
||||
|
||||
def test_function_positional_args(self):
|
||||
self._test_function_with_args(1, 2)
|
||||
|
||||
def test_function_kwargs(self):
|
||||
self._test_function_with_args(x=1, y=2)
|
||||
|
||||
def test_function_mixed_kwargs(self):
|
||||
self._test_function_with_args(1, y=2)
|
||||
|
||||
def test_function_missing_args1(self):
|
||||
self.assertRaises(cliutils.MissingArgs,
|
||||
self._test_function_with_args)
|
||||
|
||||
def test_function_missing_args2(self):
|
||||
self.assertRaises(cliutils.MissingArgs,
|
||||
self._test_function_with_args, 1)
|
||||
|
||||
def test_function_missing_args3(self):
|
||||
self.assertRaises(cliutils.MissingArgs,
|
||||
self._test_function_with_args, y=2)
|
||||
|
||||
def _test_function_with_default(self, *args, **kwargs):
|
||||
def func(x, y, z=3):
|
||||
pass
|
||||
cliutils.validate_args(func, *args, **kwargs)
|
||||
|
||||
def test_function_positional_args_with_default(self):
|
||||
self._test_function_with_default(1, 2)
|
||||
|
||||
def test_function_kwargs_with_default(self):
|
||||
self._test_function_with_default(x=1, y=2)
|
||||
|
||||
def test_function_mixed_kwargs_with_default(self):
|
||||
self._test_function_with_default(1, y=2)
|
||||
|
||||
def test_function_positional_args_all_with_default(self):
|
||||
self._test_function_with_default(1, 2, 3)
|
||||
|
||||
def test_function_kwargs_all_with_default(self):
|
||||
self._test_function_with_default(x=1, y=2, z=3)
|
||||
|
||||
def test_function_mixed_kwargs_all_with_default(self):
|
||||
self._test_function_with_default(1, y=2, z=3)
|
||||
|
||||
def test_function_with_default_missing_args1(self):
|
||||
self.assertRaises(cliutils.MissingArgs,
|
||||
self._test_function_with_default)
|
||||
|
||||
def test_function_with_default_missing_args2(self):
|
||||
self.assertRaises(cliutils.MissingArgs,
|
||||
self._test_function_with_default, 1)
|
||||
|
||||
def test_function_with_default_missing_args3(self):
|
||||
self.assertRaises(cliutils.MissingArgs,
|
||||
self._test_function_with_default, y=2)
|
||||
|
||||
def test_function_with_default_missing_args4(self):
|
||||
self.assertRaises(cliutils.MissingArgs,
|
||||
self._test_function_with_default, y=2, z=3)
|
||||
|
||||
def test_bound_method_no_args(self):
|
||||
class Foo(object):
|
||||
def bar(self):
|
||||
pass
|
||||
cliutils.validate_args(Foo().bar)
|
||||
|
||||
def _test_bound_method_with_args(self, *args, **kwargs):
|
||||
class Foo(object):
|
||||
def bar(self, x, y):
|
||||
pass
|
||||
cliutils.validate_args(Foo().bar, *args, **kwargs)
|
||||
|
||||
def test_bound_method_positional_args(self):
|
||||
self._test_bound_method_with_args(1, 2)
|
||||
|
||||
def test_bound_method_kwargs(self):
|
||||
self._test_bound_method_with_args(x=1, y=2)
|
||||
|
||||
def test_bound_method_mixed_kwargs(self):
|
||||
self._test_bound_method_with_args(1, y=2)
|
||||
|
||||
def test_bound_method_missing_args1(self):
|
||||
self.assertRaises(cliutils.MissingArgs,
|
||||
self._test_bound_method_with_args)
|
||||
|
||||
def test_bound_method_missing_args2(self):
|
||||
self.assertRaises(cliutils.MissingArgs,
|
||||
self._test_bound_method_with_args, 1)
|
||||
|
||||
def test_bound_method_missing_args3(self):
|
||||
self.assertRaises(cliutils.MissingArgs,
|
||||
self._test_bound_method_with_args, y=2)
|
||||
|
||||
def _test_bound_method_with_default(self, *args, **kwargs):
|
||||
class Foo(object):
|
||||
def bar(self, x, y, z=3):
|
||||
pass
|
||||
cliutils.validate_args(Foo().bar, *args, **kwargs)
|
||||
|
||||
def test_bound_method_positional_args_with_default(self):
|
||||
self._test_bound_method_with_default(1, 2)
|
||||
|
||||
def test_bound_method_kwargs_with_default(self):
|
||||
self._test_bound_method_with_default(x=1, y=2)
|
||||
|
||||
def test_bound_method_mixed_kwargs_with_default(self):
|
||||
self._test_bound_method_with_default(1, y=2)
|
||||
|
||||
def test_bound_method_positional_args_all_with_default(self):
|
||||
self._test_bound_method_with_default(1, 2, 3)
|
||||
|
||||
def test_bound_method_kwargs_all_with_default(self):
|
||||
self._test_bound_method_with_default(x=1, y=2, z=3)
|
||||
|
||||
def test_bound_method_mixed_kwargs_all_with_default(self):
|
||||
self._test_bound_method_with_default(1, y=2, z=3)
|
||||
|
||||
def test_bound_method_with_default_missing_args1(self):
|
||||
self.assertRaises(cliutils.MissingArgs,
|
||||
self._test_bound_method_with_default)
|
||||
|
||||
def test_bound_method_with_default_missing_args2(self):
|
||||
self.assertRaises(cliutils.MissingArgs,
|
||||
self._test_bound_method_with_default, 1)
|
||||
|
||||
def test_bound_method_with_default_missing_args3(self):
|
||||
self.assertRaises(cliutils.MissingArgs,
|
||||
self._test_bound_method_with_default, y=2)
|
||||
|
||||
def test_bound_method_with_default_missing_args4(self):
|
||||
self.assertRaises(cliutils.MissingArgs,
|
||||
self._test_bound_method_with_default, y=2, z=3)
|
||||
|
||||
def test_unbound_method_no_args(self):
|
||||
class Foo(object):
|
||||
def bar(self):
|
||||
pass
|
||||
cliutils.validate_args(Foo.bar, Foo())
|
||||
|
||||
def _test_unbound_method_with_args(self, *args, **kwargs):
|
||||
class Foo(object):
|
||||
def bar(self, x, y):
|
||||
pass
|
||||
cliutils.validate_args(Foo.bar, Foo(), *args, **kwargs)
|
||||
|
||||
def test_unbound_method_positional_args(self):
|
||||
self._test_unbound_method_with_args(1, 2)
|
||||
|
||||
def test_unbound_method_kwargs(self):
|
||||
self._test_unbound_method_with_args(x=1, y=2)
|
||||
|
||||
def test_unbound_method_mixed_kwargs(self):
|
||||
self._test_unbound_method_with_args(1, y=2)
|
||||
|
||||
def test_unbound_method_missing_args1(self):
|
||||
self.assertRaises(cliutils.MissingArgs,
|
||||
self._test_unbound_method_with_args)
|
||||
|
||||
def test_unbound_method_missing_args2(self):
|
||||
self.assertRaises(cliutils.MissingArgs,
|
||||
self._test_unbound_method_with_args, 1)
|
||||
|
||||
def test_unbound_method_missing_args3(self):
|
||||
self.assertRaises(cliutils.MissingArgs,
|
||||
self._test_unbound_method_with_args, y=2)
|
||||
|
||||
def _test_unbound_method_with_default(self, *args, **kwargs):
|
||||
class Foo(object):
|
||||
def bar(self, x, y, z=3):
|
||||
pass
|
||||
cliutils.validate_args(Foo.bar, Foo(), *args, **kwargs)
|
||||
|
||||
def test_unbound_method_positional_args_with_default(self):
|
||||
self._test_unbound_method_with_default(1, 2)
|
||||
|
||||
def test_unbound_method_kwargs_with_default(self):
|
||||
self._test_unbound_method_with_default(x=1, y=2)
|
||||
|
||||
def test_unbound_method_mixed_kwargs_with_default(self):
|
||||
self._test_unbound_method_with_default(1, y=2)
|
||||
|
||||
def test_unbound_method_with_default_missing_args1(self):
|
||||
self.assertRaises(cliutils.MissingArgs,
|
||||
self._test_unbound_method_with_default)
|
||||
|
||||
def test_unbound_method_with_default_missing_args2(self):
|
||||
self.assertRaises(cliutils.MissingArgs,
|
||||
self._test_unbound_method_with_default, 1)
|
||||
|
||||
def test_unbound_method_with_default_missing_args3(self):
|
||||
self.assertRaises(cliutils.MissingArgs,
|
||||
self._test_unbound_method_with_default, y=2)
|
||||
|
||||
def test_unbound_method_with_default_missing_args4(self):
|
||||
self.assertRaises(cliutils.MissingArgs,
|
||||
self._test_unbound_method_with_default, y=2, z=3)
|
||||
|
||||
def test_class_method_no_args(self):
|
||||
class Foo(object):
|
||||
@classmethod
|
||||
def bar(cls):
|
||||
pass
|
||||
cliutils.validate_args(Foo.bar)
|
||||
|
||||
def _test_class_method_with_args(self, *args, **kwargs):
|
||||
class Foo(object):
|
||||
@classmethod
|
||||
def bar(cls, x, y):
|
||||
pass
|
||||
cliutils.validate_args(Foo.bar, *args, **kwargs)
|
||||
|
||||
def test_class_method_positional_args(self):
|
||||
self._test_class_method_with_args(1, 2)
|
||||
|
||||
def test_class_method_kwargs(self):
|
||||
self._test_class_method_with_args(x=1, y=2)
|
||||
|
||||
def test_class_method_mixed_kwargs(self):
|
||||
self._test_class_method_with_args(1, y=2)
|
||||
|
||||
def test_class_method_missing_args1(self):
|
||||
self.assertRaises(cliutils.MissingArgs,
|
||||
self._test_class_method_with_args)
|
||||
|
||||
def test_class_method_missing_args2(self):
|
||||
self.assertRaises(cliutils.MissingArgs,
|
||||
self._test_class_method_with_args, 1)
|
||||
|
||||
def test_class_method_missing_args3(self):
|
||||
self.assertRaises(cliutils.MissingArgs,
|
||||
self._test_class_method_with_args, y=2)
|
||||
|
||||
def _test_class_method_with_default(self, *args, **kwargs):
|
||||
class Foo(object):
|
||||
@classmethod
|
||||
def bar(cls, x, y, z=3):
|
||||
pass
|
||||
cliutils.validate_args(Foo.bar, *args, **kwargs)
|
||||
|
||||
def test_class_method_positional_args_with_default(self):
|
||||
self._test_class_method_with_default(1, 2)
|
||||
|
||||
def test_class_method_kwargs_with_default(self):
|
||||
self._test_class_method_with_default(x=1, y=2)
|
||||
|
||||
def test_class_method_mixed_kwargs_with_default(self):
|
||||
self._test_class_method_with_default(1, y=2)
|
||||
|
||||
def test_class_method_with_default_missing_args1(self):
|
||||
self.assertRaises(cliutils.MissingArgs,
|
||||
self._test_class_method_with_default)
|
||||
|
||||
def test_class_method_with_default_missing_args2(self):
|
||||
self.assertRaises(cliutils.MissingArgs,
|
||||
self._test_class_method_with_default, 1)
|
||||
|
||||
def test_class_method_with_default_missing_args3(self):
|
||||
self.assertRaises(cliutils.MissingArgs,
|
||||
self._test_class_method_with_default, y=2)
|
||||
|
||||
def test_class_method_with_default_missing_args4(self):
|
||||
self.assertRaises(cliutils.MissingArgs,
|
||||
self._test_class_method_with_default, y=2, z=3)
|
||||
|
||||
def test_static_method_no_args(self):
|
||||
class Foo(object):
|
||||
@staticmethod
|
||||
def bar():
|
||||
pass
|
||||
cliutils.validate_args(Foo.bar)
|
||||
|
||||
def _test_static_method_with_args(self, *args, **kwargs):
|
||||
class Foo(object):
|
||||
@staticmethod
|
||||
def bar(x, y):
|
||||
pass
|
||||
cliutils.validate_args(Foo.bar, *args, **kwargs)
|
||||
|
||||
def test_static_method_positional_args(self):
|
||||
self._test_static_method_with_args(1, 2)
|
||||
|
||||
def test_static_method_kwargs(self):
|
||||
self._test_static_method_with_args(x=1, y=2)
|
||||
|
||||
def test_static_method_mixed_kwargs(self):
|
||||
self._test_static_method_with_args(1, y=2)
|
||||
|
||||
def test_static_method_missing_args1(self):
|
||||
self.assertRaises(cliutils.MissingArgs,
|
||||
self._test_static_method_with_args)
|
||||
|
||||
def test_static_method_missing_args2(self):
|
||||
self.assertRaises(cliutils.MissingArgs,
|
||||
self._test_static_method_with_args, 1)
|
||||
|
||||
def test_static_method_missing_args3(self):
|
||||
self.assertRaises(cliutils.MissingArgs,
|
||||
self._test_static_method_with_args, y=2)
|
||||
|
||||
def _test_static_method_with_default(self, *args, **kwargs):
|
||||
class Foo(object):
|
||||
@staticmethod
|
||||
def bar(x, y, z=3):
|
||||
pass
|
||||
cliutils.validate_args(Foo.bar, *args, **kwargs)
|
||||
|
||||
def test_static_method_positional_args_with_default(self):
|
||||
self._test_static_method_with_default(1, 2)
|
||||
|
||||
def test_static_method_kwargs_with_default(self):
|
||||
self._test_static_method_with_default(x=1, y=2)
|
||||
|
||||
def test_static_method_mixed_kwargs_with_default(self):
|
||||
self._test_static_method_with_default(1, y=2)
|
||||
|
||||
def test_static_method_with_default_missing_args1(self):
|
||||
self.assertRaises(cliutils.MissingArgs,
|
||||
self._test_static_method_with_default)
|
||||
|
||||
def test_static_method_with_default_missing_args2(self):
|
||||
self.assertRaises(cliutils.MissingArgs,
|
||||
self._test_static_method_with_default, 1)
|
||||
|
||||
def test_static_method_with_default_missing_args3(self):
|
||||
self.assertRaises(cliutils.MissingArgs,
|
||||
self._test_static_method_with_default, y=2)
|
||||
|
||||
def test_static_method_with_default_missing_args4(self):
|
||||
self.assertRaises(cliutils.MissingArgs,
|
||||
self._test_static_method_with_default, y=2, z=3)
|
||||
|
||||
|
||||
class _FakeResult(object):
|
||||
def __init__(self, name, value):
|
||||
self.name = name
|
||||
self.value = value
|
||||
|
||||
|
||||
class PrintResultTestCase(test_base.BaseTestCase):
|
||||
|
||||
def setUp(self):
|
||||
super(PrintResultTestCase, self).setUp()
|
||||
self.mock_add_row = mock.MagicMock()
|
||||
self.useFixture(fixtures.MonkeyPatch(
|
||||
"prettytable.PrettyTable.add_row",
|
||||
self.mock_add_row))
|
||||
self.mock_get_string = mock.MagicMock(return_value="")
|
||||
self.useFixture(fixtures.MonkeyPatch(
|
||||
"prettytable.PrettyTable.get_string",
|
||||
self.mock_get_string))
|
||||
self.mock_init = mock.MagicMock(return_value=None)
|
||||
self.useFixture(fixtures.MonkeyPatch(
|
||||
"prettytable.PrettyTable.__init__",
|
||||
self.mock_init))
|
||||
# NOTE(dtantsur): won't work with mocked __init__
|
||||
self.useFixture(fixtures.MonkeyPatch(
|
||||
"prettytable.PrettyTable.align",
|
||||
mock.MagicMock()))
|
||||
|
||||
def test_print_list_sort_by_str(self):
|
||||
objs = [_FakeResult("k1", 1),
|
||||
_FakeResult("k3", 2),
|
||||
_FakeResult("k2", 3)]
|
||||
|
||||
cliutils.print_list(objs, ["Name", "Value"], sortby_index=0)
|
||||
|
||||
self.assertEqual(self.mock_add_row.call_args_list,
|
||||
[mock.call(["k1", 1]),
|
||||
mock.call(["k3", 2]),
|
||||
mock.call(["k2", 3])])
|
||||
self.mock_get_string.assert_called_with(sortby="Name")
|
||||
self.mock_init.assert_called_once_with(["Name", "Value"])
|
||||
|
||||
def test_print_list_sort_by_integer(self):
|
||||
objs = [_FakeResult("k1", 1),
|
||||
_FakeResult("k2", 3),
|
||||
_FakeResult("k3", 2)]
|
||||
|
||||
cliutils.print_list(objs, ["Name", "Value"], sortby_index=1)
|
||||
|
||||
self.assertEqual(self.mock_add_row.call_args_list,
|
||||
[mock.call(["k1", 1]),
|
||||
mock.call(["k2", 3]),
|
||||
mock.call(["k3", 2])])
|
||||
self.mock_get_string.assert_called_with(sortby="Value")
|
||||
self.mock_init.assert_called_once_with(["Name", "Value"])
|
||||
|
||||
def test_print_list_sort_by_none(self):
|
||||
objs = [_FakeResult("k1", 1),
|
||||
_FakeResult("k3", 3),
|
||||
_FakeResult("k2", 2)]
|
||||
|
||||
cliutils.print_list(objs, ["Name", "Value"], sortby_index=None)
|
||||
|
||||
self.assertEqual(self.mock_add_row.call_args_list,
|
||||
[mock.call(["k1", 1]),
|
||||
mock.call(["k3", 3]),
|
||||
mock.call(["k2", 2])])
|
||||
self.mock_get_string.assert_called_with()
|
||||
self.mock_init.assert_called_once_with(["Name", "Value"])
|
||||
|
||||
def test_print_dict(self):
|
||||
cliutils.print_dict({"K": "k", "Key": "Value"})
|
||||
cliutils.print_dict({"K": "k", "Key": "Long\\nValue"})
|
||||
self.mock_add_row.assert_has_calls([
|
||||
mock.call(["K", "k"]),
|
||||
mock.call(["Key", "Value"]),
|
||||
mock.call(["K", "k"]),
|
||||
mock.call(["Key", "Long"]),
|
||||
mock.call(["", "Value"])],
|
||||
any_order=True)
|
||||
|
||||
def test_print_list_field_labels(self):
|
||||
objs = [_FakeResult("k1", 1),
|
||||
_FakeResult("k3", 3),
|
||||
_FakeResult("k2", 2)]
|
||||
field_labels = ["Another Name", "Another Value"]
|
||||
|
||||
cliutils.print_list(objs, ["Name", "Value"], sortby_index=None,
|
||||
field_labels=field_labels)
|
||||
|
||||
self.assertEqual(self.mock_add_row.call_args_list,
|
||||
[mock.call(["k1", 1]),
|
||||
mock.call(["k3", 3]),
|
||||
mock.call(["k2", 2])])
|
||||
self.mock_init.assert_called_once_with(field_labels)
|
||||
|
||||
def test_print_list_field_labels_sort(self):
|
||||
objs = [_FakeResult("k1", 1),
|
||||
_FakeResult("k3", 3),
|
||||
_FakeResult("k2", 2)]
|
||||
field_labels = ["Another Name", "Another Value"]
|
||||
|
||||
cliutils.print_list(objs, ["Name", "Value"], sortby_index=0,
|
||||
field_labels=field_labels)
|
||||
|
||||
self.assertEqual(self.mock_add_row.call_args_list,
|
||||
[mock.call(["k1", 1]),
|
||||
mock.call(["k3", 3]),
|
||||
mock.call(["k2", 2])])
|
||||
self.mock_init.assert_called_once_with(field_labels)
|
||||
self.mock_get_string.assert_called_with(sortby="Another Name")
|
||||
|
||||
def test_print_list_field_labels_too_many(self):
|
||||
objs = [_FakeResult("k1", 1),
|
||||
_FakeResult("k3", 3),
|
||||
_FakeResult("k2", 2)]
|
||||
field_labels = ["Another Name", "Another Value", "Redundant"]
|
||||
|
||||
self.assertRaises(ValueError, cliutils.print_list,
|
||||
objs, ["Name", "Value"], sortby_index=None,
|
||||
field_labels=field_labels)
|
||||
|
||||
|
||||
class PrintResultStringTestCase(test_base.BaseTestCase):
|
||||
|
||||
def test_print_list_string(self):
|
||||
objs = [_FakeResult("k1", 1)]
|
||||
field_labels = ["Another Name", "Another Value"]
|
||||
|
||||
orig = sys.stdout
|
||||
sys.stdout = six.StringIO()
|
||||
cliutils.print_list(objs, ["Name", "Value"], sortby_index=0,
|
||||
field_labels=field_labels)
|
||||
out = sys.stdout.getvalue()
|
||||
sys.stdout.close()
|
||||
sys.stdout = orig
|
||||
expected = '''\
|
||||
+--------------+---------------+
|
||||
| Another Name | Another Value |
|
||||
+--------------+---------------+
|
||||
| k1 | 1 |
|
||||
+--------------+---------------+
|
||||
'''
|
||||
self.assertEqual(expected, out)
|
||||
|
||||
def test_print_dict_string(self):
|
||||
orig = sys.stdout
|
||||
sys.stdout = six.StringIO()
|
||||
cliutils.print_dict({"K": "k", "Key": "Value"})
|
||||
out = sys.stdout.getvalue()
|
||||
sys.stdout.close()
|
||||
sys.stdout = orig
|
||||
expected = '''\
|
||||
+----------+-------+
|
||||
| Property | Value |
|
||||
+----------+-------+
|
||||
| K | k |
|
||||
| Key | Value |
|
||||
+----------+-------+
|
||||
'''
|
||||
self.assertEqual(expected, out)
|
||||
|
||||
def test_print_dict_string_custom_headers(self):
|
||||
orig = sys.stdout
|
||||
sys.stdout = six.StringIO()
|
||||
cliutils.print_dict({"K": "k", "Key": "Value"}, dict_property='Foo',
|
||||
dict_value='Bar')
|
||||
out = sys.stdout.getvalue()
|
||||
sys.stdout.close()
|
||||
sys.stdout = orig
|
||||
expected = '''\
|
||||
+-----+-------+
|
||||
| Foo | Bar |
|
||||
+-----+-------+
|
||||
| K | k |
|
||||
| Key | Value |
|
||||
+-----+-------+
|
||||
'''
|
||||
self.assertEqual(expected, out)
|
||||
|
||||
def test_print_dict_string_sorted(self):
|
||||
orig = sys.stdout
|
||||
sys.stdout = six.StringIO()
|
||||
cliutils.print_dict({"Foo": "k", "Bar": "Value"})
|
||||
out = sys.stdout.getvalue()
|
||||
sys.stdout.close()
|
||||
sys.stdout = orig
|
||||
expected = '''\
|
||||
+----------+-------+
|
||||
| Property | Value |
|
||||
+----------+-------+
|
||||
| Bar | Value |
|
||||
| Foo | k |
|
||||
+----------+-------+
|
||||
'''
|
||||
self.assertEqual(expected, out)
|
||||
|
||||
|
||||
class DecoratorsTestCase(test_base.BaseTestCase):
|
||||
|
||||
def test_arg(self):
|
||||
func_args = [("--image", ), ("--flavor", )]
|
||||
func_kwargs = [dict(default=None,
|
||||
metavar="<image>"),
|
||||
dict(default=None,
|
||||
metavar="<flavor>")]
|
||||
|
||||
@cliutils.arg(*func_args[1], **func_kwargs[1])
|
||||
@cliutils.arg(*func_args[0], **func_kwargs[0])
|
||||
def dummy_func():
|
||||
pass
|
||||
|
||||
self.assertTrue(hasattr(dummy_func, "arguments"))
|
||||
self.assertEqual(len(dummy_func.arguments), 2)
|
||||
for args_kwargs in zip(func_args, func_kwargs):
|
||||
self.assertIn(args_kwargs, dummy_func.arguments)
|
||||
|
||||
def test_unauthenticated(self):
|
||||
def dummy_func():
|
||||
pass
|
||||
|
||||
self.assertFalse(cliutils.isunauthenticated(dummy_func))
|
||||
dummy_func = cliutils.unauthenticated(dummy_func)
|
||||
self.assertTrue(cliutils.isunauthenticated(dummy_func))
|
||||
|
||||
|
||||
class EnvTestCase(test_base.BaseTestCase):
|
||||
|
||||
def test_env(self):
|
||||
env = {"alpha": "a", "beta": "b"}
|
||||
self.useFixture(fixtures.MonkeyPatch("os.environ", env))
|
||||
self.assertEqual(cliutils.env("beta"), env["beta"])
|
||||
self.assertEqual(cliutils.env("beta", "alpha"), env["beta"])
|
||||
self.assertEqual(cliutils.env("alpha", "beta"), env["alpha"])
|
||||
self.assertEqual(cliutils.env("gamma", "beta"), env["beta"])
|
||||
self.assertEqual(cliutils.env("gamma"), "")
|
||||
self.assertEqual(cliutils.env("gamma", default="c"), "c")
|
||||
|
||||
|
||||
class GetPasswordTestCase(test_base.BaseTestCase):
|
||||
|
||||
def setUp(self):
|
||||
super(GetPasswordTestCase, self).setUp()
|
||||
|
||||
class FakeFile(object):
|
||||
def isatty(self):
|
||||
return True
|
||||
|
||||
self.useFixture(fixtures.MonkeyPatch("sys.stdin", FakeFile()))
|
||||
|
||||
def test_get_password(self):
|
||||
self.useFixture(fixtures.MonkeyPatch("getpass.getpass",
|
||||
lambda prompt: "mellon"))
|
||||
self.assertEqual(cliutils.get_password(), "mellon")
|
||||
|
||||
def test_get_password_verify(self):
|
||||
env = {"OS_VERIFY_PASSWORD": "True"}
|
||||
self.useFixture(fixtures.MonkeyPatch("os.environ", env))
|
||||
self.useFixture(fixtures.MonkeyPatch("getpass.getpass",
|
||||
lambda prompt: "mellon"))
|
||||
self.assertEqual(cliutils.get_password(), "mellon")
|
||||
|
||||
def test_get_password_verify_failure(self):
|
||||
env = {"OS_VERIFY_PASSWORD": "True"}
|
||||
self.useFixture(fixtures.MonkeyPatch("os.environ", env))
|
||||
self.useFixture(fixtures.MonkeyPatch("getpass.getpass",
|
||||
lambda prompt: prompt))
|
||||
self.assertIsNone(cliutils.get_password())
|
|
@ -1,199 +0,0 @@
|
|||
# Copyright (C) 2012 Yahoo! Inc.
|
||||
# All Rights Reserved.
|
||||
#
|
||||
# Licensed under the Apache License, Version 2.0 (the "License"); you may
|
||||
# not use this file except in compliance with the License. You may obtain
|
||||
# a copy of the License at
|
||||
#
|
||||
# http://www.apache.org/licenses/LICENSE-2.0
|
||||
#
|
||||
# Unless required by applicable law or agreed to in writing, software
|
||||
# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
|
||||
# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
|
||||
# License for the specific language governing permissions and limitations
|
||||
# under the License.
|
||||
|
||||
from oslotest import base as test_base
|
||||
import testscenarios
|
||||
|
||||
from openstack.common import imageutils
|
||||
|
||||
load_tests = testscenarios.load_tests_apply_scenarios
|
||||
|
||||
|
||||
class ImageUtilsRawTestCase(test_base.BaseTestCase):
|
||||
|
||||
_image_name = [
|
||||
('disk_config', dict(image_name='disk.config')),
|
||||
]
|
||||
|
||||
_file_format = [
|
||||
('raw', dict(file_format='raw')),
|
||||
]
|
||||
|
||||
_virtual_size = [
|
||||
('64M', dict(virtual_size='64M',
|
||||
exp_virtual_size=67108864)),
|
||||
('64M_with_byte_hint', dict(virtual_size='64M (67108844 bytes)',
|
||||
exp_virtual_size=67108844)),
|
||||
('64M_byte', dict(virtual_size='67108844',
|
||||
exp_virtual_size=67108844)),
|
||||
('4.4M', dict(virtual_size='4.4M',
|
||||
exp_virtual_size=4613735)),
|
||||
('4.4M_with_byte_hint', dict(virtual_size='4.4M (4592640 bytes)',
|
||||
exp_virtual_size=4592640)),
|
||||
('2K', dict(virtual_size='2K',
|
||||
exp_virtual_size=2048)),
|
||||
('2K_with_byte_hint', dict(virtual_size='2K (2048 bytes)',
|
||||
exp_virtual_size=2048)),
|
||||
]
|
||||
|
||||
_disk_size = [
|
||||
('96K', dict(disk_size='96K',
|
||||
exp_disk_size=98304)),
|
||||
('96K_byte', dict(disk_size='963434',
|
||||
exp_disk_size=963434)),
|
||||
('3.1M', dict(disk_size='3.1G',
|
||||
exp_disk_size=3328599655)),
|
||||
]
|
||||
|
||||
_garbage_before_snapshot = [
|
||||
('no_garbage', dict(garbage_before_snapshot=None)),
|
||||
('garbage_before_snapshot_list', dict(garbage_before_snapshot=False)),
|
||||
('garbage_after_snapshot_list', dict(garbage_before_snapshot=True)),
|
||||
]
|
||||
|
||||
_snapshot_count = [
|
||||
('no_snapshots', dict(snapshot_count=None)),
|
||||
('one_snapshots', dict(snapshot_count=1)),
|
||||
('three_snapshots', dict(snapshot_count=3)),
|
||||
]
|
||||
|
||||
@classmethod
|
||||
def generate_scenarios(cls):
|
||||
cls.scenarios = testscenarios.multiply_scenarios(
|
||||
cls._image_name,
|
||||
cls._file_format,
|
||||
cls._virtual_size,
|
||||
cls._disk_size,
|
||||
cls._garbage_before_snapshot,
|
||||
cls._snapshot_count)
|
||||
|
||||
def _initialize_img_info(self):
|
||||
return ('image: %s' % self.image_name,
|
||||
'file_format: %s' % self.file_format,
|
||||
'virtual_size: %s' % self.virtual_size,
|
||||
'disk_size: %s' % self.disk_size)
|
||||
|
||||
def _insert_snapshots(self, img_info):
|
||||
img_info = img_info + ('Snapshot list:',)
|
||||
img_info = img_info + ('ID '
|
||||
'TAG '
|
||||
'VM SIZE '
|
||||
'DATE '
|
||||
'VM CLOCK',)
|
||||
for i in range(self.snapshot_count):
|
||||
img_info = img_info + ('%d '
|
||||
'd9a9784a500742a7bb95627bb3aace38 '
|
||||
'0 2012-08-20 10:52:46 '
|
||||
'00:00:00.000' % (i + 1),)
|
||||
return img_info
|
||||
|
||||
def _base_validation(self, image_info):
|
||||
self.assertEqual(image_info.image, self.image_name)
|
||||
self.assertEqual(image_info.file_format, self.file_format)
|
||||
self.assertEqual(image_info.virtual_size, self.exp_virtual_size)
|
||||
self.assertEqual(image_info.disk_size, self.exp_disk_size)
|
||||
if self.snapshot_count is not None:
|
||||
self.assertEqual(len(image_info.snapshots), self.snapshot_count)
|
||||
|
||||
def test_qemu_img_info(self):
|
||||
img_info = self._initialize_img_info()
|
||||
if self.garbage_before_snapshot is True:
|
||||
img_info = img_info + ('blah BLAH: bb',)
|
||||
if self.snapshot_count is not None:
|
||||
img_info = self._insert_snapshots(img_info)
|
||||
if self.garbage_before_snapshot is False:
|
||||
img_info = img_info + ('junk stuff: bbb',)
|
||||
example_output = '\n'.join(img_info)
|
||||
image_info = imageutils.QemuImgInfo(example_output)
|
||||
self._base_validation(image_info)
|
||||
|
||||
ImageUtilsRawTestCase.generate_scenarios()
|
||||
|
||||
|
||||
class ImageUtilsQemuTestCase(ImageUtilsRawTestCase):
|
||||
|
||||
_file_format = [
|
||||
('qcow2', dict(file_format='qcow2')),
|
||||
]
|
||||
|
||||
_qcow2_cluster_size = [
|
||||
('65536', dict(cluster_size='65536', exp_cluster_size=65536)),
|
||||
]
|
||||
|
||||
_qcow2_encrypted = [
|
||||
('no_encryption', dict(encrypted=None)),
|
||||
('encrypted', dict(encrypted='yes')),
|
||||
]
|
||||
|
||||
_qcow2_backing_file = [
|
||||
('no_backing_file', dict(backing_file=None)),
|
||||
('backing_file_path',
|
||||
dict(backing_file='/var/lib/nova/a328c7998805951a_2',
|
||||
exp_backing_file='/var/lib/nova/a328c7998805951a_2')),
|
||||
('backing_file_path_with_actual_path',
|
||||
dict(backing_file='/var/lib/nova/a328c7998805951a_2 '
|
||||
'(actual path: /b/3a988059e51a_2)',
|
||||
exp_backing_file='/b/3a988059e51a_2')),
|
||||
]
|
||||
|
||||
@classmethod
|
||||
def generate_scenarios(cls):
|
||||
cls.scenarios = testscenarios.multiply_scenarios(
|
||||
cls._image_name,
|
||||
cls._file_format,
|
||||
cls._virtual_size,
|
||||
cls._disk_size,
|
||||
cls._garbage_before_snapshot,
|
||||
cls._snapshot_count,
|
||||
cls._qcow2_cluster_size,
|
||||
cls._qcow2_encrypted,
|
||||
cls._qcow2_backing_file)
|
||||
|
||||
def test_qemu_img_info(self):
|
||||
img_info = self._initialize_img_info()
|
||||
img_info = img_info + ('cluster_size: %s' % self.cluster_size,)
|
||||
if self.backing_file is not None:
|
||||
img_info = img_info + ('backing file: %s' %
|
||||
self.backing_file,)
|
||||
if self.encrypted is not None:
|
||||
img_info = img_info + ('encrypted: %s' % self.encrypted,)
|
||||
if self.garbage_before_snapshot is True:
|
||||
img_info = img_info + ('blah BLAH: bb',)
|
||||
if self.snapshot_count is not None:
|
||||
img_info = self._insert_snapshots(img_info)
|
||||
if self.garbage_before_snapshot is False:
|
||||
img_info = img_info + ('junk stuff: bbb',)
|
||||
example_output = '\n'.join(img_info)
|
||||
image_info = imageutils.QemuImgInfo(example_output)
|
||||
self._base_validation(image_info)
|
||||
self.assertEqual(image_info.cluster_size, self.exp_cluster_size)
|
||||
if self.backing_file is not None:
|
||||
self.assertEqual(image_info.backing_file,
|
||||
self.exp_backing_file)
|
||||
if self.encrypted is not None:
|
||||
self.assertEqual(image_info.encrypted, self.encrypted)
|
||||
|
||||
ImageUtilsQemuTestCase.generate_scenarios()
|
||||
|
||||
|
||||
class ImageUtilsBlankTestCase(test_base.BaseTestCase):
|
||||
def test_qemu_img_info_blank(self):
|
||||
example_output = '\n'.join(['image: None', 'file_format: None',
|
||||
'virtual_size: None', 'disk_size: None',
|
||||
'cluster_size: None',
|
||||
'backing_file: None'])
|
||||
image_info = imageutils.QemuImgInfo()
|
||||
self.assertEqual(str(image_info), example_output)
|
||||
self.assertEqual(len(image_info.snapshots), 0)
|
|
@ -1,55 +0,0 @@
|
|||
# Copyright 2013 Nebula, Inc.
|
||||
# All Rights Reserved.
|
||||
#
|
||||
# Licensed under the Apache License, Version 2.0 (the "License"); you may
|
||||
# not use this file except in compliance with the License. You may obtain
|
||||
# a copy of the License at
|
||||
#
|
||||
# http://www.apache.org/licenses/LICENSE-2.0
|
||||
#
|
||||
# Unless required by applicable law or agreed to in writing, software
|
||||
# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
|
||||
# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
|
||||
# License for the specific language governing permissions and limitations
|
||||
# under the License.
|
||||
|
||||
import time
|
||||
|
||||
import mock
|
||||
from oslotest import base as test_base
|
||||
|
||||
from openstack.common import memorycache
|
||||
|
||||
|
||||
class MemorycacheTest(test_base.BaseTestCase):
|
||||
def setUp(self):
|
||||
self.client = memorycache.get_client()
|
||||
super(MemorycacheTest, self).setUp()
|
||||
|
||||
def test_set_get(self):
|
||||
self.client.set('foo', 'bar')
|
||||
self.assertEqual(self.client.get('foo'), 'bar')
|
||||
|
||||
def test_add_get(self):
|
||||
self.client.add('foo', 'bar')
|
||||
self.assertEqual(self.client.get('foo'), 'bar')
|
||||
|
||||
def test_set_add_get(self):
|
||||
self.client.set('foo', 'bar')
|
||||
self.client.add('foo', 'baz')
|
||||
self.assertEqual(self.client.get('foo'), 'bar')
|
||||
|
||||
def test_set_delete(self):
|
||||
self.client.set('foo', 'bar')
|
||||
self.client.delete('foo')
|
||||
self.assertIsNone(self.client.get('foo'))
|
||||
|
||||
def test_timeout(self):
|
||||
now = time.time()
|
||||
with mock.patch('time.time') as time_mock:
|
||||
time_mock.return_value = now
|
||||
self.client.set('foo', 'bar', time=3)
|
||||
time_mock.return_value = now + 1
|
||||
self.assertEqual(self.client.get('foo'), 'bar')
|
||||
time_mock.return_value = now + 3
|
||||
self.assertIsNone(self.client.get('foo'))
|
|
@ -1,35 +0,0 @@
|
|||
-----BEGIN CERTIFICATE-----
|
||||
MIIGDDCCA/SgAwIBAgIJAPSvwQYk4qI4MA0GCSqGSIb3DQEBBQUAMGExCzAJBgNV
|
||||
BAYTAkFVMRMwEQYDVQQIEwpTb21lLVN0YXRlMRUwEwYDVQQKEwxPcGVuc3RhY2sg
|
||||
Q0ExEjAQBgNVBAsTCUdsYW5jZSBDQTESMBAGA1UEAxMJR2xhbmNlIENBMB4XDTEy
|
||||
MDIwOTE3MTAwMloXDTIyMDIwNjE3MTAwMlowYTELMAkGA1UEBhMCQVUxEzARBgNV
|
||||
BAgTClNvbWUtU3RhdGUxFTATBgNVBAoTDE9wZW5zdGFjayBDQTESMBAGA1UECxMJ
|
||||
R2xhbmNlIENBMRIwEAYDVQQDEwlHbGFuY2UgQ0EwggIiMA0GCSqGSIb3DQEBAQUA
|
||||
A4ICDwAwggIKAoICAQDmf+fapWfzy1Uylus0KGalw4X/5xZ+ltPVOr+IdCPbstvi
|
||||
RTC5g+O+TvXeOP32V/cnSY4ho/+f2q730za+ZA/cgWO252rcm3Q7KTJn3PoqzJvX
|
||||
/l3EXe3/TCrbzgZ7lW3QLTCTEE2eEzwYG3wfDTOyoBq+F6ct6ADh+86gmpbIRfYI
|
||||
N+ixB0hVyz9427PTof97fL7qxxkjAayB28OfwHrkEBl7iblNhUC0RoH+/H9r5GEl
|
||||
GnWiebxfNrONEHug6PHgiaGq7/Dj+u9bwr7J3/NoS84I08ajMnhlPZxZ8bS/O8If
|
||||
ceWGZv7clPozyhABT/otDfgVcNH1UdZ4zLlQwc1MuPYN7CwxrElxc8Quf94ttGjb
|
||||
tfGTl4RTXkDofYdG1qBWW962PsGl2tWmbYDXV0q5JhV/IwbrE1X9f+OksJQne1/+
|
||||
dZDxMhdf2Q1V0P9hZZICu4+YhmTMs5Mc9myKVnzp4NYdX5fXoB/uNYph+G7xG5IK
|
||||
WLSODKhr1wFGTTcuaa8LhOH5UREVenGDJuc6DdgX9a9PzyJGIi2ngQ03TJIkCiU/
|
||||
4J/r/vsm81ezDiYZSp2j5JbME+ixW0GBLTUWpOIxUSHgUFwH5f7lQwbXWBOgwXQk
|
||||
BwpZTmdQx09MfalhBtWeu4/6BnOCOj7e/4+4J0eVxXST0AmVyv8YjJ2nz1F9oQID
|
||||
AQABo4HGMIHDMB0GA1UdDgQWBBTk7Krj4bEsTjHXaWEtI2GZ5ACQyTCBkwYDVR0j
|
||||
BIGLMIGIgBTk7Krj4bEsTjHXaWEtI2GZ5ACQyaFlpGMwYTELMAkGA1UEBhMCQVUx
|
||||
EzARBgNVBAgTClNvbWUtU3RhdGUxFTATBgNVBAoTDE9wZW5zdGFjayBDQTESMBAG
|
||||
A1UECxMJR2xhbmNlIENBMRIwEAYDVQQDEwlHbGFuY2UgQ0GCCQD0r8EGJOKiODAM
|
||||
BgNVHRMEBTADAQH/MA0GCSqGSIb3DQEBBQUAA4ICAQA8Zrss/MiwFHGmDlercE0h
|
||||
UvzA54n/EvKP9nP3jHM2qW/VPfKdnFw99nEPFLhb+lN553vdjOpCYFm+sW0Z5Mi4
|
||||
qsFkk4AmXIIEFOPt6zKxMioLYDQ9Sw/BUv6EZGeANWr/bhmaE+dMcKJt5le/0jJm
|
||||
2ahsVB9fbFu9jBFeYb7Ba/x2aLkEGMxaDLla+6EQhj148fTnS1wjmX9G2cNzJvj/
|
||||
+C2EfKJIuDJDqw2oS2FGVpP37FA2Bz2vga0QatNneLkGKCFI3ZTenBznoN+fmurX
|
||||
TL3eJE4IFNrANCcdfMpdyLAtXz4KpjcehqpZMu70er3d30zbi1l0Ajz4dU+WKz/a
|
||||
NQES+vMkT2wqjXHVTjrNwodxw3oLK/EuTgwoxIHJuplx5E5Wrdx9g7Gl1PBIJL8V
|
||||
xiOYS5N7CakyALvdhP7cPubA2+TPAjNInxiAcmhdASS/Vrmpvrkat6XhGn8h9liv
|
||||
ysDOpMQmYQkmgZBpW8yBKK7JABGGsJADJ3E6J5MMWBX2RR4kFoqVGAzdOU3oyaTy
|
||||
I0kz5sfuahaWpdYJVlkO+esc0CRXw8fLDYivabK2tOgUEWeZsZGZ9uK6aV1VxTAY
|
||||
9Guu3BJ4Rv/KP/hk7mP8rIeCwotV66/2H8nq72ImQhzSVyWcxbFf2rJiFQJ3BFwA
|
||||
WoRMgEwjGJWqzhJZUYpUAQ==
|
||||
-----END CERTIFICATE-----
|
|
@ -1,30 +0,0 @@
|
|||
-----BEGIN CERTIFICATE-----
|
||||
MIIFLjCCAxYCAQEwDQYJKoZIhvcNAQEFBQAwYTELMAkGA1UEBhMCQVUxEzARBgNV
|
||||
BAgTClNvbWUtU3RhdGUxFTATBgNVBAoTDE9wZW5zdGFjayBDQTESMBAGA1UECxMJ
|
||||
R2xhbmNlIENBMRIwEAYDVQQDEwlHbGFuY2UgQ0EwHhcNMTIwMjA5MTcxMDUzWhcN
|
||||
MjIwMjA2MTcxMDUzWjBZMQswCQYDVQQGEwJBVTETMBEGA1UECBMKU29tZS1TdGF0
|
||||
ZTESMBAGA1UEChMJT3BlbnN0YWNrMQ8wDQYDVQQLEwZHbGFuY2UxEDAOBgNVBAMT
|
||||
BzAuMC4wLjAwggIiMA0GCSqGSIb3DQEBAQUAA4ICDwAwggIKAoICAQDXpUkQN6pu
|
||||
avo+gz3o1K4krVdPl1m7NjNJDyD/+ZH0EGNcEN7iag1qPE7JsjqGPNZsQK1dMoXb
|
||||
Sz+OSi9qvNeJnBcfwUx5qTAtwyAb9AxGkwuMafIU+lWbsclo+dPGsja01ywbXTCZ
|
||||
bF32iqnpOMYhfxWUdoQYiBkhxxhW9eMPKLS/KkP8/bx+Vaa2XJiAebqkd9nrksAA
|
||||
BeGc9mlafYBEmiChPdJEPw+1ePA4QVq9aPepDsqAKtGN8JLpmoC3BdxQQTbbwL3Q
|
||||
8fTXK4tCNUaVk4AbDy/McFq6y0ocQoBPJjihOY35mWG/OLtcI99yPOpWGnps/5aG
|
||||
/64DDJ2D67Fnaj6gKHV+6TXFO8KZxlnxtgtiZDJBZkneTBt9ArSOv+l6NBsumRz0
|
||||
iEJ4o4H1S2TSMnprAvX7WnGtc6Xi9gXahYcDHEelwwYzqAiTBv6hxSp4MZ2dNXa+
|
||||
KzOitC7ZbV2qsg0au0wjfE/oSQ3NvsvUr8nOmfutJTvHRAwbC1v4G/tuAsO7O0w2
|
||||
0u2B3u+pG06m5+rnEqp+rB9hmukRYTfgEFRRsVIvpFl/cwvPXKRcX03UIMx+lLr9
|
||||
Ft+ep7YooBhY3wY2kwCxD4lRYNmbwsCIVywZt40f/4ad98TkufR9NhsfycxGeqbr
|
||||
mTMFlZ8TTlmP82iohekKCOvoyEuTIWL2+wIDAQABMA0GCSqGSIb3DQEBBQUAA4IC
|
||||
AQBMUBgV0R+Qltf4Du7u/8IFmGAoKR/mktB7R1gRRAqsvecUt7kIwBexGdavGg1y
|
||||
0pU0+lgUZjJ20N1SlPD8gkNHfXE1fL6fmMjWz4dtYJjzRVhpufHPeBW4tl8DgHPN
|
||||
rBGAYQ+drDSXaEjiPQifuzKx8WS+DGA3ki4co5mPjVnVH1xvLIdFsk89z3b3YD1k
|
||||
yCJ/a9K36x6Z/c67JK7s6MWtrdRF9+MVnRKJ2PK4xznd1kBz16V+RA466wBDdARY
|
||||
vFbtkafbEqOb96QTonIZB7+fAldKDPZYnwPqasreLmaGOaM8sxtlPYAJ5bjDONbc
|
||||
AaXG8BMRQyO4FyH237otDKlxPyHOFV66BaffF5S8OlwIMiZoIvq+IcTZOdtDUSW2
|
||||
KHNLfe5QEDZdKjWCBrfqAfvNuG13m03WqfmcMHl3o/KiPJlx8l9Z4QEzZ9xcyQGL
|
||||
cncgeHM9wJtzi2cD/rTDNFsx/gxvoyutRmno7I3NRbKmpsXF4StZioU3USRspB07
|
||||
hYXOVnG3pS+PjVby7ThT3gvFHSocguOsxClx1epdUJAmJUbmM7NmOp5WVBVtMtC2
|
||||
Su4NG/xJciXitKzw+btb7C7RjO6OEqv/1X/oBDzKBWQAwxUC+lqmnM7W6oqWJFEM
|
||||
YfTLnrjs7Hj6ThMGcEnfvc46dWK3dz0RjsQzUxugPuEkLA==
|
||||
-----END CERTIFICATE-----
|
|
@ -1,3 +0,0 @@
|
|||
{
|
||||
"default": "role:fakeC"
|
||||
}
|
|
@ -1,3 +0,0 @@
|
|||
{
|
||||
"default": "role:fakeA"
|
||||
}
|
|
@ -1,3 +0,0 @@
|
|||
{
|
||||
"default": "role:fakeB"
|
||||
}
|
|
@ -1 +0,0 @@
|
|||
fake
|
|
@ -1,4 +0,0 @@
|
|||
{
|
||||
"default": "rule:admin",
|
||||
"admin": "is_admin:True"
|
||||
}
|
|
@ -1,51 +0,0 @@
|
|||
-----BEGIN RSA PRIVATE KEY-----
|
||||
MIIJKAIBAAKCAgEA16VJEDeqbmr6PoM96NSuJK1XT5dZuzYzSQ8g//mR9BBjXBDe
|
||||
4moNajxOybI6hjzWbECtXTKF20s/jkovarzXiZwXH8FMeakwLcMgG/QMRpMLjGny
|
||||
FPpVm7HJaPnTxrI2tNcsG10wmWxd9oqp6TjGIX8VlHaEGIgZIccYVvXjDyi0vypD
|
||||
/P28flWmtlyYgHm6pHfZ65LAAAXhnPZpWn2ARJogoT3SRD8PtXjwOEFavWj3qQ7K
|
||||
gCrRjfCS6ZqAtwXcUEE228C90PH01yuLQjVGlZOAGw8vzHBaustKHEKATyY4oTmN
|
||||
+Zlhvzi7XCPfcjzqVhp6bP+Whv+uAwydg+uxZ2o+oCh1fuk1xTvCmcZZ8bYLYmQy
|
||||
QWZJ3kwbfQK0jr/pejQbLpkc9IhCeKOB9Utk0jJ6awL1+1pxrXOl4vYF2oWHAxxH
|
||||
pcMGM6gIkwb+ocUqeDGdnTV2viszorQu2W1dqrINGrtMI3xP6EkNzb7L1K/Jzpn7
|
||||
rSU7x0QMGwtb+Bv7bgLDuztMNtLtgd7vqRtOpufq5xKqfqwfYZrpEWE34BBUUbFS
|
||||
L6RZf3MLz1ykXF9N1CDMfpS6/Rbfnqe2KKAYWN8GNpMAsQ+JUWDZm8LAiFcsGbeN
|
||||
H/+GnffE5Ln0fTYbH8nMRnqm65kzBZWfE05Zj/NoqIXpCgjr6MhLkyFi9vsCAwEA
|
||||
AQKCAgAA96baQcWr9SLmQOR4NOwLEhQAMWefpWCZhU3amB4FgEVR1mmJjnw868RW
|
||||
t0v36jH0Dl44us9K6o2Ab+jCi9JTtbWM2Osk6JNkwSlVtsSPVH2KxbbmTTExH50N
|
||||
sYE3tPj12rlB7isXpRrOzlRwzWZmJBHOtrFlAsdKFYCQc03vdXlKGkBv1BuSXYP/
|
||||
8W5ltSYXMspxehkOZvhaIejbFREMPbzDvGlDER1a7Q320qQ7kUr7ISvbY1XJUzj1
|
||||
f1HwgEA6w/AhED5Jv6wfgvx+8Yo9hYnflTPbsO1XRS4x7kJxGHTMlFuEsSF1ICYH
|
||||
Bcos0wUiGcBO2N6uAFuhe98BBn+nOwAPZYWwGkmVuK2psm2mXAHx94GT/XqgK/1r
|
||||
VWGSoOV7Fhjauc2Nv8/vJU18DXT3OY5hc4iXVeEBkuZwRb/NVUtnFoHxVO/Mp5Fh
|
||||
/W5KZaLWVrLghzvSQ/KUIM0k4lfKDZpY9ZpOdNgWDyZY8tNrXumUZZimzWdXZ9vR
|
||||
dBssmd8qEKs1AHGFnMDt56IjLGou6j0qnWsLdR1e/WEFsYzGXLVHCv6vXRNkbjqh
|
||||
WFw5nA+2Dw1YAsy+YkTfgx2pOe+exM/wxsVPa7tG9oZ374dywUi1k6VoHw5dkmJw
|
||||
1hbXqSLZtx2N51G+SpGmNAV4vLUF0y3dy2wnrzFkFT4uxh1w8QKCAQEA+h6LwHTK
|
||||
hgcJx6CQQ6zYRqXo4wdvMooY1FcqJOq7LvJUA2CX5OOLs8qN1TyFrOCuAUTurOrM
|
||||
ABlQ0FpsIaP8TOGz72dHe2eLB+dD6Bqjn10sEFMn54zWd/w9ympQrO9jb5X3ViTh
|
||||
sCcdYyXVS9Hz8nzbbIF+DaKlxF2Hh71uRDxXpMPxRcGbOIuKZXUj6RkTIulzqT6o
|
||||
uawlegWxch05QSgzq/1ASxtjTzo4iuDCAii3N45xqxnB+fV9NXEt4R2oOGquBRPJ
|
||||
LxKcOnaQKBD0YNX4muTq+zPlv/kOb8/ys2WGWDUrNkpyJXqhTve4KONjqM7+iL/U
|
||||
4WdJuiCjonzk/QKCAQEA3Lc+kNq35FNLxMcnCVcUgkmiCWZ4dyGZZPdqjOPww1+n
|
||||
bbudGPzY1nxOvE60dZM4or/tm6qlXYfb2UU3+OOJrK9s297EQybZ8DTZu2GHyitc
|
||||
NSFV3Gl4cgvKdbieGKkk9X2dV9xSNesNvX9lJEnQxuwHDTeo8ubLHtV88Ml1xokn
|
||||
7W+IFiyEuUIL4e5/fadbrI3EwMrbCF4+9VcfABx4PTNMzdc8LsncCMXE+jFX8AWp
|
||||
TsT2JezTe5o2WpvBoKMAYhJQNQiaWATn00pDVY/70H1vK3ljomAa1IUdOr/AhAF7
|
||||
3jL0MYMgXSHzXZOKAtc7yf+QfFWF1Ls8+sen1clJVwKCAQEAp59rB0r+Iz56RmgL
|
||||
5t7ifs5XujbURemY5E2aN+18DuVmenD0uvfoO1DnJt4NtCNLWhxpXEdq+jH9H/VJ
|
||||
fG4a+ydT4IC1vjVRTrWlo9qeh4H4suQX3S1c2kKY4pvHf25blH/Lp9bFzbkZD8Ze
|
||||
IRcOxxb4MsrBwL+dGnGYD9dbG63ZCtoqSxaKQSX7VS1hKKmeUopj8ivFBdIht5oz
|
||||
JogBQ/J+Vqg9u1gagRFCrYgdXTcOOtRix0lW336vL+6u0ax/fXe5MjvlW3+8Zc3p
|
||||
pIBgVrlvh9ccx8crFTIDg9m4DJRgqaLQV+0ifI2np3WK3RQvSQWYPetZ7sm69ltD
|
||||
bvUGvQKCAQAz5CEhjUqOs8asjOXwnDiGKSmfbCgGWi/mPQUf+rcwN9z1P5a/uTKB
|
||||
utgIDbj/q401Nkp2vrgCNV7KxitSqKxFnTjKuKUL5KZ4gvRtyZBTR751/1BgcauP
|
||||
pJYE91K0GZBG5zGG5pWtd4XTd5Af5/rdycAeq2ddNEWtCiRFuBeohbaNbBtimzTZ
|
||||
GV4R0DDJKf+zoeEQMqEsZnwG0mTHceoS+WylOGU92teQeG7HI7K5C5uymTwFzpgq
|
||||
ByegRd5QFgKRDB0vWsZuyzh1xI/wHdnmOpdYcUGre0zTijhFB7ALWQ32P6SJv3ps
|
||||
av78kSNxZ4j3BM7DbJf6W8sKasZazOghAoIBAHekpBcLq9gRv2+NfLYxWN2sTZVB
|
||||
1ldwioG7rWvk5YQR2akukecI3NRjtC5gG2vverawG852Y4+oLfgRMHxgp0qNStwX
|
||||
juTykzPkCwZn8AyR+avC3mkrtJyM3IigcYOu4/UoaRDFa0xvCC1EfumpnKXIpHag
|
||||
miSQZf2sVbgqb3/LWvHIg/ceOP9oGJve87/HVfQtBoLaIe5RXCWkqB7mcI/exvTS
|
||||
8ShaW6v2Fe5Bzdvawj7sbsVYRWe93Aq2tmIgSX320D2RVepb6mjD4nr0IUaM3Yed
|
||||
TFT7e2ikWXyDLLgVkDTU4Qe8fr3ZKGfanCIDzvgNw6H1gRi+2WQgOmjilMQ=
|
||||
-----END RSA PRIVATE KEY-----
|
Loading…
Reference in New Issue