Rename 'options' variables to 'conf'

In the common-config patch, I'm going to be using 'conf' as the name of
the variable holding configuration values rather than 'options.

This patch does the renaming across the codebase without making any
functional changes.

Change-Id: I3a86fc01fc76825d6d1e86af882eb8245828ce5f
This commit is contained in:
Mark McLoughlin 2011-11-28 14:37:58 +00:00
parent ba44d1c384
commit 002d711fb7
43 changed files with 314 additions and 318 deletions

View File

@ -37,9 +37,9 @@ class Controller(controller.BaseController):
A controller for managing cached images. A controller for managing cached images.
""" """
def __init__(self, options): def __init__(self, conf):
self.options = options self.conf = conf
self.cache = image_cache.ImageCache(self.options) self.cache = image_cache.ImageCache(self.conf)
def get_cached_images(self, req): def get_cached_images(self, req):
""" """
@ -110,8 +110,8 @@ class CachedImageSerializer(wsgi.JSONResponseSerializer):
pass pass
def create_resource(options): def create_resource(conf):
"""Cached Images resource factory method""" """Cached Images resource factory method"""
deserializer = CachedImageDeserializer() deserializer = CachedImageDeserializer()
serializer = CachedImageSerializer() serializer = CachedImageSerializer()
return wsgi.Resource(Controller(options), deserializer, serializer) return wsgi.Resource(Controller(conf), deserializer, serializer)

View File

@ -43,9 +43,9 @@ get_images_re = re.compile(r'^(/v\d+)*/images/(.+)$')
class CacheFilter(wsgi.Middleware): class CacheFilter(wsgi.Middleware):
def __init__(self, app, options): def __init__(self, app, conf):
self.options = options self.conf = conf
self.cache = image_cache.ImageCache(options) self.cache = image_cache.ImageCache(conf)
self.serializer = images.ImageSerializer() self.serializer = images.ImageSerializer()
logger.info(_("Initialized image cache middleware")) logger.info(_("Initialized image cache middleware"))
super(CacheFilter, self).__init__(app) super(CacheFilter, self).__init__(app)

View File

@ -28,10 +28,10 @@ logger = logging.getLogger(__name__)
class CacheManageFilter(wsgi.Middleware): class CacheManageFilter(wsgi.Middleware):
def __init__(self, app, options): def __init__(self, app, conf):
map = app.map map = app.map
resource = cached_images.create_resource(options) resource = cached_images.create_resource(conf)
map.connect("/cached_images", map.connect("/cached_images",
controller=resource, controller=resource,

View File

@ -35,10 +35,10 @@ logger = logging.getLogger('glance.api.middleware.version_negotiation')
class VersionNegotiationFilter(wsgi.Middleware): class VersionNegotiationFilter(wsgi.Middleware):
def __init__(self, app, options): def __init__(self, app, conf):
self.versions_app = versions.Controller(options) self.versions_app = versions.Controller(conf)
self.version_uri_regex = re.compile(r"^v(\d+)\.?(\d+)?") self.version_uri_regex = re.compile(r"^v(\d+)\.?(\d+)?")
self.options = options self.conf = conf
super(VersionNegotiationFilter, self).__init__(app) super(VersionNegotiationFilter, self).__init__(app)
def process_request(self, req): def process_request(self, req):

View File

@ -76,11 +76,11 @@ class Controller(controller.BaseController):
DELETE /images/<ID> -- Delete the image with id <ID> DELETE /images/<ID> -- Delete the image with id <ID>
""" """
def __init__(self, options): def __init__(self, conf):
self.options = options self.conf = conf
glance.store.create_stores(options) glance.store.create_stores(conf)
self.notifier = notifier.Notifier(options) self.notifier = notifier.Notifier(conf)
registry.configure_registry_client(options) registry.configure_registry_client(conf)
def index(self, req): def index(self, req):
""" """
@ -290,7 +290,7 @@ class Controller(controller.BaseController):
raise HTTPBadRequest(explanation=msg) raise HTTPBadRequest(explanation=msg)
store_name = req.headers.get('x-image-meta-store', store_name = req.headers.get('x-image-meta-store',
self.options['default_store']) self.conf['default_store'])
store = self.get_store_or_400(req, store_name) store = self.get_store_or_400(req, store_name)
@ -557,7 +557,7 @@ class Controller(controller.BaseController):
# See https://bugs.launchpad.net/glance/+bug/747799 # See https://bugs.launchpad.net/glance/+bug/747799
try: try:
if image['location']: if image['location']:
schedule_delete_from_backend(image['location'], self.options, schedule_delete_from_backend(image['location'], self.conf,
req.context, id) req.context, id)
registry.delete_image_metadata(req.context, id) registry.delete_image_metadata(req.context, id)
except exception.NotFound, e: except exception.NotFound, e:
@ -706,8 +706,8 @@ class ImageSerializer(wsgi.JSONResponseSerializer):
return response return response
def create_resource(options): def create_resource(conf):
"""Images resource factory method""" """Images resource factory method"""
deserializer = ImageDeserializer() deserializer = ImageDeserializer()
serializer = ImageSerializer() serializer = ImageSerializer()
return wsgi.Resource(Controller(options), deserializer, serializer) return wsgi.Resource(Controller(conf), deserializer, serializer)

View File

@ -14,8 +14,8 @@ logger = logging.getLogger('glance.api.v1.members')
class Controller(object): class Controller(object):
def __init__(self, options): def __init__(self, conf):
self.options = options self.conf = conf
def index(self, req, image_id): def index(self, req, image_id):
""" """
@ -158,8 +158,8 @@ class Controller(object):
return dict(shared_images=members) return dict(shared_images=members)
def create_resource(options): def create_resource(conf):
"""Image members resource factory method""" """Image members resource factory method"""
deserializer = wsgi.JSONRequestDeserializer() deserializer = wsgi.JSONRequestDeserializer()
serializer = wsgi.JSONResponseSerializer() serializer = wsgi.JSONResponseSerializer()
return wsgi.Resource(Controller(options), deserializer, serializer) return wsgi.Resource(Controller(conf), deserializer, serializer)

View File

@ -30,11 +30,11 @@ class API(wsgi.Router):
"""WSGI router for Glance v1 API requests.""" """WSGI router for Glance v1 API requests."""
def __init__(self, options): def __init__(self, conf):
self.options = options self.conf = conf
mapper = routes.Mapper() mapper = routes.Mapper()
images_resource = images.create_resource(options) images_resource = images.create_resource(conf)
mapper.resource("image", "images", controller=images_resource, mapper.resource("image", "images", controller=images_resource,
collection={'detail': 'GET'}) collection={'detail': 'GET'})
@ -42,7 +42,7 @@ class API(wsgi.Router):
mapper.connect("/images/{id}", controller=images_resource, mapper.connect("/images/{id}", controller=images_resource,
action="meta", conditions=dict(method=["HEAD"])) action="meta", conditions=dict(method=["HEAD"]))
members_resource = members.create_resource(options) members_resource = members.create_resource(conf)
mapper.resource("member", "members", controller=members_resource, mapper.resource("member", "members", controller=members_resource,
parent_resource=dict(member_name='image', parent_resource=dict(member_name='image',

View File

@ -31,8 +31,8 @@ class Controller(object):
A controller that produces information on the Glance API versions. A controller that produces information on the Glance API versions.
""" """
def __init__(self, options): def __init__(self, conf):
self.options = options self.conf = conf
@webob.dec.wsgify @webob.dec.wsgify
def __call__(self, req): def __call__(self, req):
@ -63,5 +63,5 @@ class Controller(object):
return response return response
def get_href(self): def get_href(self):
return "http://%s:%s/v1/" % (self.options['bind_host'], return "http://%s:%s/v1/" % (self.conf['bind_host'],
self.options['bind_port']) self.conf['bind_port'])

View File

@ -53,8 +53,8 @@ class RequestContext(object):
class ContextMiddleware(wsgi.Middleware): class ContextMiddleware(wsgi.Middleware):
def __init__(self, app, options): def __init__(self, app, conf):
self.options = options self.conf = conf
super(ContextMiddleware, self).__init__(app) super(ContextMiddleware, self).__init__(app)
def make_context(self, *args, **kwargs): def make_context(self, *args, **kwargs):
@ -64,11 +64,11 @@ class ContextMiddleware(wsgi.Middleware):
# Determine the context class to use # Determine the context class to use
ctxcls = RequestContext ctxcls = RequestContext
if 'context_class' in self.options: if 'context_class' in self.conf:
ctxcls = utils.import_class(self.options['context_class']) ctxcls = utils.import_class(self.conf['context_class'])
# Determine whether to use tenant or owner # Determine whether to use tenant or owner
owner_is_tenant = config.get_option(self.options, 'owner_is_tenant', owner_is_tenant = config.get_option(self.conf, 'owner_is_tenant',
type='bool', default=True) type='bool', default=True)
kwargs.setdefault('owner_is_tenant', owner_is_tenant) kwargs.setdefault('owner_is_tenant', owner_is_tenant)

View File

@ -29,7 +29,7 @@ from glance.common import exception
class NoopStrategy(object): class NoopStrategy(object):
"""A notifier that does nothing when called.""" """A notifier that does nothing when called."""
def __init__(self, options): def __init__(self, conf):
pass pass
def warn(self, msg): def warn(self, msg):
@ -45,7 +45,7 @@ class NoopStrategy(object):
class LoggingStrategy(object): class LoggingStrategy(object):
"""A notifier that calls logging when called.""" """A notifier that calls logging when called."""
def __init__(self, options): def __init__(self, conf):
self.logger = logging.getLogger('glance.notifier.logging_notifier') self.logger = logging.getLogger('glance.notifier.logging_notifier')
def warn(self, msg): def warn(self, msg):
@ -61,9 +61,9 @@ class LoggingStrategy(object):
class RabbitStrategy(object): class RabbitStrategy(object):
"""A notifier that puts a message on a queue when called.""" """A notifier that puts a message on a queue when called."""
def __init__(self, options): def __init__(self, conf):
"""Initialize the rabbit notification strategy.""" """Initialize the rabbit notification strategy."""
self._options = options self._conf = conf
host = self._get_option('rabbit_host', 'str', 'localhost') host = self._get_option('rabbit_host', 'str', 'localhost')
port = self._get_option('rabbit_port', 'int', 5672) port = self._get_option('rabbit_port', 'int', 5672)
use_ssl = self._get_option('rabbit_use_ssl', 'bool', False) use_ssl = self._get_option('rabbit_use_ssl', 'bool', False)
@ -84,7 +84,7 @@ class RabbitStrategy(object):
def _get_option(self, name, datatype, default): def _get_option(self, name, datatype, default):
"""Retrieve a configuration option.""" """Retrieve a configuration option."""
return config.get_option(self._options, return config.get_option(self._conf,
name, name,
type=datatype, type=datatype,
default=default) default=default)
@ -115,11 +115,11 @@ class Notifier(object):
"default": NoopStrategy, "default": NoopStrategy,
} }
def __init__(self, options, strategy=None): def __init__(self, conf, strategy=None):
strategy = config.get_option(options, "notifier_strategy", strategy = config.get_option(conf, "notifier_strategy",
type="str", default="default") type="str", default="default")
try: try:
self.strategy = self.STRATEGIES[strategy](options) self.strategy = self.STRATEGIES[strategy](conf)
except KeyError: except KeyError:
raise exception.InvalidNotifierStrategy(strategy=strategy) raise exception.InvalidNotifierStrategy(strategy=strategy)

View File

@ -34,15 +34,15 @@ class ImageCache(object):
"""Provides an LRU cache for image data.""" """Provides an LRU cache for image data."""
def __init__(self, options): def __init__(self, conf):
self.options = options self.conf = conf
self.init_driver() self.init_driver()
def init_driver(self): def init_driver(self):
""" """
Create the driver for the cache Create the driver for the cache
""" """
driver_name = self.options.get('image_cache_driver', 'sqlite') driver_name = self.conf.get('image_cache_driver', 'sqlite')
driver_module = (__name__ + '.drivers.' + driver_name + '.Driver') driver_module = (__name__ + '.drivers.' + driver_name + '.Driver')
try: try:
self.driver_class = utils.import_class(driver_module) self.driver_class = utils.import_class(driver_module)
@ -64,7 +64,7 @@ class ImageCache(object):
fall back to using the SQLite driver which has no odd dependencies fall back to using the SQLite driver which has no odd dependencies
""" """
try: try:
self.driver = self.driver_class(self.options) self.driver = self.driver_class(self.conf)
self.driver.configure() self.driver.configure()
except exception.BadDriverConfiguration, config_err: except exception.BadDriverConfiguration, config_err:
driver_module = self.driver_class.__module__ driver_module = self.driver_class.__module__
@ -74,7 +74,7 @@ class ImageCache(object):
logger.info(_("Defaulting to SQLite driver.")) logger.info(_("Defaulting to SQLite driver."))
default_module = __name__ + '.drivers.sqlite.Driver' default_module = __name__ + '.drivers.sqlite.Driver'
self.driver_class = utils.import_class(default_module) self.driver_class = utils.import_class(default_module)
self.driver = self.driver_class(self.options) self.driver = self.driver_class(self.conf)
self.driver.configure() self.driver.configure()
def is_cached(self, image_id): def is_cached(self, image_id):
@ -150,7 +150,7 @@ class ImageCache(object):
size. Returns a tuple containing the total number of cached size. Returns a tuple containing the total number of cached
files removed and the total size of all pruned image files. files removed and the total size of all pruned image files.
""" """
max_size = int(self.options.get('image_cache_max_size', max_size = int(self.conf.get('image_cache_max_size',
DEFAULT_MAX_CACHE_SIZE)) DEFAULT_MAX_CACHE_SIZE))
current_size = self.driver.get_cache_size() current_size = self.driver.get_cache_size()
if max_size > current_size: if max_size > current_size:

View File

@ -27,9 +27,9 @@ logger = logging.getLogger(__name__)
class Cleaner(object): class Cleaner(object):
def __init__(self, options): def __init__(self, conf):
self.options = options self.conf = conf
self.cache = ImageCache(options) self.cache = ImageCache(conf)
def run(self): def run(self):
self.cache.clean() self.cache.clean()

View File

@ -33,15 +33,15 @@ logger = logging.getLogger(__name__)
class Driver(object): class Driver(object):
def __init__(self, options): def __init__(self, conf):
""" """
Initialize the attribute driver with a set of options. Initialize the attribute driver with a set of options.
:param options: Dictionary of configuration file options :param conf: Dictionary of configuration options
:raises `exception.BadDriverConfiguration` if configuration of the :raises `exception.BadDriverConfiguration` if configuration of the
driver fails for any reason. driver fails for any reason.
""" """
self.options = options or {} self.conf = conf or {}
def configure(self): def configure(self):
""" """
@ -62,7 +62,7 @@ class Driver(object):
try: try:
key = 'image_cache_dir' key = 'image_cache_dir'
self.base_dir = self.options[key] self.base_dir = self.conf[key]
except KeyError: except KeyError:
msg = _('Failed to read %s from config') % key msg = _('Failed to read %s from config') % key
logger.error(msg) logger.error(msg)

View File

@ -95,7 +95,7 @@ class Driver(base.Driver):
self.initialize_db() self.initialize_db()
def initialize_db(self): def initialize_db(self):
db = self.options.get('image_cache_sqlite_db', DEFAULT_SQLITE_DB) db = self.conf.get('image_cache_sqlite_db', DEFAULT_SQLITE_DB)
self.db_path = os.path.join(self.base_dir, db) self.db_path = os.path.join(self.base_dir, db)
try: try:
conn = sqlite3.connect(self.db_path, check_same_thread=False, conn = sqlite3.connect(self.db_path, check_same_thread=False,
@ -252,7 +252,7 @@ class Driver(base.Driver):
""" """
self.delete_invalid_files() self.delete_invalid_files()
incomplete_stall_time = int(self.options.get('image_cache_stall_time', incomplete_stall_time = int(self.conf.get('image_cache_stall_time',
DEFAULT_STALL_TIME)) DEFAULT_STALL_TIME))
now = time.time() now = time.time()
older_than = now - incomplete_stall_time older_than = now - incomplete_stall_time

View File

@ -424,7 +424,7 @@ class Driver(base.Driver):
""" """
self.reap_invalid() self.reap_invalid()
incomplete_stall_time = int(self.options.get('image_cache_stall_time', incomplete_stall_time = int(self.conf.get('image_cache_stall_time',
DEFAULT_STALL_TIME)) DEFAULT_STALL_TIME))
self.reap_stalled(incomplete_stall_time) self.reap_stalled(incomplete_stall_time)

View File

@ -42,14 +42,14 @@ logger = logging.getLogger(__name__)
class Prefetcher(object): class Prefetcher(object):
def __init__(self, options): def __init__(self, conf):
self.options = options self.conf = conf
glance.store.create_stores(options) glance.store.create_stores(conf)
self.cache = ImageCache(options) self.cache = ImageCache(conf)
registry.configure_registry_client(options) registry.configure_registry_client(conf)
def fetch_image_into_cache(self, image_id): def fetch_image_into_cache(self, image_id):
auth_tok = self.options.get('admin_token') auth_tok = self.conf.get('admin_token')
ctx = context.RequestContext(is_admin=True, show_deleted=True, ctx = context.RequestContext(is_admin=True, show_deleted=True,
auth_tok=auth_tok) auth_tok=auth_tok)
try: try:

View File

@ -27,9 +27,9 @@ logger = logging.getLogger(__name__)
class Pruner(object): class Pruner(object):
def __init__(self, options): def __init__(self, conf):
self.options = options self.conf = conf
self.cache = ImageCache(options) self.cache = ImageCache(conf)
def run(self): def run(self):
self.cache.prune() self.cache.prune()

View File

@ -35,13 +35,13 @@ logger = logging.getLogger(__name__)
class Queuer(object): class Queuer(object):
def __init__(self, options): def __init__(self, conf):
self.options = options self.conf = conf
self.cache = ImageCache(options) self.cache = ImageCache(conf)
registry.configure_registry_client(options) registry.configure_registry_client(conf)
def queue_image(self, image_id): def queue_image(self, image_id):
auth_tok = self.options.get('admin_token') auth_tok = self.conf.get('admin_token')
ctx = context.RequestContext(is_admin=True, show_deleted=True, ctx = context.RequestContext(is_admin=True, show_deleted=True,
auth_tok=auth_tok) auth_tok=auth_tok)
try: try:

View File

@ -34,16 +34,16 @@ _CLIENT_KWARGS = {}
_METADATA_ENCRYPTION_KEY = None _METADATA_ENCRYPTION_KEY = None
def configure_registry_client(options): def configure_registry_client(conf):
""" """
Sets up a registry client for use in registry lookups Sets up a registry client for use in registry lookups
:param options: Configuration options coming from controller :param conf: Configuration options coming from controller
""" """
global _CLIENT_KWARGS, _CLIENT_HOST, _CLIENT_PORT, _METADATA_ENCRYPTION_KEY global _CLIENT_KWARGS, _CLIENT_HOST, _CLIENT_PORT, _METADATA_ENCRYPTION_KEY
try: try:
host = options['registry_host'] host = conf['registry_host']
port = int(options['registry_port']) port = int(conf['registry_port'])
except (TypeError, ValueError): except (TypeError, ValueError):
msg = _("Configuration option was not valid") msg = _("Configuration option was not valid")
logger.error(msg) logger.error(msg)
@ -53,12 +53,12 @@ def configure_registry_client(options):
logger.error(msg) logger.error(msg)
raise exception.BadRegistryConnectionConfiguration(msg) raise exception.BadRegistryConnectionConfiguration(msg)
use_ssl = config.get_option(options, 'registry_client_protocol', use_ssl = config.get_option(conf, 'registry_client_protocol',
default='http').lower() == 'https' default='http').lower() == 'https'
key_file = options.get('registry_client_key_file') key_file = conf.get('registry_client_key_file')
cert_file = options.get('registry_client_cert_file') cert_file = conf.get('registry_client_cert_file')
ca_file = options.get('registry_client_ca_file') ca_file = conf.get('registry_client_ca_file')
_METADATA_ENCRYPTION_KEY = options.get('metadata_encryption_key') _METADATA_ENCRYPTION_KEY = conf.get('metadata_encryption_key')
_CLIENT_HOST = host _CLIENT_HOST = host
_CLIENT_PORT = port _CLIENT_PORT = port
_CLIENT_KWARGS = {'use_ssl': use_ssl, _CLIENT_KWARGS = {'use_ssl': use_ssl,

View File

@ -25,15 +25,15 @@ from glance.common import wsgi
class API(wsgi.Router): class API(wsgi.Router):
"""WSGI entry point for all Registry requests.""" """WSGI entry point for all Registry requests."""
def __init__(self, options): def __init__(self, conf):
mapper = routes.Mapper() mapper = routes.Mapper()
images_resource = images.create_resource(options) images_resource = images.create_resource(conf)
mapper.resource("image", "images", controller=images_resource, mapper.resource("image", "images", controller=images_resource,
collection={'detail': 'GET'}) collection={'detail': 'GET'})
mapper.connect("/", controller=images_resource, action="index") mapper.connect("/", controller=images_resource, action="index")
members_resource = members.create_resource(options) members_resource = members.create_resource(conf)
mapper.resource("member", "members", controller=members_resource, mapper.resource("member", "members", controller=members_resource,
parent_resource=dict(member_name='image', parent_resource=dict(member_name='image',
collection_name='images')) collection_name='images'))

View File

@ -49,9 +49,9 @@ SUPPORTED_PARAMS = ('limit', 'marker', 'sort_key', 'sort_dir')
class Controller(object): class Controller(object):
def __init__(self, options): def __init__(self, conf):
self.options = options self.conf = conf
db_api.configure_db(options) db_api.configure_db(conf)
def _get_images(self, context, **params): def _get_images(self, context, **params):
""" """
@ -181,7 +181,7 @@ class Controller(object):
def _get_limit(self, req): def _get_limit(self, req):
"""Parse a limit query param into something usable.""" """Parse a limit query param into something usable."""
try: try:
default = self.options['limit_param_default'] default = self.conf['limit_param_default']
except KeyError: except KeyError:
# if no value is configured, provide a sane default # if no value is configured, provide a sane default
default = 25 default = 25
@ -198,7 +198,7 @@ class Controller(object):
raise exc.HTTPBadRequest(_("limit param must be positive")) raise exc.HTTPBadRequest(_("limit param must be positive"))
try: try:
api_limit_max = int(self.options['api_limit_max']) api_limit_max = int(self.conf['api_limit_max'])
except (KeyError, ValueError): except (KeyError, ValueError):
api_limit_max = 1000 api_limit_max = 1000
msg = _("Failed to read api_limit_max from config. " msg = _("Failed to read api_limit_max from config. "
@ -417,8 +417,8 @@ def make_image_dict(image):
return image_dict return image_dict
def create_resource(options): def create_resource(conf):
"""Images resource factory method.""" """Images resource factory method."""
deserializer = wsgi.JSONRequestDeserializer() deserializer = wsgi.JSONRequestDeserializer()
serializer = wsgi.JSONResponseSerializer() serializer = wsgi.JSONResponseSerializer()
return wsgi.Resource(Controller(options), deserializer, serializer) return wsgi.Resource(Controller(conf), deserializer, serializer)

View File

@ -29,9 +29,9 @@ logger = logging.getLogger('glance.registry.api.v1.members')
class Controller(object): class Controller(object):
def __init__(self, options): def __init__(self, conf):
self.options = options self.conf = conf
db_api.configure_db(options) db_api.configure_db(conf)
def index(self, req, image_id): def index(self, req, image_id):
""" """
@ -296,8 +296,8 @@ def make_member_list(members, **attr_map):
if not memb.deleted] if not memb.deleted]
def create_resource(options): def create_resource(conf):
"""Image members resource factory method.""" """Image members resource factory method."""
deserializer = wsgi.JSONRequestDeserializer() deserializer = wsgi.JSONRequestDeserializer()
serializer = wsgi.JSONResponseSerializer() serializer = wsgi.JSONResponseSerializer()
return wsgi.Resource(Controller(options), deserializer, serializer) return wsgi.Resource(Controller(conf), deserializer, serializer)

View File

@ -20,7 +20,7 @@
import optparse import optparse
def add_options(parser): def add_conf(parser):
""" """
Adds any configuration options that the db layer might have. Adds any configuration options that the db layer might have.

View File

@ -58,22 +58,22 @@ STATUSES = ['active', 'saving', 'queued', 'killed', 'pending_delete',
'deleted'] 'deleted']
def configure_db(options): def configure_db(conf):
""" """
Establish the database, create an engine if needed, and Establish the database, create an engine if needed, and
register the models. register the models.
:param options: Mapping of configuration options :param conf: Mapping of configuration options
""" """
global _ENGINE, sa_logger, logger global _ENGINE, sa_logger, logger
if not _ENGINE: if not _ENGINE:
debug = config.get_option( debug = config.get_option(
options, 'debug', type='bool', default=False) conf, 'debug', type='bool', default=False)
verbose = config.get_option( verbose = config.get_option(
options, 'verbose', type='bool', default=False) conf, 'verbose', type='bool', default=False)
timeout = config.get_option( timeout = config.get_option(
options, 'sql_idle_timeout', type='int', default=3600) conf, 'sql_idle_timeout', type='int', default=3600)
sql_connection = config.get_option(options, 'sql_connection') sql_connection = config.get_option(conf, 'sql_connection')
try: try:
_ENGINE = create_engine(sql_connection, pool_recycle=timeout) _ENGINE = create_engine(sql_connection, pool_recycle=timeout)
except Exception, err: except Exception, err:

View File

@ -32,15 +32,15 @@ from glance.common import exception
logger = logging.getLogger('glance.registry.db.migration') logger = logging.getLogger('glance.registry.db.migration')
def db_version(options): def db_version(conf):
""" """
Return the database's current migration number Return the database's current migration number
:param options: options dict :param conf: conf dict
:retval version number :retval version number
""" """
repo_path = get_migrate_repo_path() repo_path = get_migrate_repo_path()
sql_connection = options['sql_connection'] sql_connection = conf['sql_connection']
try: try:
return versioning_api.db_version(sql_connection, repo_path) return versioning_api.db_version(sql_connection, repo_path)
except versioning_exceptions.DatabaseNotControlledError, e: except versioning_exceptions.DatabaseNotControlledError, e:
@ -49,78 +49,78 @@ def db_version(options):
raise exception.DatabaseMigrationError(msg) raise exception.DatabaseMigrationError(msg)
def upgrade(options, version=None): def upgrade(conf, version=None):
""" """
Upgrade the database's current migration level Upgrade the database's current migration level
:param options: options dict :param conf: conf dict
:param version: version to upgrade (defaults to latest) :param version: version to upgrade (defaults to latest)
:retval version number :retval version number
""" """
db_version(options) # Ensure db is under migration control db_version(conf) # Ensure db is under migration control
repo_path = get_migrate_repo_path() repo_path = get_migrate_repo_path()
sql_connection = options['sql_connection'] sql_connection = conf['sql_connection']
version_str = version or 'latest' version_str = version or 'latest'
logger.info(_("Upgrading %(sql_connection)s to version %(version_str)s") % logger.info(_("Upgrading %(sql_connection)s to version %(version_str)s") %
locals()) locals())
return versioning_api.upgrade(sql_connection, repo_path, version) return versioning_api.upgrade(sql_connection, repo_path, version)
def downgrade(options, version): def downgrade(conf, version):
""" """
Downgrade the database's current migration level Downgrade the database's current migration level
:param options: options dict :param conf: conf dict
:param version: version to downgrade to :param version: version to downgrade to
:retval version number :retval version number
""" """
db_version(options) # Ensure db is under migration control db_version(conf) # Ensure db is under migration control
repo_path = get_migrate_repo_path() repo_path = get_migrate_repo_path()
sql_connection = options['sql_connection'] sql_connection = conf['sql_connection']
logger.info(_("Downgrading %(sql_connection)s to version %(version)s") % logger.info(_("Downgrading %(sql_connection)s to version %(version)s") %
locals()) locals())
return versioning_api.downgrade(sql_connection, repo_path, version) return versioning_api.downgrade(sql_connection, repo_path, version)
def version_control(options): def version_control(conf):
""" """
Place a database under migration control Place a database under migration control
:param options: options dict :param conf: conf dict
""" """
sql_connection = options['sql_connection'] sql_connection = conf['sql_connection']
try: try:
_version_control(options) _version_control(conf)
except versioning_exceptions.DatabaseAlreadyControlledError, e: except versioning_exceptions.DatabaseAlreadyControlledError, e:
msg = (_("database '%(sql_connection)s' is already under migration " msg = (_("database '%(sql_connection)s' is already under migration "
"control") % locals()) "control") % locals())
raise exception.DatabaseMigrationError(msg) raise exception.DatabaseMigrationError(msg)
def _version_control(options): def _version_control(conf):
""" """
Place a database under migration control Place a database under migration control
:param options: options dict :param conf: conf dict
""" """
repo_path = get_migrate_repo_path() repo_path = get_migrate_repo_path()
sql_connection = options['sql_connection'] sql_connection = conf['sql_connection']
return versioning_api.version_control(sql_connection, repo_path) return versioning_api.version_control(sql_connection, repo_path)
def db_sync(options, version=None): def db_sync(conf, version=None):
""" """
Place a database under migration control and perform an upgrade Place a database under migration control and perform an upgrade
:param options: options dict :param conf: conf dict
:retval version number :retval version number
""" """
try: try:
_version_control(options) _version_control(conf)
except versioning_exceptions.DatabaseAlreadyControlledError, e: except versioning_exceptions.DatabaseAlreadyControlledError, e:
pass pass
upgrade(options, version=version) upgrade(conf, version=version)
def get_migrate_repo_path(): def get_migrate_repo_path():

View File

@ -87,7 +87,7 @@ def register_store(store_module, schemes):
location.register_scheme_map(scheme_map) location.register_scheme_map(scheme_map)
def create_stores(options): def create_stores(conf):
""" """
Construct the store objects with supplied configuration options Construct the store objects with supplied configuration options
""" """
@ -98,7 +98,7 @@ def create_stores(options):
raise BackendException('Unable to create store. Could not find ' raise BackendException('Unable to create store. Could not find '
'a class named Store in module %s.' 'a class named Store in module %s.'
% store_module) % store_module)
STORES[store_module] = store_class(options) STORES[store_module] = store_class(conf)
def get_store_from_scheme(scheme): def get_store_from_scheme(scheme):
@ -154,11 +154,11 @@ def get_store_from_location(uri):
return loc.store_name return loc.store_name
def schedule_delete_from_backend(uri, options, context, image_id, **kwargs): def schedule_delete_from_backend(uri, conf, context, image_id, **kwargs):
""" """
Given a uri and a time, schedule the deletion of an image. Given a uri and a time, schedule the deletion of an image.
""" """
use_delay = config.get_option(options, 'delayed_delete', type='bool', use_delay = config.get_option(conf, 'delayed_delete', type='bool',
default=False) default=False)
if not use_delay: if not use_delay:
registry.update_image_metadata(context, image_id, registry.update_image_metadata(context, image_id,
@ -169,8 +169,8 @@ def schedule_delete_from_backend(uri, options, context, image_id, **kwargs):
msg = _("Failed to delete image from store (%(uri)s).") % locals() msg = _("Failed to delete image from store (%(uri)s).") % locals()
logger.error(msg) logger.error(msg)
datadir = config.get_option(options, 'scrubber_datadir') datadir = config.get_option(conf, 'scrubber_datadir')
scrub_time = config.get_option(options, 'scrub_time', type='int', scrub_time = config.get_option(conf, 'scrub_time', type='int',
default=0) default=0)
delete_time = time.time() + scrub_time delete_time = time.time() + scrub_time
file_path = os.path.join(datadir, str(image_id)) file_path = os.path.join(datadir, str(image_id))

View File

@ -28,13 +28,13 @@ class Store(object):
CHUNKSIZE = (16 * 1024 * 1024) # 16M CHUNKSIZE = (16 * 1024 * 1024) # 16M
def __init__(self, options=None): def __init__(self, conf=None):
""" """
Initialize the Store Initialize the Store
:param options: Optional dictionary of configuration options :param conf: Optional dictionary of configuration options
""" """
self.options = options or {} self.conf = conf or {}
self.configure() self.configure()

View File

@ -115,7 +115,7 @@ class Store(glance.store.base.Store):
reason=reason) reason=reason)
def _option_get(self, param): def _option_get(self, param):
result = self.options.get(param) result = self.conf.get(param)
if not result: if not result:
reason = _("Could not find %s in configuration options.") % param reason = _("Could not find %s in configuration options.") % param
logger.error(reason) logger.error(reason)

View File

@ -109,16 +109,16 @@ class Store(glance.store.base.Store):
""" """
try: try:
self.chunk_size = int( self.chunk_size = int(
self.options.get( self.conf.get(
'rbd_store_chunk_size', 'rbd_store_chunk_size',
DEFAULT_CHUNKSIZE)) * 1024 * 1024 DEFAULT_CHUNKSIZE)) * 1024 * 1024
# these must not be unicode since they will be passed to a # these must not be unicode since they will be passed to a
# non-unicode-aware C library # non-unicode-aware C library
self.pool = str(self.options.get('rbd_store_pool', self.pool = str(self.conf.get('rbd_store_pool',
DEFAULT_POOL)) DEFAULT_POOL))
self.user = str(self.options.get('rbd_store_user', self.user = str(self.conf.get('rbd_store_user',
DEFAULT_USER)) DEFAULT_USER))
self.conf_file = str(self.options.get('rbd_store_ceph_conf', self.conf_file = str(self.conf.get('rbd_store_ceph_conf',
DEFAULT_CONFFILE)) DEFAULT_CONFFILE))
except Exception, e: except Exception, e:
reason = _("Error in store configuration: %s") % e reason = _("Error in store configuration: %s") % e

View File

@ -210,14 +210,14 @@ class Store(glance.store.base.Store):
else: # Defaults http else: # Defaults http
self.full_s3_host = 'http://' + self.s3_host self.full_s3_host = 'http://' + self.s3_host
if self.options.get('s3_store_object_buffer_dir'): if self.conf.get('s3_store_object_buffer_dir'):
self.s3_store_object_buffer_dir = self.options.get( self.s3_store_object_buffer_dir = self.conf.get(
's3_store_object_buffer_dir') 's3_store_object_buffer_dir')
else: else:
self.s3_store_object_buffer_dir = None self.s3_store_object_buffer_dir = None
def _option_get(self, param): def _option_get(self, param):
result = self.options.get(param) result = self.conf.get(param)
if not result: if not result:
reason = _("Could not find %(param)s in configuration " reason = _("Could not find %(param)s in configuration "
"options.") % locals() "options.") % locals()
@ -297,7 +297,7 @@ class Store(glance.store.base.Store):
host=loc.s3serviceurl, host=loc.s3serviceurl,
is_secure=(loc.scheme == 's3+https')) is_secure=(loc.scheme == 's3+https'))
create_bucket_if_missing(self.bucket, s3_conn, self.options) create_bucket_if_missing(self.bucket, s3_conn, self.conf)
bucket_obj = get_bucket(s3_conn, self.bucket) bucket_obj = get_bucket(s3_conn, self.bucket)
obj_name = str(image_id) obj_name = str(image_id)
@ -403,21 +403,21 @@ def get_bucket(conn, bucket_id):
return bucket return bucket
def create_bucket_if_missing(bucket, s3_conn, options): def create_bucket_if_missing(bucket, s3_conn, conf):
""" """
Creates a missing bucket in S3 if the Creates a missing bucket in S3 if the
``s3_store_create_bucket_on_put`` option is set. ``s3_store_create_bucket_on_put`` option is set.
:param bucket: Name of bucket to create :param bucket: Name of bucket to create
:param s3_conn: Connection to S3 :param s3_conn: Connection to S3
:param options: Option mapping :param conf: Option mapping
""" """
from boto.exception import S3ResponseError from boto.exception import S3ResponseError
try: try:
s3_conn.get_bucket(bucket) s3_conn.get_bucket(bucket)
except S3ResponseError, e: except S3ResponseError, e:
if e.status == httplib.NOT_FOUND: if e.status == httplib.NOT_FOUND:
add_bucket = config.get_option(options, add_bucket = config.get_option(conf,
's3_store_create_bucket_on_put', 's3_store_create_bucket_on_put',
type='bool', default=False) type='bool', default=False)
if add_bucket: if add_bucket:

View File

@ -65,23 +65,23 @@ class Daemon(object):
class Scrubber(object): class Scrubber(object):
CLEANUP_FILE = ".cleanup" CLEANUP_FILE = ".cleanup"
def __init__(self, options): def __init__(self, conf):
logger.info(_("Initializing scrubber with options: %s") % options) logger.info(_("Initializing scrubber with conf: %s") % conf)
self.options = options self.conf = conf
self.datadir = config.get_option(options, 'scrubber_datadir') self.datadir = config.get_option(conf, 'scrubber_datadir')
self.cleanup = config.get_option(options, 'cleanup_scrubber', self.cleanup = config.get_option(conf, 'cleanup_scrubber',
type='bool', default=False) type='bool', default=False)
host = config.get_option(options, 'registry_host') host = config.get_option(conf, 'registry_host')
port = config.get_option(options, 'registry_port', type='int') port = config.get_option(conf, 'registry_port', type='int')
self.registry = client.RegistryClient(host, port) self.registry = client.RegistryClient(host, port)
utils.safe_mkdirs(self.datadir) utils.safe_mkdirs(self.datadir)
if self.cleanup: if self.cleanup:
self.cleanup_time = config.get_option(options, self.cleanup_time = config.get_option(conf,
'cleanup_scrubber_time', 'cleanup_scrubber_time',
type='int', default=86400) type='int', default=86400)
store.create_stores(options) store.create_stores(conf)
def run(self, pool, event=None): def run(self, pool, event=None):
now = time.time() now = time.time()

View File

@ -187,7 +187,7 @@ class Store(glance.store.base.Store):
def configure(self): def configure(self):
self.snet = config.get_option( self.snet = config.get_option(
self.options, 'swift_enable_snet', type='bool', default=False) self.conf, 'swift_enable_snet', type='bool', default=False)
def configure_add(self): def configure_add(self):
""" """
@ -199,30 +199,30 @@ class Store(glance.store.base.Store):
self.auth_address = self._option_get('swift_store_auth_address') self.auth_address = self._option_get('swift_store_auth_address')
self.user = self._option_get('swift_store_user') self.user = self._option_get('swift_store_user')
self.key = self._option_get('swift_store_key') self.key = self._option_get('swift_store_key')
self.container = self.options.get('swift_store_container', self.container = self.conf.get('swift_store_container',
DEFAULT_CONTAINER) DEFAULT_CONTAINER)
try: try:
if self.options.get('swift_store_large_object_size'): if self.conf.get('swift_store_large_object_size'):
self.large_object_size = int( self.large_object_size = int(
self.options.get('swift_store_large_object_size') self.conf.get('swift_store_large_object_size')
) * (1024 * 1024) # Size specified in MB in conf files ) * (1024 * 1024) # Size specified in MB in conf files
else: else:
self.large_object_size = DEFAULT_LARGE_OBJECT_SIZE self.large_object_size = DEFAULT_LARGE_OBJECT_SIZE
if self.options.get('swift_store_large_object_chunk_size'): if self.conf.get('swift_store_large_object_chunk_size'):
self.large_object_chunk_size = int( self.large_object_chunk_size = int(
self.options.get('swift_store_large_object_chunk_size') self.conf.get('swift_store_large_object_chunk_size')
) * (1024 * 1024) # Size specified in MB in conf files ) * (1024 * 1024) # Size specified in MB in conf files
else: else:
self.large_object_chunk_size = DEFAULT_LARGE_OBJECT_CHUNK_SIZE self.large_object_chunk_size = DEFAULT_LARGE_OBJECT_CHUNK_SIZE
if self.options.get('swift_store_object_buffer_dir'): if self.conf.get('swift_store_object_buffer_dir'):
self.swift_store_object_buffer_dir = ( self.swift_store_object_buffer_dir = (
self.options.get('swift_store_object_buffer_dir')) self.conf.get('swift_store_object_buffer_dir'))
else: else:
self.swift_store_object_buffer_dir = None self.swift_store_object_buffer_dir = None
except Exception, e: except Exception, e:
reason = _("Error in configuration options: %s") % e reason = _("Error in configuration conf: %s") % e
logger.error(reason) logger.error(reason)
raise exception.BadStoreConfiguration(store_name="swift", raise exception.BadStoreConfiguration(store_name="swift",
reason=reason) reason=reason)
@ -283,7 +283,7 @@ class Store(glance.store.base.Store):
authurl=auth_url, user=user, key=key, snet=snet) authurl=auth_url, user=user, key=key, snet=snet)
def _option_get(self, param): def _option_get(self, param):
result = self.options.get(param) result = self.conf.get(param)
if not result: if not result:
reason = (_("Could not find %(param)s in configuration " reason = (_("Could not find %(param)s in configuration "
"options.") % locals()) "options.") % locals())
@ -330,7 +330,7 @@ class Store(glance.store.base.Store):
swift_conn = self._make_swift_connection( swift_conn = self._make_swift_connection(
auth_url=self.full_auth_address, user=self.user, key=self.key) auth_url=self.full_auth_address, user=self.user, key=self.key)
create_container_if_missing(self.container, swift_conn, self.options) create_container_if_missing(self.container, swift_conn, self.conf)
obj_name = str(image_id) obj_name = str(image_id)
location = StoreLocation({'scheme': self.scheme, location = StoreLocation({'scheme': self.scheme,
@ -482,20 +482,20 @@ class Store(glance.store.base.Store):
raise raise
def create_container_if_missing(container, swift_conn, options): def create_container_if_missing(container, swift_conn, conf):
""" """
Creates a missing container in Swift if the Creates a missing container in Swift if the
``swift_store_create_container_on_put`` option is set. ``swift_store_create_container_on_put`` option is set.
:param container: Name of container to create :param container: Name of container to create
:param swift_conn: Connection to Swift :param swift_conn: Connection to Swift
:param options: Option mapping :param conf: Option mapping
""" """
try: try:
swift_conn.head_container(container) swift_conn.head_container(container)
except swift_client.ClientException, e: except swift_client.ClientException, e:
if e.http_status == httplib.NOT_FOUND: if e.http_status == httplib.NOT_FOUND:
add_container = config.get_option(options, add_container = config.get_option(conf,
'swift_store_create_container_on_put', 'swift_store_create_container_on_put',
type='bool', default=False) type='bool', default=False)
if add_container: if add_container:

View File

@ -97,9 +97,9 @@ def stub_out_registry_and_store_server(stubs):
sql_connection = os.environ.get('GLANCE_SQL_CONNECTION', sql_connection = os.environ.get('GLANCE_SQL_CONNECTION',
"sqlite://") "sqlite://")
context_class = 'glance.registry.context.RequestContext' context_class = 'glance.registry.context.RequestContext'
options = {'sql_connection': sql_connection, 'verbose': VERBOSE, conf = {'sql_connection': sql_connection, 'verbose': VERBOSE,
'debug': DEBUG, 'context_class': context_class} 'debug': DEBUG, 'context_class': context_class}
api = context.ContextMiddleware(rserver.API(options), options) api = context.ContextMiddleware(rserver.API(conf), conf)
res = self.req.get_response(api) res = self.req.get_response(api)
# httplib.Response has a read() method...fake it out # httplib.Response has a read() method...fake it out
@ -145,7 +145,7 @@ def stub_out_registry_and_store_server(stubs):
self.req.body = body self.req.body = body
def getresponse(self): def getresponse(self):
options = {'verbose': VERBOSE, conf = {'verbose': VERBOSE,
'debug': DEBUG, 'debug': DEBUG,
'bind_host': '0.0.0.0', 'bind_host': '0.0.0.0',
'bind_port': '9999999', 'bind_port': '9999999',
@ -154,8 +154,8 @@ def stub_out_registry_and_store_server(stubs):
'default_store': 'file', 'default_store': 'file',
'filesystem_store_datadir': FAKE_FILESYSTEM_ROOTDIR} 'filesystem_store_datadir': FAKE_FILESYSTEM_ROOTDIR}
api = version_negotiation.VersionNegotiationFilter( api = version_negotiation.VersionNegotiationFilter(
context.ContextMiddleware(router.API(options), options), context.ContextMiddleware(router.API(conf), conf),
options) conf)
res = self.req.get_response(api) res = self.req.get_response(api)
# httplib.Response has a read() method...fake it out # httplib.Response has a read() method...fake it out
@ -218,9 +218,9 @@ def stub_out_registry_server(stubs, **kwargs):
def getresponse(self): def getresponse(self):
sql_connection = kwargs.get('sql_connection', "sqlite:///") sql_connection = kwargs.get('sql_connection', "sqlite:///")
context_class = 'glance.registry.context.RequestContext' context_class = 'glance.registry.context.RequestContext'
options = {'sql_connection': sql_connection, 'verbose': VERBOSE, conf = {'sql_connection': sql_connection, 'verbose': VERBOSE,
'debug': DEBUG, 'context_class': context_class} 'debug': DEBUG, 'context_class': context_class}
api = context.ContextMiddleware(rserver.API(options), options) api = context.ContextMiddleware(rserver.API(conf), conf)
res = self.req.get_response(api) res = self.req.get_response(api)
# httplib.Response has a read() method...fake it out # httplib.Response has a read() method...fake it out

View File

@ -42,7 +42,7 @@ UUID1 = _gen_uuid()
UUID2 = _gen_uuid() UUID2 = _gen_uuid()
OPTIONS = {'sql_connection': 'sqlite://', CONF = {'sql_connection': 'sqlite://',
'verbose': False, 'verbose': False,
'debug': False, 'debug': False,
'registry_host': '0.0.0.0', 'registry_host': '0.0.0.0',
@ -64,15 +64,14 @@ class TestRegistryDb(unittest.TestCase):
API controller results in a) an Exception being thrown and b) API controller results in a) an Exception being thrown and b)
a message being logged to the registry log file... a message being logged to the registry log file...
""" """
bad_options = {'verbose': True, bad_conf = {'verbose': True,
'debug': True, 'debug': True,
'sql_connection': 'baddriver:///'} 'sql_connection': 'baddriver:///'}
# We set this to None to trigger a reconfigure, otherwise # We set this to None to trigger a reconfigure, otherwise
# other modules may have already correctly configured the DB # other modules may have already correctly configured the DB
orig_engine = db_api._ENGINE orig_engine = db_api._ENGINE
db_api._ENGINE = None db_api._ENGINE = None
self.assertRaises(ImportError, db_api.configure_db, self.assertRaises(ImportError, db_api.configure_db, bad_conf)
bad_options)
exc_raised = False exc_raised = False
self.log_written = False self.log_written = False
@ -82,7 +81,7 @@ class TestRegistryDb(unittest.TestCase):
self.stubs.Set(db_api.logger, 'error', fake_log_error) self.stubs.Set(db_api.logger, 'error', fake_log_error)
try: try:
api_obj = rserver.API(bad_options) api_obj = rserver.API(bad_conf)
except ImportError: except ImportError:
exc_raised = True exc_raised = True
finally: finally:
@ -102,7 +101,7 @@ class TestRegistryAPI(unittest.TestCase):
self.stubs = stubout.StubOutForTesting() self.stubs = stubout.StubOutForTesting()
stubs.stub_out_registry_and_store_server(self.stubs) stubs.stub_out_registry_and_store_server(self.stubs)
stubs.stub_out_filesystem_backend() stubs.stub_out_filesystem_backend()
self.api = context.ContextMiddleware(rserver.API(OPTIONS), OPTIONS) self.api = context.ContextMiddleware(rserver.API(CONF), CONF)
self.FIXTURES = [ self.FIXTURES = [
{'id': UUID1, {'id': UUID1,
'name': 'fake image #1', 'name': 'fake image #1',
@ -137,7 +136,7 @@ class TestRegistryAPI(unittest.TestCase):
'location': "file:///tmp/glance-tests/2", 'location': "file:///tmp/glance-tests/2",
'properties': {}}] 'properties': {}}]
self.context = rcontext.RequestContext(is_admin=True) self.context = rcontext.RequestContext(is_admin=True)
db_api.configure_db(OPTIONS) db_api.configure_db(CONF)
self.destroy_fixtures() self.destroy_fixtures()
self.create_fixtures() self.create_fixtures()
@ -1936,7 +1935,7 @@ class TestGlanceAPI(unittest.TestCase):
stubs.stub_out_registry_and_store_server(self.stubs) stubs.stub_out_registry_and_store_server(self.stubs)
stubs.stub_out_filesystem_backend() stubs.stub_out_filesystem_backend()
sql_connection = os.environ.get('GLANCE_SQL_CONNECTION', "sqlite://") sql_connection = os.environ.get('GLANCE_SQL_CONNECTION', "sqlite://")
self.api = context.ContextMiddleware(router.API(OPTIONS), OPTIONS) self.api = context.ContextMiddleware(router.API(CONF), CONF)
self.FIXTURES = [ self.FIXTURES = [
{'id': UUID1, {'id': UUID1,
'name': 'fake image #1', 'name': 'fake image #1',
@ -1967,7 +1966,7 @@ class TestGlanceAPI(unittest.TestCase):
'location': "file:///tmp/glance-tests/2", 'location': "file:///tmp/glance-tests/2",
'properties': {}}] 'properties': {}}]
self.context = rcontext.RequestContext(is_admin=True) self.context = rcontext.RequestContext(is_admin=True)
db_api.configure_db(OPTIONS) db_api.configure_db(CONF)
self.destroy_fixtures() self.destroy_fixtures()
self.create_fixtures() self.create_fixtures()

View File

@ -35,7 +35,7 @@ from glance.registry import client as rclient
from glance.registry import context as rcontext from glance.registry import context as rcontext
from glance.tests import stubs from glance.tests import stubs
OPTIONS = {'sql_connection': 'sqlite://'} CONF = {'sql_connection': 'sqlite://'}
_gen_uuid = utils.generate_uuid _gen_uuid = utils.generate_uuid
@ -138,7 +138,7 @@ class TestRegistryClient(unittest.TestCase):
"""Establish a clean test environment""" """Establish a clean test environment"""
self.stubs = stubout.StubOutForTesting() self.stubs = stubout.StubOutForTesting()
stubs.stub_out_registry_and_store_server(self.stubs) stubs.stub_out_registry_and_store_server(self.stubs)
db_api.configure_db(OPTIONS) db_api.configure_db(CONF)
self.context = rcontext.RequestContext(is_admin=True) self.context = rcontext.RequestContext(is_admin=True)
self.FIXTURES = [ self.FIXTURES = [
{'id': UUID1, {'id': UUID1,
@ -1138,7 +1138,7 @@ class TestClient(unittest.TestCase):
self.stubs = stubout.StubOutForTesting() self.stubs = stubout.StubOutForTesting()
stubs.stub_out_registry_and_store_server(self.stubs) stubs.stub_out_registry_and_store_server(self.stubs)
stubs.stub_out_filesystem_backend() stubs.stub_out_filesystem_backend()
db_api.configure_db(OPTIONS) db_api.configure_db(CONF)
self.client = client.Client("0.0.0.0") self.client = client.Client("0.0.0.0")
self.FIXTURES = [ self.FIXTURES = [
{'id': UUID1, {'id': UUID1,

View File

@ -48,21 +48,21 @@ class TestOptionParsing(unittest.TestCase):
# of typed values # of typed values
parser = optparse.OptionParser() parser = optparse.OptionParser()
config.add_common_options(parser) config.add_common_options(parser)
parsed_options, args = config.parse_options(parser, []) parsed_conf, args = config.parse_options(parser, [])
expected_options = {'verbose': False, 'debug': False, expected_conf = {'verbose': False, 'debug': False,
'config_file': None} 'config_file': None}
self.assertEquals(expected_options, parsed_options) self.assertEquals(expected_conf, parsed_conf)
# test non-empty args and that parse_options() returns a mapping # test non-empty args and that parse_options() returns a mapping
# of typed values matching supplied args # of typed values matching supplied args
parser = optparse.OptionParser() parser = optparse.OptionParser()
config.add_common_options(parser) config.add_common_options(parser)
parsed_options, args = config.parse_options(parser, ['--verbose']) parsed_conf, args = config.parse_options(parser, ['--verbose'])
expected_options = {'verbose': True, 'debug': False, expected_conf = {'verbose': True, 'debug': False,
'config_file': None} 'config_file': None}
self.assertEquals(expected_options, parsed_options) self.assertEquals(expected_conf, parsed_conf)
# test non-empty args that contain unknown options raises # test non-empty args that contain unknown options raises
# a SystemExit exception. Not ideal, but unfortunately optparse # a SystemExit exception. Not ideal, but unfortunately optparse

View File

@ -29,7 +29,7 @@ from glance.store.location import get_location_from_uri
from glance.store.filesystem import Store, ChunkedFile from glance.store.filesystem import Store, ChunkedFile
from glance.tests import stubs from glance.tests import stubs
FILESYSTEM_OPTIONS = { FILESYSTEM_CONF = {
'verbose': True, 'verbose': True,
'debug': True, 'debug': True,
'filesystem_store_datadir': stubs.FAKE_FILESYSTEM_ROOTDIR} 'filesystem_store_datadir': stubs.FAKE_FILESYSTEM_ROOTDIR}
@ -43,7 +43,7 @@ class TestStore(unittest.TestCase):
stubs.stub_out_filesystem_backend() stubs.stub_out_filesystem_backend()
self.orig_chunksize = ChunkedFile.CHUNKSIZE self.orig_chunksize = ChunkedFile.CHUNKSIZE
ChunkedFile.CHUNKSIZE = 10 ChunkedFile.CHUNKSIZE = 10
self.store = Store(FILESYSTEM_OPTIONS) self.store = Store(FILESYSTEM_CONF)
def tearDown(self): def tearDown(self):
"""Clear the test environment""" """Clear the test environment"""
@ -116,9 +116,6 @@ class TestStore(unittest.TestCase):
raises an appropriate exception raises an appropriate exception
""" """
image_file = StringIO.StringIO("nevergonnamakeit") image_file = StringIO.StringIO("nevergonnamakeit")
options = {'verbose': True,
'debug': True,
'filesystem_store_datadir': stubs.FAKE_FILESYSTEM_ROOTDIR}
self.assertRaises(exception.Duplicate, self.assertRaises(exception.Duplicate,
self.store.add, self.store.add,
'2', image_file, 0) '2', image_file, 0)

View File

@ -136,7 +136,7 @@ class ImageCacheTestCase(object):
self.assertTrue(os.path.exists(incomplete_file_path)) self.assertTrue(os.path.exists(incomplete_file_path))
self.cache.options['image_cache_stall_time'] = 0 self.cache.conf['image_cache_stall_time'] = 0
self.cache.clean() self.cache.clean()
self.assertFalse(os.path.exists(incomplete_file_path)) self.assertFalse(os.path.exists(incomplete_file_path))
@ -250,12 +250,12 @@ class TestImageCacheXattr(unittest.TestCase,
self.inited = True self.inited = True
self.disabled = False self.disabled = False
self.options = {'image_cache_dir': self.cache_dir, self.conf = {'image_cache_dir': self.cache_dir,
'image_cache_driver': 'xattr', 'image_cache_driver': 'xattr',
'image_cache_max_size': 1024 * 5, 'image_cache_max_size': 1024 * 5,
'registry_host': '0.0.0.0', 'registry_host': '0.0.0.0',
'registry_port': 9191} 'registry_port': 9191}
self.cache = image_cache.ImageCache(self.options) self.cache = image_cache.ImageCache(self.conf)
if not xattr_writes_supported(self.cache_dir): if not xattr_writes_supported(self.cache_dir):
self.inited = True self.inited = True
@ -294,12 +294,12 @@ class TestImageCacheSqlite(unittest.TestCase,
self.disabled = False self.disabled = False
self.cache_dir = os.path.join("/", "tmp", "test.cache.%d" % self.cache_dir = os.path.join("/", "tmp", "test.cache.%d" %
random.randint(0, 1000000)) random.randint(0, 1000000))
self.options = {'image_cache_dir': self.cache_dir, self.conf = {'image_cache_dir': self.cache_dir,
'image_cache_driver': 'sqlite', 'image_cache_driver': 'sqlite',
'image_cache_max_size': 1024 * 5, 'image_cache_max_size': 1024 * 5,
'registry_host': '0.0.0.0', 'registry_host': '0.0.0.0',
'registry_port': 9191} 'registry_port': 9191}
self.cache = image_cache.ImageCache(self.options) self.cache = image_cache.ImageCache(self.conf)
def tearDown(self): def tearDown(self):
if os.path.exists(self.cache_dir): if os.path.exists(self.cache_dir):

View File

@ -124,10 +124,10 @@ class TestMigrations(unittest.TestCase):
that there are no errors in the version scripts for each engine that there are no errors in the version scripts for each engine
""" """
for key, engine in self.engines.items(): for key, engine in self.engines.items():
options = {'sql_connection': TestMigrations.TEST_DATABASES[key]} conf = {'sql_connection': TestMigrations.TEST_DATABASES[key]}
self._walk_versions(options) self._walk_versions(conf)
def _walk_versions(self, options): def _walk_versions(self, conf):
# Determine latest version script from the repo, then # Determine latest version script from the repo, then
# upgrade from 1 through to the latest, with no data # upgrade from 1 through to the latest, with no data
# in the databases. This just checks that the schema itself # in the databases. This just checks that the schema itself
@ -136,24 +136,24 @@ class TestMigrations(unittest.TestCase):
# Assert we are not under version control... # Assert we are not under version control...
self.assertRaises(exception.DatabaseMigrationError, self.assertRaises(exception.DatabaseMigrationError,
migration_api.db_version, migration_api.db_version,
options) conf)
# Place the database under version control # Place the database under version control
migration_api.version_control(options) migration_api.version_control(conf)
cur_version = migration_api.db_version(options) cur_version = migration_api.db_version(conf)
self.assertEqual(0, cur_version) self.assertEqual(0, cur_version)
for version in xrange(1, TestMigrations.REPOSITORY.latest + 1): for version in xrange(1, TestMigrations.REPOSITORY.latest + 1):
migration_api.upgrade(options, version) migration_api.upgrade(conf, version)
cur_version = migration_api.db_version(options) cur_version = migration_api.db_version(conf)
self.assertEqual(cur_version, version) self.assertEqual(cur_version, version)
# Now walk it back down to 0 from the latest, testing # Now walk it back down to 0 from the latest, testing
# the downgrade paths. # the downgrade paths.
for version in reversed( for version in reversed(
xrange(0, TestMigrations.REPOSITORY.latest)): xrange(0, TestMigrations.REPOSITORY.latest)):
migration_api.downgrade(options, version) migration_api.downgrade(conf, version)
cur_version = migration_api.db_version(options) cur_version = migration_api.db_version(conf)
self.assertEqual(cur_version, version) self.assertEqual(cur_version, version)
def test_no_data_loss_2_to_3_to_2(self): def test_no_data_loss_2_to_3_to_2(self):
@ -165,14 +165,14 @@ class TestMigrations(unittest.TestCase):
the image_properties table back into the base image table. the image_properties table back into the base image table.
""" """
for key, engine in self.engines.items(): for key, engine in self.engines.items():
options = {'sql_connection': TestMigrations.TEST_DATABASES[key]} conf = {'sql_connection': TestMigrations.TEST_DATABASES[key]}
self._no_data_loss_2_to_3_to_2(engine, options) self._no_data_loss_2_to_3_to_2(engine, conf)
def _no_data_loss_2_to_3_to_2(self, engine, options): def _no_data_loss_2_to_3_to_2(self, engine, conf):
migration_api.version_control(options) migration_api.version_control(conf)
migration_api.upgrade(options, 2) migration_api.upgrade(conf, 2)
cur_version = migration_api.db_version(options) cur_version = migration_api.db_version(conf)
self.assertEquals(2, cur_version) self.assertEquals(2, cur_version)
# We are now on version 2. Check that the images table does # We are now on version 2. Check that the images table does
@ -214,9 +214,9 @@ class TestMigrations(unittest.TestCase):
# Now let's upgrade to 3. This should move the type column # Now let's upgrade to 3. This should move the type column
# to the image_properties table as type properties. # to the image_properties table as type properties.
migration_api.upgrade(options, 3) migration_api.upgrade(conf, 3)
cur_version = migration_api.db_version(options) cur_version = migration_api.db_version(conf)
self.assertEquals(3, cur_version) self.assertEquals(3, cur_version)
images_table = Table('images', MetaData(), autoload=True, images_table = Table('images', MetaData(), autoload=True,
@ -240,7 +240,7 @@ class TestMigrations(unittest.TestCase):
# Downgrade to 2 and check that the type properties were moved # Downgrade to 2 and check that the type properties were moved
# to the main image table # to the main image table
migration_api.downgrade(options, 2) migration_api.downgrade(conf, 2)
images_table = Table('images', MetaData(), autoload=True, images_table = Table('images', MetaData(), autoload=True,
autoload_with=engine) autoload_with=engine)

View File

@ -26,20 +26,20 @@ class TestInvalidNotifier(unittest.TestCase):
"""Test that notifications are generated appropriately""" """Test that notifications are generated appropriately"""
def test_cannot_create(self): def test_cannot_create(self):
options = {"notifier_strategy": "invalid_notifier"} conf = {"notifier_strategy": "invalid_notifier"}
self.assertRaises(exception.InvalidNotifierStrategy, self.assertRaises(exception.InvalidNotifierStrategy,
notifier.Notifier, notifier.Notifier,
options) conf)
class TestLoggingNotifier(unittest.TestCase): class TestLoggingNotifier(unittest.TestCase):
"""Test the logging notifier is selected and works properly.""" """Test the logging notifier is selected and works properly."""
def setUp(self): def setUp(self):
options = {"notifier_strategy": "logging"} conf = {"notifier_strategy": "logging"}
self.called = False self.called = False
self.logger = logging.getLogger("glance.notifier.logging_notifier") self.logger = logging.getLogger("glance.notifier.logging_notifier")
self.notifier = notifier.Notifier(options) self.notifier = notifier.Notifier(conf)
def _called(self, msg): def _called(self, msg):
self.called = msg self.called = msg
@ -67,8 +67,8 @@ class TestNoopNotifier(unittest.TestCase):
"""Test that the noop notifier works...and does nothing?""" """Test that the noop notifier works...and does nothing?"""
def setUp(self): def setUp(self):
options = {"notifier_strategy": "noop"} conf = {"notifier_strategy": "noop"}
self.notifier = notifier.Notifier(options) self.notifier = notifier.Notifier(conf)
def test_warn(self): def test_warn(self):
self.notifier.warn("test_event", "test_message") self.notifier.warn("test_event", "test_message")
@ -86,8 +86,8 @@ class TestRabbitNotifier(unittest.TestCase):
def setUp(self): def setUp(self):
notifier.RabbitStrategy._send_message = self._send_message notifier.RabbitStrategy._send_message = self._send_message
self.called = False self.called = False
options = {"notifier_strategy": "rabbit"} conf = {"notifier_strategy": "rabbit"}
self.notifier = notifier.Notifier(options) self.notifier = notifier.Notifier(conf)
def _send_message(self, message, priority): def _send_message(self, message, priority):
self.called = { self.called = {

View File

@ -37,7 +37,7 @@ from glance.store.s3 import Store
FAKE_UUID = utils.generate_uuid() FAKE_UUID = utils.generate_uuid()
FIVE_KB = (5 * 1024) FIVE_KB = (5 * 1024)
S3_OPTIONS = {'verbose': True, S3_CONF = {'verbose': True,
'debug': True, 'debug': True,
's3_store_access_key': 'user', 's3_store_access_key': 'user',
's3_store_secret_key': 'key', 's3_store_secret_key': 'key',
@ -163,7 +163,7 @@ class TestStore(unittest.TestCase):
"""Establish a clean test environment""" """Establish a clean test environment"""
self.stubs = stubout.StubOutForTesting() self.stubs = stubout.StubOutForTesting()
stub_out_s3(self.stubs) stub_out_s3(self.stubs)
self.store = Store(S3_OPTIONS) self.store = Store(S3_CONF)
def tearDown(self): def tearDown(self):
"""Clear the test environment""" """Clear the test environment"""
@ -204,10 +204,10 @@ class TestStore(unittest.TestCase):
expected_s3_contents = "*" * expected_s3_size expected_s3_contents = "*" * expected_s3_size
expected_checksum = hashlib.md5(expected_s3_contents).hexdigest() expected_checksum = hashlib.md5(expected_s3_contents).hexdigest()
expected_location = format_s3_location( expected_location = format_s3_location(
S3_OPTIONS['s3_store_access_key'], S3_CONF['s3_store_access_key'],
S3_OPTIONS['s3_store_secret_key'], S3_CONF['s3_store_secret_key'],
S3_OPTIONS['s3_store_host'], S3_CONF['s3_store_host'],
S3_OPTIONS['s3_store_bucket'], S3_CONF['s3_store_bucket'],
expected_image_id) expected_image_id)
image_s3 = StringIO.StringIO(expected_s3_contents) image_s3 = StringIO.StringIO(expected_s3_contents)
@ -250,17 +250,17 @@ class TestStore(unittest.TestCase):
expected_s3_contents = "*" * expected_s3_size expected_s3_contents = "*" * expected_s3_size
expected_checksum = \ expected_checksum = \
hashlib.md5(expected_s3_contents).hexdigest() hashlib.md5(expected_s3_contents).hexdigest()
new_options = S3_OPTIONS.copy() new_conf = S3_CONF.copy()
new_options['s3_store_host'] = variation new_conf['s3_store_host'] = variation
expected_location = format_s3_location( expected_location = format_s3_location(
new_options['s3_store_access_key'], new_conf['s3_store_access_key'],
new_options['s3_store_secret_key'], new_conf['s3_store_secret_key'],
new_options['s3_store_host'], new_conf['s3_store_host'],
new_options['s3_store_bucket'], new_conf['s3_store_bucket'],
expected_image_id) expected_image_id)
image_s3 = StringIO.StringIO(expected_s3_contents) image_s3 = StringIO.StringIO(expected_s3_contents)
self.store = Store(new_options) self.store = Store(new_conf)
location, size, checksum = self.store.add(expected_image_id, location, size, checksum = self.store.add(expected_image_id,
image_s3, image_s3,
expected_s3_size) expected_s3_size)
@ -288,11 +288,11 @@ class TestStore(unittest.TestCase):
FAKE_UUID, image_s3, 0) FAKE_UUID, image_s3, 0)
def _option_required(self, key): def _option_required(self, key):
options = S3_OPTIONS.copy() conf = S3_CONF.copy()
del options[key] del conf[key]
try: try:
self.store = Store(options) self.store = Store(conf)
return self.store.add == self.store.add_disabled return self.store.add == self.store.add_disabled
except: except:
return False return False

View File

@ -38,7 +38,7 @@ Store = glance.store.swift.Store
FIVE_KB = (5 * 1024) FIVE_KB = (5 * 1024)
FIVE_GB = (5 * 1024 * 1024 * 1024) FIVE_GB = (5 * 1024 * 1024 * 1024)
MAX_SWIFT_OBJECT_SIZE = FIVE_GB MAX_SWIFT_OBJECT_SIZE = FIVE_GB
SWIFT_OPTIONS = {'verbose': True, SWIFT_CONF = {'verbose': True,
'debug': True, 'debug': True,
'swift_store_user': 'user', 'swift_store_user': 'user',
'swift_store_key': 'key', 'swift_store_key': 'key',
@ -182,7 +182,7 @@ class TestStore(unittest.TestCase):
"""Establish a clean test environment""" """Establish a clean test environment"""
self.stubs = stubout.StubOutForTesting() self.stubs = stubout.StubOutForTesting()
stub_out_swift_common_client(self.stubs) stub_out_swift_common_client(self.stubs)
self.store = Store(SWIFT_OPTIONS) self.store = Store(SWIFT_CONF)
def tearDown(self): def tearDown(self):
"""Clear the test environment""" """Clear the test environment"""
@ -288,12 +288,12 @@ class TestStore(unittest.TestCase):
expected_swift_contents = "*" * expected_swift_size expected_swift_contents = "*" * expected_swift_size
expected_checksum = \ expected_checksum = \
hashlib.md5(expected_swift_contents).hexdigest() hashlib.md5(expected_swift_contents).hexdigest()
new_options = SWIFT_OPTIONS.copy() new_conf = SWIFT_CONF.copy()
new_options['swift_store_auth_address'] = variation new_conf['swift_store_auth_address'] = variation
image_swift = StringIO.StringIO(expected_swift_contents) image_swift = StringIO.StringIO(expected_swift_contents)
self.store = Store(new_options) self.store = Store(new_conf)
location, size, checksum = self.store.add(image_id, image_swift, location, size, checksum = self.store.add(image_id, image_swift,
expected_swift_size) expected_swift_size)
@ -314,11 +314,11 @@ class TestStore(unittest.TestCase):
Tests that adding an image with a non-existing container Tests that adding an image with a non-existing container
raises an appropriate exception raises an appropriate exception
""" """
options = SWIFT_OPTIONS.copy() conf = SWIFT_CONF.copy()
options['swift_store_create_container_on_put'] = 'False' conf['swift_store_create_container_on_put'] = 'False'
options['swift_store_container'] = 'noexist' conf['swift_store_container'] = 'noexist'
image_swift = StringIO.StringIO("nevergonnamakeit") image_swift = StringIO.StringIO("nevergonnamakeit")
self.store = Store(options) self.store = Store(conf)
# We check the exception text to ensure the container # We check the exception text to ensure the container
# missing text is found in it, otherwise, we would have # missing text is found in it, otherwise, we would have
@ -337,9 +337,9 @@ class TestStore(unittest.TestCase):
Tests that adding an image with a non-existing container Tests that adding an image with a non-existing container
creates the container automatically if flag is set creates the container automatically if flag is set
""" """
options = SWIFT_OPTIONS.copy() conf = SWIFT_CONF.copy()
options['swift_store_create_container_on_put'] = 'True' conf['swift_store_create_container_on_put'] = 'True'
options['swift_store_container'] = 'noexist' conf['swift_store_container'] = 'noexist'
expected_swift_size = FIVE_KB expected_swift_size = FIVE_KB
expected_swift_contents = "*" * expected_swift_size expected_swift_contents = "*" * expected_swift_size
expected_checksum = hashlib.md5(expected_swift_contents).hexdigest() expected_checksum = hashlib.md5(expected_swift_contents).hexdigest()
@ -348,7 +348,7 @@ class TestStore(unittest.TestCase):
'/noexist/%s' % expected_image_id '/noexist/%s' % expected_image_id
image_swift = StringIO.StringIO(expected_swift_contents) image_swift = StringIO.StringIO(expected_swift_contents)
self.store = Store(options) self.store = Store(conf)
location, size, checksum = self.store.add(expected_image_id, location, size, checksum = self.store.add(expected_image_id,
image_swift, image_swift,
expected_swift_size) expected_swift_size)
@ -372,8 +372,8 @@ class TestStore(unittest.TestCase):
and then verify that there have been a number of calls to and then verify that there have been a number of calls to
put_object()... put_object()...
""" """
options = SWIFT_OPTIONS.copy() conf = SWIFT_CONF.copy()
options['swift_store_container'] = 'glance' conf['swift_store_container'] = 'glance'
expected_swift_size = FIVE_KB expected_swift_size = FIVE_KB
expected_swift_contents = "*" * expected_swift_size expected_swift_contents = "*" * expected_swift_size
expected_checksum = hashlib.md5(expected_swift_contents).hexdigest() expected_checksum = hashlib.md5(expected_swift_contents).hexdigest()
@ -387,7 +387,7 @@ class TestStore(unittest.TestCase):
try: try:
glance.store.swift.DEFAULT_LARGE_OBJECT_SIZE = 1024 glance.store.swift.DEFAULT_LARGE_OBJECT_SIZE = 1024
glance.store.swift.DEFAULT_LARGE_OBJECT_CHUNK_SIZE = 1024 glance.store.swift.DEFAULT_LARGE_OBJECT_CHUNK_SIZE = 1024
self.store = Store(options) self.store = Store(conf)
location, size, checksum = self.store.add(expected_image_id, location, size, checksum = self.store.add(expected_image_id,
image_swift, image_swift,
expected_swift_size) expected_swift_size)
@ -418,8 +418,8 @@ class TestStore(unittest.TestCase):
Bug lp:891738 Bug lp:891738
""" """
options = SWIFT_OPTIONS.copy() conf = SWIFT_CONF.copy()
options['swift_store_container'] = 'glance' conf['swift_store_container'] = 'glance'
# Set up a 'large' image of 5KB # Set up a 'large' image of 5KB
expected_swift_size = FIVE_KB expected_swift_size = FIVE_KB
@ -440,7 +440,7 @@ class TestStore(unittest.TestCase):
MAX_SWIFT_OBJECT_SIZE = 1024 MAX_SWIFT_OBJECT_SIZE = 1024
glance.store.swift.DEFAULT_LARGE_OBJECT_SIZE = 1024 glance.store.swift.DEFAULT_LARGE_OBJECT_SIZE = 1024
glance.store.swift.DEFAULT_LARGE_OBJECT_CHUNK_SIZE = 1024 glance.store.swift.DEFAULT_LARGE_OBJECT_CHUNK_SIZE = 1024
self.store = Store(options) self.store = Store(conf)
location, size, checksum = self.store.add(expected_image_id, location, size, checksum = self.store.add(expected_image_id,
image_swift, 0) image_swift, 0)
finally: finally:
@ -471,11 +471,11 @@ class TestStore(unittest.TestCase):
FAKE_UUID, image_swift, 0) FAKE_UUID, image_swift, 0)
def _option_required(self, key): def _option_required(self, key):
options = SWIFT_OPTIONS.copy() conf = SWIFT_CONF.copy()
del options[key] del conf[key]
try: try:
self.store = Store(options) self.store = Store(conf)
return self.store.add == self.store.add_disabled return self.store.add == self.store.add_disabled
except: except:
return False return False

View File

@ -46,9 +46,9 @@ class VersionsTest(unittest.TestCase):
def test_get_version_list(self): def test_get_version_list(self):
req = webob.Request.blank('/') req = webob.Request.blank('/')
req.accept = "application/json" req.accept = "application/json"
options = {'bind_host': '0.0.0.0', conf = {'bind_host': '0.0.0.0',
'bind_port': 9292} 'bind_port': 9292}
res = req.get_response(versions.Controller(options)) res = req.get_response(versions.Controller(conf))
self.assertEqual(res.status_int, 300) self.assertEqual(res.status_int, 300)
self.assertEqual(res.content_type, "application/json") self.assertEqual(res.content_type, "application/json")
results = json.loads(res.body)["versions"] results = json.loads(res.body)["versions"]