Don't use cfg.CONF in oslo.db

Using of a global config object is something we want to avoid when
making libraries. Parameters must be passed as function/class methods
arguments instead.

In order not to duplicate options declaration in each project, they
have been put into separate options module (which should not be a
part of oslo.db when it's released; we'll probably want to find a
place for it within oslo-incubator).

Partial-Bug: #1263908

Co-authored-by: Victor Sergeyev <vsergeyev@mirantis.com>

Change-Id: Ib5a4e31b4e78e2b4cb807a8c88b8072f4207eb22
This commit is contained in:
Roman Podoliaka 2014-01-23 18:42:56 +02:00
parent ce69e7f8f6
commit 630d3959b9
6 changed files with 453 additions and 359 deletions

View File

@ -15,11 +15,6 @@
"""Multiple DB API backend support.
Supported configuration options:
The following two parameters are in the 'database' group:
`backend`: DB backend name or full module path to DB backend module.
A DB backend module should implement a method named 'get_backend' which
takes no arguments. The method can return any object that implements DB
API methods.
@ -29,43 +24,11 @@ import functools
import logging
import time
from oslo.config import cfg
from openstack.common.db import exception
from openstack.common.gettextutils import _ # noqa
from openstack.common import importutils
db_opts = [
cfg.StrOpt('backend',
default='sqlalchemy',
deprecated_name='db_backend',
deprecated_group='DEFAULT',
help='The backend to use for db'),
cfg.BoolOpt('use_db_reconnect',
default=False,
help='Enable the experimental use of database reconnect '
'on connection lost'),
cfg.IntOpt('db_retry_interval',
default=1,
help='seconds between db connection retries'),
cfg.BoolOpt('db_inc_retry_interval',
default=True,
help='Whether to increase interval between db connection '
'retries, up to db_max_retry_interval'),
cfg.IntOpt('db_max_retry_interval',
default=10,
help='max seconds between db connection retries, if '
'db_inc_retry_interval is enabled'),
cfg.IntOpt('db_max_retries',
default=20,
help='maximum db connection retries before error is raised. '
'(setting -1 implies an infinite retry count)'),
]
CONF = cfg.CONF
CONF.register_opts(db_opts, 'database')
LOG = logging.getLogger(__name__)
@ -75,7 +38,7 @@ def safe_for_db_retry(f):
return f
def _wrap_db_retry(f):
class wrap_db_retry(object):
"""Retry db.api methods, if DBConnectionError() raised
Retry decorated db.api methods. If we enabled `use_db_reconnect`
@ -84,44 +47,88 @@ def _wrap_db_retry(f):
Decorator catchs DBConnectionError() and retries function in a
loop until it succeeds, or until maximum retries count will be reached.
"""
@functools.wraps(f)
def wrapper(*args, **kwargs):
next_interval = CONF.database.db_retry_interval
remaining = CONF.database.db_max_retries
while True:
try:
return f(*args, **kwargs)
except exception.DBConnectionError as e:
if remaining == 0:
LOG.exception(_('DB exceeded retry limit.'))
raise exception.DBError(e)
if remaining != -1:
remaining -= 1
LOG.exception(_('DB connection error.'))
# NOTE(vsergeyev): We are using patched time module, so this
# effectively yields the execution context to
# another green thread.
time.sleep(next_interval)
if CONF.database.db_inc_retry_interval:
next_interval = min(
next_interval * 2,
CONF.database.db_max_retry_interval
)
return wrapper
def __init__(self, retry_interval, max_retries, inc_retry_interval,
max_retry_interval):
super(wrap_db_retry, self).__init__()
self.retry_interval = retry_interval
self.max_retries = max_retries
self.inc_retry_interval = inc_retry_interval
self.max_retry_interval = max_retry_interval
def __call__(self, f):
@functools.wraps(f)
def wrapper(*args, **kwargs):
next_interval = self.retry_interval
remaining = self.max_retries
while True:
try:
return f(*args, **kwargs)
except exception.DBConnectionError as e:
if remaining == 0:
LOG.exception(_('DB exceeded retry limit.'))
raise exception.DBError(e)
if remaining != -1:
remaining -= 1
LOG.exception(_('DB connection error.'))
# NOTE(vsergeyev): We are using patched time module, so
# this effectively yields the execution
# context to another green thread.
time.sleep(next_interval)
if self.inc_retry_interval:
next_interval = min(
next_interval * 2,
self.max_retry_interval
)
return wrapper
class DBAPI(object):
def __init__(self, backend_mapping=None):
def __init__(self, backend_name, backend_mapping=None, **kwargs):
"""Initialize the choosen DB API backend.
:param backend_name: name of the backend to load
:type backend_name: str
:param backend_mapping: backend name -> module/class to load mapping
:type backend_mapping: dict
Keyword arguments:
:keyword use_db_reconnect: retry DB transactions on disconnect or not
:type use_db_reconnect: bool
:keyword retry_interval: seconds between transaction retries
:type retry_interval: int
:keyword inc_retry_interval: increase retry interval or not
:type inc_retry_interval: bool
:keyword max_retry_interval: max interval value between retries
:type max_retry_interval: int
:keyword max_retries: max number of retries before an error is raised
:type max_retries: int
"""
if backend_mapping is None:
backend_mapping = {}
backend_name = CONF.database.backend
# Import the untranslated name if we don't have a
# mapping.
backend_path = backend_mapping.get(backend_name, backend_name)
backend_mod = importutils.import_module(backend_path)
self.__backend = backend_mod.get_backend()
self.use_db_reconnect = kwargs.get('use_db_reconnect', False)
self.retry_interval = kwargs.get('retry_interval', 1)
self.inc_retry_interval = kwargs.get('inc_retry_interval', True)
self.max_retry_interval = kwargs.get('max_retry_interval', 10)
self.max_retries = kwargs.get('max_retries', 20)
def __getattr__(self, key):
attr = getattr(self.__backend, key)
@ -130,7 +137,11 @@ class DBAPI(object):
# NOTE(vsergeyev): If `use_db_reconnect` option is set to True, retry
# DB API methods, decorated with @safe_for_db_retry
# on disconnect.
if CONF.database.use_db_reconnect and hasattr(attr, 'enable_retry'):
attr = _wrap_db_retry(attr)
if self.use_db_reconnect and hasattr(attr, 'enable_retry'):
attr = wrap_db_retry(
retry_interval=self.retry_interval,
max_retries=self.max_retries,
inc_retry_interval=self.inc_retry_interval,
max_retry_interval=self.max_retry_interval)(attr)
return attr

View File

@ -0,0 +1,177 @@
# Licensed under the Apache License, Version 2.0 (the "License"); you may
# not use this file except in compliance with the License. You may obtain
# a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
# License for the specific language governing permissions and limitations
# under the License.
import copy
import os
from oslo.config import cfg
sqlite_db_opts = [
cfg.StrOpt('sqlite_db',
default='oslo.sqlite',
help='The file name to use with SQLite'),
cfg.BoolOpt('sqlite_synchronous',
default=True,
help='If True, SQLite uses synchronous mode'),
]
database_opts = [
cfg.StrOpt('backend',
default='sqlalchemy',
deprecated_name='db_backend',
deprecated_group='DEFAULT',
help='The backend to use for db'),
cfg.StrOpt('connection',
default='sqlite:///' +
os.path.abspath(os.path.join(os.path.dirname(__file__),
'../', '$sqlite_db')),
help='The SQLAlchemy connection string used to connect to the '
'database',
secret=True,
deprecated_opts=[cfg.DeprecatedOpt('sql_connection',
group='DEFAULT'),
cfg.DeprecatedOpt('sql_connection',
group='DATABASE'),
cfg.DeprecatedOpt('connection',
group='sql'), ]),
cfg.IntOpt('idle_timeout',
default=3600,
deprecated_opts=[cfg.DeprecatedOpt('sql_idle_timeout',
group='DEFAULT'),
cfg.DeprecatedOpt('sql_idle_timeout',
group='DATABASE'),
cfg.DeprecatedOpt('idle_timeout',
group='sql')],
help='Timeout before idle sql connections are reaped'),
cfg.IntOpt('min_pool_size',
default=1,
deprecated_opts=[cfg.DeprecatedOpt('sql_min_pool_size',
group='DEFAULT'),
cfg.DeprecatedOpt('sql_min_pool_size',
group='DATABASE')],
help='Minimum number of SQL connections to keep open in a '
'pool'),
cfg.IntOpt('max_pool_size',
default=None,
deprecated_opts=[cfg.DeprecatedOpt('sql_max_pool_size',
group='DEFAULT'),
cfg.DeprecatedOpt('sql_max_pool_size',
group='DATABASE')],
help='Maximum number of SQL connections to keep open in a '
'pool'),
cfg.IntOpt('max_retries',
default=10,
deprecated_opts=[cfg.DeprecatedOpt('sql_max_retries',
group='DEFAULT'),
cfg.DeprecatedOpt('sql_max_retries',
group='DATABASE')],
help='Maximum db connection retries during startup. '
'(setting -1 implies an infinite retry count)'),
cfg.IntOpt('retry_interval',
default=10,
deprecated_opts=[cfg.DeprecatedOpt('sql_retry_interval',
group='DEFAULT'),
cfg.DeprecatedOpt('reconnect_interval',
group='DATABASE')],
help='Interval between retries of opening a sql connection'),
cfg.IntOpt('max_overflow',
default=None,
deprecated_opts=[cfg.DeprecatedOpt('sql_max_overflow',
group='DEFAULT'),
cfg.DeprecatedOpt('sqlalchemy_max_overflow',
group='DATABASE')],
help='If set, use this value for max_overflow with sqlalchemy'),
cfg.IntOpt('connection_debug',
default=0,
deprecated_opts=[cfg.DeprecatedOpt('sql_connection_debug',
group='DEFAULT')],
help='Verbosity of SQL debugging information. 0=None, '
'100=Everything'),
cfg.BoolOpt('connection_trace',
default=False,
deprecated_opts=[cfg.DeprecatedOpt('sql_connection_trace',
group='DEFAULT')],
help='Add python stack traces to SQL as comment strings'),
cfg.IntOpt('pool_timeout',
default=None,
deprecated_opts=[cfg.DeprecatedOpt('sqlalchemy_pool_timeout',
group='DATABASE')],
help='If set, use this value for pool_timeout with sqlalchemy'),
cfg.BoolOpt('use_db_reconnect',
default=False,
help='Enable the experimental use of database reconnect '
'on connection lost'),
cfg.IntOpt('db_retry_interval',
default=1,
help='seconds between db connection retries'),
cfg.BoolOpt('db_inc_retry_interval',
default=True,
help='Whether to increase interval between db connection '
'retries, up to db_max_retry_interval'),
cfg.IntOpt('db_max_retry_interval',
default=10,
help='max seconds between db connection retries, if '
'db_inc_retry_interval is enabled'),
cfg.IntOpt('db_max_retries',
default=20,
help='maximum db connection retries before error is raised. '
'(setting -1 implies an infinite retry count)'),
]
CONF = cfg.CONF
CONF.register_opts(sqlite_db_opts)
CONF.register_opts(database_opts, 'database')
def set_defaults(sql_connection, sqlite_db, max_pool_size=None,
max_overflow=None, pool_timeout=None):
"""Set defaults for configuration variables."""
cfg.set_defaults(database_opts,
connection=sql_connection)
cfg.set_defaults(sqlite_db_opts,
sqlite_db=sqlite_db)
# Update the QueuePool defaults
if max_pool_size is not None:
cfg.set_defaults(database_opts,
max_pool_size=max_pool_size)
if max_overflow is not None:
cfg.set_defaults(database_opts,
max_overflow=max_overflow)
if pool_timeout is not None:
cfg.set_defaults(database_opts,
pool_timeout=pool_timeout)
_opts = [
(None, sqlite_db_opts),
('database', database_opts),
]
def list_opts():
"""Returns a list of oslo.config options available in the library.
The returned list includes all oslo.config options which may be registered
at runtime by the library.
Each element of the list is a tuple. The first element is the name of the
group under which the list of elements in the second element will be
registered. A group name of None corresponds to the [DEFAULT] group in
config files.
The purpose of this is to allow tools like the Oslo sample config file
generator to discover the options exposed to users by this library.
:returns: a list of (group_name, opts) tuples
"""
return [(g, copy.deepcopy(o)) for g, o in _opts]

View File

@ -16,19 +16,6 @@
"""Session Handling for SQLAlchemy backend.
Initializing:
* Call `set_defaults()` with the minimal of the following kwargs:
``sql_connection``, ``sqlite_db``
Example:
.. code:: python
session.set_defaults(
sql_connection="sqlite:///var/lib/oslo/sqlite.db",
sqlite_db="/var/lib/oslo/sqlite.db")
Recommended ways to use sessions within this framework:
* Don't use them explicitly; this is like running with ``AUTOCOMMIT=1``.
@ -293,11 +280,9 @@ Efficient use of soft deletes:
import functools
import logging
import os.path
import re
import time
from oslo.config import cfg
import six
from sqlalchemy import exc as sqla_exc
from sqlalchemy.interfaces import PoolListener
@ -309,125 +294,10 @@ from openstack.common.db import exception
from openstack.common.gettextutils import _
from openstack.common import timeutils
sqlite_db_opts = [
cfg.StrOpt('sqlite_db',
default='oslo.sqlite',
help='The file name to use with SQLite'),
cfg.BoolOpt('sqlite_synchronous',
default=True,
help='If True, SQLite uses synchronous mode'),
]
database_opts = [
cfg.StrOpt('connection',
default='sqlite:///' +
os.path.abspath(os.path.join(os.path.dirname(__file__),
'../', '$sqlite_db')),
help='The SQLAlchemy connection string used to connect to the '
'database',
secret=True,
deprecated_opts=[cfg.DeprecatedOpt('sql_connection',
group='DEFAULT'),
cfg.DeprecatedOpt('sql_connection',
group='DATABASE'),
cfg.DeprecatedOpt('connection',
group='sql'), ]),
cfg.StrOpt('slave_connection',
default='',
secret=True,
help='The SQLAlchemy connection string used to connect to the '
'slave database'),
cfg.IntOpt('idle_timeout',
default=3600,
deprecated_opts=[cfg.DeprecatedOpt('sql_idle_timeout',
group='DEFAULT'),
cfg.DeprecatedOpt('sql_idle_timeout',
group='DATABASE'),
cfg.DeprecatedOpt('idle_timeout',
group='sql')],
help='Timeout before idle sql connections are reaped'),
cfg.IntOpt('min_pool_size',
default=1,
deprecated_opts=[cfg.DeprecatedOpt('sql_min_pool_size',
group='DEFAULT'),
cfg.DeprecatedOpt('sql_min_pool_size',
group='DATABASE')],
help='Minimum number of SQL connections to keep open in a '
'pool'),
cfg.IntOpt('max_pool_size',
default=None,
deprecated_opts=[cfg.DeprecatedOpt('sql_max_pool_size',
group='DEFAULT'),
cfg.DeprecatedOpt('sql_max_pool_size',
group='DATABASE')],
help='Maximum number of SQL connections to keep open in a '
'pool'),
cfg.IntOpt('max_retries',
default=10,
deprecated_opts=[cfg.DeprecatedOpt('sql_max_retries',
group='DEFAULT'),
cfg.DeprecatedOpt('sql_max_retries',
group='DATABASE')],
help='Maximum db connection retries during startup. '
'(setting -1 implies an infinite retry count)'),
cfg.IntOpt('retry_interval',
default=10,
deprecated_opts=[cfg.DeprecatedOpt('sql_retry_interval',
group='DEFAULT'),
cfg.DeprecatedOpt('reconnect_interval',
group='DATABASE')],
help='Interval between retries of opening a sql connection'),
cfg.IntOpt('max_overflow',
default=None,
deprecated_opts=[cfg.DeprecatedOpt('sql_max_overflow',
group='DEFAULT'),
cfg.DeprecatedOpt('sqlalchemy_max_overflow',
group='DATABASE')],
help='If set, use this value for max_overflow with sqlalchemy'),
cfg.IntOpt('connection_debug',
default=0,
deprecated_opts=[cfg.DeprecatedOpt('sql_connection_debug',
group='DEFAULT')],
help='Verbosity of SQL debugging information. 0=None, '
'100=Everything'),
cfg.BoolOpt('connection_trace',
default=False,
deprecated_opts=[cfg.DeprecatedOpt('sql_connection_trace',
group='DEFAULT')],
help='Add python stack traces to SQL as comment strings'),
cfg.IntOpt('pool_timeout',
default=None,
deprecated_opts=[cfg.DeprecatedOpt('sqlalchemy_pool_timeout',
group='DATABASE')],
help='If set, use this value for pool_timeout with sqlalchemy'),
]
CONF = cfg.CONF
CONF.register_opts(sqlite_db_opts)
CONF.register_opts(database_opts, 'database')
LOG = logging.getLogger(__name__)
def set_defaults(sql_connection, sqlite_db, max_pool_size=None,
max_overflow=None, pool_timeout=None):
"""Set defaults for configuration variables."""
cfg.set_defaults(database_opts,
connection=sql_connection)
cfg.set_defaults(sqlite_db_opts,
sqlite_db=sqlite_db)
# Update the QueuePool defaults
if max_pool_size is not None:
cfg.set_defaults(database_opts,
max_pool_size=max_pool_size)
if max_overflow is not None:
cfg.set_defaults(database_opts,
max_overflow=max_overflow)
if pool_timeout is not None:
cfg.set_defaults(database_opts,
pool_timeout=pool_timeout)
class SqliteForeignKeysListener(PoolListener):
"""Ensures that the foreign key constraints are enforced in SQLite.
@ -661,25 +531,24 @@ def _raise_if_db_connection_lost(error, engine):
def create_engine(sql_connection, sqlite_fk=False,
mysql_traditional_mode=False):
mysql_traditional_mode=False, idle_timeout=3600,
connection_debug=0, max_pool_size=None, max_overflow=None,
pool_timeout=None, sqlite_synchronous=True,
connection_trace=False, max_retries=10, retry_interval=10):
"""Return a new SQLAlchemy engine."""
# NOTE(geekinutah): At this point we could be connecting to the normal
# db handle or the slave db handle. Things like
# _wrap_db_error aren't going to work well if their
# backends don't match. Let's check.
_assert_matching_drivers()
connection_dict = sqlalchemy.engine.url.make_url(sql_connection)
engine_args = {
"pool_recycle": CONF.database.idle_timeout,
"pool_recycle": idle_timeout,
"echo": False,
'convert_unicode': True,
}
# Map our SQL debug level to SQLAlchemy's options
if CONF.database.connection_debug >= 100:
if connection_debug >= 100:
engine_args['echo'] = 'debug'
elif CONF.database.connection_debug >= 50:
elif connection_debug >= 50:
engine_args['echo'] = True
if "sqlite" in connection_dict.drivername:
@ -687,16 +556,16 @@ def create_engine(sql_connection, sqlite_fk=False,
engine_args["listeners"] = [SqliteForeignKeysListener()]
engine_args["poolclass"] = NullPool
if CONF.database.connection == "sqlite://":
if sql_connection == "sqlite://":
engine_args["poolclass"] = StaticPool
engine_args["connect_args"] = {'check_same_thread': False}
else:
if CONF.database.max_pool_size is not None:
engine_args['pool_size'] = CONF.database.max_pool_size
if CONF.database.max_overflow is not None:
engine_args['max_overflow'] = CONF.database.max_overflow
if CONF.database.pool_timeout is not None:
engine_args['pool_timeout'] = CONF.database.pool_timeout
if max_pool_size is not None:
engine_args['pool_size'] = max_pool_size
if max_overflow is not None:
engine_args['max_overflow'] = max_overflow
if pool_timeout is not None:
engine_args['pool_timeout'] = pool_timeout
engine = sqlalchemy.create_engine(sql_connection, **engine_args)
@ -716,13 +585,12 @@ def create_engine(sql_connection, sqlite_fk=False,
"Please encourage the application "
"developers to enable this mode."))
elif 'sqlite' in connection_dict.drivername:
if not CONF.sqlite_synchronous:
if not sqlite_synchronous:
sqlalchemy.event.listen(engine, 'connect',
_synchronous_switch_listener)
sqlalchemy.event.listen(engine, 'connect', _add_regexp_listener)
if (CONF.database.connection_trace and
engine.dialect.dbapi.__name__ == 'MySQLdb'):
if connection_trace and engine.dialect.dbapi.__name__ == 'MySQLdb':
_patch_mysqldb_with_stacktrace_comments()
try:
@ -731,7 +599,7 @@ def create_engine(sql_connection, sqlite_fk=False,
if not _is_db_connection_error(e.args[0]):
raise
remaining = CONF.database.max_retries
remaining = max_retries
if remaining == -1:
remaining = 'infinite'
while True:
@ -739,7 +607,7 @@ def create_engine(sql_connection, sqlite_fk=False,
LOG.warning(msg % remaining)
if remaining != 'infinite':
remaining -= 1
time.sleep(CONF.database.retry_interval)
time.sleep(retry_interval)
try:
engine.connect()
break
@ -826,18 +694,6 @@ def _patch_mysqldb_with_stacktrace_comments():
setattr(MySQLdb.cursors.BaseCursor, '_do_query', _do_query)
def _assert_matching_drivers():
"""Make sure slave handle and normal handle have the same driver."""
# NOTE(geekinutah): There's no use case for writing to one backend and
# reading from another. Who knows what the future holds?
if CONF.database.slave_connection == '':
return
normal = sqlalchemy.engine.url.make_url(CONF.database.connection)
slave = sqlalchemy.engine.url.make_url(CONF.database.slave_connection)
assert normal.drivername == slave.drivername
class EngineFacade(object):
"""A helper class for removing of global engine instances from oslo.db.
@ -868,7 +724,7 @@ class EngineFacade(object):
def __init__(self, sql_connection,
sqlite_fk=False, mysql_traditional_mode=False,
autocommit=True, expire_on_commit=False):
autocommit=True, expire_on_commit=False, **kwargs):
"""Initialize engine and sessionmaker instances.
:param sqlite_fk: enable foreign keys in SQLite
@ -883,6 +739,28 @@ class EngineFacade(object):
:param expire_on_commit: expire session objects on commit
:type expire_on_commit: bool
Keyword arguments:
:keyword idle_timeout: timeout before idle sql connections are reaped
(defaults to 3600)
:keyword connection_debug: verbosity of SQL debugging information.
0=None, 100=Everything (defaults to 0)
:keyword max_pool_size: maximum number of SQL connections to keep open
in a pool (defaults to SQLAlchemy settings)
:keyword max_overflow: if set, use this value for max_overflow with
sqlalchemy (defaults to SQLAlchemy settings)
:keyword pool_timeout: if set, use this value for pool_timeout with
sqlalchemy (defaults to SQLAlchemy settings)
:keyword sqlite_synchronous: if True, SQLite uses synchronous mode
(defaults to True)
:keyword connection_trace: add python stack traces to SQL as comment
strings (defaults to False)
:keyword max_retries: maximum db connection retries during startup.
(setting -1 implies an infinite retry count)
(defaults to 10)
:keyword retry_interval: interval between retries of opening a sql
connection (defaults to 10)
"""
super(EngineFacade, self).__init__()
@ -890,7 +768,16 @@ class EngineFacade(object):
self._engine = create_engine(
sql_connection=sql_connection,
sqlite_fk=sqlite_fk,
mysql_traditional_mode=mysql_traditional_mode)
mysql_traditional_mode=mysql_traditional_mode,
idle_timeout=kwargs.get('idle_timeout', 3600),
connection_debug=kwargs.get('connection_debug', 0),
max_pool_size=kwargs.get('max_pool_size', None),
max_overflow=kwargs.get('max_overflow', None),
pool_timeout=kwargs.get('pool_timeout', None),
sqlite_synchronous=kwargs.get('sqlite_synchronous', True),
connection_trace=kwargs.get('connection_trace', False),
max_retries=kwargs.get('max_retries', 10),
retry_interval=kwargs.get('retry_interval', 10))
self._session_maker = get_maker(
engine=self._engine,
autocommit=autocommit,

View File

@ -0,0 +1,120 @@
#
# Licensed under the Apache License, Version 2.0 (the "License"); you may
# not use this file except in compliance with the License. You may obtain
# a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
# License for the specific language governing permissions and limitations
# under the License.
from oslo.config import cfg
from openstack.common.fixture import config
from tests import utils as test_utils
cfg.CONF.import_opt('connection', 'openstack.common.db.options',
group='database')
class DbApiOptionsTestCase(test_utils.BaseTestCase):
def setUp(self):
super(DbApiOptionsTestCase, self).setUp()
config_fixture = self.useFixture(config.Config())
self.conf = config_fixture.conf
self.config = config_fixture.config
def test_deprecated_session_parameters(self):
path = self.create_tempfiles([["tmp", """[DEFAULT]
sql_connection=x://y.z
sql_min_pool_size=10
sql_max_pool_size=20
sql_max_retries=30
sql_retry_interval=40
sql_max_overflow=50
sql_connection_debug=60
sql_connection_trace=True
"""]])[0]
self.conf(['--config-file', path])
self.assertEqual(self.conf.database.connection, 'x://y.z')
self.assertEqual(self.conf.database.min_pool_size, 10)
self.assertEqual(self.conf.database.max_pool_size, 20)
self.assertEqual(self.conf.database.max_retries, 30)
self.assertEqual(self.conf.database.retry_interval, 40)
self.assertEqual(self.conf.database.max_overflow, 50)
self.assertEqual(self.conf.database.connection_debug, 60)
self.assertEqual(self.conf.database.connection_trace, True)
def test_session_parameters(self):
path = self.create_tempfiles([["tmp", """[database]
connection=x://y.z
min_pool_size=10
max_pool_size=20
max_retries=30
retry_interval=40
max_overflow=50
connection_debug=60
connection_trace=True
pool_timeout=7
"""]])[0]
self.conf(['--config-file', path])
self.assertEqual(self.conf.database.connection, 'x://y.z')
self.assertEqual(self.conf.database.min_pool_size, 10)
self.assertEqual(self.conf.database.max_pool_size, 20)
self.assertEqual(self.conf.database.max_retries, 30)
self.assertEqual(self.conf.database.retry_interval, 40)
self.assertEqual(self.conf.database.max_overflow, 50)
self.assertEqual(self.conf.database.connection_debug, 60)
self.assertEqual(self.conf.database.connection_trace, True)
self.assertEqual(self.conf.database.pool_timeout, 7)
def test_dbapi_database_deprecated_parameters(self):
path = self.create_tempfiles([['tmp', '[DATABASE]\n'
'sql_connection=fake_connection\n'
'sql_idle_timeout=100\n'
'sql_min_pool_size=99\n'
'sql_max_pool_size=199\n'
'sql_max_retries=22\n'
'reconnect_interval=17\n'
'sqlalchemy_max_overflow=101\n'
'sqlalchemy_pool_timeout=5\n'
]])[0]
self.conf(['--config-file', path])
self.assertEqual(self.conf.database.connection, 'fake_connection')
self.assertEqual(self.conf.database.idle_timeout, 100)
self.assertEqual(self.conf.database.min_pool_size, 99)
self.assertEqual(self.conf.database.max_pool_size, 199)
self.assertEqual(self.conf.database.max_retries, 22)
self.assertEqual(self.conf.database.retry_interval, 17)
self.assertEqual(self.conf.database.max_overflow, 101)
self.assertEqual(self.conf.database.pool_timeout, 5)
def test_dbapi_database_deprecated_parameters_sql(self):
path = self.create_tempfiles([['tmp', '[sql]\n'
'connection=test_sql_connection\n'
'idle_timeout=99\n'
]])[0]
self.conf(['--config-file', path])
self.assertEqual(self.conf.database.connection, 'test_sql_connection')
self.assertEqual(self.conf.database.idle_timeout, 99)
def test_deprecated_dbapi_parameters(self):
path = self.create_tempfiles([['tmp', '[DEFAULT]\n'
'db_backend=test_123\n'
]])[0]
self.conf(['--config-file', path])
self.assertEqual(self.conf.database.backend, 'test_123')
def test_dbapi_parameters(self):
path = self.create_tempfiles([['tmp', '[database]\n'
'backend=test_123\n'
]])[0]
self.conf(['--config-file', path])
self.assertEqual(self.conf.database.backend, 'test_123')

View File

@ -29,7 +29,6 @@ from openstack.common.db import exception as db_exc
from openstack.common.db.sqlalchemy import models
from openstack.common.db.sqlalchemy import session
from openstack.common.db.sqlalchemy import test_base
from openstack.common.fixture import config
from openstack.common import test
@ -43,88 +42,6 @@ class TmpTable(BASE, models.ModelBase):
foo = Column(Integer)
class SessionParametersTestCase(test_base.DbTestCase):
def setUp(self):
super(SessionParametersTestCase, self).setUp()
config_fixture = self.useFixture(config.Config())
self.conf = config_fixture.conf
def test_deprecated_session_parameters(self):
path = self.create_tempfiles([["tmp", """[DEFAULT]
sql_connection=x://y.z
sql_min_pool_size=10
sql_max_pool_size=20
sql_max_retries=30
sql_retry_interval=40
sql_max_overflow=50
sql_connection_debug=60
sql_connection_trace=True
"""]])[0]
self.conf(['--config-file', path])
self.assertEqual(self.conf.database.connection, 'x://y.z')
self.assertEqual(self.conf.database.min_pool_size, 10)
self.assertEqual(self.conf.database.max_pool_size, 20)
self.assertEqual(self.conf.database.max_retries, 30)
self.assertEqual(self.conf.database.retry_interval, 40)
self.assertEqual(self.conf.database.max_overflow, 50)
self.assertEqual(self.conf.database.connection_debug, 60)
self.assertEqual(self.conf.database.connection_trace, True)
def test_session_parameters(self):
path = self.create_tempfiles([["tmp", """[database]
connection=x://y.z
min_pool_size=10
max_pool_size=20
max_retries=30
retry_interval=40
max_overflow=50
connection_debug=60
connection_trace=True
pool_timeout=7
"""]])[0]
self.conf(['--config-file', path])
self.assertEqual(self.conf.database.connection, 'x://y.z')
self.assertEqual(self.conf.database.min_pool_size, 10)
self.assertEqual(self.conf.database.max_pool_size, 20)
self.assertEqual(self.conf.database.max_retries, 30)
self.assertEqual(self.conf.database.retry_interval, 40)
self.assertEqual(self.conf.database.max_overflow, 50)
self.assertEqual(self.conf.database.connection_debug, 60)
self.assertEqual(self.conf.database.connection_trace, True)
self.assertEqual(self.conf.database.pool_timeout, 7)
def test_dbapi_database_deprecated_parameters(self):
path = self.create_tempfiles([['tmp', '[DATABASE]\n'
'sql_connection=fake_connection\n'
'sql_idle_timeout=100\n'
'sql_min_pool_size=99\n'
'sql_max_pool_size=199\n'
'sql_max_retries=22\n'
'reconnect_interval=17\n'
'sqlalchemy_max_overflow=101\n'
'sqlalchemy_pool_timeout=5\n'
]])[0]
self.conf(['--config-file', path])
self.assertEqual(self.conf.database.connection, 'fake_connection')
self.assertEqual(self.conf.database.idle_timeout, 100)
self.assertEqual(self.conf.database.min_pool_size, 99)
self.assertEqual(self.conf.database.max_pool_size, 199)
self.assertEqual(self.conf.database.max_retries, 22)
self.assertEqual(self.conf.database.retry_interval, 17)
self.assertEqual(self.conf.database.max_overflow, 101)
self.assertEqual(self.conf.database.pool_timeout, 5)
def test_dbapi_database_deprecated_parameters_sql(self):
path = self.create_tempfiles([['tmp', '[sql]\n'
'connection=test_sql_connection\n'
'idle_timeout=99\n'
]])[0]
self.conf(['--config-file', path])
self.assertEqual(self.conf.database.connection, 'test_sql_connection')
self.assertEqual(self.conf.database.idle_timeout, 99)
class SessionErrorWrapperTestCase(test_base.DbTestCase):
def setUp(self):
super(SessionErrorWrapperTestCase, self).setUp()

View File

@ -19,7 +19,6 @@ import mock
from openstack.common.db import api
from openstack.common.db import exception
from openstack.common.fixture import config
from openstack.common import importutils
from tests import utils as test_utils
@ -63,41 +62,14 @@ class DBAPI(object):
class DBAPITestCase(test_utils.BaseTestCase):
def setUp(self):
super(DBAPITestCase, self).setUp()
config_fixture = self.useFixture(config.Config())
self.conf = config_fixture.conf
self.config = config_fixture.config
def test_deprecated_dbapi_parameters(self):
path = self.create_tempfiles([['tmp', '[DEFAULT]\n'
'db_backend=test_123\n'
]])[0]
self.conf(['--config-file', path])
self.assertEqual(self.conf.database.backend, 'test_123')
def test_dbapi_parameters(self):
path = self.create_tempfiles([['tmp', '[database]\n'
'backend=test_123\n'
]])[0]
self.conf(['--config-file', path])
self.assertEqual(self.conf.database.backend, 'test_123')
def test_dbapi_full_path_module_method(self):
self.config(backend='tests.unit.db.test_api',
group='database')
dbapi = api.DBAPI()
dbapi = api.DBAPI('tests.unit.db.test_api')
result = dbapi.api_class_call1(1, 2, kwarg1='meow')
expected = ((1, 2), {'kwarg1': 'meow'})
self.assertEqual(expected, result)
def test_dbapi_unknown_invalid_backend(self):
self.config(backend='tests.unit.db.not_existent',
group='database')
self.assertRaises(ImportError, api.DBAPI)
self.assertRaises(ImportError, api.DBAPI, 'tests.unit.db.not_existent')
class DBReconnectTestCase(DBAPITestCase):
@ -110,30 +82,36 @@ class DBReconnectTestCase(DBAPITestCase):
patcher.start()
self.addCleanup(patcher.stop)
self.dbapi = api.DBAPI(
{'sqlalchemy': __name__}
)
def test_raise_connection_error(self):
self.dbapi = api.DBAPI('sqlalchemy', {'sqlalchemy': __name__})
self.test_db_api.error_counter = 5
self.assertRaises(exception.DBConnectionError, self.dbapi._api_raise)
def test_raise_connection_error_decorated(self):
self.dbapi = api.DBAPI('sqlalchemy', {'sqlalchemy': __name__})
self.test_db_api.error_counter = 5
self.assertRaises(exception.DBConnectionError,
self.dbapi.api_raise_enable_retry)
self.assertEqual(4, self.test_db_api.error_counter, 'Unexpected retry')
def test_raise_connection_error_enabled_config(self):
self.config(group='database', use_db_reconnect=True)
def test_raise_connection_error_enabled(self):
self.dbapi = api.DBAPI('sqlalchemy',
{'sqlalchemy': __name__},
use_db_reconnect=True)
self.test_db_api.error_counter = 5
self.assertRaises(exception.DBConnectionError,
self.dbapi.api_raise_default)
self.assertEqual(4, self.test_db_api.error_counter, 'Unexpected retry')
def test_retry_one(self):
self.config(group='database', use_db_reconnect=True)
self.config(group='database', db_retry_interval=1)
self.dbapi = api.DBAPI('sqlalchemy',
{'sqlalchemy': __name__},
use_db_reconnect=True,
retry_interval=1)
try:
func = self.dbapi.api_raise_enable_retry
self.test_db_api.error_counter = 1
@ -146,9 +124,11 @@ class DBReconnectTestCase(DBAPITestCase):
'Counter not decremented, retry logic probably failed.')
def test_retry_two(self):
self.config(group='database', use_db_reconnect=True)
self.config(group='database', db_inc_retry_interval=False)
self.config(group='database', db_retry_interval=1)
self.dbapi = api.DBAPI('sqlalchemy',
{'sqlalchemy': __name__},
use_db_reconnect=True,
retry_interval=1,
inc_retry_interval=False)
try:
func = self.dbapi.api_raise_enable_retry
@ -162,10 +142,12 @@ class DBReconnectTestCase(DBAPITestCase):
'Counter not decremented, retry logic probably failed.')
def test_retry_until_failure(self):
self.config(group='database', use_db_reconnect=True)
self.config(group='database', db_inc_retry_interval=False)
self.config(group='database', db_max_retries=3)
self.config(group='database', db_retry_interval=1)
self.dbapi = api.DBAPI('sqlalchemy',
{'sqlalchemy': __name__},
use_db_reconnect=True,
retry_interval=1,
inc_retry_interval=False,
max_retries=3)
func = self.dbapi.api_raise_enable_retry
self.test_db_api.error_counter = 5