Fix sqlalchemy migration

1. Fix the case of table names
2. Fix the sqlalchemy migration tests

NOTE: The sqlalchemy migrations test depends on oslo_db's migration
test which requires mysql user 'openstack_citest' with password
'openstack_citest' otherwise the test will be skipped.

Closes-Bug: #1654105

Change-Id: Ia672440a948fa4784f6dd1aa6d5fed0bc3915663
This commit is contained in:
Fei Long Wang 2017-01-05 13:26:46 +13:00
parent 828bab9d66
commit 1ddd9ca5c0
6 changed files with 82 additions and 35 deletions

View File

@ -153,6 +153,15 @@ For example, you want to run functional tests with keystone authentication
enabled, input a valid set of credentials to ``[auth]`` section in enabled, input a valid set of credentials to ``[auth]`` section in
configuration file and set ``auth_on`` parameter to ``True``. configuration file and set ``auth_on`` parameter to ``True``.
Using local Mysql database
^^^^^^^^^^^^^^^^^^^^^^^^^^
To use a similar testing environment with database support like upstream CI,
you can run ``zaqar/tools/test-setup.sh`` to create a required Mysql user
``openstack_citest`` with same password. The user is required by oslo.db's
test. Zaqar needs it because Zaqar's sqlalchemy database migration is
leveraging oslo.db's migration test base.
.. rubric:: Footnotes .. rubric:: Footnotes
.. [#f1] See http://docs.openstack.org/infra/system-config/jenkins.html .. [#f1] See http://docs.openstack.org/infra/system-config/jenkins.html

View File

@ -11,6 +11,7 @@ mock>=2.0 # BSD
redis>=2.10.0 # MIT redis>=2.10.0 # MIT
pymongo!=3.1,>=3.0.2 # Apache-2.0 pymongo!=3.1,>=3.0.2 # Apache-2.0
websocket-client>=0.32.0 # LGPLv2+ websocket-client>=0.32.0 # LGPLv2+
PyMySQL>=0.7.6 # MIT License
# Unit testing # Unit testing
coverage>=4.0 # Apache-2.0 coverage>=4.0 # Apache-2.0

33
tools/test-setup.sh Executable file
View File

@ -0,0 +1,33 @@
#!/bin/bash -xe
# This script will be run by OpenStack CI before unit tests are run,
# it sets up the test system as needed.
# Developers should setup their test systems in a similar way.
# This setup needs to be run as a user that can run sudo.
# The root password for the MySQL database; pass it in via
# MYSQL_ROOT_PW.
DB_ROOT_PW=${MYSQL_ROOT_PW:-insecure_slave}
# This user and its password are used by the tests, if you change it,
# your tests might fail.
DB_USER=openstack_citest
DB_PW=openstack_citest
sudo -H mysqladmin -u root password $DB_ROOT_PW
# It's best practice to remove anonymous users from the database. If
# a anonymous user exists, then it matches first for connections and
# other connections from that host will not work.
sudo -H mysql -u root -p$DB_ROOT_PW -h localhost -e "
DELETE FROM mysql.user WHERE User='';
FLUSH PRIVILEGES;
GRANT ALL PRIVILEGES ON *.*
TO '$DB_USER'@'%' identified by '$DB_PW' WITH GRANT OPTION;"
# Now create our database.
mysql -u $DB_USER -p$DB_PW -h 127.0.0.1 -e "
SET default_storage_engine=MYISAM;
DROP DATABASE IF EXISTS openstack_citest;
CREATE DATABASE openstack_citest CHARACTER SET utf8;"

View File

@ -33,20 +33,20 @@ MYSQL_CHARSET = 'utf8'
def upgrade(): def upgrade():
op.create_table('queues', op.create_table('Queues',
sa.Column('id', sa.INTEGER, primary_key=True), sa.Column('id', sa.INTEGER, primary_key=True),
sa.Column('project', sa.String(64)), sa.Column('project', sa.String(64)),
sa.Column('name', sa.String(64)), sa.Column('name', sa.String(64)),
sa.Column('metadata', sa.LargeBinary), sa.Column('metadata', sa.LargeBinary),
sa.UniqueConstraint('project', 'name')) sa.UniqueConstraint('project', 'name'))
op.create_table('poolgroup', op.create_table('PoolGroup',
sa.Column('name', sa.String(64), primary_key=True)) sa.Column('name', sa.String(64), primary_key=True))
op.create_table('pools', op.create_table('Pools',
sa.Column('name', sa.String(64), primary_key=True), sa.Column('name', sa.String(64), primary_key=True),
sa.Column('group', sa.String(64), sa.Column('group', sa.String(64),
sa.ForeignKey('poolgroup.name', sa.ForeignKey('PoolGroup.name',
ondelete='CASCADE'), ondelete='CASCADE'),
nullable=True), nullable=True),
sa.Column('uri', sa.String(255), sa.Column('uri', sa.String(255),
@ -54,18 +54,18 @@ def upgrade():
sa.Column('weight', sa.INTEGER, nullable=False), sa.Column('weight', sa.INTEGER, nullable=False),
sa.Column('options', sa.Text())) sa.Column('options', sa.Text()))
op.create_table('flavors', op.create_table('Flavors',
sa.Column('name', sa.String(64), primary_key=True), sa.Column('name', sa.String(64), primary_key=True),
sa.Column('project', sa.String(64)), sa.Column('project', sa.String(64)),
sa.Column('pool_group', sa.String(64), sa.Column('pool_group', sa.String(64),
sa.ForeignKey('poolgroup.name', sa.ForeignKey('PoolGroup.name',
ondelete='CASCADE'), ondelete='CASCADE'),
nullable=False), nullable=False),
sa.Column('capabilities', sa.Text())) sa.Column('capabilities', sa.Text()))
op.create_table('catalogue', op.create_table('Catalogue',
sa.Column('pool', sa.String(64), sa.Column('pool', sa.String(64),
sa.ForeignKey('pools.name', sa.ForeignKey('Pools.name',
ondelete='CASCADE')), ondelete='CASCADE')),
sa.Column('project', sa.String(64)), sa.Column('project', sa.String(64)),
sa.Column('queue', sa.String(64), nullable=False), sa.Column('queue', sa.String(64), nullable=False),

View File

@ -30,8 +30,6 @@ postgres=# create database openstack_citest with owner openstack_citest;
""" """
import os
from oslo_db.sqlalchemy import test_base from oslo_db.sqlalchemy import test_base
from oslo_db.sqlalchemy import utils as db_utils from oslo_db.sqlalchemy import utils as db_utils
@ -95,17 +93,17 @@ class ZaqarMigrationsCheckers(object):
'metadata' 'metadata'
] ]
self.assertColumnsExist( self.assertColumnsExist(
engine, 'queues', queues_columns) engine, 'Queues', queues_columns)
self.assertColumnCount( self.assertColumnCount(
engine, 'queues', queues_columns) engine, 'Queues', queues_columns)
poolgroup_columns = [ poolgroup_columns = [
'name', 'name',
] ]
self.assertColumnsExist( self.assertColumnsExist(
engine, 'poolgroup', poolgroup_columns) engine, 'PoolGroup', poolgroup_columns)
self.assertColumnCount( self.assertColumnCount(
engine, 'poolgroup', poolgroup_columns) engine, 'PoolGroup', poolgroup_columns)
pools_columns = [ pools_columns = [
'name', 'name',
@ -115,9 +113,9 @@ class ZaqarMigrationsCheckers(object):
'options', 'options',
] ]
self.assertColumnsExist( self.assertColumnsExist(
engine, 'pools', pools_columns) engine, 'Pools', pools_columns)
self.assertColumnCount( self.assertColumnCount(
engine, 'pools', pools_columns) engine, 'Pools', pools_columns)
flavors_columns = [ flavors_columns = [
'name', 'name',
@ -126,9 +124,9 @@ class ZaqarMigrationsCheckers(object):
'capabilities', 'capabilities',
] ]
self.assertColumnsExist( self.assertColumnsExist(
engine, 'flavors', flavors_columns) engine, 'Flavors', flavors_columns)
self.assertColumnCount( self.assertColumnCount(
engine, 'flavors', flavors_columns) engine, 'Flavors', flavors_columns)
catalogue_columns = [ catalogue_columns = [
'pool', 'pool',
@ -136,19 +134,19 @@ class ZaqarMigrationsCheckers(object):
'queue', 'queue',
] ]
self.assertColumnsExist( self.assertColumnsExist(
engine, 'catalogue', catalogue_columns) engine, 'Catalogue', catalogue_columns)
self.assertColumnCount( self.assertColumnCount(
engine, 'catalogue', catalogue_columns) engine, 'Catalogue', catalogue_columns)
self._data_001(engine, data) self._data_001(engine, data)
def _data_001(self, engine, data): def _data_001(self, engine, data):
datasize = 512 * 1024 # 512kB project = 'myproject'
data = os.urandom(datasize) t = db_utils.get_table(engine, 'Queues')
t = db_utils.get_table(engine, 'job_binary_internal') engine.execute(t.insert(), id='123', name='name', project='myproject',
engine.execute(t.insert(), data=data, id='123', name='name') metadata={})
new_data = engine.execute(t.select()).fetchone().data new_project = engine.execute(t.select()).fetchone().project
self.assertEqual(data, new_data) self.assertEqual(project, new_project)
engine.execute(t.delete()) engine.execute(t.delete())
def _check_002(self, engine, data): def _check_002(self, engine, data):

View File

@ -25,7 +25,6 @@
import io import io
import os import os
import sqlalchemy as sa
import alembic import alembic
from alembic import command from alembic import command
@ -40,10 +39,17 @@ from zaqar.i18n import _LE
import zaqar.storage.sqlalchemy.migration import zaqar.storage.sqlalchemy.migration
from zaqar.storage.sqlalchemy import tables from zaqar.storage.sqlalchemy import tables
LOG = logging.getLogger(__name__) LOG = logging.getLogger(__name__)
CONF = cfg.CONF CONF = cfg.CONF
sqlalchemy_opts = [cfg.StrOpt('uri',
help='The SQLAlchemy connection string to'
' use to connect to the database.',
secret=True)]
CONF.register_opts(sqlalchemy_opts,
group='drivers:management_store:sqlalchemy')
class BaseWalkMigrationTestCase(object): class BaseWalkMigrationTestCase(object):
@ -63,10 +69,10 @@ class BaseWalkMigrationTestCase(object):
should use oslo_config and openstack.commom.db.sqlalchemy.session with should use oslo_config and openstack.commom.db.sqlalchemy.session with
database functionality (reset default settings and session cleanup). database functionality (reset default settings and session cleanup).
""" """
CONF.set_override('uri', str(engine.url), CONF.set_override('uri', str(engine.url),
group='drivers:management_store:sqlalchemy', group='drivers:management_store:sqlalchemy',
enforce_type=True) enforce_type=True)
sa.cleanup()
def _alembic_command(self, alembic_command, engine, *args, **kwargs): def _alembic_command(self, alembic_command, engine, *args, **kwargs):
"""Most of alembic command return data into output. """Most of alembic command return data into output.
@ -77,12 +83,12 @@ class BaseWalkMigrationTestCase(object):
CONF.set_override('uri', str(engine.url), CONF.set_override('uri', str(engine.url),
group='drivers:management_store:sqlalchemy', group='drivers:management_store:sqlalchemy',
enforce_type=True) enforce_type=True)
sa.cleanup()
getattr(command, alembic_command)(*args, **kwargs) getattr(command, alembic_command)(*args, **kwargs)
res = buf.getvalue().strip() res = buf.getvalue().strip()
LOG.debug('Alembic command {command} returns: {result}'.format( LOG.debug('Alembic command {command} returns: {result}'.format(
command=alembic_command, result=res)) command=alembic_command, result=res))
sa.cleanup()
return res return res
def _get_versions(self): def _get_versions(self):
@ -167,11 +173,9 @@ class TestModelsMigrationsSync(t_m.ModelsMigrationsSync):
Allows to check if the DB schema obtained by applying of migration Allows to check if the DB schema obtained by applying of migration
scripts is equal to the one produced from models definitions. scripts is equal to the one produced from models definitions.
""" """
mg_path = os.path.dirname(zaqar.storage.sqlalchemy.migration.__file__)
ALEMBIC_CONFIG = alembic_config.Config( ALEMBIC_CONFIG = alembic_config.Config(
os.path.join( os.path.join(mg_path, 'alembic.ini')
os.path.dirname(zaqar.storage.sqlalchemy.migration.__file__),
'alembic.ini')
) )
ALEMBIC_CONFIG.zaqar_config = CONF ALEMBIC_CONFIG.zaqar_config = CONF
@ -182,6 +186,8 @@ class TestModelsMigrationsSync(t_m.ModelsMigrationsSync):
CONF.set_override('uri', str(engine.url), CONF.set_override('uri', str(engine.url),
group='drivers:management_store:sqlalchemy', group='drivers:management_store:sqlalchemy',
enforce_type=True) enforce_type=True)
script_location = os.path.join(self.mg_path, 'alembic_migrations')
self.ALEMBIC_CONFIG.set_main_option('script_location', script_location)
alembic.command.upgrade(self.ALEMBIC_CONFIG, 'head') alembic.command.upgrade(self.ALEMBIC_CONFIG, 'head')
def get_metadata(self): def get_metadata(self):