Fix wrong data type in database migration

The `deleted` column in volume_type_extra_specs was as a boolean;
The same column in share_type_extra_specs is an integer. However,
the code migrating from volume_types to share_types
assumes them to be vice-versa. This breaks downgrading from that
migration.

Further, the `id` column in share_type_extra_specs is a primary key
field of Integer datatype. Such fields have auto-increment turned on
by default; we shouldn't be copying the `id` from
volume_type_extra_specs when we copy the data.

Fix these bugs and add a unit test.

Change-Id: Ic32a8a17b1b6a773de6dcf7792d9b37f6aed4a3c
Needed-by: Ib0ad5fbfdf6297665c208149b08c8d21b3c232be
Closes-Bug: 1643535
This commit is contained in:
Goutham Pacha Ravi 2016-11-21 21:18:52 +05:30
parent 7294b2d78e
commit f552549303
2 changed files with 93 additions and 3 deletions

View File

@ -115,7 +115,7 @@ def _copy_records(destination_table, up_migration=True):
sa.Column('created_at', sa.DateTime),
sa.Column('updated_at', sa.DateTime),
sa.Column('deleted_at', sa.DateTime),
sa.Column('deleted', sa.Integer if up_migration else sa.Boolean),
sa.Column('deleted', sa.Boolean if up_migration else sa.Integer),
sa.Column('id', sa.Integer, primary_key=True, nullable=False),
sa.Column(data_from[0] + '_type_id', sa.String(length=36)),
sa.Column(data_from[1] + 'key', sa.String(length=255)),
@ -126,13 +126,12 @@ def _copy_records(destination_table, up_migration=True):
if up_migration:
deleted = strutils.int_from_bool_as_string(es.deleted)
else:
deleted = strutils.bool_from_string(es.deleted)
deleted = strutils.bool_from_string(es.deleted, default=True)
extra_specs.append({
'created_at': es.created_at,
'updated_at': es.updated_at,
'deleted_at': es.deleted_at,
'deleted': deleted,
'id': es.id,
data_to[0] + '_type_id': getattr(es, data_from[0] + '_type_id'),
data_to[1] + 'key': getattr(es, data_from[1] + 'key'),
data_to[1] + 'value': getattr(es, data_from[1] + 'value'),

View File

@ -121,6 +121,97 @@ class BaseMigrationChecks(object):
"""
@map_to_migration('38e632621e5a')
class ShareTypeMigrationChecks(BaseMigrationChecks):
def _get_fake_data(self):
extra_specs = []
self.share_type_ids = []
volume_types = [
{
'id': uuidutils.generate_uuid(),
'deleted': 'False',
'name': 'vol-type-A',
},
{
'id': uuidutils.generate_uuid(),
'deleted': 'False',
'name': 'vol-type-B',
},
]
for idx, volume_type in enumerate(volume_types):
extra_specs.append({
'volume_type_id': volume_type['id'],
'key': 'foo',
'value': 'bar%s' % idx,
'deleted': False,
})
extra_specs.append({
'volume_type_id': volume_type['id'],
'key': 'xyzzy',
'value': 'spoon_%s' % idx,
'deleted': False,
})
self.share_type_ids.append(volume_type['id'])
return volume_types, extra_specs
def setup_upgrade_data(self, engine):
(self.volume_types, self.extra_specs) = self._get_fake_data()
volume_types_table = utils.load_table('volume_types', engine)
engine.execute(volume_types_table.insert(self.volume_types))
extra_specs_table = utils.load_table('volume_type_extra_specs',
engine)
engine.execute(extra_specs_table.insert(self.extra_specs))
def check_upgrade(self, engine, data):
# Verify table transformations
share_types_table = utils.load_table('share_types', engine)
share_types_specs_table = utils.load_table(
'share_type_extra_specs', engine)
self.test_case.assertRaises(sa_exc.NoSuchTableError, utils.load_table,
'volume_types', engine)
self.test_case.assertRaises(sa_exc.NoSuchTableError, utils.load_table,
'volume_type_extra_specs', engine)
# Verify presence of data
share_type_ids = [
st['id'] for st in engine.execute(share_types_table.select())
if st['id'] in self.share_type_ids
]
self.test_case.assertEqual(sorted(self.share_type_ids),
sorted(share_type_ids))
extra_specs = [
{'type': es['share_type_id'], 'key': es['spec_key']}
for es in engine.execute(share_types_specs_table.select())
if es['share_type_id'] in self.share_type_ids
]
self.test_case.assertEqual(4, len(extra_specs))
def check_downgrade(self, engine):
# Verify table transformations
volume_types_table = utils.load_table('volume_types', engine)
volume_types_specs_table = utils.load_table(
'volume_type_extra_specs', engine)
self.test_case.assertRaises(sa_exc.NoSuchTableError, utils.load_table,
'share_types', engine)
self.test_case.assertRaises(sa_exc.NoSuchTableError, utils.load_table,
'share_type_extra_specs', engine)
# Verify presence of data
volume_type_ids = [
vt['id'] for vt in engine.execute(volume_types_table.select())
if vt['id'] in self.share_type_ids
]
self.test_case.assertEqual(sorted(self.share_type_ids),
sorted(volume_type_ids))
extra_specs = [
{'type': es['volume_type_id'], 'key': es['key']}
for es in engine.execute(volume_types_specs_table.select())
if es['volume_type_id'] in self.share_type_ids
]
self.test_case.assertEqual(4, len(extra_specs))
@map_to_migration('1f0bd302c1a6')
class AvailabilityZoneMigrationChecks(BaseMigrationChecks):