Add online data migrations for conductor version

This modifies the backfill_version_column method that was used in Pike,
to backfill the version column for the Conductor object, since we missed
setting the Conductor's version column in Pike. This method is invoked
when the operator runs ``ironic-dbsync online_data_migrations``.

Change-Id: I42fec0ac9c8b684219f14822648e627fce529d36
Partial-Bug: 1526283
This commit is contained in:
Ruby Loo 2017-08-24 18:04:13 -04:00
parent e63947b5d2
commit f1d442b722
6 changed files with 94 additions and 71 deletions

View File

@ -58,8 +58,8 @@ dbapi = db_api.get_instance()
# of migrated objects.
# """
ONLINE_MIGRATIONS = (
# Added in Pike
# TODO(rloo): remove in Queens
# Added in Pike, modified in Queens
# TODO(rloo): remove in Rocky
dbapi.backfill_version_column,
)

View File

@ -482,6 +482,7 @@ class Connection(object):
'hostname': the unique hostname which identifies
this Conductor service.
'drivers': a list of supported drivers.
'version': the version of the object.Conductor
}
:param update_existing: When false, registration will raise an
exception when a conflicting online record
@ -899,14 +900,20 @@ class Connection(object):
@abc.abstractmethod
def backfill_version_column(self, max_count):
"""Backfill the version column with Ocata versions.
"""Backfill the Conductor version column with Pike version.
The version column was added to all the resource tables in this Pike
The version column was added to all the resource tables in the Pike
release (via 'ironic-dbsync upgrade'). After upgrading (from Ocata to
Pike), the 'ironic-dbsync online_data_migrations' command will invoke
this method to populate (backfill) the version columns. The version
used will be the object version from the pinning set in config (i.e.
prior to this column being added).
Pike), the 'ironic-dbsync online_data_migrations' command would have
populated (backfilled) the version column for all objects.
Unfortunately, in the Pike release, we forgot to set the value for the
conductor's version column. For the Queens release, we are setting
the conductor version, however, we still need to backfill in case new
conductors were added between the time the operator ran Pike's
'ironic-dbsync online_data_migrations' and their upgrade to Queens.
The version used will be the conductor object version from the Pike
release.
:param max_count: The maximum number of objects to migrate. Must be
>= 0. If zero, all the objects will be migrated.
@ -914,4 +921,4 @@ class Connection(object):
migrated (at the beginning of this call) and 2. the number
of migrated objects.
"""
# TODO(rloo) Delete this in Queens cycle.
# TODO(rloo) Delete this in Rocky cycle.

View File

@ -1160,13 +1160,20 @@ class Connection(api.Connection):
@oslo_db_api.retry_on_deadlock
def backfill_version_column(self, context, max_count):
"""Backfill the version column with Ocata versions.
"""Backfill the Conductor version column with Pike version.
The version column was added to all the resource tables in this Pike
The version column was added to all the resource tables in the Pike
release (via 'ironic-dbsync upgrade'). After upgrading (from Ocata to
Pike), the 'ironic-dbsync online_data_migrations' command will invoke
this method to populate (backfill) the version columns. The version
used will be the object version prior to this column being added.
Pike), the 'ironic-dbsync online_data_migrations' command would have
populated (backfilled) the version column for all objects.
Unfortunately, in the Pike release, we forgot to set the value for the
conductor's version column. For the Queens release, we are setting
the conductor version, however, we still need to backfill in case new
conductors were added between the time the operator ran Pike's
'ironic-dbsync online_data_migrations' and their upgrade to Queens.
The version used will be the conductor object version from the Pike
release.
:param context: the admin context (not used)
:param max_count: The maximum number of objects to migrate. Must be
@ -1175,15 +1182,17 @@ class Connection(api.Connection):
migrated (at the beginning of this call) and 2. the number
of migrated objects.
"""
# TODO(rloo): Delete this in Queens cycle.
prior_release = '7.0'
# TODO(rloo): Delete this in Rocky cycle.
prior_release = 'pike'
mapping = release_mappings.RELEASE_MAPPING[prior_release]['objects']
total_to_migrate = 0
total_migrated = 0
# backfill only objects that were in the prior release
sql_models = [model for model in models.Base.__subclasses__()
if model.__name__ in mapping]
# backfill only the Conductor.
# NOTE(rloo) This code was used in Pike to backfill all the objects.
# To make it easier to review, etc., we are re-using that code with
# minimal code changes to only backfill the 'Conductor' object.
sql_models = [models.Conductor]
for model in sql_models:
query = model_query(model).filter(model.version.is_(None))
total_to_migrate += query.count()
@ -1211,6 +1220,9 @@ class Connection(api.Connection):
num_migrated = 0
with _session_for_write():
query = model_query(model).filter(model.version.is_(None))
# NOTE(rloo) Caution here; after doing query.count(), it is
# possible that the value is different in the
# next invocation of the query.
if max_to_migrate < query.count():
# Only want to update max_to_migrate objects; cannot use
# sql's limit(), so we generate a new query with

View File

@ -57,81 +57,62 @@ class BackfillVersionTestCase(base.DbTestCase):
super(BackfillVersionTestCase, self).setUp()
self.context = context.get_admin_context()
self.dbapi = db_api.get_instance()
obj_mapping = release_mappings.RELEASE_MAPPING['ocata']['objects']
self.node_ver = obj_mapping['Node'][0]
self.chassis_ver = obj_mapping['Chassis'][0]
obj_mapping = release_mappings.RELEASE_MAPPING['pike']['objects']
self.conductor_ver = obj_mapping['Conductor'][0]
def test_empty_db(self):
self.assertEqual((0, 0),
self.dbapi.backfill_version_column(self.context, 10))
def test_version_exists(self):
utils.create_test_node()
utils.create_test_conductor()
self.assertEqual((0, 0),
self.dbapi.backfill_version_column(self.context, 10))
def test_one_node(self):
node = utils.create_test_node(version=None)
self.assertIsNone(node.version)
node = self.dbapi.get_node_by_uuid(node.uuid)
self.assertIsNone(node.version)
def test_one_conductor(self):
conductors = self._create_conductors(1)
self.assertEqual((1, 1),
self.dbapi.backfill_version_column(self.context, 10))
res = self.dbapi.get_node_by_uuid(node.uuid)
self.assertEqual(self.node_ver, res.version)
res = self.dbapi.get_conductor(conductors[0])
self.assertEqual(self.conductor_ver, res.version)
def test_max_count_zero(self):
orig_node = utils.create_test_node(version=None)
orig_chassis = utils.create_test_chassis(version=None)
self.assertIsNone(orig_node.version)
self.assertIsNone(orig_chassis.version)
conductors = self._create_conductors(2)
self.assertEqual((2, 2),
self.dbapi.backfill_version_column(self.context, 0))
node = self.dbapi.get_node_by_uuid(orig_node.uuid)
self.assertEqual(self.node_ver, node.version)
chassis = self.dbapi.get_chassis_by_uuid(orig_chassis.uuid)
self.assertEqual(self.chassis_ver, chassis.version)
for hostname in conductors:
conductor = self.dbapi.get_conductor(hostname)
self.assertEqual(self.conductor_ver, conductor.version)
def test_no_version_max_count_1(self):
orig_node = utils.create_test_node(version=None)
orig_chassis = utils.create_test_chassis(version=None)
self.assertIsNone(orig_node.version)
self.assertIsNone(orig_chassis.version)
self.assertEqual((2, 1),
self.dbapi.backfill_version_column(self.context, 1))
node = self.dbapi.get_node_by_uuid(orig_node.uuid)
chassis = self.dbapi.get_chassis_by_uuid(orig_chassis.uuid)
self.assertTrue(node.version is None or chassis.version is None)
self.assertTrue(node.version == self.node_ver or
chassis.version == self.chassis_ver)
def _create_conductors(self, num, version=None):
conductors = []
for i in range(0, num):
conductor = utils.create_test_conductor(
version=version,
hostname='test_name_%d' % i,
uuid=uuidutils.generate_uuid())
conductors.append(conductor.hostname)
for hostname in conductors:
conductor = self.dbapi.get_conductor(hostname)
self.assertEqual(version, conductor.version)
return conductors
def _create_nodes(self, num_nodes, version=None):
nodes = []
for i in range(0, num_nodes):
node = utils.create_test_node(version=version,
uuid=uuidutils.generate_uuid())
nodes.append(node.uuid)
for uuid in nodes:
node = self.dbapi.get_node_by_uuid(uuid)
self.assertIsNone(node.version)
return nodes
def test_no_version_max_count_2_some_nodes(self):
nodes = self._create_nodes(5)
def test_no_version_max_count_2_some_conductors(self):
conductors = self._create_conductors(5)
self.assertEqual((5, 2),
self.dbapi.backfill_version_column(self.context, 2))
self.assertEqual((3, 3),
self.dbapi.backfill_version_column(self.context, 10))
for uuid in nodes:
node = self.dbapi.get_node_by_uuid(uuid)
self.assertEqual(self.node_ver, node.version)
for hostname in conductors:
conductor = self.dbapi.get_conductor(hostname)
self.assertEqual(self.conductor_ver, conductor.version)
def test_no_version_max_count_same_nodes(self):
nodes = self._create_nodes(5)
def test_no_version_max_count_same(self):
conductors = self._create_conductors(5)
self.assertEqual((5, 5),
self.dbapi.backfill_version_column(self.context, 5))
for uuid in nodes:
node = self.dbapi.get_node_by_uuid(uuid)
self.assertEqual(self.node_ver, node.version)
for hostname in conductors:
conductor = self.dbapi.get_conductor(hostname)
self.assertEqual(self.conductor_ver, conductor.version)

View File

@ -368,6 +368,23 @@ def get_test_conductor(**kw):
}
def create_test_conductor(**kw):
"""Create test conductor entry in DB and return Conductor DB object.
Function to be used to create test Conductor objects in the database.
:param kw: kwargs with overriding values for conductor's attributes.
:returns: Test Conductor DB object.
"""
conductor = get_test_conductor(**kw)
# Let DB generate ID if it isn't specified explicitly
if 'id' not in kw:
del conductor['id']
dbapi = db_api.get_instance()
return dbapi.register_conductor(conductor)
def get_test_ucs_info():
return {
"ucs_username": "admin",

View File

@ -0,0 +1,6 @@
---
upgrade:
- |
The ``conductors`` database table's ``version`` column is
populated as part of the data migration (via the command
``ironic-dbsync online_data_migrations``).