diff --git a/masakari/engine/drivers/taskflow/driver.py b/masakari/engine/drivers/taskflow/driver.py index 6c020a3c..fe25d4cb 100644 --- a/masakari/engine/drivers/taskflow/driver.py +++ b/masakari/engine/drivers/taskflow/driver.py @@ -282,27 +282,29 @@ class TaskFlowDriver(driver.NotificationDriver): progress_details = [] flow_details = conn.get_flows_for_book( notification.notification_uuid) - for flow in flow_details: - od = OrderedDict() - atom_details = list(conn.get_atoms_for_flow(flow.uuid)) + if flow_details: + for flow in flow_details: + od = OrderedDict() + atom_details = list(conn.get_atoms_for_flow(flow.uuid)) - for task in task_list: - for atom in atom_details: - if task == atom.name: - od[atom.name] = atom + for task in task_list: + for atom in atom_details: + if task == atom.name: + od[atom.name] = atom - for key, value in od.items(): - # Add progress_details only if tasks are executed and meta - # is available in which progress_details are stored. - if value.meta: - progress_details_obj = ( - objects.NotificationProgressDetails.create( - value.name, - value.meta['progress'], - value.meta['progress_details']['details'] - ['progress_details'], - value.state)) + for key, value in od.items(): + # Add progress_details only if tasks are executed and + # meta is available in which progress_details are + # stored. + if value.meta and value.meta.get('progress_details'): + progress_details_obj = ( + objects.NotificationProgressDetails.create( + value.name, + value.meta['progress'], + value.meta['progress_details']['details'] + ['progress_details'], + value.state)) - progress_details.append(progress_details_obj) + progress_details.append(progress_details_obj) return progress_details diff --git a/masakari/engine/manager.py b/masakari/engine/manager.py index 7f5cf298..0bc71f28 100644 --- a/masakari/engine/manager.py +++ b/masakari/engine/manager.py @@ -349,7 +349,7 @@ class MasakariManager(manager.Manager): notification['recovery_workflow_details'] = progress_details except Exception: msg = (_('Failed to fetch notification recovery workflow details ' - 'for %s'), notification.notification_uuid) + 'for %s') % notification.notification_uuid) LOG.exception(msg) raise exception.MasakariException(msg) diff --git a/masakari/tests/unit/engine/drivers/taskflow/test_taskflow_driver.py b/masakari/tests/unit/engine/drivers/taskflow/test_taskflow_driver.py index 481b6360..4625b2ec 100644 --- a/masakari/tests/unit/engine/drivers/taskflow/test_taskflow_driver.py +++ b/masakari/tests/unit/engine/drivers/taskflow/test_taskflow_driver.py @@ -14,6 +14,9 @@ # under the License. import mock +from oslo_utils import timeutils +from taskflow.persistence import models +from taskflow.persistence import path_based from masakari import context from masakari.engine.drivers.taskflow import base @@ -22,9 +25,13 @@ from masakari.engine.drivers.taskflow import host_failure from masakari import exception from masakari.objects import fields from masakari import test +from masakari.tests.unit import fakes from masakari.tests import uuidsentinel +NOW = timeutils.utcnow().replace(microsecond=0) + + class FakeFlow(object): """Fake flow class of taskflow.""" @@ -215,3 +222,123 @@ class TaskflowDriverTestCase(test.TestCase): self.assertFalse(mock_rh_flow.called) # Ensures that 'auto' flow executes as 'reserved_host' flow fails self.assertTrue(mock_auto_flow.called) + + @mock.patch.object(path_based.PathBasedConnection, 'get_atoms_for_flow') + @mock.patch.object(path_based.PathBasedConnection, 'get_flows_for_book') + def test_get_notification_recovery_workflow_details( + self, mock_get_flows_for_book, mock_get_atoms_for_flow): + + notification = fakes.create_fake_notification( + payload={ + 'event': 'LIFECYCLE', 'instance_uuid': uuidsentinel.fake_ins, + 'vir_domain_event': 'STOPPED_FAILED'}, + source_host_uuid=uuidsentinel.fake_host, + notification_uuid=uuidsentinel.fake_notification) + + fd = models.FlowDetail('test', uuid=notification.notification_uuid) + atom1 = models.TaskDetail('StopInstanceTask', + uuid=uuidsentinel.atom_id_1) + atom1.meta = { + 'progress': 1.0, + 'progress_details': { + 'at_progress': 1.0, + 'details': { + 'progress_details': [ + {'timestamp': '2019-03-11 05:22:20.329171', + 'message': 'Stopping instance: ' + '87c8ebc3-2a70-49f0-9280-d34662dc203d', + 'progress': 0.0}, + {'timestamp': '2019-03-11 05:22:28.902665', + 'message': "Stopped instance: " + "'87c8ebc3-2a70-49f0-9280-d34662dc203d'", + 'progress': 1.0}]}}} + atom1.state = 'SUCCESS' + + atom2 = models.TaskDetail('ConfirmInstanceActiveTask', + uuid=uuidsentinel.atom_id_2) + atom2.meta = { + 'progress': 1.0, + 'progress_details': { + 'at_progress': 1.0, + 'details': { + 'progress_details': [ + {'timestamp': '2019-03-11 05:22:29.597303', + 'message': "Confirming instance " + "'87c8ebc3-2a70-49f0-9280-d34662dc203d' " + "vm_state is ACTIVE", + 'progress': 0.0}, + {'timestamp': '2019-03-11 05:22:31.916620', + 'message': "Confirmed instance " + "'87c8ebc3-2a70-49f0-9280-d34662dc203d'" + " vm_state is ACTIVE", 'progress': 1.0}] + }}} + atom2.state = 'SUCCESS' + + atom3 = models.TaskDetail('StartInstanceTask', + uuid=uuidsentinel.atom_id_3) + atom3.meta = { + 'progress': 1.0, + 'progress_details': { + 'at_progress': 1.0, + 'details': {'progress_details': [ + {'timestamp': '2019-03-11 05:22:29.130876', + 'message': "Starting instance: " + "'87c8ebc3-2a70-49f0-9280-d34662dc203d'", + 'progress': 0.0}, + {'timestamp': '2019-03-11 05:22:29.525882', 'message': + "Instance started: " + "'87c8ebc3-2a70-49f0-9280-d34662dc203d'", 'progress': + 1.0}]}}} + atom3.state = 'SUCCESS' + + def fd_generator(): + yield fd + + def atom_detail_generator(): + for atom in [atom1, atom2, atom3]: + yield atom + + flow_details = fd_generator() + atom_details = atom_detail_generator() + mock_get_flows_for_book.return_value = flow_details + mock_get_atoms_for_flow.return_value = atom_details + driver.PERSISTENCE_BACKEND = 'memory://' + + progress_details = ( + self.taskflow_driver.get_notification_recovery_workflow_details( + self.ctxt, 'auto', notification)) + + # list of NotificationProgressDetails object + expected_result = [] + expected_result.append(( + fakes.create_fake_notification_progress_details( + name=atom1.name, + uuid=atom1.uuid, + progress=atom1.meta['progress'], + state=atom1.state, + progress_details=atom1.meta['progress_details'] + ['details']['progress_details']))) + expected_result.append(( + fakes.create_fake_notification_progress_details( + name=atom3.name, + uuid=atom3.uuid, + progress=atom3.meta['progress'], + state=atom3.state, + progress_details=atom3.meta['progress_details'] + ['details']['progress_details']))) + expected_result.append(( + fakes.create_fake_notification_progress_details( + name=atom2.name, + uuid=atom2.uuid, + progress=atom2.meta['progress'], + state=atom2.state, + progress_details=atom2.meta['progress_details'] + ['details']['progress_details']))) + + self.assertIsNotNone(progress_details) + mock_get_flows_for_book.assert_called_once() + mock_get_atoms_for_flow.assert_called_once() + + self.assertItemsEqual(expected_result[0], progress_details[0]) + self.assertItemsEqual(expected_result[1], progress_details[1]) + self.assertItemsEqual(expected_result[2], progress_details[2]) diff --git a/masakari/tests/unit/fakes.py b/masakari/tests/unit/fakes.py index 29bea478..ace5d981 100644 --- a/masakari/tests/unit/fakes.py +++ b/masakari/tests/unit/fakes.py @@ -188,3 +188,11 @@ def create_fake_failover_segment(name='fake_segment', id=1, description=None, return objects.FailoverSegment( name=name, id=id, description=description, service_type=service_type, recovery_method=recovery_method, uuid=uuid) + + +def create_fake_notification_progress_details( + name, uuid, progress, state, progress_details): + + return objects.NotificationProgressDetails( + name=name, uuid=uuid, progress=progress, state=state, + progress_details=progress_details)