Skip PortNotFound when unbinding port

There might be cases when user deleted port before doing vif_detach.
With this patch info message will be shown in the logs for such cases.

Change-Id: I60deab450b427f1a1b4ccb0bb5963ec30d255d48
Closes-Bug: #1685592
(cherry picked from commit 940c87d6c4)
This commit is contained in:
Vasyl Saienko 2017-03-30 11:18:59 +03:00 committed by Pierre Riteau
parent dfa2fb6dcd
commit 71d1c26376
3 changed files with 29 additions and 0 deletions

View File

@ -88,6 +88,9 @@ def unbind_neutron_port(port_id, client=None):
try:
client.update_port(port_id, body)
# NOTE(vsaienko): Ignore if port was deleted before calling vif detach.
except neutron_exceptions.PortNotFoundClient:
LOG.info('Port %s was not found while unbinding.', port_id)
except neutron_exceptions.NeutronClientException as e:
msg = (_('Unable to clear binding profile for '
'neutron port %(port_id)s. Error: '

View File

@ -686,3 +686,21 @@ class TestUnbindPort(base.TestCase):
mock_client.assert_called_once_with()
mock_client.return_value.update_port.assert_called_once_with(port_id,
body)
@mock.patch.object(neutron, 'LOG')
def test_unbind_neutron_port_not_found(self, mock_log, mock_client):
port_id = 'fake-port-id'
mock_client.return_value.update_port.side_effect = (
neutron_client_exc.PortNotFoundClient())
body = {
'port': {
'binding:host_id': '',
'binding:profile': {}
}
}
neutron.unbind_neutron_port(port_id)
mock_client.assert_called_once_with()
mock_client.return_value.update_port.assert_called_once_with(port_id,
body)
mock_log.info.assert_called_once_with('Port %s was not found while '
'unbinding.', port_id)

View File

@ -0,0 +1,8 @@
---
fixes:
- Fixes a failure when deploying a node. This happened when a port or port
group's internal_info['tenant_vif_port_id'] still existed after the
corresponding neutron port was removed and prior to deletion of the
instance, causing future deployments of the bare metal node to fail. The
situation is now logged and does not block future deployments. See
https://bugs.launchpad.net/ironic/+bug/1685592 for details.