Fix update_port_postcommit and port not found with DVR

Updating DVR Router interface ports was throwing
errors in the l2pop mechanism drivers function
update_port_postcommit.

PortContext's portbinding information does not show
the status of the ports. For DVR Router interface
ports the DVRPortbinding table contains the status
information for the ports.

In the case of the update_port method, there was
no code related to DVR that retreives the port
binding information from the DVRPortBinding table.

This was working before, since in the driver_context,
the PortContext was just returning the port status for
all router interfaces.

With the recent refactor to the driver_context, this
behavior changed and the PortContext was returning the
_binding.status for the DVR router interface ports and
the _port.status for the non DVR ports.

When the update_port function calls update_port_postcommit
with PortContext for DVR router interface ports, l2pop
was throwing an error saying that Portbinding does not
have the attribute 'status'.

This was causing addition of any second subnet to the
same network with respect to IPv6 to fail.
Because in the case of IPv6, when you add additional
subnets to the existing network, it just updates the port
with the IPv6 prefix instead of creating additional port.

In the case of IPv4 still we could see that there are
two different ports created for each subnet we try to
add.

This patch fixes the above issue in l2pop and allows the
DVR router interface ports to be successfull.

Also the _find_ipv6_router_port_by_network was returning
all the ports for DVR including the DVR CSNAT internal
ports which are not part of the router interface ports.

This patch also fixes this problem by returning false,
when it finds a DVR SNAT port.

Closes-Bug: #1465434

Change-Id: Id243a4b3f30071226411ace6d12550fc099901cc
This commit is contained in:
Swaminathan Vasudevan 2015-06-29 16:19:49 -07:00
parent d4c5e961ad
commit 9eed4167b6
4 changed files with 131 additions and 13 deletions

View File

@ -311,6 +311,13 @@ class L3_NAT_with_dvr_db_mixin(l3_db.L3_NAT_db_mixin,
context, router_interface_info, 'add')
return router_interface_info
def _port_has_ipv6_address(self, port):
"""Overridden to return False if DVR SNAT port."""
if port['device_owner'] == DEVICE_OWNER_DVR_SNAT:
return False
return super(L3_NAT_with_dvr_db_mixin,
self)._port_has_ipv6_address(port)
def remove_router_interface(self, context, router_id, interface_info):
remove_by_port, remove_by_subnet = (
self._validate_interface_info(interface_info, for_removal=True)

View File

@ -1101,6 +1101,7 @@ class Ml2Plugin(db_base_plugin_v2.NeutronDbPluginV2,
attrs = port[attributes.PORT]
need_port_update_notify = False
session = context.session
bound_mech_contexts = []
# REVISIT: Serialize this operation with a semaphore to
# prevent deadlock waiting to acquire a DB lock held by
@ -1141,11 +1142,36 @@ class Ml2Plugin(db_base_plugin_v2.NeutronDbPluginV2,
mech_context = driver_context.PortContext(
self, context, updated_port, network, binding, levels,
original_port=original_port)
new_host_port = self._get_host_port_if_changed(mech_context, attrs)
# For DVR router interface ports we need to retrieve the
# DVRPortbinding context instead of the normal port context.
# The normal Portbinding context does not have the status
# of the ports that are required by the l2pop to process the
# postcommit events.
# NOTE:Sometimes during the update_port call, the DVR router
# interface port may not have the port binding, so we cannot
# create a generic bindinglist that will address both the
# DVR and non-DVR cases here.
# TODO(Swami): This code need to be revisited.
if port_db['device_owner'] == const.DEVICE_OWNER_DVR_INTERFACE:
dvr_binding_list = db.get_dvr_port_bindings(session, id)
for dvr_binding in dvr_binding_list:
levels = db.get_binding_levels(session, id,
dvr_binding.host)
dvr_mech_context = driver_context.PortContext(
self, context, updated_port, network,
dvr_binding, levels, original_port=original_port)
self.mechanism_manager.update_port_precommit(
dvr_mech_context)
bound_mech_contexts.append(dvr_mech_context)
else:
self.mechanism_manager.update_port_precommit(mech_context)
bound_mech_contexts.append(mech_context)
new_host_port = self._get_host_port_if_changed(
mech_context, attrs)
need_port_update_notify |= self._process_port_binding(
mech_context, attrs)
self.mechanism_manager.update_port_precommit(mech_context)
# Notifications must be sent after the above transaction is complete
kwargs = {
'context': context,
@ -1154,11 +1180,18 @@ class Ml2Plugin(db_base_plugin_v2.NeutronDbPluginV2,
}
registry.notify(resources.PORT, events.AFTER_UPDATE, self, **kwargs)
# TODO(apech) - handle errors raised by update_port, potentially
# by re-calling update_port with the previous attributes. For
# now the error is propogated to the caller, which is expected to
# either undo/retry the operation or delete the resource.
self.mechanism_manager.update_port_postcommit(mech_context)
# Note that DVR Interface ports will have bindings on
# multiple hosts, and so will have multiple mech_contexts,
# while other ports typically have just one.
# Since bound_mech_contexts has both the DVR and non-DVR
# contexts we can manage just with a single for loop.
try:
for mech_context in bound_mech_contexts:
self.mechanism_manager.update_port_postcommit(
mech_context)
except ml2_exc.MechanismDriverError:
LOG.error(_LE("mechanism_manager.update_port_postcommit "
"failed for port %s"), id)
self.check_and_notify_security_group_member_changed(
context, original_port, updated_port)
@ -1167,7 +1200,13 @@ class Ml2Plugin(db_base_plugin_v2.NeutronDbPluginV2,
if original_port['admin_state_up'] != updated_port['admin_state_up']:
need_port_update_notify = True
# NOTE: In the case of DVR ports, the port-binding is done after
# router scheduling when sync_routers is callede and so this call
# below may not be required for DVR routed interfaces. But still
# since we don't have the mech_context for the DVR router interfaces
# at certain times, we just pass the port-context and return it, so
# that we don't disturb other methods that are expecting a return
# value.
bound_context = self._bind_port_if_needed(
mech_context,
allow_notify=True,

View File

@ -244,6 +244,31 @@ class L3DvrTestCase(testlib_api.SqlTestCase):
self.assertTrue(cfips.called)
self.assertTrue(gvm.called)
def setup_port_has_ipv6_address(self, port):
with mock.patch.object(l3_dvr_db.l3_db.L3_NAT_db_mixin,
'_port_has_ipv6_address') as pv6:
pv6.return_value = True
result = self.mixin._port_has_ipv6_address(port)
return result, pv6
def test__port_has_ipv6_address_for_dvr_snat_port(self):
port = {
'id': 'my_port_id',
'device_owner': l3_const.DEVICE_OWNER_ROUTER_SNAT,
}
result, pv6 = self.setup_port_has_ipv6_address(port)
self.assertFalse(result)
self.assertFalse(pv6.called)
def test__port_has_ipv6_address_for_non_snat_ports(self):
port = {
'id': 'my_port_id',
'device_owner': l3_const.DEVICE_OWNER_DVR_INTERFACE,
}
result, pv6 = self.setup_port_has_ipv6_address(port)
self.assertTrue(result)
self.assertTrue(pv6.called)
def test__delete_floatingip_agent_gateway_port(self):
port = {
'id': 'my_port_id',

View File

@ -1489,10 +1489,7 @@ class TestFaultyMechansimDriver(Ml2PluginV2FaultyDriverTestCase):
data = {'port': {'name': new_name}}
req = self.new_update_request('ports', data, port_id)
res = req.get_response(self.api)
self.assertEqual(500, res.status_int)
error = self.deserialize(self.fmt, res)
self.assertEqual('MechanismDriverError',
error['NeutronError']['type'])
self.assertEqual(200, res.status_int)
# Test if other mechanism driver was called
self.assertTrue(upp.called)
port = self._show('ports', port_id)
@ -1500,6 +1497,56 @@ class TestFaultyMechansimDriver(Ml2PluginV2FaultyDriverTestCase):
self._delete('ports', port['port']['id'])
def test_update_dvr_router_interface_port(self):
"""Test validate dvr router interface update succeeds."""
host_id = 'host'
binding = models.DVRPortBinding(
port_id='port_id',
host=host_id,
router_id='old_router_id',
vif_type=portbindings.VIF_TYPE_OVS,
vnic_type=portbindings.VNIC_NORMAL,
status=constants.PORT_STATUS_DOWN)
with mock.patch.object(
mech_test.TestMechanismDriver,
'update_port_postcommit',
side_effect=ml2_exc.MechanismDriverError) as port_post,\
mock.patch.object(
mech_test.TestMechanismDriver,
'update_port_precommit') as port_pre,\
mock.patch.object(ml2_db,
'get_dvr_port_bindings') as dvr_bindings:
dvr_bindings.return_value = [binding]
port_pre.return_value = True
with self.network() as network:
with self.subnet(network=network) as subnet:
subnet_id = subnet['subnet']['id']
data = {'port': {
'network_id': network['network']['id'],
'tenant_id':
network['network']['tenant_id'],
'name': 'port1',
'device_owner':
'network:router_interface_distributed',
'admin_state_up': 1,
'fixed_ips':
[{'subnet_id': subnet_id}]}}
port_req = self.new_create_request('ports', data)
port_res = port_req.get_response(self.api)
self.assertEqual(201, port_res.status_int)
port = self.deserialize(self.fmt, port_res)
port_id = port['port']['id']
new_name = 'a_brand_new_name'
data = {'port': {'name': new_name}}
req = self.new_update_request('ports', data, port_id)
res = req.get_response(self.api)
self.assertEqual(200, res.status_int)
self.assertTrue(dvr_bindings.called)
self.assertTrue(port_pre.called)
self.assertTrue(port_post.called)
port = self._show('ports', port_id)
self.assertEqual(new_name, port['port']['name'])
class TestMl2PluginCreateUpdateDeletePort(base.BaseTestCase):
def setUp(self):