diff --git a/neutron/api/rpc/handlers/l3_rpc.py b/neutron/api/rpc/handlers/l3_rpc.py index 3da7cf247a2..3332d114a1c 100644 --- a/neutron/api/rpc/handlers/l3_rpc.py +++ b/neutron/api/rpc/handlers/l3_rpc.py @@ -193,8 +193,8 @@ class L3RpcCallback(object): # Ports that are DVR interfaces have multiple bindings (based on # of hosts on which DVR router interfaces are spawned). Such # bindings are created/updated here by invoking - # update_dvr_port_binding - self.plugin.update_dvr_port_binding(context, port['id'], + # update_distributed_port_binding + self.plugin.update_distributed_port_binding(context, port['id'], {'port': {portbindings.HOST_ID: host, 'device_id': router_id} diff --git a/neutron/db/l3_dvrscheduler_db.py b/neutron/db/l3_dvrscheduler_db.py index b87d4e04d81..1a3c1e20478 100644 --- a/neutron/db/l3_dvrscheduler_db.py +++ b/neutron/db/l3_dvrscheduler_db.py @@ -184,9 +184,8 @@ class L3_DVRsch_db_mixin(l3agent_sch_db.L3AgentSchedulerDbMixin): admin_context, filters=filter_rtr) for port in int_ports: dvr_binding = (ml2_db. - get_dvr_port_binding_by_host(context.session, - port['id'], - port_host)) + get_distributed_port_binding_by_host( + context.session, port['id'], port_host)) if dvr_binding: # unbind this port from router dvr_binding['router_id'] = None diff --git a/neutron/plugins/ml2/db.py b/neutron/plugins/ml2/db.py index 32feaf8996c..a5bf670f765 100644 --- a/neutron/plugins/ml2/db.py +++ b/neutron/plugins/ml2/db.py @@ -131,7 +131,7 @@ def clear_binding_levels(session, port_id, host): 'host': host}) -def ensure_dvr_port_binding(session, port_id, host, router_id=None): +def ensure_distributed_port_binding(session, port_id, host, router_id=None): record = (session.query(models.DistributedPortBinding). filter_by(port_id=port_id, host=host).first()) if record: @@ -149,15 +149,15 @@ def ensure_dvr_port_binding(session, port_id, host, router_id=None): session.add(record) return record except db_exc.DBDuplicateEntry: - LOG.debug("DVR Port %s already bound", port_id) + LOG.debug("Distributed Port %s already bound", port_id) return (session.query(models.DistributedPortBinding). filter_by(port_id=port_id, host=host).one()) -def delete_dvr_port_binding_if_stale(session, binding): +def delete_distributed_port_binding_if_stale(session, binding): if not binding.router_id and binding.status == n_const.PORT_STATUS_DOWN: with session.begin(subtransactions=True): - LOG.debug("DVR: Deleting binding %s", binding) + LOG.debug("Distributed port: Deleting binding %s", binding) session.delete(binding) @@ -264,9 +264,9 @@ def get_port_binding_host(session, port_id): return query.host -def generate_dvr_port_status(session, port_id): +def generate_distributed_port_status(session, port_id): # an OR'ed value of status assigned to parent port from the - # dvrportbinding bucket + # distributedportbinding bucket query = session.query(models.DistributedPortBinding) final_status = n_const.PORT_STATUS_BUILD for bind in query.filter(models.DistributedPortBinding.port_id == port_id): @@ -277,24 +277,24 @@ def generate_dvr_port_status(session, port_id): return final_status -def get_dvr_port_binding_by_host(session, port_id, host): +def get_distributed_port_binding_by_host(session, port_id, host): with session.begin(subtransactions=True): binding = (session.query(models.DistributedPortBinding). filter(models.DistributedPortBinding.port_id.startswith(port_id), models.DistributedPortBinding.host == host).first()) if not binding: - LOG.debug("No binding for DVR port %(port_id)s with host " + LOG.debug("No binding for distributed port %(port_id)s with host " "%(host)s", {'port_id': port_id, 'host': host}) return binding -def get_dvr_port_bindings(session, port_id): +def get_distributed_port_bindings(session, port_id): with session.begin(subtransactions=True): bindings = (session.query(models.DistributedPortBinding). filter(models.DistributedPortBinding.port_id.startswith( port_id)).all()) if not bindings: - LOG.debug("No bindings for DVR port %s", port_id) + LOG.debug("No bindings for distributed port %s", port_id) return bindings diff --git a/neutron/plugins/ml2/drivers/l2pop/db.py b/neutron/plugins/ml2/drivers/l2pop/db.py index 7e7f5b1ed53..d1dd530d5c9 100644 --- a/neutron/plugins/ml2/drivers/l2pop/db.py +++ b/neutron/plugins/ml2/drivers/l2pop/db.py @@ -70,7 +70,7 @@ def _get_active_network_ports(session, network_id): return query -def get_nondvr_active_network_ports(session, network_id): +def get_nondistributed_active_network_ports(session, network_id): query = _get_active_network_ports(session, network_id) query = query.filter(models_v2.Port.device_owner != const.DEVICE_OWNER_DVR_INTERFACE) @@ -78,7 +78,7 @@ def get_nondvr_active_network_ports(session, network_id): if get_agent_ip(agent)] -def get_dvr_active_network_ports(session, network_id): +def get_distributed_active_network_ports(session, network_id): with session.begin(subtransactions=True): query = session.query(ml2_models.DistributedPortBinding, agents_db.Agent) diff --git a/neutron/plugins/ml2/drivers/l2pop/mech_driver.py b/neutron/plugins/ml2/drivers/l2pop/mech_driver.py index 622f6388cd3..3b7b9acb8cc 100644 --- a/neutron/plugins/ml2/drivers/l2pop/mech_driver.py +++ b/neutron/plugins/ml2/drivers/l2pop/mech_driver.py @@ -169,9 +169,10 @@ class L2populationMechanismDriver(api.MechanismDriver): 'network_type': segment['network_type'], 'ports': {}}} tunnel_network_ports = ( - l2pop_db.get_dvr_active_network_ports(session, network_id)) + l2pop_db.get_distributed_active_network_ports(session, network_id)) fdb_network_ports = ( - l2pop_db.get_nondvr_active_network_ports(session, network_id)) + l2pop_db.get_nondistributed_active_network_ports(session, + network_id)) ports = agent_fdb_entries[network_id]['ports'] ports.update(self._get_tunnels( fdb_network_ports + tunnel_network_ports, diff --git a/neutron/plugins/ml2/plugin.py b/neutron/plugins/ml2/plugin.py index 17d47aae22d..0d2c1ae38ce 100644 --- a/neutron/plugins/ml2/plugin.py +++ b/neutron/plugins/ml2/plugin.py @@ -466,7 +466,7 @@ class Ml2Plugin(db_base_plugin_v2.NeutronDbPluginV2, # in the distributed case. Since the PortBinding # instance will then be needed, it does not make sense # to optimize this code to avoid fetching it. - cur_binding = db.get_dvr_port_binding_by_host( + cur_binding = db.get_distributed_port_binding_by_host( session, port_id, orig_binding.host) cur_context = driver_context.PortContext( self, plugin_context, port, network, cur_binding, None, @@ -1318,16 +1318,17 @@ class Ml2Plugin(db_base_plugin_v2.NeutronDbPluginV2, # DVR and non-DVR cases here. # TODO(Swami): This code need to be revisited. if port_db['device_owner'] == const.DEVICE_OWNER_DVR_INTERFACE: - dvr_binding_list = db.get_dvr_port_bindings(session, id) - for dvr_binding in dvr_binding_list: + dist_binding_list = db.get_distributed_port_bindings(session, + id) + for dist_binding in dist_binding_list: levels = db.get_binding_levels(session, id, - dvr_binding.host) - dvr_mech_context = driver_context.PortContext( + dist_binding.host) + dist_mech_context = driver_context.PortContext( self, context, updated_port, network, - dvr_binding, levels, original_port=original_port) + dist_binding, levels, original_port=original_port) self.mechanism_manager.update_port_precommit( - dvr_mech_context) - bound_mech_contexts.append(dvr_mech_context) + dist_mech_context) + bound_mech_contexts.append(dist_mech_context) else: self.mechanism_manager.update_port_precommit(mech_context) self._setup_dhcp_agent_provisioning_component( @@ -1376,7 +1377,7 @@ class Ml2Plugin(db_base_plugin_v2.NeutronDbPluginV2, need_notify=need_port_update_notify) return bound_context.current - def _process_dvr_port_binding(self, mech_context, context, attrs): + def _process_distributed_port_binding(self, mech_context, context, attrs): session = mech_context._plugin_context.session binding = mech_context._binding port = mech_context.current @@ -1393,7 +1394,7 @@ class Ml2Plugin(db_base_plugin_v2.NeutronDbPluginV2, binding.host = attrs and attrs.get(portbindings.HOST_ID) binding.router_id = attrs and attrs.get('device_id') - def update_dvr_port_binding(self, context, id, port): + def update_distributed_port_binding(self, context, id, port): attrs = port[attributes.PORT] host = attrs and attrs.get(portbindings.HOST_ID) @@ -1404,7 +1405,7 @@ class Ml2Plugin(db_base_plugin_v2.NeutronDbPluginV2, return session = context.session - binding = db.get_dvr_port_binding_by_host(session, id, host) + binding = db.get_distributed_port_binding_by_host(session, id, host) device_id = attrs and attrs.get('device_id') router_id = binding and binding.get('router_id') update_required = (not binding or @@ -1415,7 +1416,7 @@ class Ml2Plugin(db_base_plugin_v2.NeutronDbPluginV2, with session.begin(subtransactions=True): orig_port = self.get_port(context, id) if not binding: - binding = db.ensure_dvr_port_binding( + binding = db.ensure_distributed_port_binding( session, id, host, router_id=device_id) network = self.get_network(context, orig_port['network_id']) @@ -1423,8 +1424,8 @@ class Ml2Plugin(db_base_plugin_v2.NeutronDbPluginV2, mech_context = driver_context.PortContext(self, context, orig_port, network, binding, levels, original_port=orig_port) - self._process_dvr_port_binding(mech_context, context, - attrs) + self._process_distributed_port_binding( + mech_context, context, attrs) except (os_db_exception.DBReferenceError, exc.PortNotFound): LOG.debug("DVR Port %s has been deleted concurrently", id) return @@ -1468,7 +1469,8 @@ class Ml2Plugin(db_base_plugin_v2.NeutronDbPluginV2, bound_mech_contexts = [] device_owner = port['device_owner'] if device_owner == const.DEVICE_OWNER_DVR_INTERFACE: - bindings = db.get_dvr_port_bindings(context.session, id) + bindings = db.get_distributed_port_bindings(context.session, + id) for bind in bindings: levels = db.get_binding_levels(context.session, id, bind.host) @@ -1541,7 +1543,7 @@ class Ml2Plugin(db_base_plugin_v2.NeutronDbPluginV2, network = self.get_network(plugin_context, port['network_id']) if port['device_owner'] == const.DEVICE_OWNER_DVR_INTERFACE: - binding = db.get_dvr_port_binding_by_host( + binding = db.get_distributed_port_binding_by_host( session, port['id'], host) if not binding: LOG.error(_LE("Binding info for DVR port %s not found"), @@ -1604,7 +1606,7 @@ class Ml2Plugin(db_base_plugin_v2.NeutronDbPluginV2, self.mechanism_manager.update_port_precommit(mech_context) updated = True elif port['device_owner'] == const.DEVICE_OWNER_DVR_INTERFACE: - binding = db.get_dvr_port_binding_by_host( + binding = db.get_distributed_port_binding_by_host( session, port['id'], host) if not binding: return @@ -1623,7 +1625,8 @@ class Ml2Plugin(db_base_plugin_v2.NeutronDbPluginV2, original_port = self._make_port_dict(port) network = network or self.get_network( context, original_port['network_id']) - port.status = db.generate_dvr_port_status(session, port['id']) + port.status = db.generate_distributed_port_status(session, + port['id']) updated_port = self._make_port_dict(port) levels = db.get_binding_levels(session, port_id, host) mech_context = (driver_context.PortContext( @@ -1645,7 +1648,7 @@ class Ml2Plugin(db_base_plugin_v2.NeutronDbPluginV2, **kwargs) if port['device_owner'] == const.DEVICE_OWNER_DVR_INTERFACE: - db.delete_dvr_port_binding_if_stale(session, binding) + db.delete_distributed_port_binding_if_stale(session, binding) return port['id'] @@ -1657,7 +1660,8 @@ class Ml2Plugin(db_base_plugin_v2.NeutronDbPluginV2, LOG.debug("No Port match for: %s", port_id) return if port['device_owner'] == const.DEVICE_OWNER_DVR_INTERFACE: - bindings = db.get_dvr_port_bindings(context.session, port_id) + bindings = db.get_distributed_port_bindings(context.session, + port_id) for b in bindings: if b.host == host: return port diff --git a/neutron/tests/unit/plugins/ml2/drivers/l2pop/test_db.py b/neutron/tests/unit/plugins/ml2/drivers/l2pop/test_db.py index 8dd5da67431..ee9558fc061 100644 --- a/neutron/tests/unit/plugins/ml2/drivers/l2pop/test_db.py +++ b/neutron/tests/unit/plugins/ml2/drivers/l2pop/test_db.py @@ -71,44 +71,44 @@ class TestL2PopulationDBTestCase(testlib_api.SqlTestCase): self.ctx.session.add(port_binding_cls(**binding_kwarg)) - def test_get_dvr_active_network_ports(self): + def test_get_distributed_active_network_ports(self): self._setup_port_binding() # Register a L2 agent + A bunch of other agents on the same host helpers.register_l3_agent() helpers.register_dhcp_agent() helpers.register_ovs_agent() - tunnel_network_ports = l2pop_db.get_dvr_active_network_ports( + tunnel_network_ports = l2pop_db.get_distributed_active_network_ports( self.ctx.session, 'network_id') self.assertEqual(1, len(tunnel_network_ports)) _, agent = tunnel_network_ports[0] self.assertEqual(constants.AGENT_TYPE_OVS, agent.agent_type) - def test_get_dvr_active_network_ports_no_candidate(self): + def test_get_distributed_active_network_ports_no_candidate(self): self._setup_port_binding() # Register a bunch of non-L2 agents on the same host helpers.register_l3_agent() helpers.register_dhcp_agent() - tunnel_network_ports = l2pop_db.get_dvr_active_network_ports( + tunnel_network_ports = l2pop_db.get_distributed_active_network_ports( self.ctx.session, 'network_id') self.assertEqual(0, len(tunnel_network_ports)) - def test_get_nondvr_active_network_ports(self): + def test_get_nondistributed_active_network_ports(self): self._setup_port_binding(dvr=False) # Register a L2 agent + A bunch of other agents on the same host helpers.register_l3_agent() helpers.register_dhcp_agent() helpers.register_ovs_agent() - fdb_network_ports = l2pop_db.get_nondvr_active_network_ports( + fdb_network_ports = l2pop_db.get_nondistributed_active_network_ports( self.ctx.session, 'network_id') self.assertEqual(1, len(fdb_network_ports)) _, agent = fdb_network_ports[0] self.assertEqual(constants.AGENT_TYPE_OVS, agent.agent_type) - def test_get_nondvr_active_network_ports_no_candidate(self): + def test_get_nondistributed_active_network_ports_no_candidate(self): self._setup_port_binding(dvr=False) # Register a bunch of non-L2 agents on the same host helpers.register_l3_agent() helpers.register_dhcp_agent() - fdb_network_ports = l2pop_db.get_nondvr_active_network_ports( + fdb_network_ports = l2pop_db.get_nondistributed_active_network_ports( self.ctx.session, 'network_id') self.assertEqual(0, len(fdb_network_ports)) diff --git a/neutron/tests/unit/plugins/ml2/drivers/l2pop/test_mech_driver.py b/neutron/tests/unit/plugins/ml2/drivers/l2pop/test_mech_driver.py index 7e9d2b4ebc0..c0568e516c3 100644 --- a/neutron/tests/unit/plugins/ml2/drivers/l2pop/test_mech_driver.py +++ b/neutron/tests/unit/plugins/ml2/drivers/l2pop/test_mech_driver.py @@ -920,9 +920,11 @@ class TestL2PopulationMechDriver(base.BaseTestCase): with mock.patch.object(l2pop_db, 'get_agent_ip', side_effect=agent_ip_side_effect),\ - mock.patch.object(l2pop_db, 'get_nondvr_active_network_ports', + mock.patch.object(l2pop_db, + 'get_nondistributed_active_network_ports', return_value=fdb_network_ports),\ - mock.patch.object(l2pop_db, 'get_dvr_active_network_ports', + mock.patch.object(l2pop_db, + 'get_distributed_active_network_ports', return_value=tunnel_network_ports): session = mock.Mock() agent = mock.Mock() diff --git a/neutron/tests/unit/plugins/ml2/test_db.py b/neutron/tests/unit/plugins/ml2/test_db.py index 32d32ccf75d..b680f0a646a 100644 --- a/neutron/tests/unit/plugins/ml2/test_db.py +++ b/neutron/tests/unit/plugins/ml2/test_db.py @@ -292,7 +292,8 @@ class Ml2DvrDBTestCase(testlib_api.SqlTestCase): self.ctx.session.add(router) return router - def _setup_dvr_binding(self, network_id, port_id, router_id, host_id): + def _setup_distributed_binding(self, network_id, + port_id, router_id, host_id): with self.ctx.session.begin(subtransactions=True): record = models.DistributedPortBinding( port_id=port_id, @@ -304,81 +305,85 @@ class Ml2DvrDBTestCase(testlib_api.SqlTestCase): self.ctx.session.add(record) return record - def test_ensure_dvr_port_binding_deals_with_db_duplicate(self): + def test_ensure_distributed_port_binding_deals_with_db_duplicate(self): network_id = 'foo_network_id' port_id = 'foo_port_id' router_id = 'foo_router_id' host_id = 'foo_host_id' self._setup_neutron_network(network_id, [port_id]) - self._setup_dvr_binding(network_id, port_id, router_id, host_id) + self._setup_distributed_binding(network_id, port_id, + router_id, host_id) with mock.patch.object(query.Query, 'first') as query_first: query_first.return_value = [] with mock.patch.object(ml2_db.LOG, 'debug') as log_trace: - binding = ml2_db.ensure_dvr_port_binding( + binding = ml2_db.ensure_distributed_port_binding( self.ctx.session, port_id, host_id, router_id) self.assertTrue(query_first.called) self.assertTrue(log_trace.called) self.assertEqual(port_id, binding.port_id) - def test_ensure_dvr_port_binding(self): + def test_ensure_distributed_port_binding(self): network_id = 'foo_network_id' port_id = 'foo_port_id' self._setup_neutron_network(network_id, [port_id]) router = self._setup_neutron_router() - ml2_db.ensure_dvr_port_binding( + ml2_db.ensure_distributed_port_binding( self.ctx.session, port_id, 'foo_host', router.id) expected = (self.ctx.session.query(models.DistributedPortBinding). filter_by(port_id=port_id).one()) self.assertEqual(port_id, expected.port_id) - def test_ensure_dvr_port_binding_multiple_bindings(self): + def test_ensure_distributed_port_binding_multiple_bindings(self): network_id = 'foo_network_id' port_id = 'foo_port_id' self._setup_neutron_network(network_id, [port_id]) router = self._setup_neutron_router() - ml2_db.ensure_dvr_port_binding( + ml2_db.ensure_distributed_port_binding( self.ctx.session, port_id, 'foo_host_1', router.id) - ml2_db.ensure_dvr_port_binding( + ml2_db.ensure_distributed_port_binding( self.ctx.session, port_id, 'foo_host_2', router.id) bindings = (self.ctx.session.query(models.DistributedPortBinding). filter_by(port_id=port_id).all()) self.assertEqual(2, len(bindings)) - def test_delete_dvr_port_binding_if_stale(self): + def test_delete_distributed_port_binding_if_stale(self): network_id = 'foo_network_id' port_id = 'foo_port_id' self._setup_neutron_network(network_id, [port_id]) - binding = self._setup_dvr_binding( + binding = self._setup_distributed_binding( network_id, port_id, None, 'foo_host_id') - ml2_db.delete_dvr_port_binding_if_stale(self.ctx.session, binding) + ml2_db.delete_distributed_port_binding_if_stale(self.ctx.session, + binding) count = (self.ctx.session.query(models.DistributedPortBinding). filter_by(port_id=binding.port_id).count()) self.assertFalse(count) - def test_get_dvr_port_binding_by_host_not_found(self): - port = ml2_db.get_dvr_port_binding_by_host( + def test_get_distributed_port_binding_by_host_not_found(self): + port = ml2_db.get_distributed_port_binding_by_host( self.ctx.session, 'foo_port_id', 'foo_host_id') self.assertIsNone(port) - def test_get_dvr_port_bindings_not_found(self): - port = ml2_db.get_dvr_port_bindings(self.ctx.session, 'foo_port_id') + def test_get_distributed_port_bindings_not_found(self): + port = ml2_db.get_distributed_port_bindings(self.ctx.session, + 'foo_port_id') self.assertFalse(len(port)) - def test_get_dvr_port_bindings(self): + def test_get_distributed_port_bindings(self): network_id = 'foo_network_id' port_id_1 = 'foo_port_id_1' port_id_2 = 'foo_port_id_2' self._setup_neutron_network(network_id, [port_id_1, port_id_2]) router = self._setup_neutron_router() - self._setup_dvr_binding( + self._setup_distributed_binding( network_id, port_id_1, router.id, 'foo_host_id_1') - self._setup_dvr_binding( + self._setup_distributed_binding( network_id, port_id_1, router.id, 'foo_host_id_2') - ports = ml2_db.get_dvr_port_bindings(self.ctx.session, 'foo_port_id') + ports = ml2_db.get_distributed_port_bindings(self.ctx.session, + 'foo_port_id') self.assertEqual(2, len(ports)) - def test_dvr_port_binding_deleted_by_port_deletion(self): + def test_distributed_port_binding_deleted_by_port_deletion(self): with self.ctx.session.begin(subtransactions=True): self.ctx.session.add(models_v2.Network(id='network_id')) device_owner = constants.DEVICE_OWNER_DVR_INTERFACE @@ -408,5 +413,6 @@ class Ml2DvrDBTestCase(testlib_api.SqlTestCase): with self.ctx.session.begin(subtransactions=True): self.ctx.session.delete(port) self.assertEqual([], warning_list) - ports = ml2_db.get_dvr_port_bindings(self.ctx.session, 'port_id') + ports = ml2_db.get_distributed_port_bindings(self.ctx.session, + 'port_id') self.assertEqual(0, len(ports)) diff --git a/neutron/tests/unit/plugins/ml2/test_plugin.py b/neutron/tests/unit/plugins/ml2/test_plugin.py index e7141b64aaa..5ad19cfe8ea 100644 --- a/neutron/tests/unit/plugins/ml2/test_plugin.py +++ b/neutron/tests/unit/plugins/ml2/test_plugin.py @@ -1303,7 +1303,7 @@ class TestMl2PortBinding(Ml2PluginV2TestCase, self.assertTrue(update_mock.mock_calls) self.assertEqual('test', binding.host) - def test_process_dvr_port_binding_update_router_id(self): + def test_process_distributed_port_binding_update_router_id(self): host_id = 'host' binding = models.DistributedPortBinding( port_id='port_id', @@ -1323,12 +1323,13 @@ class TestMl2PortBinding(Ml2PluginV2TestCase, return_value=[]): mech_context = driver_context.PortContext( self, context, mock_port, mock_network, binding, None) - plugin._process_dvr_port_binding(mech_context, context, attrs) + plugin._process_distributed_port_binding(mech_context, + context, attrs) self.assertEqual(new_router_id, mech_context._binding.router_id) self.assertEqual(host_id, mech_context._binding.host) - def test_update_dvr_port_binding_on_concurrent_port_delete(self): + def test_update_distributed_port_binding_on_concurrent_port_delete(self): plugin = manager.NeutronManager.get_plugin() with self.port() as port: port = { @@ -1336,20 +1337,21 @@ class TestMl2PortBinding(Ml2PluginV2TestCase, portbindings.HOST_ID: 'foo_host', } with mock.patch.object(plugin, 'get_port', new=plugin.delete_port): - res = plugin.update_dvr_port_binding( + res = plugin.update_distributed_port_binding( self.context, 'foo_port_id', {'port': port}) self.assertIsNone(res) - def test_update_dvr_port_binding_on_non_existent_port(self): + def test_update_distributed_port_binding_on_non_existent_port(self): plugin = manager.NeutronManager.get_plugin() port = { 'id': 'foo_port_id', portbindings.HOST_ID: 'foo_host', } - with mock.patch.object(ml2_db, 'ensure_dvr_port_binding') as mock_dvr: - plugin.update_dvr_port_binding( + with mock.patch.object( + ml2_db, 'ensure_distributed_port_binding') as mock_dist: + plugin.update_distributed_port_binding( self.context, 'foo_port_id', {'port': port}) - self.assertFalse(mock_dvr.called) + self.assertFalse(mock_dist.called) class TestMl2PortBindingNoSG(TestMl2PortBinding): @@ -1973,8 +1975,8 @@ class TestFaultyMechansimDriver(Ml2PluginV2FaultyDriverTestCase): self._delete('ports', port['port']['id']) - def test_update_dvr_router_interface_port(self): - """Test validate dvr router interface update succeeds.""" + def test_update_distributed_router_interface_port(self): + """Test validate distributed router interface update succeeds.""" host_id = 'host' binding = models.DistributedPortBinding( port_id='port_id', @@ -1990,9 +1992,9 @@ class TestFaultyMechansimDriver(Ml2PluginV2FaultyDriverTestCase): mock.patch.object( mech_test.TestMechanismDriver, 'update_port_precommit') as port_pre,\ - mock.patch.object(ml2_db, - 'get_dvr_port_bindings') as dvr_bindings: - dvr_bindings.return_value = [binding] + mock.patch.object( + ml2_db, 'get_distributed_port_bindings') as dist_bindings: + dist_bindings.return_value = [binding] port_pre.return_value = True with self.network() as network: with self.subnet(network=network) as subnet: @@ -2017,7 +2019,7 @@ class TestFaultyMechansimDriver(Ml2PluginV2FaultyDriverTestCase): req = self.new_update_request('ports', data, port_id) res = req.get_response(self.api) self.assertEqual(200, res.status_int) - self.assertTrue(dvr_bindings.called) + self.assertTrue(dist_bindings.called) self.assertTrue(port_pre.called) self.assertTrue(port_post.called) port = self._show('ports', port_id) diff --git a/neutron/tests/unit/plugins/ml2/test_port_binding.py b/neutron/tests/unit/plugins/ml2/test_port_binding.py index 09bf9bf4e5d..b4e177a64ee 100644 --- a/neutron/tests/unit/plugins/ml2/test_port_binding.py +++ b/neutron/tests/unit/plugins/ml2/test_port_binding.py @@ -212,7 +212,7 @@ class PortBindingTestCase(test_plugin.NeutronDbPluginV2TestCase): port_dict = plugin.get_port(ctx, port['port']['id']) self.assertEqual(const.PORT_STATUS_DOWN, port_dict['status']) - def test_dvr_binding(self): + def test_distributed_binding(self): ctx = context.get_admin_context() with self.port(device_owner=const.DEVICE_OWNER_DVR_INTERFACE) as port: port_id = port['port']['id'] @@ -223,9 +223,9 @@ class PortBindingTestCase(test_plugin.NeutronDbPluginV2TestCase): self.assertEqual('DOWN', port['port']['status']) # Update port to bind for a host. - self.plugin.update_dvr_port_binding(ctx, port_id, {'port': { - portbindings.HOST_ID: 'host-ovs-no_filter', - 'device_id': 'router1'}}) + self.plugin.update_distributed_port_binding(ctx, port_id, {'port': + {portbindings.HOST_ID: 'host-ovs-no_filter', + 'device_id': 'router1'}}) # Get port and verify VIF type and status unchanged. port = self._show('ports', port_id) @@ -267,15 +267,15 @@ class PortBindingTestCase(test_plugin.NeutronDbPluginV2TestCase): port['port'][portbindings.VIF_TYPE]) self.assertEqual('DOWN', port['port']['status']) - def test_dvr_binding_multi_host_status(self): + def test_distributed_binding_multi_host_status(self): ctx = context.get_admin_context() with self.port(device_owner=const.DEVICE_OWNER_DVR_INTERFACE) as port: port_id = port['port']['id'] # Update port to bind for 1st host. - self.plugin.update_dvr_port_binding(ctx, port_id, {'port': { - portbindings.HOST_ID: 'host-ovs-no_filter', - 'device_id': 'router1'}}) + self.plugin.update_distributed_port_binding(ctx, port_id, {'port': + {portbindings.HOST_ID: 'host-ovs-no_filter', + 'device_id': 'router1'}}) # Mark 1st device up. self.plugin.endpoints[0].update_device_up( @@ -287,9 +287,9 @@ class PortBindingTestCase(test_plugin.NeutronDbPluginV2TestCase): self.assertEqual('ACTIVE', port['port']['status']) # Update port to bind for a 2nd host. - self.plugin.update_dvr_port_binding(ctx, port_id, {'port': { - portbindings.HOST_ID: 'host-bridge-filter', - 'device_id': 'router1'}}) + self.plugin.update_distributed_port_binding(ctx, port_id, {'port': + {portbindings.HOST_ID: 'host-bridge-filter', + 'device_id': 'router1'}}) # Mark 2nd device up. self.plugin.endpoints[0].update_device_up( @@ -318,7 +318,7 @@ class PortBindingTestCase(test_plugin.NeutronDbPluginV2TestCase): port = self._show('ports', port_id) self.assertEqual('DOWN', port['port']['status']) - def test_dvr_binding_update_unbound_host(self): + def test_distributed_binding_update_unbound_host(self): ctx = context.get_admin_context() with self.port(device_owner=const.DEVICE_OWNER_DVR_INTERFACE) as port: port_id = port['port']['id']