Fix dvr update for subnet attach multi subnets

Fix method dvr_update_router_addvm to notify every
router attached to subnet where the vm will boot
on.

In dvr case, when a subnet only attaches to one router,
the subnet will only have one distributed router interface,
which device_owner is "network:router_interface_distributed".
So in this case, get_ports in this method will only get
one port, and it should be unnecessary to break in for loop.

But when a subnet attaches multiple routers, get_ports in
this method will return all distributed router interfaces
and the routers hold those interfaces should be notified
when an instance booted on the subnet. So it should also
be unnecessary to break in for loop.

Change-Id: I3a5808e5b6e8b78abd1a5b924395844507da0764
Closes-Bug: #1427122
Co-Authored-By: Ryan Moats <rmoats@us.ibm.com>
This commit is contained in:
lzklibj 2015-03-02 02:13:41 -08:00 committed by Carl Baldwin
parent 5d5aa1b045
commit 24fa37e055
2 changed files with 35 additions and 7 deletions

View File

@ -106,7 +106,6 @@ class L3_DVRsch_db_mixin(l3agent_sch_db.L3AgentSchedulerDbMixin):
filter_sub = {'fixed_ips': {'subnet_id': [subnet]},
'device_owner':
[n_const.DEVICE_OWNER_DVR_INTERFACE]}
router_id = None
ports = self._core_plugin.get_ports(context, filters=filter_sub)
for port in ports:
router_id = port['device_id']
@ -115,8 +114,7 @@ class L3_DVRsch_db_mixin(l3agent_sch_db.L3AgentSchedulerDbMixin):
payload = {'subnet_id': subnet}
self.l3_rpc_notifier.routers_updated(
context, [router_id], None, payload)
break
LOG.debug('DVR: dvr_update_router_addvm %s ', router_id)
LOG.debug('DVR: dvr_update_router_addvm %s ', router_id)
def get_dvr_routers_by_portid(self, context, port_id):
"""Gets the dvr routers on vmport subnets."""

View File

@ -948,7 +948,8 @@ class L3DvrSchedulerTestCase(testlib_api.SqlTestCase):
}
]
}
dvr_port = {
dvr_ports = [
{
'id': 'dvr_port1',
'device_id': 'r1',
'device_owner': 'network:router_interface_distributed',
@ -958,23 +959,52 @@ class L3DvrSchedulerTestCase(testlib_api.SqlTestCase):
'ip_address': '10.10.10.1'
}
]
}
},
{
'id': 'dvr_port2',
'device_id': 'r2',
'device_owner': 'network:router_interface_distributed',
'fixed_ips': [
{
'subnet_id': '80947d4a-fbc8-484b-9f92-623a6bfcf3e0',
'ip_address': '10.10.10.123'
}
]
}
]
r1 = {
'id': 'r1',
'distributed': True,
}
r2 = {
'id': 'r2',
'distributed': True,
}
with mock.patch(
'neutron.db.db_base_plugin_v2.NeutronDbPluginV2' '.get_ports',
return_value=[dvr_port]),\
return_value=dvr_ports),\
mock.patch(
'neutron.manager.NeutronManager.get_service_plugins',
return_value=mock.Mock()),\
mock.patch('neutron.db.l3_db.L3_NAT_db_mixin.get_router',
return_value=r1),\
router_id='r1', return_value=r1),\
mock.patch('neutron.db.l3_db.L3_NAT_db_mixin.get_router',
router_id='r2', return_value=r2),\
mock.patch('neutron.api.rpc.agentnotifiers.l3_rpc_agent_api'
'.L3AgentNotifyAPI'):
self.dut.dvr_update_router_addvm(self.adminContext, port)
self.assertEqual(
self.dut.l3_rpc_notifier.routers_updated.call_count, 2)
payload = {'subnet_id': port['fixed_ips'][0]['subnet_id']}
expected_calls = [
mock.call.routers_updated(
self.adminContext, ['r1'], None, payload),
mock.call.routers_updated(
self.adminContext, ['r2'], None, payload)
]
self.dut.l3_rpc_notifier.routers_updated.assert_has_calls(
expected_calls, any_order=True)
def test_get_dvr_routers_by_portid(self):
dvr_port = {