Fix GET /servers/detail host_status performance regression
Change I82b11b8866ac82b05eae04351605d52fa8b91453 moved the host_status extended server attribute processing from an extension to the main servers view builder. This, however, caused a regression in the detailed listing of servers because it didn't incorporate the caching mechanism used previously by the extension so now for each server with details when microversion 2.16 or greater is used (and the request passes the policy check), we get the host status per server even if we have multiple servers on the same host. This moves the host_status processing out of the show() method when listing servers with details and processes them in aggregate similar to security groups and attached volumes. One catch is the show() method handles instances from down cells for us so we have to handle that separately in the new host_status processing, but it's trivial (just don't get host_status for instances without a host field set). NOTE(mriedem): This backport does not revert commit0cecd2ac32
since that change was only in Train. Change-Id: I8278d4ea993ed1600919e34c9759600c8c7dbb41 Closes-Bug: #1830260 (cherry picked from commitab7d923ae7
)
This commit is contained in:
parent
950e044af1
commit
ef10d8d9a6
|
@ -369,10 +369,6 @@ class ViewBuilder(common.ViewBuilder):
|
|||
show_extra_specs = False
|
||||
show_extended_attr = context.can(
|
||||
esa_policies.BASE_POLICY_NAME, fatal=False)
|
||||
show_host_status = False
|
||||
if (api_version_request.is_supported(request, min_version='2.16')):
|
||||
show_host_status = context.can(
|
||||
servers_policies.SERVERS % 'show:host_status', fatal=False)
|
||||
|
||||
instance_uuids = [inst['uuid'] for inst in instances]
|
||||
bdms = self._get_instance_bdms_in_multiple_cells(context,
|
||||
|
@ -385,11 +381,17 @@ class ViewBuilder(common.ViewBuilder):
|
|||
servers_dict = self._list_view(self.show, request, instances,
|
||||
coll_name, show_extra_specs,
|
||||
show_extended_attr=show_extended_attr,
|
||||
show_host_status=show_host_status,
|
||||
# We process host_status in aggregate.
|
||||
show_host_status=False,
|
||||
show_sec_grp=False,
|
||||
bdms=bdms,
|
||||
cell_down_support=cell_down_support)
|
||||
|
||||
if (api_version_request.is_supported(request, min_version='2.16') and
|
||||
context.can(servers_policies.SERVERS % 'show:host_status',
|
||||
fatal=False)):
|
||||
self._add_host_status(list(servers_dict["servers"]), instances)
|
||||
|
||||
self._add_security_grps(request, list(servers_dict["servers"]),
|
||||
instances)
|
||||
return servers_dict
|
||||
|
@ -558,6 +560,27 @@ class ViewBuilder(common.ViewBuilder):
|
|||
|
||||
return fault_dict
|
||||
|
||||
def _add_host_status(self, servers, instances):
|
||||
"""Adds the ``host_status`` field to the list of servers
|
||||
|
||||
This method takes care to filter instances from down cells since they
|
||||
do not have a host set and as such we cannot determine the host status.
|
||||
|
||||
:param servers: list of detailed server dicts for the API response
|
||||
body; this list is modified by reference by updating the server
|
||||
dicts within the list
|
||||
:param instances: list of Instance objects
|
||||
"""
|
||||
# Filter out instances from down cells which do not have a host field.
|
||||
instances = [instance for instance in instances if 'host' in instance]
|
||||
# Get the dict, keyed by instance.uuid, of host status values.
|
||||
host_statuses = self.compute_api.get_instances_host_statuses(instances)
|
||||
for server in servers:
|
||||
# Filter out anything that is not in the resulting dict because
|
||||
# we had to filter the list of instances above for down cells.
|
||||
if server['id'] in host_statuses:
|
||||
server['host_status'] = host_statuses[server['id']]
|
||||
|
||||
def _add_security_grps(self, req, servers, instances):
|
||||
if not len(servers):
|
||||
return
|
||||
|
|
|
@ -2026,6 +2026,7 @@ class ServersControllerTestV216(ServersControllerTest):
|
|||
super(ServersControllerTestV216, self).setUp()
|
||||
self.mock_get.side_effect = fakes.fake_compute_get(
|
||||
id=2, uuid=FAKE_UUID,
|
||||
host="node-fake",
|
||||
node="node-fake",
|
||||
reservation_id="r-1", launch_index=0,
|
||||
kernel_id=UUID1, ramdisk_id=UUID2,
|
||||
|
@ -2046,9 +2047,10 @@ class ServersControllerTestV216(ServersControllerTest):
|
|||
task_state=None,
|
||||
vm_state=vm_states.ACTIVE,
|
||||
power_state=1)
|
||||
self.useFixture(fixtures.MockPatchObject(
|
||||
compute_api.API, 'get_instance_host_status',
|
||||
return_value='UP')).mock
|
||||
self.mock_get_instance_host_status = self.useFixture(
|
||||
fixtures.MockPatchObject(
|
||||
compute_api.API, 'get_instance_host_status',
|
||||
return_value='UP')).mock
|
||||
|
||||
def _get_server_data_dict(self, uuid, image_bookmark, flavor_bookmark,
|
||||
status="ACTIVE", progress=100):
|
||||
|
@ -2061,6 +2063,9 @@ class ServersControllerTestV216(ServersControllerTest):
|
|||
server_dict['server']['locked'] = False
|
||||
server_dict['server']["host_status"] = "UP"
|
||||
server_dict['server']["OS-EXT-SRV-ATTR:hostname"] = "server2"
|
||||
server_dict['server']['hostId'] = nova_utils.generate_hostid(
|
||||
'node-fake', server_dict['server']['tenant_id'])
|
||||
server_dict['server']["OS-EXT-SRV-ATTR:host"] = "node-fake"
|
||||
server_dict['server'][
|
||||
"OS-EXT-SRV-ATTR:hypervisor_hostname"] = "node-fake"
|
||||
server_dict['server']["OS-EXT-SRV-ATTR:kernel_id"] = UUID1
|
||||
|
@ -2097,6 +2102,7 @@ class ServersControllerTestV216(ServersControllerTest):
|
|||
for i in range(2):
|
||||
server = fakes.stub_instance_obj(context,
|
||||
id=2, uuid=FAKE_UUID,
|
||||
host="node-fake",
|
||||
node="node-fake",
|
||||
reservation_id="r-1", launch_index=0,
|
||||
kernel_id=UUID1, ramdisk_id=UUID2,
|
||||
|
@ -2129,6 +2135,7 @@ class ServersControllerTestV216(ServersControllerTest):
|
|||
|
||||
req = self.req('/fake/servers/detail')
|
||||
servers_list = self.controller.detail(req)
|
||||
self.assertEqual(2, len(servers_list['servers']))
|
||||
image_bookmark = "http://localhost/fake/images/10"
|
||||
flavor_bookmark = "http://localhost/fake/flavors/2"
|
||||
expected_server = self._get_server_data_dict(FAKE_UUID,
|
||||
|
@ -2137,6 +2144,9 @@ class ServersControllerTestV216(ServersControllerTest):
|
|||
progress=0)
|
||||
|
||||
self.assertIn(expected_server['server'], servers_list['servers'])
|
||||
# We should have only gotten the host status once per host (and the
|
||||
# 2 servers in the response are using the same host).
|
||||
self.mock_get_instance_host_status.assert_called_once()
|
||||
|
||||
|
||||
class ServersControllerTestV219(ServersControllerTest):
|
||||
|
|
Loading…
Reference in New Issue