Merge "Introduce ``cache_it`` and ``cache_clear``"

This commit is contained in:
Zuul 2018-10-24 11:46:10 +00:00 committed by Gerrit Code Review
commit 6d0d5896ec
22 changed files with 442 additions and 302 deletions

View File

@ -358,8 +358,6 @@ class ResourceCollectionBase(ResourceBase):
adapter=utils.get_members_identities)
"""A tuple with the members identities"""
_members = None # caching variable
def __init__(self, connector, path, redfish_version=None):
"""A class representing the base of any Redfish resource collection
@ -395,24 +393,20 @@ class ResourceCollectionBase(ResourceBase):
return self._resource_type(self._conn, identity,
redfish_version=self.redfish_version)
@utils.cache_it
def get_members(self):
"""Return a list of ``_resource_type`` objects present in collection
:returns: A list of ``_resource_type`` objects
"""
if self._members is None:
self._members = [self.get_member(id_)
for id_ in self.members_identities]
return [self.get_member(id_) for id_ in self.members_identities]
for m in self._members:
m.refresh(force=False)
return self._members
def _do_refresh(self, force=False):
def _do_refresh(self, force):
"""Do refresh related activities.
Undefine the `_members` attribute here for fresh evaluation in
subsequent calls to `get_members()` method. Other similar activities
Invalidate / Undefine the cache attributes here for fresh evaluation
in subsequent calls to `get_members()` method. Other similar activities
can also follow in future, if needed.
"""
self._members = None
super(ResourceCollectionBase, self)._do_refresh(force=force)
utils.cache_clear(self, force)

View File

@ -77,8 +77,6 @@ class Manager(base.ResourceBase):
_actions = ActionsField('Actions', required=True)
_virtual_media = None
def __init__(self, connector, identity, redfish_version=None):
"""A class representing a Manager
@ -89,9 +87,9 @@ class Manager(base.ResourceBase):
"""
super(Manager, self).__init__(connector, identity, redfish_version)
def _do_refresh(self, force=False):
if self._virtual_media is not None:
self._virtual_media.invalidate(force)
def _do_refresh(self, force):
super(Manager, self)._do_refresh(force=force)
utils.cache_clear(self, force)
def get_supported_graphical_console_types(self):
"""Get the supported values for Graphical Console connection types.
@ -188,15 +186,11 @@ class Manager(base.ResourceBase):
LOG.info('The Manager %s is being reset', self.identity)
@property
@utils.cache_it
def virtual_media(self):
if self._virtual_media is None:
self._virtual_media = virtual_media.VirtualMediaCollection(
self._conn,
utils.get_sub_resource_path_by(self, 'VirtualMedia'),
redfish_version=self.redfish_version)
self._virtual_media.refresh(force=False)
return self._virtual_media
return virtual_media.VirtualMediaCollection(
self._conn, utils.get_sub_resource_path_by(self, 'VirtualMedia'),
redfish_version=self.redfish_version)
class ManagerCollection(base.ResourceCollectionBase):

View File

@ -18,6 +18,7 @@ import logging
from sushy import exceptions
from sushy.resources import base
from sushy.resources.sessionservice import session
from sushy import utils
LOG = logging.getLogger(__name__)
@ -36,8 +37,6 @@ class SessionService(base.ResourceBase):
service_enabled = base.Field('ServiceEnabled')
"""Tells us if session service is enabled"""
_sessions = None # ref to SessionCollection instance
session_timeout = base.Field('SessionTimeout')
"""The session service timeout"""
@ -66,29 +65,26 @@ class SessionService(base.ResourceBase):
return sessions_col.get('@odata.id')
@property
@utils.cache_it
def sessions(self):
"""Property to provide reference to the `SessionCollection` instance
It is calculated once when the first time it is queried. On refresh,
this property gets reset.
"""
if self._sessions is None:
self._sessions = session.SessionCollection(
self._conn, self._get_sessions_collection_path(),
redfish_version=self.redfish_version)
return session.SessionCollection(
self._conn, self._get_sessions_collection_path(),
redfish_version=self.redfish_version)
self._sessions.refresh(force=False)
return self._sessions
def _do_refresh(self, force=False):
def _do_refresh(self, force):
"""Do custom resource specific refresh activities
On refresh, all sub-resources are marked as stale, i.e.
greedy-refresh not done for them unless forced by ``force``
argument.
"""
if self._sessions is not None:
self._sessions.invalidate(force)
super(SessionService, self)._do_refresh(force=force)
utils.cache_clear(self, force)
def close_session(self, session_uri):
"""This function is for closing a session based on its id.

View File

@ -19,6 +19,7 @@ from sushy import exceptions
from sushy.resources import base
from sushy.resources import common
from sushy.resources import settings
from sushy import utils
LOG = logging.getLogger(__name__)
@ -56,7 +57,13 @@ class Bios(base.ResourceBase):
_actions = ActionsField('Actions')
_pending_settings_resource = None
@property
@utils.cache_it
def _pending_settings_resource(self):
"""Pending BIOS settings resource"""
return Bios(
self._conn, self._settings.resource_uri,
redfish_version=self.redfish_version)
@property
def pending_attributes(self):
@ -65,13 +72,6 @@ class Bios(base.ResourceBase):
BIOS attributes that have been comitted to the system,
but for them to take effect system restart is necessary
"""
if not self._pending_settings_resource:
self._pending_settings_resource = Bios(
self._conn,
self._settings.resource_uri,
redfish_version=self.redfish_version)
self._pending_settings_resource.refresh(force=False)
return self._pending_settings_resource.attributes
def set_attribute(self, key, value):
@ -97,8 +97,8 @@ class Bios(base.ResourceBase):
"""
self._settings.commit(self._conn,
{'Attributes': value})
if self._pending_settings_resource:
self._pending_settings_resource.invalidate()
utils.cache_clear(self, force_refresh=False,
only_these=['_pending_settings_resource'])
def _get_reset_bios_action_element(self):
actions = self._actions
@ -155,5 +155,5 @@ class Bios(base.ResourceBase):
greedy-refresh not done for them unless forced by ``force``
argument.
"""
if self._pending_settings_resource is not None:
self._pending_settings_resource.invalidate(force)
super(Bios, self)._do_refresh(force=force)
utils.cache_clear(self, force)

View File

@ -18,6 +18,7 @@ import logging
from sushy.resources import base
from sushy.resources import common
from sushy.resources import constants as res_cons
from sushy import utils
LOG = logging.getLogger(__name__)
@ -49,13 +50,12 @@ class EthernetInterface(base.ResourceBase):
class EthernetInterfaceCollection(base.ResourceCollectionBase):
_summary = None
@property
def _resource_type(self):
return EthernetInterface
@property
@utils.cache_it
def summary(self):
"""Summary of MAC addresses and interfaces state
@ -67,14 +67,12 @@ class EthernetInterfaceCollection(base.ResourceCollectionBase):
{'aa:bb:cc:dd:ee:ff': sushy.STATE_ENABLED,
'aa:bb:aa:aa:aa:aa': sushy.STATE_DISABLED}
"""
if self._summary is None:
mac_dict = {}
for eth in self.get_members():
if eth.mac_address is not None and eth.status is not None:
if eth.status.health == res_cons.HEALTH_OK:
mac_dict[eth.mac_address] = eth.status.state
self._summary = mac_dict
return self._summary
mac_dict = {}
for eth in self.get_members():
if eth.mac_address is not None and eth.status is not None:
if eth.status.health == res_cons.HEALTH_OK:
mac_dict[eth.mac_address] = eth.status.state
return mac_dict
def _do_refresh(self, force=False):
"""Do custom resource specific refresh activities
@ -84,4 +82,4 @@ class EthernetInterfaceCollection(base.ResourceCollectionBase):
argument.
"""
super(EthernetInterfaceCollection, self)._do_refresh(force)
self._summary = None
utils.cache_clear(self, force)

View File

@ -18,6 +18,7 @@ import logging
from sushy.resources import base
from sushy.resources import common
from sushy.resources.system import mappings as sys_maps
from sushy import utils
# Representation of Summary of Processor information
ProcessorSummary = collections.namedtuple('ProcessorSummary',
@ -104,10 +105,8 @@ class ProcessorCollection(base.ResourceCollectionBase):
def _resource_type(self):
return Processor
_summary = None
"""The summary of processors of the system in general detail"""
@property
@utils.cache_it
def summary(self):
"""Property to provide ProcessorSummary info
@ -117,25 +116,21 @@ class ProcessorCollection(base.ResourceCollectionBase):
:returns: A namedtuple containing the ``count`` of processors
in regards to logical CPUs, and their ``architecture``.
"""
if self._summary is None:
count, architecture = 0, None
for proc in self.get_members():
# Note(deray): It attempts to detect the number of CPU cores.
# It returns the number of logical CPUs.
if proc.total_threads is not None:
count += proc.total_threads
count, architecture = 0, None
for proc in self.get_members():
# Note(deray): It attempts to detect the number of CPU cores.
# It returns the number of logical CPUs.
if proc.total_threads is not None:
count += proc.total_threads
# Note(deray): Bail out of checking the architecture info
# if you have already got hold of any one of the processors'
# architecture information.
if (architecture is None
and proc.processor_architecture is not None):
architecture = proc.processor_architecture
# Note(deray): Bail out of checking the architecture info
# if you have already got hold of any one of the processors'
# architecture information.
if (architecture is None
and proc.processor_architecture is not None):
architecture = proc.processor_architecture
self._summary = ProcessorSummary(count=count,
architecture=architecture)
return self._summary
return ProcessorSummary(count=count, architecture=architecture)
def __init__(self, connector, path, redfish_version=None):
"""A class representing a ProcessorCollection
@ -155,6 +150,5 @@ class ProcessorCollection(base.ResourceCollectionBase):
greedy-refresh not done for them unless forced by ``force``
argument.
"""
super(ProcessorCollection, self)._do_refresh(force)
# Reset summary attribute
self._summary = None
super(ProcessorCollection, self)._do_refresh(force=force)
utils.cache_clear(self, force)

View File

@ -58,28 +58,22 @@ class SimpleStorage(base.ResourceBase):
class SimpleStorageCollection(base.ResourceCollectionBase):
"""Represents a collection of simple storage associated with system."""
_disks_sizes_bytes = None
@property
def _resource_type(self):
return SimpleStorage
@property
@utils.cache_it
def disks_sizes_bytes(self):
"""Sizes of each Disk in bytes in SimpleStorage collection resource.
Returns the list of cached values until it (or its parent resource)
is refreshed.
"""
if self._disks_sizes_bytes is None:
self._disks_sizes_bytes = sorted(
device.capacity_bytes
for simpl_stor in self.get_members()
for device in simpl_stor.devices
if device.status.state == res_cons.STATE_ENABLED
)
return self._disks_sizes_bytes
return sorted(device.capacity_bytes
for simpl_stor in self.get_members()
for device in simpl_stor.devices
if device.status.state == res_cons.STATE_ENABLED)
@property
def max_size_bytes(self):
@ -90,8 +84,6 @@ class SimpleStorageCollection(base.ResourceCollectionBase):
"""
return utils.max_safe(self.disks_sizes_bytes)
def _do_refresh(self, force=False):
super(SimpleStorageCollection, self)._do_refresh(force)
# Note(deray): undefine the attribute here for fresh creation in
# subsequent calls to it's exposed property.
self._disks_sizes_bytes = None
def _do_refresh(self, force):
super(SimpleStorageCollection, self)._do_refresh(force=force)
utils.cache_clear(self, force)

View File

@ -42,10 +42,6 @@ class Storage(base.ResourceBase):
adapter=utils.get_members_identities)
"""A tuple with the drive identities"""
_drives_sizes_bytes = None
_drives = None
_volumes = None # reference to VolumeCollection instance
def get_drive(self, drive_identity):
"""Given the drive identity return a ``Drive`` object
@ -57,6 +53,7 @@ class Storage(base.ResourceBase):
redfish_version=self.redfish_version)
@property
@utils.cache_it
def drives(self):
"""Return a list of `Drive` objects present in the storage resource.
@ -67,22 +64,17 @@ class Storage(base.ResourceBase):
:returns: A list of `Drive` objects
:raises: ResourceNotFoundError
"""
if self._drives is None:
self._drives = [
self.get_drive(id_) for id_ in self.drives_identities]
return self._drives
return [self.get_drive(id_) for id_ in self.drives_identities]
@property
@utils.cache_it
def drives_sizes_bytes(self):
"""Sizes of all Drives in bytes in Storage resource.
Returns the list of cached values until it (or its parent resource)
is refreshed.
"""
if self._drives_sizes_bytes is None:
self._drives_sizes_bytes = sorted(
drv.capacity_bytes for drv in self.drives)
return self._drives_sizes_bytes
return sorted(drv.capacity_bytes for drv in self.drives)
@property
def drives_max_size_bytes(self):
@ -90,6 +82,7 @@ class Storage(base.ResourceBase):
return utils.max_safe(self.drives_sizes_bytes)
@property
@utils.cache_it
def volumes(self):
"""Property to reference `VolumeCollection` instance
@ -98,50 +91,35 @@ class Storage(base.ResourceBase):
point). Here only the actual refresh of the sub-resource happens,
if resource is stale.
"""
if self._volumes is None:
self._volumes = volume.VolumeCollection(
self._conn, utils.get_sub_resource_path_by(self, 'Volumes'),
redfish_version=self.redfish_version)
return volume.VolumeCollection(
self._conn, utils.get_sub_resource_path_by(self, 'Volumes'),
redfish_version=self.redfish_version)
self._volumes.refresh(force=False)
return self._volumes
def _do_refresh(self, force=False):
def _do_refresh(self, force):
"""Do resource specific refresh activities."""
# Note(deray): undefine the attribute here for fresh evaluation in
# subsequent calls to it's exposed property.
self._drives_sizes_bytes = None
self._drives = None
# invalidate the nested resource
if self._volumes is not None:
self._volumes.invalidate(force)
# Note(deray): invalidate / undefine the attributes here for fresh
# evaluation in subsequent calls to it's exposed property.
super(Storage, self)._do_refresh(force=force)
utils.cache_clear(self, force)
class StorageCollection(base.ResourceCollectionBase):
"""This class represents the collection of Storage resources"""
_drives_sizes_bytes = None
_volumes_sizes_bytes = None
@property
def _resource_type(self):
return Storage
@property
@utils.cache_it
def drives_sizes_bytes(self):
"""Sizes of each Drive in bytes in Storage collection resource.
Returns the list of cached values until it (or its parent resource)
is refreshed.
"""
if self._drives_sizes_bytes is None:
self._drives_sizes_bytes = sorted(
drive_size
for storage_ in self.get_members()
for drive_size in storage_.drives_sizes_bytes
)
return self._drives_sizes_bytes
return sorted(drive_size for storage_ in self.get_members()
for drive_size in storage_.drives_sizes_bytes)
@property
def max_drive_size_bytes(self):
@ -153,19 +131,15 @@ class StorageCollection(base.ResourceCollectionBase):
return utils.max_safe(self.drives_sizes_bytes)
@property
@utils.cache_it
def volumes_sizes_bytes(self):
"""Sizes of each Volume in bytes in Storage collection resource.
Returns the list of cached values until it (or its parent resource)
is refreshed.
"""
if self._volumes_sizes_bytes is None:
self._volumes_sizes_bytes = sorted(
volume_size
for storage_ in self.get_members()
for volume_size in storage_.volumes.volumes_sizes_bytes)
return self._volumes_sizes_bytes
return sorted(volume_size for storage_ in self.get_members()
for volume_size in storage_.volumes.volumes_sizes_bytes)
@property
def max_volume_size_bytes(self):
@ -176,10 +150,7 @@ class StorageCollection(base.ResourceCollectionBase):
"""
return utils.max_safe(self.volumes_sizes_bytes)
def _do_refresh(self, force=False):
def _do_refresh(self, force):
"""Do resource specific refresh activities"""
super(StorageCollection, self)._do_refresh(force)
# Note(deray): undefine the attributes here for fresh evaluation in
# subsequent calls to their exposed properties.
self._drives_sizes_bytes = None
self._volumes_sizes_bytes = None
super(StorageCollection, self)._do_refresh(force=force)
utils.cache_clear(self, force)

View File

@ -37,24 +37,19 @@ class Volume(base.ResourceBase):
class VolumeCollection(base.ResourceCollectionBase):
"""This class represents the Storage Volume collection"""
_volumes_sizes_bytes = None
@property
def _resource_type(self):
return Volume
@property
@utils.cache_it
def volumes_sizes_bytes(self):
"""Sizes of all Volumes in bytes in VolumeCollection resource.
Returns the list of cached values until it (or its parent resource)
is refreshed.
"""
if self._volumes_sizes_bytes is None:
self._volumes_sizes_bytes = sorted(
vol.capacity_bytes
for vol in self.get_members())
return self._volumes_sizes_bytes
return sorted(vol.capacity_bytes for vol in self.get_members())
@property
def max_volume_size_bytes(self):
@ -68,7 +63,6 @@ class VolumeCollection(base.ResourceCollectionBase):
# NOTE(etingof): for backward compatibility
max_size_bytes = max_volume_size_bytes
def _do_refresh(self, force=False):
super(VolumeCollection, self)._do_refresh(force)
# invalidate the attribute
self._volumes_sizes_bytes = None
def _do_refresh(self, force):
super(VolumeCollection, self)._do_refresh(force=force)
utils.cache_clear(self, force)

View File

@ -123,21 +123,6 @@ class System(base.ResourceBase):
_actions = ActionsField('Actions', required=True)
# reference to ProcessorCollection instance
_processors = None
# reference to EthernetInterfaceCollection instance
_ethernet_interfaces = None
# reference to BIOS instance
_bios = None
# reference to SimpleStorageCollection instance
_simple_storage = None
# reference to StorageCollection instance
_storage = None
def __init__(self, connector, identity, redfish_version=None):
"""A class representing a ComputerSystem
@ -264,6 +249,7 @@ class System(base.ResourceBase):
return utils.get_sub_resource_path_by(self, 'Processors')
@property
@utils.cache_it
def processors(self):
"""Property to reference `ProcessorCollection` instance
@ -271,15 +257,12 @@ class System(base.ResourceBase):
this property is marked as stale (greedy-refresh not done).
Here the actual refresh of the sub-resource happens, if stale.
"""
if self._processors is None:
self._processors = processor.ProcessorCollection(
self._conn, self._get_processor_collection_path(),
redfish_version=self.redfish_version)
self._processors.refresh(force=False)
return self._processors
return processor.ProcessorCollection(
self._conn, self._get_processor_collection_path(),
redfish_version=self.redfish_version)
@property
@utils.cache_it
def ethernet_interfaces(self):
"""Property to reference `EthernetInterfaceCollection` instance
@ -287,17 +270,13 @@ class System(base.ResourceBase):
this property is marked as stale (greedy-refresh not done).
Here the actual refresh of the sub-resource happens, if stale.
"""
if self._ethernet_interfaces is None:
self._ethernet_interfaces = (
ethernet_interface.EthernetInterfaceCollection(
self._conn,
utils.get_sub_resource_path_by(self, "EthernetInterfaces"),
redfish_version=self.redfish_version))
self._ethernet_interfaces.refresh(force=False)
return self._ethernet_interfaces
return ethernet_interface.EthernetInterfaceCollection(
self._conn,
utils.get_sub_resource_path_by(self, "EthernetInterfaces"),
redfish_version=self.redfish_version)
@property
@utils.cache_it
def bios(self):
"""Property to reference `Bios` instance
@ -305,16 +284,13 @@ class System(base.ResourceBase):
this property is marked as stale (greedy-refresh not done).
Here the actual refresh of the sub-resource happens, if stale.
"""
if self._bios is None:
self._bios = bios.Bios(
self._conn,
utils.get_sub_resource_path_by(self, 'Bios'),
redfish_version=self.redfish_version)
self._bios.refresh(force=False)
return self._bios
return bios.Bios(
self._conn,
utils.get_sub_resource_path_by(self, 'Bios'),
redfish_version=self.redfish_version)
@property
@utils.cache_it
def simple_storage(self):
"""A collection of simple storage associated with system.
@ -330,16 +306,12 @@ class System(base.ResourceBase):
is missing.
:returns: `SimpleStorageCollection` instance
"""
if self._simple_storage is None:
self._simple_storage = sys_simple_storage.SimpleStorageCollection(
self._conn,
utils.get_sub_resource_path_by(self, "SimpleStorage"),
redfish_version=self.redfish_version)
self._simple_storage.refresh(force=False)
return self._simple_storage
return sys_simple_storage.SimpleStorageCollection(
self._conn, utils.get_sub_resource_path_by(self, "SimpleStorage"),
redfish_version=self.redfish_version)
@property
@utils.cache_it
def storage(self):
"""A collection of storage subsystems associated with system.
@ -356,31 +328,19 @@ class System(base.ResourceBase):
is missing.
:returns: `StorageCollection` instance
"""
if self._storage is None:
self._storage = sys_storage.StorageCollection(
self._conn, utils.get_sub_resource_path_by(self, "Storage"),
redfish_version=self.redfish_version)
return sys_storage.StorageCollection(
self._conn, utils.get_sub_resource_path_by(self, "Storage"),
redfish_version=self.redfish_version)
self._storage.refresh(force=False)
return self._storage
def _do_refresh(self, force=False):
def _do_refresh(self, force):
"""Do custom resource specific refresh activities
On refresh, all sub-resources are marked as stale, i.e.
greedy-refresh not done for them unless forced by ``force``
argument.
"""
if self._processors is not None:
self._processors.invalidate(force)
if self._ethernet_interfaces is not None:
self._ethernet_interfaces.invalidate(force)
if self._bios is not None:
self._bios.invalidate(force)
if self._simple_storage is not None:
self._simple_storage.invalidate(force)
if self._storage is not None:
self._storage.invalidate(force)
super(System, self)._do_refresh(force=force)
utils.cache_clear(self, force)
class SystemCollection(base.ResourceCollectionBase):

View File

@ -54,7 +54,6 @@ class ManagerTestCase(base.TestCase):
self.assertEqual(sushy.MANAGER_TYPE_BMC, self.manager.manager_type)
self.assertEqual('58893887-8974-2487-2389-841168418919',
self.manager.uuid)
self.assertIsNone(self.manager._virtual_media)
def test_get_supported_graphical_console_types(self):
# | GIVEN |
@ -243,8 +242,8 @@ class ManagerTestCase(base.TestCase):
self.conn.get.return_value.json.return_value = json.load(f)
# | WHEN & THEN |
self.assertIsInstance(self.manager.virtual_media,
virtual_media.VirtualMediaCollection)
vrt_media = self.manager.virtual_media
self.assertIsInstance(vrt_media, virtual_media.VirtualMediaCollection)
# On refreshing the manager instance...
with open('sushy/tests/unit/json_samples/manager.json', 'r') as f:
@ -254,8 +253,7 @@ class ManagerTestCase(base.TestCase):
self.manager.refresh(force=False)
# | WHEN & THEN |
self.assertIsNotNone(self.manager._virtual_media)
self.assertTrue(self.manager._virtual_media._is_stale)
self.assertTrue(vrt_media._is_stale)
# | GIVEN |
with open('sushy/tests/unit/json_samples/'
@ -265,7 +263,7 @@ class ManagerTestCase(base.TestCase):
# | WHEN & THEN |
self.assertIsInstance(self.manager.virtual_media,
virtual_media.VirtualMediaCollection)
self.assertFalse(self.manager._virtual_media._is_stale)
self.assertFalse(vrt_media._is_stale)
class ManagerCollectionTestCase(base.TestCase):

View File

@ -53,7 +53,6 @@ class SessionServiceTestCase(base.TestCase):
self.assertEqual('Session Service', self.sess_serv_inst.name)
self.assertEqual(30, self.sess_serv_inst.session_timeout)
self.assertEqual(exp_path, self.sess_serv_inst.path)
self.assertIsNone(self.sess_serv_inst._sessions)
def test__parse_attributes_missing_timeout(self):
self.sess_serv_inst.json.pop('SessionTimeout')
@ -134,8 +133,6 @@ class SessionServiceTestCase(base.TestCase):
self.conn.get.return_value.json.side_effect = successive_return_values
def test_sessions(self):
# check for the underneath variable value
self.assertIsNone(self.sess_serv_inst._sessions)
# | GIVEN |
self._setUp_sessions()
# | WHEN |
@ -166,8 +163,7 @@ class SessionServiceTestCase(base.TestCase):
self.sess_serv_inst.refresh(force=True)
# | WHEN & THEN |
self.assertIsNotNone(self.sess_serv_inst._sessions)
self.assertFalse(self.sess_serv_inst._sessions._is_stale)
self.assertFalse(self.sess_serv_inst.sessions._is_stale)
def test_close_session(self):
self.sess_serv_inst.close_session('session/identity')

View File

@ -103,7 +103,6 @@ class StorageTestCase(base.TestCase):
def test_drives_after_refresh(self):
self.storage.refresh()
self.assertIsNone(self.storage._drives)
self.conn.get.return_value.json.reset_mock()
successive_return_values = []
@ -120,7 +119,6 @@ class StorageTestCase(base.TestCase):
self.assertIsInstance(drv, drive.Drive)
def test_drives_max_size_bytes(self):
self.assertIsNone(self.storage._drives_sizes_bytes)
self.conn.get.return_value.json.reset_mock()
successive_return_values = []
@ -139,7 +137,6 @@ class StorageTestCase(base.TestCase):
def test_drives_max_size_bytes_after_refresh(self):
self.storage.refresh()
self.assertIsNone(self.storage._drives_sizes_bytes)
self.conn.get.return_value.json.reset_mock()
successive_return_values = []
@ -152,8 +149,6 @@ class StorageTestCase(base.TestCase):
self.assertEqual(899527000000, self.storage.drives_max_size_bytes)
def test_volumes(self):
# check for the underneath variable value
self.assertIsNone(self.storage._volumes)
# | GIVEN |
self.conn.get.return_value.json.reset_mock()
with open('sushy/tests/unit/json_samples/volume_collection.json') as f:
@ -184,8 +179,8 @@ class StorageTestCase(base.TestCase):
with open('sushy/tests/unit/json_samples/volume_collection.json') as f:
self.conn.get.return_value.json.return_value = json.load(f)
# | WHEN & THEN |
self.assertIsInstance(self.storage.volumes,
volume.VolumeCollection)
vols = self.storage.volumes
self.assertIsInstance(vols, volume.VolumeCollection)
# On refreshing the system instance...
with open('sushy/tests/unit/json_samples/storage.json') as f:
@ -195,8 +190,7 @@ class StorageTestCase(base.TestCase):
self.storage.refresh(force=False)
# | WHEN & THEN |
self.assertIsNotNone(self.storage._volumes)
self.assertTrue(self.storage._volumes._is_stale)
self.assertTrue(vols._is_stale)
# | GIVEN |
with open('sushy/tests/unit/json_samples/volume_collection.json') as f:
@ -204,7 +198,6 @@ class StorageTestCase(base.TestCase):
# | WHEN & THEN |
self.assertIsInstance(self.storage.volumes,
volume.VolumeCollection)
self.assertFalse(self.storage._volumes._is_stale)
class StorageCollectionTestCase(base.TestCase):
@ -260,7 +253,6 @@ class StorageCollectionTestCase(base.TestCase):
899527000000], self.stor_col.drives_sizes_bytes)
def test_max_drive_size_bytes(self):
self.assertIsNone(self.stor_col._drives_sizes_bytes)
self.conn.get.return_value.json.reset_mock()
successive_return_values = []
@ -281,7 +273,6 @@ class StorageCollectionTestCase(base.TestCase):
def test_max_drive_size_bytes_after_refresh(self):
self.stor_col.refresh(force=False)
self.assertIsNone(self.stor_col._drives_sizes_bytes)
self.conn.get.return_value.json.reset_mock()
successive_return_values = []
@ -311,7 +302,6 @@ class StorageCollectionTestCase(base.TestCase):
self.stor_col.volumes_sizes_bytes)
def test_max_volume_size_bytes(self):
self.assertIsNone(self.stor_col._volumes_sizes_bytes)
self.conn.get.return_value.json.reset_mock()
successive_return_values = []
@ -332,7 +322,6 @@ class StorageCollectionTestCase(base.TestCase):
def test_max_volume_size_bytes_after_refresh(self):
self.stor_col.refresh(force=False)
self.assertIsNone(self.stor_col._volumes_sizes_bytes)
self.conn.get.return_value.json.reset_mock()
successive_return_values = []

View File

@ -86,7 +86,6 @@ class VolumeCollectionTestCase(base.TestCase):
self.assertEqual(3, len(members))
def test_max_size_bytes(self):
self.assertIsNone(self.stor_vol_col._volumes_sizes_bytes)
self.conn.get.return_value.json.reset_mock()
successive_return_values = []
@ -107,7 +106,6 @@ class VolumeCollectionTestCase(base.TestCase):
def test_max_size_bytes_after_refresh(self):
self.stor_vol_col.refresh()
self.assertIsNone(self.stor_vol_col._volumes_sizes_bytes)
self.conn.get.return_value.json.reset_mock()
successive_return_values = []

View File

@ -66,13 +66,20 @@ class BiosTestCase(base.TestCase):
data={'Attributes': {'ProcTurboMode': 'Disabled'}})
def test_set_attribute_on_refresh(self):
self.conn.get.reset_mock()
# make it to instantiate pending attributes
self.sys_bios.pending_attributes
self.assertTrue(self.conn.get.called)
self.conn.get.reset_mock()
self.sys_bios.pending_attributes
self.assertFalse(self.conn.get.called)
self.sys_bios.set_attribute('ProcTurboMode', 'Disabled')
self.assertTrue(self.sys_bios._pending_settings_resource._is_stale)
# make it to refresh pending attributes on next retrieval
self.sys_bios.pending_attributes
self.assertFalse(self.sys_bios._pending_settings_resource._is_stale)
self.assertTrue(self.conn.get.called)
def test_set_attributes(self):
self.sys_bios.set_attributes({'ProcTurboMode': 'Disabled',
@ -83,14 +90,21 @@ class BiosTestCase(base.TestCase):
'UsbControl': 'UsbDisabled'}})
def test_set_attributes_on_refresh(self):
self.conn.get.reset_mock()
# make it to instantiate pending attributes
self.sys_bios.pending_attributes
self.assertTrue(self.conn.get.called)
self.conn.get.reset_mock()
self.sys_bios.pending_attributes
self.assertFalse(self.conn.get.called)
self.sys_bios.set_attributes({'ProcTurboMode': 'Disabled',
'UsbControl': 'UsbDisabled'})
self.assertTrue(self.sys_bios._pending_settings_resource._is_stale)
# make it to refresh pending attributes on next retrieval
self.sys_bios.pending_attributes
self.assertFalse(self.sys_bios._pending_settings_resource._is_stale)
self.assertTrue(self.conn.get.called)
def test__get_reset_bios_action_element(self):
value = self.sys_bios._get_reset_bios_action_element()

View File

@ -93,7 +93,6 @@ class EthernetInterfaceCollectionTestCase(base.TestCase):
self.assertEqual(1, len(members))
def test_summary(self):
self.assertIsNone(self.sys_eth_col._summary)
self.conn.get.return_value.json.reset_mock()
with open('sushy/tests/unit/json_samples/'
'ethernet_interfaces.json') as f:

View File

@ -127,8 +127,6 @@ class ProcessorCollectionTestCase(base.TestCase):
self.conn.get.return_value.json.side_effect = successive_return_values
def test_summary(self):
# check for the underneath variable value
self.assertIsNone(self.sys_processor_col._summary)
# | GIVEN |
self._setUp_processor_summary()
# | WHEN |
@ -161,10 +159,8 @@ class ProcessorCollectionTestCase(base.TestCase):
with open('sushy/tests/unit/json_samples/'
'processor_collection.json') as f:
self.conn.get.return_value.json.return_value = json.load(f)
self.sys_processor_col.refresh(force=True)
# | WHEN & THEN |
self.assertIsNone(self.sys_processor_col._summary)
self.sys_processor_col.invalidate()
self.sys_processor_col.refresh(force=False)
# | GIVEN |
self._setUp_processor_summary()

View File

@ -97,7 +97,6 @@ class SimpleStorageCollectionTestCase(base.TestCase):
self.simpl_stor_col.disks_sizes_bytes)
def test_max_size_bytes(self):
self.assertIsNone(self.simpl_stor_col._disks_sizes_bytes)
self.conn.get.return_value.json.reset_mock()
with open('sushy/tests/unit/json_samples/'
@ -113,7 +112,6 @@ class SimpleStorageCollectionTestCase(base.TestCase):
def test_max_size_bytes_after_refresh(self):
self.simpl_stor_col.refresh()
self.assertIsNone(self.simpl_stor_col._disks_sizes_bytes)
self.conn.get.return_value.json.reset_mock()
with open('sushy/tests/unit/json_samples/'

View File

@ -21,7 +21,6 @@ import sushy
from sushy import exceptions
from sushy.resources import constants as res_cons
from sushy.resources.system import bios
from sushy.resources.system import ethernet_interface
from sushy.resources.system import mappings as sys_map
from sushy.resources.system import processor
from sushy.resources.system import simple_storage
@ -68,9 +67,6 @@ class SystemTestCase(base.TestCase):
self.sys_inst.power_state)
self.assertEqual(96, self.sys_inst.memory_summary.size_gib)
self.assertEqual("OK", self.sys_inst.memory_summary.health)
self.assertIsNone(self.sys_inst._processors)
self.assertIsNone(self.sys_inst._ethernet_interfaces)
self.assertIsNone(self.sys_inst._bios)
def test__parse_attributes_missing_actions(self):
self.sys_inst.json.pop('Actions')
@ -270,8 +266,6 @@ class SystemTestCase(base.TestCase):
self.assertIsNone(self.sys_inst.memory_summary)
def test_processors(self):
# check for the underneath variable value
self.assertIsNone(self.sys_inst._processors)
# | GIVEN |
self.conn.get.return_value.json.reset_mock()
with open('sushy/tests/unit/json_samples/'
@ -308,10 +302,6 @@ class SystemTestCase(base.TestCase):
self.sys_inst.invalidate()
self.sys_inst.refresh(force=False)
# | WHEN & THEN |
self.assertIsNotNone(self.sys_inst._processors)
self.assertTrue(self.sys_inst._processors._is_stale)
# | GIVEN |
with open('sushy/tests/unit/json_samples/'
'processor_collection.json') as f:
@ -319,7 +309,6 @@ class SystemTestCase(base.TestCase):
# | WHEN & THEN |
self.assertIsInstance(self.sys_inst.processors,
processor.ProcessorCollection)
self.assertFalse(self.sys_inst._processors._is_stale)
def _setUp_processor_summary(self):
self.conn.get.return_value.json.reset_mock()
@ -374,13 +363,10 @@ class SystemTestCase(base.TestCase):
self.conn.get.return_value.json.side_effect = [eth_coll_return_value,
eth_return_value]
self.assertIsNone(self.sys_inst._ethernet_interfaces)
actual_macs = self.sys_inst.ethernet_interfaces.summary
expected_macs = (
{'12:44:6A:3B:04:11': res_cons.STATE_ENABLED})
self.assertEqual(expected_macs, actual_macs)
self.assertIsInstance(self.sys_inst._ethernet_interfaces,
ethernet_interface.EthernetInterfaceCollection)
def test_bios(self):
self.conn.get.return_value.json.reset_mock()
@ -389,7 +375,6 @@ class SystemTestCase(base.TestCase):
bios_return_value = json.load(f)
self.conn.get.return_value.json.side_effect = [bios_return_value]
self.assertIsNone(self.sys_inst._bios)
self.assertIsInstance(self.sys_inst.bios, bios.Bios)
self.assertEqual('BIOS Configuration Current Settings',
self.sys_inst.bios.name)
@ -401,8 +386,6 @@ class SystemTestCase(base.TestCase):
self.sys_inst.simple_storage
def test_simple_storage(self):
# check for the underneath variable value
self.assertIsNone(self.sys_inst._simple_storage)
# | GIVEN |
self.conn.get.return_value.json.reset_mock()
with open('sushy/tests/unit/json_samples/'
@ -439,10 +422,6 @@ class SystemTestCase(base.TestCase):
self.sys_inst.invalidate()
self.sys_inst.refresh(force=False)
# | WHEN & THEN |
self.assertIsNotNone(self.sys_inst._simple_storage)
self.assertTrue(self.sys_inst._simple_storage._is_stale)
# | GIVEN |
with open('sushy/tests/unit/json_samples/'
'simple_storage_collection.json') as f:
@ -450,7 +429,6 @@ class SystemTestCase(base.TestCase):
# | WHEN & THEN |
self.assertIsInstance(self.sys_inst.simple_storage,
simple_storage.SimpleStorageCollection)
self.assertFalse(self.sys_inst._simple_storage._is_stale)
def test_storage_for_missing_attr(self):
self.sys_inst.json.pop('Storage')
@ -459,8 +437,6 @@ class SystemTestCase(base.TestCase):
self.sys_inst.storage
def test_storage(self):
# check for the underneath variable value
self.assertIsNone(self.sys_inst._storage)
# | GIVEN |
self.conn.get.return_value.json.reset_mock()
with open('sushy/tests/unit/json_samples/'
@ -495,18 +471,12 @@ class SystemTestCase(base.TestCase):
self.sys_inst.invalidate()
self.sys_inst.refresh(force=False)
# | WHEN & THEN |
self.assertIsNotNone(self.sys_inst._storage)
self.assertTrue(self.sys_inst._storage._is_stale)
# | GIVEN |
with open('sushy/tests/unit/json_samples/'
'storage_collection.json') as f:
self.conn.get.return_value.json.return_value = json.load(f)
# | WHEN & THEN |
self.assertIsInstance(self.sys_inst.storage,
storage.StorageCollection)
self.assertFalse(self.sys_inst._storage._is_stale)
self.assertIsInstance(self.sys_inst.storage, storage.StorageCollection)
class SystemCollectionTestCase(base.TestCase):

View File

@ -141,6 +141,7 @@ class ResourceCollectionBaseTestCase(base.TestCase):
self.assertIsInstance(val, TestResource)
self.assertTrue(val.identity in member_ids)
self.assertEqual('1.0.x', val.redfish_version)
self.assertFalse(val._is_stale)
return result
@ -148,15 +149,33 @@ class ResourceCollectionBaseTestCase(base.TestCase):
self._validate_get_members_result(('1', '2'))
def test_get_members_on_refresh(self):
self._validate_get_members_result(('1', '2'))
all_members = self._validate_get_members_result(('1', '2'))
# Now emulating the resource invalidate and refresh action!
# Call resource invalidate
self.test_resource_collection.invalidate()
self.assertTrue(self.test_resource_collection._is_stale)
# Now invoke refresh action on resource. This can be viewed as
# "light refresh" which involves only the resource's fresh retrieval
# and not its nested resources (these are only marked as stale).
self.test_resource_collection.refresh(force=False)
self._validate_get_members_result(('3', '4'))
# resource itself is fresh
self.assertFalse(self.test_resource_collection._is_stale)
# members are marked as stale
for m in all_members:
self.assertTrue(m._is_stale)
self._validate_get_members_result(('1', '2'))
# members are also now freshly retrieved
for m in all_members:
self.assertFalse(m._is_stale)
# Again invalidate and do a forced refresh on resource
self.test_resource_collection.invalidate(force_refresh=True)
# Now, even the members are also freshly retrieved. This can be viewed
# as "cascading refresh" which involves not only the resource's fresh
# retrieval but also its nested resources.
for m in all_members:
self.assertFalse(m._is_stale)
def test_get_members_caching(self):
result = self._validate_get_members_result(('1', '2'))

View File

@ -18,6 +18,7 @@ import json
import mock
from sushy import exceptions
from sushy.resources import base as resource_base
from sushy.resources.system import system
from sushy.tests.unit import base
from sushy import utils
@ -96,3 +97,112 @@ class UtilsTestCase(base.TestCase):
self.assertEqual(821, utils.max_safe([15, 300, 270, None, 821, None]))
self.assertEqual(0, utils.max_safe([]))
self.assertIsNone(utils.max_safe([], default=None))
class NestedResource(resource_base.ResourceBase):
def _parse_attributes(self):
pass
class BaseResource(resource_base.ResourceBase):
def _parse_attributes(self):
pass
def _do_some_crunch_work_to_get_a(self):
return 'a'
@utils.cache_it
def get_a(self):
return self._do_some_crunch_work_to_get_a()
def _do_some_crunch_work_to_get_b(self):
return 'b'
@utils.cache_it
def get_b(self):
return self._do_some_crunch_work_to_get_b()
@property
@utils.cache_it
def nested_resource(self):
return NestedResource(
self._conn, "path/to/nested_resource",
redfish_version=self.redfish_version)
@property
@utils.cache_it
def few_nested_resources(self):
return [NestedResource(self._conn, "/nested_res1",
redfish_version=self.redfish_version),
NestedResource(self._conn, "/nested_res2",
redfish_version=self.redfish_version)]
def _do_refresh(self, force):
utils.cache_clear(self, force)
class CacheTestCase(base.TestCase):
def setUp(self):
super(CacheTestCase, self).setUp()
self.conn = mock.Mock()
self.res = BaseResource(connector=self.conn, path='/Foo',
redfish_version='1.0.2')
def test_cache_nested_resource_retrieval(self):
nested_res = self.res.nested_resource
few_nested_res = self.res.few_nested_resources
self.assertIsInstance(nested_res, NestedResource)
self.assertIs(nested_res, self.res.nested_resource)
self.assertIsInstance(few_nested_res, list)
for n_res in few_nested_res:
self.assertIsInstance(n_res, NestedResource)
self.assertIs(few_nested_res, self.res.few_nested_resources)
self.res.invalidate()
self.res.refresh(force=False)
self.assertIsNotNone(self.res._cache_nested_resource)
self.assertTrue(self.res._cache_nested_resource._is_stale)
self.assertIsNotNone(self.res._cache_few_nested_resources)
for n_res in self.res._cache_few_nested_resources:
self.assertTrue(n_res._is_stale)
self.assertIsInstance(self.res.nested_resource, NestedResource)
self.assertFalse(self.res._cache_nested_resource._is_stale)
self.assertIsInstance(self.res.few_nested_resources, list)
for n_res in self.res._cache_few_nested_resources:
self.assertFalse(n_res._is_stale)
def test_cache_non_resource_retrieval(self):
with mock.patch.object(
self.res, '_do_some_crunch_work_to_get_a',
wraps=self.res._do_some_crunch_work_to_get_a,
autospec=True) as do_work_to_get_a_spy:
result = self.res.get_a()
self.assertTrue(do_work_to_get_a_spy.called)
do_work_to_get_a_spy.reset_mock()
# verify subsequent invocation
self.assertEqual(result, self.res.get_a())
self.assertFalse(do_work_to_get_a_spy.called)
def test_cache_clear_only_selected_attr(self):
self.res.nested_resource
self.res.get_a()
self.res.get_b()
utils.cache_clear(self.res, False, only_these=['get_a'])
# cache cleared (set to None)
self.assertIsNone(self.res._cache_get_a)
# cache retained
self.assertEqual('b', self.res._cache_get_b)
self.assertFalse(self.res._cache_nested_resource._is_stale)
def test_cache_clear_failure(self):
self.assertRaises(
TypeError, utils.cache_clear, self.res, False, only_these=10)

View File

@ -13,12 +13,17 @@
# License for the specific language governing permissions and limitations
# under the License.
import collections
import logging
import six
from sushy import exceptions
LOG = logging.getLogger(__name__)
CACHE_ATTR_NAMES_VAR_NAME = '_cache_attr_names'
def revert_dictionary(dictionary):
"""Given a dictionary revert it's mapping
@ -97,6 +102,7 @@ def max_safe(iterable, default=0):
This function is just a wrapper over builtin max() w/o ``key`` argument.
The ``default`` argument specifies an object to return if the provided
``iterable`` is empty. Also it filters out the None type values.
:param iterable: an iterable
:param default: 0 by default
"""
@ -106,3 +112,157 @@ def max_safe(iterable, default=0):
except ValueError:
# TypeError is not caught here as that should be thrown.
return default
def setdefaultattr(obj, name, default):
"""Python's ``dict.setdefault`` applied on Python objects.
If name is an attribute with obj, return its value. If not, set name
attribute with a value of default and return default.
:param obj: a python object
:param name: name of attribute
:param default: default value to be set
"""
try:
return getattr(obj, name)
except AttributeError:
setattr(obj, name, default)
return default
def cache_it(res_accessor_method):
"""Utility decorator to cache the return value of the decorated method.
This decorator is to be used with any Sushy resource class method.
This will internally create an attribute on the resource namely
``_cache_<decorated_method_name>``. This is referred to as the "caching
attribute". This attribute will eventually hold the resultant value from
the method invocation (when method gets first time called) and for every
subsequent calls to that method this cached value will get returned. It
expects the decorated method to contain its own logic of evaluation.
This also assigns a variable named ``_cache_attr_names`` on the resource.
This variable maintains a collection of all the existing
"caching attribute" names.
To invalidate or clear the cache use :py:func:`~cache_clear`.
Usage:
.. code-block:: python
class SomeResource(base.ResourceBase):
...
@cache_it
def get_summary(self):
# do some calculation and return the result
# and this result will be cached.
return result
...
def _do_refresh(self, force):
cache_clear(self, force)
If the returned value is a Sushy resource instance or an Iterable whose
element is of type Sushy resource it handles the case of calling the
``refresh()`` method of that resource. This is done to avoid unnecessary
recreation of a new resource instance which got already created at the
first place in contrast to fresh retrieval of the resource json data.
Again, the ``force`` argument is deliberately set to False to do only the
"light refresh" of the resource (only the fresh retrieval of resource)
instead of doing the complete exhaustive "cascading refresh" (resource
with all its nested subresources recursively).
.. code-block:: python
class SomeResource(base.ResourceBase):
...
@property
@cache_it
def nested_resource(self):
return NestedResource(
self._conn, "Path/to/NestedResource",
redfish_version=self.redfish_version)
...
def _do_refresh(self, force):
# selective attribute clearing
cache_clear(self, force, only_these=['nested_resource'])
Do note that this is not thread safe. So guard your code to protect it
from any kind of concurrency issues while using this decorator.
:param res_accessor_method: the resource accessor decorated method.
"""
cache_attr_name = '_cache_' + res_accessor_method.__name__
@six.wraps(res_accessor_method)
def func_wrapper(res_selfie):
cache_attr_val = getattr(res_selfie, cache_attr_name, None)
if cache_attr_val is None:
cache_attr_val = res_accessor_method(res_selfie)
setattr(res_selfie, cache_attr_name, cache_attr_val)
# Note(deray): Each resource instance maintains a collection of
# all the cache attribute names in a private attribute.
cache_attr_names = setdefaultattr(
res_selfie, CACHE_ATTR_NAMES_VAR_NAME, set())
cache_attr_names.add(cache_attr_name)
from sushy.resources import base
if isinstance(cache_attr_val, base.ResourceBase):
cache_attr_val.refresh(force=False)
elif isinstance(cache_attr_val, collections.Iterable):
for elem in cache_attr_val:
if isinstance(elem, base.ResourceBase):
elem.refresh(force=False)
return cache_attr_val
return func_wrapper
def cache_clear(res_selfie, force_refresh, only_these=None):
"""Clear some or all cached values of the resource.
If the cache variable refers to a resource instance then the
``invalidate()`` method is called on that. Otherwise it is set to None.
Should there be a need to force refresh the resource and its sub-resources,
"cascading refresh", ``force_refresh`` is to be set to True.
This is the complimentary method of ``cache_it`` decorator.
:param res_selfie: the resource instance.
:param force_refresh: force_refresh argument of ``invalidate()`` method.
:param only_these: expects an Iterable of specific method names
for which the cached value/s need to be cleared only. When None, all
the cached values are cleared.
"""
cache_attr_names = setdefaultattr(
res_selfie, CACHE_ATTR_NAMES_VAR_NAME, set())
if only_these is not None:
if not isinstance(only_these, collections.Iterable):
raise TypeError("'only_these' must be Iterable.")
cache_attr_names = cache_attr_names.intersection(
'_cache_' + attr for attr in only_these)
for cache_attr_name in cache_attr_names:
cache_attr_val = getattr(res_selfie, cache_attr_name)
from sushy.resources import base
if isinstance(cache_attr_val, base.ResourceBase):
cache_attr_val.invalidate(force_refresh)
elif isinstance(cache_attr_val, collections.Iterable):
for elem in cache_attr_val:
if isinstance(elem, base.ResourceBase):
elem.invalidate(force_refresh)
else:
setattr(res_selfie, cache_attr_name, None)
break
else:
setattr(res_selfie, cache_attr_name, None)