Merge "Return sizes of storage devices"

This commit is contained in:
Zuul 2018-09-21 10:56:57 +00:00 committed by Gerrit Code Review
commit 63d0b37461
6 changed files with 161 additions and 54 deletions

View File

@ -58,30 +58,40 @@ class SimpleStorage(base.ResourceBase):
class SimpleStorageCollection(base.ResourceCollectionBase):
"""Represents a collection of simple storage associated with system."""
_max_size_bytes = None
_disks_sizes_bytes = None
@property
def _resource_type(self):
return SimpleStorage
@property
def max_size_bytes(self):
"""Max size available (in bytes) among all enabled device resources.
def disks_sizes_bytes(self):
"""Sizes of each Disk in bytes in SimpleStorage collection resource.
It returns the cached value until it (or its parent resource) is
Returns the list of cached values until it (or its parent resource)
is refreshed.
"""
if self._disks_sizes_bytes is None:
self._disks_sizes_bytes = sorted(
device.capacity_bytes
for simpl_stor in self.get_members()
for device in simpl_stor.devices
if device.status.state == res_cons.STATE_ENABLED
)
return self._disks_sizes_bytes
@property
def max_size_bytes(self):
"""Max size available (in bytes) among all enabled Disk resources.
Returns the cached value until it (or its parent resource) is
refreshed.
"""
if self._max_size_bytes is None:
self._max_size_bytes = (
utils.max_safe(device.capacity_bytes
for simpl_stor in self.get_members()
for device in simpl_stor.devices
if (device.status.state ==
res_cons.STATE_ENABLED)))
return self._max_size_bytes
return utils.max_safe(self.disks_sizes_bytes)
def _do_refresh(self, force=False):
super(SimpleStorageCollection, self)._do_refresh(force)
# Note(deray): undefine the attribute here for fresh creation in
# subsequent calls to it's exposed property.
self._max_size_bytes = None
self._disks_sizes_bytes = None

View File

@ -42,7 +42,7 @@ class Storage(base.ResourceBase):
adapter=utils.get_members_identities)
"""A tuple with the drive identities"""
_drives_max_size_bytes = None
_drives_sizes_bytes = None
_drives = None
_volumes = None # reference to VolumeCollection instance
@ -72,13 +72,22 @@ class Storage(base.ResourceBase):
self.get_drive(id_) for id_ in self.drives_identities]
return self._drives
@property
def drives_sizes_bytes(self):
"""Sizes of all Drives in bytes in Storage resource.
Returns the list of cached values until it (or its parent resource)
is refreshed.
"""
if self._drives_sizes_bytes is None:
self._drives_sizes_bytes = sorted(
drv.capacity_bytes for drv in self.drives)
return self._drives_sizes_bytes
@property
def drives_max_size_bytes(self):
"""Max size available in bytes among all Drives of this collection."""
if self._drives_max_size_bytes is None:
self._drives_max_size_bytes = (
utils.max_safe(drv.capacity_bytes for drv in self.drives))
return self._drives_max_size_bytes
return utils.max_safe(self.drives_sizes_bytes)
@property
def volumes(self):
@ -101,7 +110,7 @@ class Storage(base.ResourceBase):
"""Do resource specific refresh activities."""
# Note(deray): undefine the attribute here for fresh evaluation in
# subsequent calls to it's exposed property.
self._drives_max_size_bytes = None
self._drives_sizes_bytes = None
self._drives = None
# invalidate the nested resource
if self._volumes is not None:
@ -111,34 +120,66 @@ class Storage(base.ResourceBase):
class StorageCollection(base.ResourceCollectionBase):
"""This class represents the collection of Storage resources"""
_max_drive_size_bytes = None
_max_volume_size_bytes = None
_drives_sizes_bytes = None
_volumes_sizes_bytes = None
@property
def _resource_type(self):
return Storage
@property
def drives_sizes_bytes(self):
"""Sizes of each Drive in bytes in Storage collection resource.
Returns the list of cached values until it (or its parent resource)
is refreshed.
"""
if self._drives_sizes_bytes is None:
self._drives_sizes_bytes = sorted(
drive_size
for storage_ in self.get_members()
for drive_size in storage_.drives_sizes_bytes
)
return self._drives_sizes_bytes
@property
def max_drive_size_bytes(self):
"""Max size available (in bytes) among all device resources."""
if self._max_drive_size_bytes is None:
self._max_drive_size_bytes = max(
storage_.drives_max_size_bytes
for storage_ in self.get_members())
return self._max_drive_size_bytes
"""Max size available (in bytes) among all Drive resources.
Returns the cached value until it (or its parent resource) is
refreshed.
"""
return utils.max_safe(self.drives_sizes_bytes)
@property
def volumes_sizes_bytes(self):
"""Sizes of each Volume in bytes in Storage collection resource.
Returns the list of cached values until it (or its parent resource)
is refreshed.
"""
if self._volumes_sizes_bytes is None:
self._volumes_sizes_bytes = sorted(
volume_size
for storage_ in self.get_members()
for volume_size in storage_.volumes.volumes_sizes_bytes)
return self._volumes_sizes_bytes
@property
def max_volume_size_bytes(self):
"""Max size available (in bytes) among all Volumes under this."""
if self._max_volume_size_bytes is None:
self._max_volume_size_bytes = max(
storage_.volumes.max_size_bytes
for storage_ in self.get_members())
return self._max_volume_size_bytes
"""Max size available (in bytes) among all Volume resources.
Returns the cached value until it (or its parent resource) is
refreshed.
"""
return utils.max_safe(self.volumes_sizes_bytes)
def _do_refresh(self, force=False):
"""Do resource specific refresh activities"""
super(StorageCollection, self)._do_refresh(force)
# Note(deray): undefine the attributes here for fresh evaluation in
# subsequent calls to their exposed properties.
self._max_drive_size_bytes = None
self._max_volume_size_bytes = None
self._drives_sizes_bytes = None
self._volumes_sizes_bytes = None

View File

@ -37,22 +37,38 @@ class Volume(base.ResourceBase):
class VolumeCollection(base.ResourceCollectionBase):
"""This class represents the Storage Volume collection"""
_max_size_bytes = None
_volumes_sizes_bytes = None
@property
def _resource_type(self):
return Volume
@property
def max_size_bytes(self):
"""Max size available in bytes among all Volumes of this collection."""
if self._max_size_bytes is None:
self._max_size_bytes = (
utils.max_safe([vol.capacity_bytes
for vol in self.get_members()]))
return self._max_size_bytes
def volumes_sizes_bytes(self):
"""Sizes of all Volumes in bytes in VolumeCollection resource.
Returns the list of cached values until it (or its parent resource)
is refreshed.
"""
if self._volumes_sizes_bytes is None:
self._volumes_sizes_bytes = sorted(
vol.capacity_bytes
for vol in self.get_members())
return self._volumes_sizes_bytes
@property
def max_volume_size_bytes(self):
"""Max size available (in bytes) among all Volume resources.
Returns the cached value until it (or its parent resource) is
refreshed.
"""
return utils.max_safe(self.volumes_sizes_bytes)
# NOTE(etingof): for backward compatibility
max_size_bytes = max_volume_size_bytes
def _do_refresh(self, force=False):
super(VolumeCollection, self)._do_refresh(force)
# invalidate the attribute
self._max_size_bytes = None
self._volumes_sizes_bytes = None

View File

@ -120,7 +120,7 @@ class StorageTestCase(base.TestCase):
self.assertIsInstance(drv, drive.Drive)
def test_drives_max_size_bytes(self):
self.assertIsNone(self.storage._drives_max_size_bytes)
self.assertIsNone(self.storage._drives_sizes_bytes)
self.conn.get.return_value.json.reset_mock()
successive_return_values = []
@ -139,7 +139,7 @@ class StorageTestCase(base.TestCase):
def test_drives_max_size_bytes_after_refresh(self):
self.storage.refresh()
self.assertIsNone(self.storage._drives_max_size_bytes)
self.assertIsNone(self.storage._drives_sizes_bytes)
self.conn.get.return_value.json.reset_mock()
successive_return_values = []
@ -244,8 +244,23 @@ class StorageCollectionTestCase(base.TestCase):
self.assertIsInstance(members, list)
self.assertEqual(1, len(members))
def test_drives_sizes_bytes(self):
self.conn.get.return_value.json.reset_mock()
successive_return_values = []
with open('sushy/tests/unit/json_samples/storage.json') as f:
successive_return_values.append(json.load(f))
# repeating the 3rd one to provide mock data for 4th iteration.
for fname in STORAGE_DRIVE_FILE_NAMES + [STORAGE_DRIVE_FILE_NAMES[-1]]:
with open(fname) as f:
successive_return_values.append(json.load(f))
self.conn.get.return_value.json.side_effect = successive_return_values
self.assertEqual([899527000000, 899527000000, 899527000000,
899527000000], self.stor_col.drives_sizes_bytes)
def test_max_drive_size_bytes(self):
self.assertIsNone(self.stor_col._max_drive_size_bytes)
self.assertIsNone(self.stor_col._drives_sizes_bytes)
self.conn.get.return_value.json.reset_mock()
successive_return_values = []
@ -266,7 +281,7 @@ class StorageCollectionTestCase(base.TestCase):
def test_max_drive_size_bytes_after_refresh(self):
self.stor_col.refresh(force=False)
self.assertIsNone(self.stor_col._max_drive_size_bytes)
self.assertIsNone(self.stor_col._drives_sizes_bytes)
self.conn.get.return_value.json.reset_mock()
successive_return_values = []
@ -280,8 +295,23 @@ class StorageCollectionTestCase(base.TestCase):
self.assertEqual(899527000000, self.stor_col.max_drive_size_bytes)
def test_volumes_sizes_bytes(self):
self.conn.get.return_value.json.reset_mock()
successive_return_values = []
with open('sushy/tests/unit/json_samples/storage.json') as f:
successive_return_values.append(json.load(f))
# repeating the 3rd one to provide mock data for 4th iteration.
for fname in STORAGE_VOLUME_FILE_NAMES:
with open(fname) as f:
successive_return_values.append(json.load(f))
self.conn.get.return_value.json.side_effect = successive_return_values
self.assertEqual([107374182400, 899527000000, 1073741824000],
self.stor_col.volumes_sizes_bytes)
def test_max_volume_size_bytes(self):
self.assertIsNone(self.stor_col._max_volume_size_bytes)
self.assertIsNone(self.stor_col._volumes_sizes_bytes)
self.conn.get.return_value.json.reset_mock()
successive_return_values = []
@ -302,7 +332,7 @@ class StorageCollectionTestCase(base.TestCase):
def test_max_volume_size_bytes_after_refresh(self):
self.stor_col.refresh(force=False)
self.assertIsNone(self.stor_col._max_volume_size_bytes)
self.assertIsNone(self.stor_col._volumes_sizes_bytes)
self.conn.get.return_value.json.reset_mock()
successive_return_values = []

View File

@ -86,7 +86,7 @@ class VolumeCollectionTestCase(base.TestCase):
self.assertEqual(3, len(members))
def test_max_size_bytes(self):
self.assertIsNone(self.stor_vol_col._max_size_bytes)
self.assertIsNone(self.stor_vol_col._volumes_sizes_bytes)
self.conn.get.return_value.json.reset_mock()
successive_return_values = []
@ -107,7 +107,7 @@ class VolumeCollectionTestCase(base.TestCase):
def test_max_size_bytes_after_refresh(self):
self.stor_vol_col.refresh()
self.assertIsNone(self.stor_vol_col._max_size_bytes)
self.assertIsNone(self.stor_vol_col._volumes_sizes_bytes)
self.conn.get.return_value.json.reset_mock()
successive_return_values = []

View File

@ -86,8 +86,18 @@ class SimpleStorageCollectionTestCase(base.TestCase):
self.assertIsInstance(members, list)
self.assertEqual(1, len(members))
def test_disks_sizes_bytes(self):
self.conn.get.return_value.json.reset_mock()
with open('sushy/tests/unit/json_samples/'
'simple_storage.json') as f:
self.conn.get.return_value.json.return_value = json.load(f)
self.assertEqual([4000000000000, 8000000000000],
self.simpl_stor_col.disks_sizes_bytes)
def test_max_size_bytes(self):
self.assertIsNone(self.simpl_stor_col._max_size_bytes)
self.assertIsNone(self.simpl_stor_col._disks_sizes_bytes)
self.conn.get.return_value.json.reset_mock()
with open('sushy/tests/unit/json_samples/'
@ -103,7 +113,7 @@ class SimpleStorageCollectionTestCase(base.TestCase):
def test_max_size_bytes_after_refresh(self):
self.simpl_stor_col.refresh()
self.assertIsNone(self.simpl_stor_col._max_size_bytes)
self.assertIsNone(self.simpl_stor_col._disks_sizes_bytes)
self.conn.get.return_value.json.reset_mock()
with open('sushy/tests/unit/json_samples/'