Merge "Add a generic retry decorator to cinder/utils"
This commit is contained in:
commit
a8499b24a9
|
@ -16,6 +16,7 @@
|
|||
import datetime
|
||||
import hashlib
|
||||
import os
|
||||
import time
|
||||
import uuid
|
||||
|
||||
import mock
|
||||
|
@ -785,7 +786,6 @@ class AuditPeriodTest(test.TestCase):
|
|||
|
||||
def setUp(self):
|
||||
super(AuditPeriodTest, self).setUp()
|
||||
#a fairly random time to test with
|
||||
test_time = datetime.datetime(second=23,
|
||||
minute=12,
|
||||
hour=8,
|
||||
|
@ -1380,3 +1380,91 @@ class IsBlkDeviceTestCase(test.TestCase):
|
|||
def test_fail_is_blk_device(self, mock_os_stat, mock_S_ISBLK):
|
||||
dev = 'device_exception'
|
||||
self.assertFalse(utils.is_blk_device(dev))
|
||||
|
||||
|
||||
class WrongException(Exception):
|
||||
pass
|
||||
|
||||
|
||||
class TestRetryDecorator(test.TestCase):
|
||||
def setUp(self):
|
||||
super(TestRetryDecorator, self).setUp()
|
||||
|
||||
def test_no_retry_required(self):
|
||||
self.counter = 0
|
||||
|
||||
with mock.patch.object(time, 'sleep') as mock_sleep:
|
||||
@utils.retry(exception.VolumeBackendAPIException,
|
||||
interval=2,
|
||||
retries=3,
|
||||
backoff_rate=2)
|
||||
def succeeds():
|
||||
self.counter += 1
|
||||
return 'success'
|
||||
|
||||
ret = succeeds()
|
||||
self.assertFalse(mock_sleep.called)
|
||||
self.assertEqual(ret, 'success')
|
||||
self.assertEqual(self.counter, 1)
|
||||
|
||||
def test_retries_once(self):
|
||||
self.counter = 0
|
||||
interval = 2
|
||||
backoff_rate = 2
|
||||
retries = 3
|
||||
|
||||
with mock.patch.object(time, 'sleep') as mock_sleep:
|
||||
@utils.retry(exception.VolumeBackendAPIException,
|
||||
interval,
|
||||
retries,
|
||||
backoff_rate)
|
||||
def fails_once():
|
||||
self.counter += 1
|
||||
if self.counter < 2:
|
||||
raise exception.VolumeBackendAPIException(data='fake')
|
||||
else:
|
||||
return 'success'
|
||||
|
||||
ret = fails_once()
|
||||
self.assertEqual(ret, 'success')
|
||||
self.assertEqual(self.counter, 2)
|
||||
self.assertEqual(mock_sleep.call_count, 1)
|
||||
mock_sleep.assert_called_with(interval * backoff_rate)
|
||||
|
||||
def test_limit_is_reached(self):
|
||||
self.counter = 0
|
||||
retries = 3
|
||||
interval = 2
|
||||
backoff_rate = 4
|
||||
|
||||
with mock.patch.object(time, 'sleep') as mock_sleep:
|
||||
@utils.retry(exception.VolumeBackendAPIException,
|
||||
interval,
|
||||
retries,
|
||||
backoff_rate)
|
||||
def always_fails():
|
||||
self.counter += 1
|
||||
raise exception.VolumeBackendAPIException(data='fake')
|
||||
|
||||
self.assertRaises(exception.VolumeBackendAPIException,
|
||||
always_fails)
|
||||
self.assertEqual(retries, self.counter)
|
||||
|
||||
expected_sleep_arg = []
|
||||
|
||||
for i in xrange(retries):
|
||||
if i > 0:
|
||||
interval *= backoff_rate
|
||||
expected_sleep_arg.append(float(interval))
|
||||
|
||||
mock_sleep.assert_has_calls(map(mock.call, expected_sleep_arg))
|
||||
|
||||
def test_wrong_exception_no_retry(self):
|
||||
|
||||
with mock.patch.object(time, 'sleep') as mock_sleep:
|
||||
@utils.retry(exception.VolumeBackendAPIException)
|
||||
def raise_unexpected_error():
|
||||
raise WrongException("wrong exception")
|
||||
|
||||
self.assertRaises(WrongException, raise_unexpected_error)
|
||||
self.assertFalse(mock_sleep.called)
|
||||
|
|
|
@ -40,6 +40,7 @@ from oslo_concurrency import processutils
|
|||
from oslo_config import cfg
|
||||
from oslo_utils import importutils
|
||||
from oslo_utils import timeutils
|
||||
import retrying
|
||||
import six
|
||||
|
||||
from cinder.brick.initiator import connector
|
||||
|
@ -766,3 +767,39 @@ def is_blk_device(dev):
|
|||
except Exception:
|
||||
LOG.debug('Path %s not found in is_blk_device check' % dev)
|
||||
return False
|
||||
|
||||
|
||||
def retry(exceptions, interval=1, retries=3, backoff_rate=2):
|
||||
|
||||
def _retry_on_exception(e):
|
||||
return isinstance(e, exceptions)
|
||||
|
||||
def _backoff_sleep(previous_attempt_number, delay_since_first_attempt_ms):
|
||||
exp = backoff_rate ** previous_attempt_number
|
||||
wait_for = max(0, interval * exp)
|
||||
LOG.debug("Sleeping for %s seconds", wait_for)
|
||||
return wait_for * 1000.0
|
||||
|
||||
def _print_stop(previous_attempt_number, delay_since_first_attempt_ms):
|
||||
delay_since_first_attempt = delay_since_first_attempt_ms / 1000.0
|
||||
LOG.debug("Failed attempt %s", previous_attempt_number)
|
||||
LOG.debug("Have been at this for %s seconds",
|
||||
delay_since_first_attempt)
|
||||
return previous_attempt_number == retries
|
||||
|
||||
if retries < 1:
|
||||
raise ValueError('Retries must be greater than or '
|
||||
'equal to 1 (received: %s). ' % retries)
|
||||
|
||||
def _decorator(f):
|
||||
|
||||
@six.wraps(f)
|
||||
def _wrapper(*args, **kwargs):
|
||||
r = retrying.Retrying(retry_on_exception=_retry_on_exception,
|
||||
wait_func=_backoff_sleep,
|
||||
stop_func=_print_stop)
|
||||
return r.call(f, *args, **kwargs)
|
||||
|
||||
return _wrapper
|
||||
|
||||
return _decorator
|
||||
|
|
Loading…
Reference in New Issue