Fix error status check when create backup

Now when creating backup from snapshot, the volume's status is
still checked.

This patch is a quick fix.

Change-Id: I3a477ed622f076de9451741d198ac1d62c8d5b40
Related-Bug: #1670541
This commit is contained in:
wangxiyuan 2017-03-29 14:40:33 +08:00
parent 3bf740353c
commit 4b9233c7e5
3 changed files with 56 additions and 27 deletions

View File

@ -215,20 +215,20 @@ class API(base.Base):
% {'vol1': volume_id,
'vol2': snapshot.volume_id})
raise exception.InvalidVolume(reason=msg)
if volume['status'] not in ["available", "in-use"]:
if snapshot['status'] not in ["available"]:
msg = (_('Snapshot to be backed up must be available, '
'but the current status is "%s".')
% snapshot['status'])
raise exception.InvalidSnapshot(reason=msg)
elif volume['status'] not in ["available", "in-use"]:
msg = (_('Volume to be backed up must be available '
'or in-use, but the current status is "%s".')
% volume['status'])
raise exception.InvalidVolume(reason=msg)
elif volume['status'] in ["in-use"] and not snapshot_id and not force:
elif volume['status'] in ["in-use"] and not force:
msg = _('Backing up an in-use volume must use '
'the force flag.')
raise exception.InvalidVolume(reason=msg)
elif snapshot_id and snapshot['status'] not in ["available"]:
msg = (_('Snapshot to be backed up must be available, '
'but the current status is "%s".')
% snapshot['status'])
raise exception.InvalidSnapshot(reason=msg)
previous_status = volume['status']
volume_host = volume_utils.extract_host(volume.host, 'host')

View File

@ -360,9 +360,18 @@ class BackupManager(manager.ThreadPoolManager):
snapshot = objects.Snapshot.get_by_id(
context, snapshot_id) if snapshot_id else None
previous_status = volume.get('previous_status', None)
LOG.info('Create backup started, backup: %(backup_id)s '
'volume: %(volume_id)s.',
{'backup_id': backup.id, 'volume_id': volume_id})
if snapshot_id:
log_message = ('Create backup started, backup: %(backup_id)s '
'volume: %(volume_id)s snapshot: %(snapshot_id)s.'
% {'backup_id': backup.id,
'volume_id': volume_id,
'snapshot_id': snapshot_id})
else:
log_message = ('Create backup started, backup: %(backup_id)s '
'volume: %(volume_id)s.'
% {'backup_id': backup.id,
'volume_id': volume_id})
LOG.info(log_message)
self._notify_about_backup_usage(context, backup, "create.start")
@ -371,19 +380,8 @@ class BackupManager(manager.ThreadPoolManager):
backup.availability_zone = self.az
backup.save()
expected_status = 'available' if snapshot_id else "backing-up"
actual_status = volume['status']
if actual_status != expected_status:
err = _('Create backup aborted, expected volume status '
'%(expected_status)s but got %(actual_status)s.') % {
'expected_status': expected_status,
'actual_status': actual_status,
}
self._update_backup_error(backup, err)
raise exception.InvalidVolume(reason=err)
expected_status = "backing-up"
if snapshot_id:
expected_status = fields.SnapshotStatus.BACKING_UP
actual_status = snapshot['status']
if actual_status != expected_status:
err = _('Create backup aborted, expected snapshot status '
@ -393,6 +391,16 @@ class BackupManager(manager.ThreadPoolManager):
}
self._update_backup_error(backup, err)
raise exception.InvalidSnapshot(reason=err)
else:
actual_status = volume['status']
if actual_status != expected_status:
err = _('Create backup aborted, expected volume status '
'%(expected_status)s but got %(actual_status)s.') % {
'expected_status': expected_status,
'actual_status': actual_status,
}
self._update_backup_error(backup, err)
raise exception.InvalidVolume(reason=err)
expected_status = fields.BackupStatus.CREATING
actual_status = backup.status

View File

@ -620,11 +620,14 @@ class BackupTestCase(BaseBackupTest):
self.assertEqual(vol_size, backup['size'])
@mock.patch('cinder.backup.manager.BackupManager._run_backup')
@ddt.data(fields.SnapshotStatus.BACKING_UP,
fields.SnapshotStatus.AVAILABLE)
def test_create_backup_with_snapshot(self, snapshot_status,
@ddt.data((fields.SnapshotStatus.BACKING_UP, 'available'),
(fields.SnapshotStatus.BACKING_UP, 'in-use'),
(fields.SnapshotStatus.AVAILABLE, 'available'),
(fields.SnapshotStatus.AVAILABLE, 'in-use'))
@ddt.unpack
def test_create_backup_with_snapshot(self, snapshot_status, volume_status,
mock_run_backup):
vol_id = self._create_volume_db_entry(status='available')
vol_id = self._create_volume_db_entry(status=volume_status)
snapshot = self._create_snapshot_db_entry(volume_id=vol_id,
status=snapshot_status)
backup = self._create_backup_db_entry(volume_id=vol_id,
@ -635,7 +638,7 @@ class BackupTestCase(BaseBackupTest):
vol = objects.Volume.get_by_id(self.ctxt, vol_id)
snapshot = objects.Snapshot.get_by_id(self.ctxt, snapshot.id)
self.assertEqual('available', vol.status)
self.assertEqual(volume_status, vol.status)
self.assertEqual(fields.SnapshotStatus.AVAILABLE, snapshot.status)
else:
self.assertRaises(exception.InvalidSnapshot,
@ -1518,6 +1521,24 @@ class BackupAPITestCase(BaseBackupTest):
backup = self.api.create(self.ctxt, None, None, volume_id, None)
self.assertEqual('testhost', backup.host)
@mock.patch.object(api.API, '_get_available_backup_service_host',
return_value='fake_host')
@mock.patch('cinder.backup.rpcapi.BackupAPI.create_backup')
def test_create_backup_from_snapshot_with_volume_in_use(
self, mock_create, mock_get_service):
self.ctxt.user_id = 'fake_user'
self.ctxt.project_id = 'fake_project'
volume_id = self._create_volume_db_entry(status='in-use')
snapshot = self._create_snapshot_db_entry(volume_id=volume_id)
backup = self.api.create(self.ctxt, None, None, volume_id, None,
snapshot_id=snapshot.id)
self.assertEqual(fields.BackupStatus.CREATING, backup.status)
volume = objects.Volume.get_by_id(self.ctxt, volume_id)
snapshot = objects.Snapshot.get_by_id(self.ctxt, snapshot.id)
self.assertEqual(fields.SnapshotStatus.BACKING_UP, snapshot.status)
self.assertEqual('in-use', volume.status)
@mock.patch.object(api.API, '_get_available_backup_service_host',
return_value='fake_host')
@mock.patch('cinder.backup.rpcapi.BackupAPI.create_backup')