From 39105d2a0a51af6cc2d9251eada364127273e550 Mon Sep 17 00:00:00 2001 From: Kushal Agrawal Date: Tue, 8 May 2018 16:16:09 +0530 Subject: [PATCH] Retain existing blob in case of pre_upload_hook or save fails While re-uploading blob if pre_upload_hook or store_api throws exception, Existing content needs to be retained. Change-Id: I51effe4c76de9a353dec99c634c173840c792198 --- glare/engine.py | 15 ++++++++++++++- glare/tests/sample_artifact.py | 18 ++++++++++++++++++ glare/tests/unit/api/test_upload.py | 10 ++++++++++ 3 files changed, 42 insertions(+), 1 deletion(-) diff --git a/glare/engine.py b/glare/engine.py index 6913234..3fb62f1 100644 --- a/glare/engine.py +++ b/glare/engine.py @@ -581,6 +581,11 @@ class Engine(object): 'status': existing_blob_status} raise exception.Conflict(message=msg) utils.validate_change_allowed(af, field_name) + + if existing_blob is not None: + blob_info = deepcopy(existing_blob) + blob_info['status'] = 'saving' + blob_info['size'] = self._calculate_allowed_space( context, af, field_name, content_length, blob_key) @@ -628,8 +633,16 @@ class Engine(object): # if upload failed remove blob from db and storage with excutils.save_and_reraise_exception(logger=LOG): LOG.error("Exception occured: %s", Exception) + # delete created blob_info in case of blob_data upload fails. + if existing_blob is None: + blob_info = None + else: + # Update size of blob_data to previous blob and + # Mark existing blob status to active. + blob_info['size'] = existing_blob['size'] + blob_info['status'] = 'active' self._save_blob_info( - context, af, field_name, blob_key, None) + context, af, field_name, blob_key, blob_info) LOG.info("Successfully finished blob uploading for artifact " "%(artifact)s blob field %(blob)s.", diff --git a/glare/tests/sample_artifact.py b/glare/tests/sample_artifact.py index 3ef731b..c3df5df 100644 --- a/glare/tests/sample_artifact.py +++ b/glare/tests/sample_artifact.py @@ -13,9 +13,12 @@ # under the License. """Sample artifact object for testing purposes""" +import io +from glare.common.utils import CooperativeReader from oslo_versionedobjects import fields +from glare.common.exception import GlareException from glare.objects import base as base_artifact from glare.objects.meta import fields as glare_fields from glare.objects.meta import validators @@ -143,6 +146,21 @@ class SampleArtifact(base_artifact.BaseArtifact): def get_display_type_name(cls): return "Sample Artifact" + @classmethod + def pre_upload_hook(cls, context, af, field_name, blob_key, fd): + flobj = io.BytesIO(CooperativeReader(fd).read()) + data = flobj.read() + print("Data in pre_upload_hook: %s" % data) + if data == b'invalid_data': + raise GlareException("Invalid Data found") + # Try to seek same fd to 0. + try: + fd.seek(0) + except Exception: # catch Exception in case fd is non-seekable. + flobj.seek(0) + return flobj + return fd + def to_dict(self): res = self.obj_to_primitive()['versioned_object.data'] res['__some_meta_information__'] = res['name'].upper() diff --git a/glare/tests/unit/api/test_upload.py b/glare/tests/unit/api/test_upload.py index e2f7ffd..9feaf77 100644 --- a/glare/tests/unit/api/test_upload.py +++ b/glare/tests/unit/api/test_upload.py @@ -66,6 +66,16 @@ class TestArtifactUpload(base.BaseTestArtifactAPI): self.assertEqual(5, artifact['blob']['size']) self.assertEqual('active', artifact['blob']['status']) + # failed in pre_upload_hook validation and retain the existing data + self.assertRaises(exc.GlareException, self.controller.upload_blob, + self.req, 'sample_artifact', + self.sample_artifact['id'], 'blob', + BytesIO(b'invalid_data'), 'application/octet-stream') + artifact = self.controller.show(self.req, 'sample_artifact', + self.sample_artifact['id']) + self.assertEqual(5, artifact['blob']['size']) + self.assertEqual('active', artifact['blob']['status']) + def test_upload_saving_blob(self): self.controller.upload_blob( self.req, 'sample_artifact', self.sample_artifact['id'], 'blob',