Limit Ram Usage in unpack_zip_archive_in_memory.

Co-authored-by: Mike Fedosin <mikhail.fedosin.ext@nokia.com>

Change-Id: Ib93d539584bcc1b7f3f1095ccd2c8f1d74113db3
(cherry picked from commit 04a7178fb4)
This commit is contained in:
Idan Narotzki 2017-07-26 14:58:27 +00:00
parent 6d71044a0a
commit 0ead65d9b7
2 changed files with 16 additions and 6 deletions

View File

@ -313,6 +313,8 @@ class Engine(object):
try:
# call upload hook first
fd, path = af.validate_upload(context, af, field_name, fd)
except exception.GlareException:
raise
except Exception as e:
raise exception.BadRequest(message=str(e))
@ -447,6 +449,8 @@ class Engine(object):
# call download hook first
data, path = af.validate_download(
context, af, field_name, data)
except exception.GlareException:
raise
except Exception as e:
raise exception.BadRequest(message=str(e))

View File

@ -24,13 +24,17 @@ from oslo_config import cfg
from oslo_log import log as logging
from oslo_utils import excutils
from glare.common import exception
from glare.common import store_api
from glare.common import utils
from glare.i18n import _
from glare.objects.meta import fields as glare_fields
CONF = cfg.CONF
LOG = logging.getLogger(__name__)
INMEMORY_OBJECT_SIZE_LIMIT = 134217728 # 128 megabytes
def create_temporary_file(stream, suffix=''):
"""Create a temporary local file from a stream.
@ -111,12 +115,14 @@ def unpack_zip_archive_in_memory(context, af, field_name, fd):
:param fd: zip archive
:return io.BytesIO object - simple stream of in-memory bytes
"""
flobj = io.BytesIO(fd.read())
while True:
data = fd.read(65536)
if data == b'': # end of file reached
break
flobj.write(data)
flobj = io.BytesIO(fd.read(INMEMORY_OBJECT_SIZE_LIMIT))
# Raise exception if something left
data = fd.read(1)
if data:
msg = _("The zip you are trying to unpack is too big. "
"The system upper limit is %s") % INMEMORY_OBJECT_SIZE_LIMIT
raise exception.RequestEntityTooLarge(msg)
zip_ref = zipfile.ZipFile(flobj, 'r')
for name in zip_ref.namelist():