Merge "Add sha1, sha256 support for blobs"

This commit is contained in:
Jenkins 2016-09-26 11:32:15 +00:00 committed by Gerrit Code Review
commit a4706969d8
11 changed files with 159 additions and 100 deletions

View File

@ -481,14 +481,18 @@ class ResponseSerializer(api_versioning.VersionedResource,
def _serialize_blob(response, result):
data, meta = result['data'], result['meta']
response.headers['Content-Type'] = meta['content_type']
response.headers['Content-MD5'] = meta['checksum']
response.headers['Content-MD5'] = meta['md5']
response.headers['X-Openstack-Glare-Content-SHA1'] = meta['sha1']
response.headers['X-Openstack-Glare-Content-SHA256'] = meta['sha256']
response.headers['Content-Length'] = str(meta['size'])
response.app_iter = iter(data)
@staticmethod
def _serialize_location(response, result):
data, meta = result['data'], result['meta']
response.headers['Content-MD5'] = meta['checksum']
response.headers['Content-MD5'] = meta['md5']
response.headers['X-Openstack-Glare-Content-SHA1'] = meta['sha1']
response.headers['X-Openstack-Glare-Content-SHA256'] = meta['sha256']
response.location = data['url']
response.content_type = 'application/json'
response.status = http_client.MOVED_PERMANENTLY

View File

@ -60,13 +60,15 @@ def save_blob_to_store(blob_id, blob, context, max_size,
:param blob: blob file iterator
:param context: user context
:param verifier:signature verified
:return: tuple of values: (location_uri, size, checksum, metadata)
:return: tuple of values: (location_uri, size, checksums, metadata)
"""
(location, size, checksum, metadata) = backend.add_to_backend(
CONF, blob_id,
utils.LimitingReader(utils.CooperativeReader(blob), max_size),
0, store_type, context, verifier)
return location, size, checksum
data = utils.LimitingReader(utils.CooperativeReader(blob), max_size)
(location, size, md5checksum, metadata) = backend.add_to_backend(
CONF, blob_id, data, 0, store_type, context, verifier)
checksums = {"md5": md5checksum,
"sha1": data.sha1.hexdigest(),
"sha256": data.sha256.hexdigest()}
return location, size, checksums
@utils.error_handler(error_map)

View File

@ -29,6 +29,7 @@ except ImportError:
from eventlet.green import socket
import functools
import hashlib
import os
import re
import uuid
@ -213,6 +214,8 @@ class LimitingReader(object):
self.data = data
self.limit = limit
self.bytes_read = 0
self.sha1 = hashlib.sha1()
self.sha256 = hashlib.sha256()
def __iter__(self):
for chunk in self.data:
@ -224,7 +227,11 @@ class LimitingReader(object):
def read(self, i):
result = self.data.read(i)
self.bytes_read += len(result)
len_result = len(result)
self.bytes_read += len_result
if len_result:
self.sha1.update(result)
self.sha256.update(result)
if self.bytes_read > self.limit:
raise exception.RequestEntityTooLarge()
return result

View File

@ -99,7 +99,9 @@ def upgrade():
sa.Column('artifact_id', sa.String(36),
sa.ForeignKey('glare_artifacts.id'), nullable=False),
sa.Column('size', sa.BigInteger()),
sa.Column('checksum', sa.String(32)),
sa.Column('md5', sa.String(32)),
sa.Column('sha1', sa.String(40)),
sa.Column('sha256', sa.String(64)),
sa.Column('name', sa.String(255), nullable=False),
sa.Column('status', sa.String(32), nullable=False),
sa.Column('external', sa.Boolean()),

View File

@ -550,7 +550,7 @@ def _do_properties(artifact, new_properties):
def _update_blob_values(blob, values):
for elem in ('size', 'checksum', 'url', 'external', 'status',
for elem in ('size', 'md5', 'sha1', 'sha256', 'url', 'external', 'status',
'content_type'):
setattr(blob, elem, values[elem])
return blob

View File

@ -81,7 +81,9 @@ def _parse_blob_value(blob):
"url": blob.url,
"status": blob.status,
"external": blob.external,
"checksum": blob.checksum,
"md5": blob.md5,
"sha1": blob.sha1,
"sha256": blob.sha256,
"size": blob.size,
"content_type": blob.content_type
}
@ -222,7 +224,9 @@ class ArtifactBlob(BASE, ArtifactBase):
nullable=False)
name = Column(String(255), nullable=False)
size = Column(BigInteger().with_variant(Integer, "sqlite"))
checksum = Column(String(32))
md5 = Column(String(32))
sha1 = Column(String(40))
sha256 = Column(String(64))
external = Column(Boolean)
url = Column(Text)
status = Column(String(32), nullable=False)

View File

@ -819,20 +819,21 @@ class BaseArtifact(base.VersionedObject):
"upload passed for blob %(blob)s. "
"Start blob uploading to backend.",
{'artifact': af.id, 'blob': field_name})
blob = {'url': None, 'size': None, 'checksum': None,
'status': glare_fields.BlobFieldType.SAVING, 'external': False,
'content_type': content_type}
blob = {'url': None, 'size': None, 'md5': None, 'sha1': None,
'sha256': None, 'status': glare_fields.BlobFieldType.SAVING,
'external': False, 'content_type': content_type}
setattr(af, field_name, blob)
cls.db_api.update(
context, af.id, {field_name: getattr(af, field_name)})
blob_id = getattr(af, field_name)['id']
try:
location_uri, size, checksum = store_api.save_blob_to_store(
location_uri, size, checksums = store_api.save_blob_to_store(
blob_id, fd, context, cls._get_max_blob_size(field_name))
blob.update({'url': location_uri,
'status': glare_fields.BlobFieldType.ACTIVE,
'size': size, 'checksum': checksum})
'size': size})
blob.update(checksums)
setattr(af, field_name, blob)
af_upd = cls.db_api.update(
context, af.id, {field_name: getattr(af, field_name)})
@ -864,7 +865,9 @@ class BaseArtifact(base.VersionedObject):
if blob is None or blob['status'] != glare_fields.BlobFieldType.ACTIVE:
msg = _("%s is not ready for download") % field_name
raise exception.BadRequest(message=msg)
meta = {'checksum': blob.get('checksum'),
meta = {'md5': blob.get('md5'),
'sha1': blob.get('sha1'),
'sha256': blob.get('sha256'),
'external': blob.get('external')}
if blob['external']:
data = {'url': blob['url']}
@ -894,20 +897,21 @@ class BaseArtifact(base.VersionedObject):
"upload passed for blob dict %(blob)s with key %(key)s. "
"Start blob uploading to backend.",
{'artifact': af.id, 'blob': field_name, 'key': blob_key})
blob = {'url': None, 'size': None, 'checksum': None,
'status': glare_fields.BlobFieldType.SAVING, 'external': False,
'content_type': content_type}
blob = {'url': None, 'size': None, 'md5': None, 'sha1': None,
'sha256': None, 'status': glare_fields.BlobFieldType.SAVING,
'external': False, 'content_type': content_type}
blob_dict_attr = getattr(af, field_name)
blob_dict_attr[blob_key] = blob
cls.db_api.update(
context, af.id, {field_name: blob_dict_attr})
blob_id = getattr(af, field_name)[blob_key]['id']
try:
location_uri, size, checksum = store_api.save_blob_to_store(
location_uri, size, checksums = store_api.save_blob_to_store(
blob_id, fd, context, cls._get_max_blob_size(field_name))
blob.update({'url': location_uri,
'status': glare_fields.BlobFieldType.ACTIVE,
'size': size, 'checksum': checksum})
'size': size})
blob.update(checksums)
af_values = cls.db_api.update(
context, af.id, {field_name: blob_dict_attr})
LOG.info(_LI("Successfully finished blob upload for artifact "
@ -948,8 +952,11 @@ class BaseArtifact(base.VersionedObject):
"is not ready for download") % (blob_key, field_name)
LOG.error(msg)
raise exception.BadRequest(message=msg)
meta = {'checksum': blob.get('checksum'),
meta = {'md5': blob.get('md5'),
'sha1': blob.get('sha1'),
'sha256': blob.get('sha256'),
'external': blob.get('external')}
if blob['external']:
data = {'url': blob['url']}
else:
@ -973,17 +980,20 @@ class BaseArtifact(base.VersionedObject):
"passed for blob %(blob)s. Start location check for artifact"
".", {'artifact': af.id, 'blob': field_name})
blob = {'url': location, 'size': None, 'checksum': None,
'status': glare_fields.BlobFieldType.ACTIVE, 'external': True,
'content_type': None}
blob = {'url': location, 'size': None, 'md5': None, 'sha1': None,
'sha256': None, 'status': glare_fields.BlobFieldType.ACTIVE,
'external': True, 'content_type': None}
if blob_meta.get('checksum') is None:
msg = (_("Incorrect blob metadata %(meta)s. Checksum is required "
md5 = blob_meta.pop("md5", None)
if md5 is None:
msg = (_("Incorrect blob metadata %(meta)s. MD5 must be specified "
"for external location in artifact blob %(field_name)."),
{"meta": str(blob_meta), "field_name": field_name})
raise exception.BadRequest(msg)
else:
blob['checksum'] = blob_meta['checksum']
blob["md5"] = md5
blob["sha1"] = blob_meta.pop("sha1", None)
blob["sha256"] = blob_meta.pop("sha256", None)
setattr(af, field_name, blob)
updated_af = cls.db_api.update(
@ -999,19 +1009,22 @@ class BaseArtifact(base.VersionedObject):
blob_key, location, blob_meta):
cls._validate_upload_allowed(context, af, field_name, blob_key)
blob = {'url': location, 'size': None, 'checksum': None,
'status': glare_fields.BlobFieldType.ACTIVE, 'external': True,
'content_type': None}
blob = {'url': location, 'size': None, 'md5': None, 'sha1': None,
'sha256': None, 'status': glare_fields.BlobFieldType.ACTIVE,
'external': True, 'content_type': None}
if blob_meta.get('checksum') is None:
msg = (_("Incorrect blob metadata %(meta)s. Checksum is required "
md5 = blob_meta.pop("md5", None)
if md5 is None:
msg = (_("Incorrect blob metadata %(meta)s. MD5 must be specified "
"for external location in artifact blob "
"%(field_name)[%(blob_key)s]."),
{"meta": str(blob_meta), "field_name": field_name,
"blob_key": str(blob_key)})
raise exception.BadRequest(msg)
else:
blob['checksum'] = blob_meta['checksum']
blob["md5"] = md5
blob["sha1"] = blob_meta.pop("sha1", None)
blob["sha256"] = blob_meta.pop("sha256", None)
blob_dict_attr = getattr(af, field_name)
blob_dict_attr[blob_key] = blob
@ -1103,14 +1116,16 @@ class BaseArtifact(base.VersionedObject):
'type': ['object', 'null'],
'properties': {
'size': {'type': ['number', 'null']},
'checksum': {'type': ['string', 'null']},
'md5': {'type': ['string', 'null']},
'sha1': {'type': ['string', 'null']},
'sha256': {'type': ['string', 'null']},
'external': {'type': 'boolean'},
'status': {'type': 'string',
'enum': list(
glare_fields.BlobFieldType.BLOB_STATUS)},
'content_type': {'type': 'string'},
},
'required': ['size', 'checksum', 'external', 'status',
'required': ['size', 'md5', 'sha1', 'sha256', 'external', 'status',
'content_type'],
'additionalProperties': False
}

View File

@ -63,15 +63,17 @@ class BlobFieldType(fields.FieldType):
'url': {'type': ['string', 'null'], 'format': 'uri',
'max_length': 255},
'size': {'type': ['number', 'null']},
'checksum': {'type': ['string', 'null']},
'md5': {'type': ['string', 'null']},
'sha1': {'type': ['string', 'null']},
'sha256': {'type': ['string', 'null']},
'external': {'type': 'boolean'},
'id': {'type': 'string'},
'status': {'type': 'string',
'enum': list(BLOB_STATUS)},
'content_type': {'type': ['string', 'null']},
},
'required': ['url', 'size', 'checksum', 'external', 'status',
'id', 'content_type']
'required': ['url', 'size', 'md5', 'sha1', 'sha256', 'external',
'status', 'id', 'content_type']
}
@staticmethod

View File

@ -13,6 +13,7 @@
# License for the specific language governing permissions and limitations
# under the License.
import hashlib
import uuid
from oslo_serialization import jsonutils
@ -904,7 +905,7 @@ class TestBlobs(TestArtifact):
self.delete(url)
def test_blob_download(self):
data = 'data'
data = 'some_arbitrary_testing_data'
art = self.create_artifact(data={'name': 'test_af',
'version': '0.0.1'})
url = '/sample_artifact/%s' % art['id']
@ -922,6 +923,12 @@ class TestBlobs(TestArtifact):
art = self.put(url=url + '/blob', data=data, status=200,
headers=headers)
self.assertEqual('active', art['blob']['status'])
md5 = hashlib.md5(data.encode('UTF-8')).hexdigest()
sha1 = hashlib.sha1(data.encode('UTF-8')).hexdigest()
sha256 = hashlib.sha256(data.encode('UTF-8')).hexdigest()
self.assertEqual(md5, art['blob']['md5'])
self.assertEqual(sha1, art['blob']['sha1'])
self.assertEqual(sha256, art['blob']['sha256'])
blob_data = self.get(url=url + '/blob')
self.assertEqual(data, blob_data)
@ -951,7 +958,7 @@ class TestBlobs(TestArtifact):
url = '/sample_artifact/%s' % art['id']
body = jsonutils.dumps(
{'url': 'https://www.apache.org/licenses/LICENSE-2.0.txt',
'checksum': "fake"})
'md5': "fake", 'sha1': "fake_sha", "sha256": "fake_sha256"})
headers = {'Content-Type':
'application/vnd+openstack.glare-custom-location+json'}
self.put(url=url + '/blob', data=body,
@ -966,7 +973,9 @@ class TestBlobs(TestArtifact):
# Get the artifact, blob property should have status 'active'
art = self.get(url=url, status=200)
self.assertEqual('active', art['blob']['status'])
self.assertIsNotNone(art['blob']['checksum'])
self.assertEqual('fake', art['blob']['md5'])
self.assertEqual('fake_sha', art['blob']['sha1'])
self.assertEqual('fake_sha256', art['blob']['sha256'])
self.assertIsNone(art['blob']['size'])
self.assertIsNone(art['blob']['content_type'])
self.assertEqual('https://www.apache.org/licenses/LICENSE-2.0.txt',
@ -981,7 +990,7 @@ class TestBlobs(TestArtifact):
# Get the artifact, blob property should have status 'active'
art = self.get(url=url, status=200)
self.assertEqual('active', art['dict_of_blobs']['blob']['status'])
self.assertIsNotNone(art['dict_of_blobs']['blob']['checksum'])
self.assertIsNotNone(art['dict_of_blobs']['blob']['md5'])
self.assertIsNone(art['dict_of_blobs']['blob']['size'])
self.assertIsNone(art['dict_of_blobs']['blob']['content_type'])
self.assertEqual('https://www.apache.org/licenses/LICENSE-2.0.txt',
@ -1112,7 +1121,7 @@ class TestArtifactOps(TestArtifact):
self.create_artifact(data={"name": "test_af", "version": "0.0.2",
"blob": {
'url': None, 'size': None,
'checksum': None, 'status': 'saving',
'md5': None, 'status': 'saving',
'external': False}}, status=400)
# check that anonymous user cannot create artifact
self.set_user("anonymous")
@ -1432,7 +1441,7 @@ class TestUpdate(TestArtifact):
"op": "replace",
"path": "/blob",
"value": {"name": "test_af", "version": "0.0.2",
"blob": {'url': None, 'size': None, 'checksum': None,
"blob": {'url': None, 'size': None, 'md5': None,
'status': 'saving', 'external': False}}}]
self.patch(url, blob_attr, 400)
blob_attr[0]["path"] = "/dict_of_blobs/-"

View File

@ -63,8 +63,9 @@ fixture_base_props = {
u'icon': {u'additionalProperties': False,
u'description': u'Artifact icon.',
u'filter_ops': [],
u'properties': {u'checksum': {u'type': [u'string',
u'null']},
u'properties': {u'md5': {u'type': [u'string', u'null']},
u'sha1': {u'type': [u'string', u'null']},
u'sha256': {u'type': [u'string', u'null']},
u'content_type': {u'type': u'string'},
u'external': {u'type': u'boolean'},
u'size': {u'type': [u'number',
@ -74,7 +75,7 @@ fixture_base_props = {
u'pending_delete'],
u'type': u'string'}},
u'required': [u'size',
u'checksum',
u'md5', u'sha1', u'sha256',
u'external',
u'status',
u'content_type'],
@ -248,9 +249,10 @@ fixtures = {
u'description': u'I am Blob',
u'filter_ops': [],
u'mutable': True,
u'properties': {u'checksum': {
u'type': [u'string',
u'null']},
u'properties': {
u'md5': {u'type': [u'string', u'null']},
u'sha1': {u'type': [u'string', u'null']},
u'sha256': {u'type': [u'string', u'null']},
u'content_type': {
u'type': u'string'},
u'external': {
@ -265,7 +267,7 @@ fixtures = {
u'pending_delete'],
u'type': u'string'}},
u'required': [u'size',
u'checksum',
u'md5', u'sha1', u'sha256',
u'external',
u'status',
u'content_type'],
@ -297,9 +299,10 @@ fixtures = {
u'dict_of_blobs': {
u'additionalProperties': {
u'additionalProperties': False,
u'properties': {u'checksum': {
u'type': [u'string',
u'null']},
u'properties': {
u'md5': {u'type': [u'string', u'null']},
u'sha1': {u'type': [u'string', u'null']},
u'sha256': {u'type': [u'string', u'null']},
u'content_type': {
u'type': u'string'},
u'external': {
@ -315,7 +318,7 @@ fixtures = {
u'pending_delete'],
u'type': u'string'}},
u'required': [u'size',
u'checksum',
u'md5', u'sha1', u'sha256',
u'external',
u'status',
u'content_type'],
@ -450,9 +453,10 @@ fixtures = {
u'small_blob': {u'additionalProperties': False,
u'filter_ops': [],
u'mutable': True,
u'properties': {u'checksum': {
u'type': [u'string',
u'null']},
u'properties': {
u'md5': {u'type': [u'string', u'null']},
u'sha1': {u'type': [u'string', u'null']},
u'sha256': {u'type': [u'string', u'null']},
u'content_type': {
u'type': u'string'},
u'external': {
@ -468,7 +472,7 @@ fixtures = {
u'pending_delete'],
u'type': u'string'}},
u'required': [u'size',
u'checksum',
u'md5', u'sha1', u'sha256',
u'external',
u'status',
u'content_type'],
@ -546,19 +550,21 @@ fixtures = {
u'additionalProperties': False,
u'description': u'TOSCA template body.',
u'filter_ops': [],
u'properties': {u'checksum': {u'type': [u'string',
u'null']},
u'content_type': {
u'type': u'string'},
u'external': {u'type': u'boolean'},
u'size': {u'type': [u'number',
u'null']},
u'status': {u'enum': [u'saving',
u'active',
u'pending_delete'],
u'type': u'string'}},
u'properties': {
u'md5': {u'type': [u'string', u'null']},
u'sha1': {u'type': [u'string', u'null']},
u'sha256': {u'type': [u'string', u'null']},
u'content_type': {
u'type': u'string'},
u'external': {u'type': u'boolean'},
u'size': {u'type': [u'number',
u'null']},
u'status': {u'enum': [u'saving',
u'active',
u'pending_delete'],
u'type': u'string'}},
u'required': [u'size',
u'checksum',
u'md5', u'sha1', u'sha256',
u'external',
u'status',
u'content_type'],
@ -645,8 +651,9 @@ fixtures = {
u'additionalProperties': False,
u'description': u'Murano Package binary.',
u'filter_ops': [],
u'properties': {u'checksum': {u'type': [u'string',
u'null']},
u'properties': {u'md5': {u'type': [u'string', u'null']},
u'sha1': {u'type': [u'string', u'null']},
u'sha256': {u'type': [u'string', u'null']},
u'content_type': {u'type': u'string'},
u'external': {u'type': u'boolean'},
u'size': {u'type': [u'number',
@ -656,7 +663,7 @@ fixtures = {
u'pending_delete'],
u'type': u'string'}},
u'required': [u'size',
u'checksum',
u'md5', u'sha1', u'sha256',
u'external',
u'status',
u'content_type'],
@ -738,8 +745,9 @@ fixtures = {
u'description': u'Image binary.',
u'filter_ops': [],
u'properties': {
u'checksum': {u'type': [u'string',
u'null']},
u'md5': {u'type': [u'string', u'null']},
u'sha1': {u'type': [u'string', u'null']},
u'sha256': {u'type': [u'string', u'null']},
u'content_type': {u'type': u'string'},
u'external': {u'type': u'boolean'},
u'size': {u'type': [u'number',
@ -749,7 +757,7 @@ fixtures = {
u'pending_delete'],
u'type': u'string'}},
u'required': [u'size',
u'checksum',
u'md5', u'sha1', u'sha256',
u'external',
u'status',
u'content_type'],
@ -877,8 +885,9 @@ fixtures = {
u'additionalProperties':
{u'additionalProperties': False,
u'properties': {
u'checksum': {u'type': [u'string',
u'null']},
u'md5': {u'type': [u'string', u'null']},
u'sha1': {u'type': [u'string', u'null']},
u'sha256': {u'type': [u'string', u'null']},
u'content_type': {
u'type': u'string'},
u'external': {u'type': u'boolean'},
@ -889,7 +898,7 @@ fixtures = {
u'pending_delete'],
u'type': u'string'}},
u'required': [u'size',
u'checksum',
u'md5', u'sha1', u'sha256',
u'external',
u'status',
u'content_type'],
@ -907,19 +916,21 @@ fixtures = {
u'additionalProperties': False,
u'description': u'Heat template body.',
u'filter_ops': [],
u'properties': {u'checksum': {u'type': [u'string',
u'null']},
u'content_type': {
u'type': u'string'},
u'external': {u'type': u'boolean'},
u'size': {u'type': [u'number',
u'null']},
u'status': {u'enum': [u'saving',
u'active',
u'pending_delete'],
u'type': u'string'}},
u'properties': {
u'md5': {u'type': [u'string', u'null']},
u'sha1': {u'type': [u'string', u'null']},
u'sha256': {u'type': [u'string', u'null']},
u'content_type': {
u'type': u'string'},
u'external': {u'type': u'boolean'},
u'size': {u'type': [u'number',
u'null']},
u'status': {u'enum': [u'saving',
u'active',
u'pending_delete'],
u'type': u'string'}},
u'required': [u'size',
u'checksum',
u'md5', u'sha1', u'sha256',
u'external',
u'status',
u'content_type'],
@ -937,8 +948,9 @@ fixtures = {
u'additionalProperties': False,
u'description': u'Heat Environment text body.',
u'filter_ops': [],
u'properties': {u'checksum': {u'type': [u'string',
u'null']},
u'properties': {u'md5': {u'type': [u'string', u'null']},
u'sha1': {u'type': [u'string', u'null']},
u'sha256': {u'type': [u'string', u'null']},
u'content_type': {u'type': u'string'},
u'external': {u'type': u'boolean'},
u'size': {u'type': [u'number',
@ -948,7 +960,7 @@ fixtures = {
u'pending_delete'],
u'type': u'string'}},
u'required': [u'size',
u'checksum',
u'md5', u'sha1', u'sha256',
u'external',
u'status',
u'content_type'],

View File

@ -190,7 +190,9 @@ class GlareMigrationsCheckers(object):
blobs_columns = ['id',
'artifact_id',
'size',
'checksum',
'md5',
'sha1',
'sha256',
'name',
'key_name',
'external',