Verify for duplicate location+metadata instances
Glance allow users to have duplicated locations for the same image. This could led to some confusion and bad UX. This patch introduces such check in the insert method and makes both extend and append use it to add new images. The patch also adds a new exception - DuplicateLocation - and handles it in the v2 of the API. When `DuplicateLocation` is raised, a BadRequest will be returned back to the client. A new test for this functionality was added as well. Change-Id: I32400d467408e3d56bbbdbb3379357f2ee812e56 Closes-bug: 1251244
This commit is contained in:
parent
ccb82b776a
commit
b3849a9cc9
|
@ -60,6 +60,8 @@ class ImagesController(object):
|
|||
image = image_factory.new_image(extra_properties=extra_properties,
|
||||
tags=tags, **image)
|
||||
image_repo.add(image)
|
||||
except exception.DuplicateLocation as dup:
|
||||
raise webob.exc.HTTPBadRequest(explanation=unicode(dup))
|
||||
except exception.Forbidden as e:
|
||||
raise webob.exc.HTTPForbidden(explanation=unicode(e))
|
||||
except exception.ImagePropertyLimitExceeded as e:
|
||||
|
@ -227,7 +229,7 @@ class ImagesController(object):
|
|||
image.locations = value
|
||||
if image.status == 'queued':
|
||||
image.status = 'active'
|
||||
except exception.BadStoreUri as bse:
|
||||
except (exception.BadStoreUri, exception.DuplicateLocation) as bse:
|
||||
raise webob.exc.HTTPBadRequest(explanation=unicode(bse))
|
||||
except ValueError as ve: # update image status failed.
|
||||
raise webob.exc.HTTPBadRequest(explanation=unicode(ve))
|
||||
|
@ -242,7 +244,7 @@ class ImagesController(object):
|
|||
image.locations.insert(pos, value)
|
||||
if image.status == 'queued':
|
||||
image.status = 'active'
|
||||
except exception.BadStoreUri as bse:
|
||||
except (exception.BadStoreUri, exception.DuplicateLocation) as bse:
|
||||
raise webob.exc.HTTPBadRequest(explanation=unicode(bse))
|
||||
except ValueError as ve: # update image status failed.
|
||||
raise webob.exc.HTTPBadRequest(explanation=unicode(ve))
|
||||
|
|
|
@ -317,3 +317,7 @@ class InvalidTaskType(TaskException, Invalid):
|
|||
class InvalidTaskStatusTransition(TaskException, Invalid):
|
||||
message = _("Status transition from %(cur_status)s to"
|
||||
" %(new_status)s is not allowed")
|
||||
|
||||
|
||||
class DuplicateLocation(Duplicate):
|
||||
message = _("The location %(location)s already exists")
|
||||
|
|
|
@ -0,0 +1,62 @@
|
|||
# vim: tabstop=4 shiftwidth=4 softtabstop=4
|
||||
|
||||
# Copyright 2013 Red Hat, Inc.
|
||||
# All Rights Reserved.
|
||||
#
|
||||
# Licensed under the Apache License, Version 2.0 (the "License"); you may
|
||||
# not use this file except in compliance with the License. You may obtain
|
||||
# a copy of the License at
|
||||
#
|
||||
# http://www.apache.org/licenses/LICENSE-2.0
|
||||
#
|
||||
# Unless required by applicable law or agreed to in writing, software
|
||||
# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
|
||||
# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
|
||||
# License for the specific language governing permissions and limitations
|
||||
# under the License.
|
||||
|
||||
import sqlalchemy
|
||||
from sqlalchemy import func
|
||||
from sqlalchemy import orm
|
||||
from sqlalchemy import Table
|
||||
|
||||
|
||||
def upgrade(migrate_engine):
|
||||
meta = sqlalchemy.schema.MetaData(migrate_engine)
|
||||
image_locations = Table('image_locations', meta, autoload=True)
|
||||
|
||||
session = orm.sessionmaker(bind=migrate_engine)()
|
||||
|
||||
# NOTE(flaper87): Lets group by
|
||||
# image_id, location and metadata.
|
||||
grp = [image_locations.c.image_id,
|
||||
image_locations.c.value,
|
||||
image_locations.c.meta_data]
|
||||
|
||||
# NOTE(flaper87): Get all duplicated rows
|
||||
qry = (session.query(*grp)
|
||||
.filter(image_locations.c.deleted == False)
|
||||
.group_by(*grp)
|
||||
.having(func.count() > 1))
|
||||
|
||||
for row in qry:
|
||||
# NOTE(flaper87): Not the fastest way to do it.
|
||||
# This is the best way to do it since sqlalchemy
|
||||
# has a bug around delete + limit.
|
||||
s = (sqlalchemy.sql.select([image_locations.c.id])
|
||||
.where(image_locations.c.image_id == row[0])
|
||||
.where(image_locations.c.value == row[1])
|
||||
.where(image_locations.c.meta_data == row[2])
|
||||
.where(image_locations.c.deleted == False)
|
||||
.limit(1).execute())
|
||||
stmt = (image_locations.delete()
|
||||
.where(image_locations.c.id == s.first()[0]))
|
||||
stmt.execute()
|
||||
|
||||
session.close()
|
||||
|
||||
|
||||
def downgrade(migrate_engine):
|
||||
# NOTE(flaper87): There's no downgrade
|
||||
# path for this.
|
||||
return
|
|
@ -434,8 +434,13 @@ class ImageFactoryProxy(glance.domain.proxy.ImageFactory):
|
|||
proxy_kwargs=proxy_kwargs)
|
||||
|
||||
def new_image(self, **kwargs):
|
||||
for l in kwargs.get('locations', []):
|
||||
locations = kwargs.get('locations', [])
|
||||
for l in locations:
|
||||
_check_image_location(self.context, self.store_api, l)
|
||||
|
||||
if locations.count(l) > 1:
|
||||
raise exception.DuplicateLocation(location=l['url'])
|
||||
|
||||
return super(ImageFactoryProxy, self).new_image(**kwargs)
|
||||
|
||||
|
||||
|
@ -454,24 +459,27 @@ class StoreLocations(collections.MutableSequence):
|
|||
self.value = list(value)
|
||||
|
||||
def append(self, location):
|
||||
_check_image_location(self.image_proxy.context,
|
||||
self.image_proxy.store_api, location)
|
||||
self.value.append(location)
|
||||
# NOTE(flaper87): Insert this
|
||||
# location at the very end of
|
||||
# the value list.
|
||||
self.insert(len(self.value), location)
|
||||
|
||||
def extend(self, other):
|
||||
if isinstance(other, StoreLocations):
|
||||
self.value.extend(other.value)
|
||||
locations = other.value
|
||||
else:
|
||||
locations = list(other)
|
||||
for location in locations:
|
||||
_check_image_location(self.image_proxy.context,
|
||||
self.image_proxy.store_api,
|
||||
location)
|
||||
self.value.extend(locations)
|
||||
|
||||
for location in locations:
|
||||
self.append(location)
|
||||
|
||||
def insert(self, i, location):
|
||||
_check_image_location(self.image_proxy.context,
|
||||
self.image_proxy.store_api, location)
|
||||
|
||||
if location in self.value:
|
||||
raise exception.DuplicateLocation(location=location['url'])
|
||||
|
||||
self.value.insert(i, location)
|
||||
|
||||
def pop(self, i=-1):
|
||||
|
@ -540,15 +548,7 @@ class StoreLocations(collections.MutableSequence):
|
|||
self.value.__delitem__(i)
|
||||
|
||||
def __iadd__(self, other):
|
||||
if isinstance(other, StoreLocations):
|
||||
self.value += other.value
|
||||
else:
|
||||
locations = list(other)
|
||||
for location in locations:
|
||||
_check_image_location(self.image_proxy.context,
|
||||
self.image_proxy.store_api,
|
||||
location)
|
||||
self.value += locations
|
||||
self.extend(other)
|
||||
return self
|
||||
|
||||
def __contains__(self, location):
|
||||
|
@ -594,6 +594,10 @@ def _locations_proxy(target, attr):
|
|||
for location in value:
|
||||
_check_image_location(self.context, self.store_api,
|
||||
location)
|
||||
|
||||
if value.count(location) > 1:
|
||||
raise exception.DuplicateLocation(location=location['url'])
|
||||
|
||||
return setattr(getattr(self, target), attr, list(value))
|
||||
|
||||
def del_attr(self):
|
||||
|
|
|
@ -631,7 +631,6 @@ class TestMigrations(utils.BaseTestCase):
|
|||
self.assertEqual(len(rows), 1)
|
||||
|
||||
row = rows[0]
|
||||
print(repr(dict(row)))
|
||||
self.assertTrue(uuidutils.is_uuid_like(row['id']))
|
||||
|
||||
uuids[name] = row['id']
|
||||
|
@ -1044,3 +1043,50 @@ class TestMigrations(utils.BaseTestCase):
|
|||
def _post_downgrade_030(self, engine):
|
||||
self.assertRaises(sqlalchemy.exc.NoSuchTableError,
|
||||
get_table, engine, 'tasks')
|
||||
|
||||
def _pre_upgrade_031(self, engine):
|
||||
images = get_table(engine, 'images')
|
||||
now = datetime.datetime.now()
|
||||
image_id = 'fake_031_id'
|
||||
temp = dict(deleted=False,
|
||||
created_at=now,
|
||||
updated_at=now,
|
||||
status='active',
|
||||
is_public=True,
|
||||
min_disk=0,
|
||||
min_ram=0,
|
||||
id=image_id)
|
||||
images.insert().values(temp).execute()
|
||||
|
||||
locations_table = get_table(engine, 'image_locations')
|
||||
data = image_id
|
||||
locations = [
|
||||
('file://ab', '{"a": "yo yo"}'),
|
||||
('file://ab', '{}'),
|
||||
('file://ab', '{}'),
|
||||
('file://ab1', '{"a": "that one, please"}'),
|
||||
('file://ab1', '{"a": "that one, please"}'),
|
||||
]
|
||||
temp = dict(deleted=False,
|
||||
created_at=now,
|
||||
updated_at=now,
|
||||
image_id=image_id)
|
||||
|
||||
for location, metadata in locations:
|
||||
temp.update(value=location, meta_data=metadata)
|
||||
locations_table.insert().values(temp).execute()
|
||||
return image_id
|
||||
|
||||
def _check_031(self, engine, image_id):
|
||||
locations_table = get_table(engine, 'image_locations')
|
||||
result = locations_table.select()\
|
||||
.where(locations_table.c.image_id == image_id)\
|
||||
.execute().fetchall()
|
||||
|
||||
locations = set([(x['value'], x['meta_data']) for x in result])
|
||||
actual_locations = set([
|
||||
('file://ab', '{"a": "yo yo"}'),
|
||||
('file://ab', '{}'),
|
||||
('file://ab1', '{"a": "that one, please"}'),
|
||||
])
|
||||
self.assertFalse(actual_locations.symmetric_difference(locations))
|
||||
|
|
|
@ -1261,6 +1261,34 @@ class TestImagesController(base.IsolatedUnitTest):
|
|||
self.assertRaises(webob.exc.HTTPBadRequest, self.controller.update,
|
||||
request, UUID2, changes)
|
||||
|
||||
def test_update_add_duplicate_locations(self):
|
||||
new_location = {'url': '%s/fake_location' % BASE_URI, 'metadata': {}}
|
||||
request = unit_test_utils.get_fake_request()
|
||||
changes = [{'op': 'add', 'path': ['locations', '-'],
|
||||
'value': new_location}]
|
||||
output = self.controller.update(request, UUID1, changes)
|
||||
self.assertEqual(output.image_id, UUID1)
|
||||
self.assertEqual(len(output.locations), 2)
|
||||
self.assertEqual(new_location, output.locations[1])
|
||||
|
||||
self.assertRaises(webob.exc.HTTPBadRequest, self.controller.update,
|
||||
request, UUID1, changes)
|
||||
|
||||
def test_update_replace_duplicate_locations(self):
|
||||
request = unit_test_utils.get_fake_request()
|
||||
changes = [{'op': 'replace', 'path': ['locations'], 'value': []}]
|
||||
output = self.controller.update(request, UUID1, changes)
|
||||
self.assertEqual(output.image_id, UUID1)
|
||||
self.assertEqual(len(output.locations), 0)
|
||||
self.assertEqual(output.status, 'queued')
|
||||
|
||||
new_location = {'url': '%s/fake_location' % BASE_URI, 'metadata': {}}
|
||||
changes = [{'op': 'replace', 'path': ['locations'],
|
||||
'value': [new_location, new_location]}]
|
||||
|
||||
self.assertRaises(webob.exc.HTTPBadRequest, self.controller.update,
|
||||
request, UUID1, changes)
|
||||
|
||||
def test_update_remove_base_property(self):
|
||||
self.db.image_update(None, UUID1, {'properties': {'foo': 'bar'}})
|
||||
request = unit_test_utils.get_fake_request()
|
||||
|
|
Loading…
Reference in New Issue