diff --git a/glance/db/sqlalchemy/migrate_repo/versions/031_remove_duplicated_locations.py b/glance/db/sqlalchemy/migrate_repo/versions/031_remove_duplicated_locations.py index e48492d38c..5d2a1c1626 100644 --- a/glance/db/sqlalchemy/migrate_repo/versions/031_remove_duplicated_locations.py +++ b/glance/db/sqlalchemy/migrate_repo/versions/031_remove_duplicated_locations.py @@ -18,6 +18,7 @@ import sqlalchemy from sqlalchemy import func from sqlalchemy import orm +from sqlalchemy import sql from sqlalchemy import Table @@ -25,35 +26,60 @@ def upgrade(migrate_engine): meta = sqlalchemy.schema.MetaData(migrate_engine) image_locations = Table('image_locations', meta, autoload=True) - session = orm.sessionmaker(bind=migrate_engine)() + if migrate_engine.name == "ibm_db_sa": + il = orm.aliased(image_locations) + # NOTE(wenchma): Get all duplicated rows. + qry = (sql.select([il.c.id]) + .where(il.c.id > (sql.select([func.min(image_locations.c.id)]) + .where(image_locations.c.image_id == il.c.image_id) + .where(image_locations.c.value == il.c.value) + .where(image_locations.c.meta_data == il.c.meta_data) + .where(image_locations.c.deleted == False))) + .where((sql.select([func.count(image_locations.c.id)]) + .where(image_locations.c.image_id == il.c.image_id) + .where(image_locations.c.value == il.c.value) + .where(image_locations.c.meta_data == il.c.meta_data) + .where(image_locations.c.deleted == False)) > 1) + .where(il.c.deleted == False) + .execute() + ) - # NOTE(flaper87): Lets group by - # image_id, location and metadata. - grp = [image_locations.c.image_id, - image_locations.c.value, - image_locations.c.meta_data] + for row in qry: + stmt = (image_locations.delete() + .where(image_locations.c.id == row[0]) + .where(image_locations.c.deleted == False)) + stmt.execute() - # NOTE(flaper87): Get all duplicated rows - qry = (session.query(*grp) - .filter(image_locations.c.deleted == False) - .group_by(*grp) - .having(func.count() > 1)) + else: + session = orm.sessionmaker(bind=migrate_engine)() - for row in qry: - # NOTE(flaper87): Not the fastest way to do it. - # This is the best way to do it since sqlalchemy - # has a bug around delete + limit. - s = (sqlalchemy.sql.select([image_locations.c.id]) - .where(image_locations.c.image_id == row[0]) - .where(image_locations.c.value == row[1]) - .where(image_locations.c.meta_data == row[2]) - .where(image_locations.c.deleted == False) - .limit(1).execute()) - stmt = (image_locations.delete() - .where(image_locations.c.id == s.first()[0])) - stmt.execute() + # NOTE(flaper87): Lets group by + # image_id, location and metadata. + grp = [image_locations.c.image_id, + image_locations.c.value, + image_locations.c.meta_data] - session.close() + # NOTE(flaper87): Get all duplicated rows + qry = (session.query(*grp) + .filter(image_locations.c.deleted == False) + .group_by(*grp) + .having(func.count() > 1)) + + for row in qry: + # NOTE(flaper87): Not the fastest way to do it. + # This is the best way to do it since sqlalchemy + # has a bug around delete + limit. + s = (sql.select([image_locations.c.id]) + .where(image_locations.c.image_id == row[0]) + .where(image_locations.c.value == row[1]) + .where(image_locations.c.meta_data == row[2]) + .where(image_locations.c.deleted == False) + .limit(1).execute()) + stmt = (image_locations.delete() + .where(image_locations.c.id == s.first()[0])) + stmt.execute() + + session.close() def downgrade(migrate_engine):