From 4d43ab797dd46c4183fa685ac6b9501caa1115d4 Mon Sep 17 00:00:00 2001 From: Wen Cheng Ma Date: Tue, 3 Dec 2013 14:05:33 +0800 Subject: [PATCH] Fix 031 migration failed on DB2 031 migration script failed on DB2, because DB2 doesn't support GROUP BY with clob data type. Add if condition to judge DB2 connection, if yes, make another sql query supported by DB2. Fixes bug #1256816 Change-Id: I7e3a8cd89edd91c167c619365ab863ec53787f8f --- .../031_remove_duplicated_locations.py | 76 +++++++++++++------ 1 file changed, 51 insertions(+), 25 deletions(-) diff --git a/glance/db/sqlalchemy/migrate_repo/versions/031_remove_duplicated_locations.py b/glance/db/sqlalchemy/migrate_repo/versions/031_remove_duplicated_locations.py index e48492d38c..5d2a1c1626 100644 --- a/glance/db/sqlalchemy/migrate_repo/versions/031_remove_duplicated_locations.py +++ b/glance/db/sqlalchemy/migrate_repo/versions/031_remove_duplicated_locations.py @@ -18,6 +18,7 @@ import sqlalchemy from sqlalchemy import func from sqlalchemy import orm +from sqlalchemy import sql from sqlalchemy import Table @@ -25,35 +26,60 @@ def upgrade(migrate_engine): meta = sqlalchemy.schema.MetaData(migrate_engine) image_locations = Table('image_locations', meta, autoload=True) - session = orm.sessionmaker(bind=migrate_engine)() + if migrate_engine.name == "ibm_db_sa": + il = orm.aliased(image_locations) + # NOTE(wenchma): Get all duplicated rows. + qry = (sql.select([il.c.id]) + .where(il.c.id > (sql.select([func.min(image_locations.c.id)]) + .where(image_locations.c.image_id == il.c.image_id) + .where(image_locations.c.value == il.c.value) + .where(image_locations.c.meta_data == il.c.meta_data) + .where(image_locations.c.deleted == False))) + .where((sql.select([func.count(image_locations.c.id)]) + .where(image_locations.c.image_id == il.c.image_id) + .where(image_locations.c.value == il.c.value) + .where(image_locations.c.meta_data == il.c.meta_data) + .where(image_locations.c.deleted == False)) > 1) + .where(il.c.deleted == False) + .execute() + ) - # NOTE(flaper87): Lets group by - # image_id, location and metadata. - grp = [image_locations.c.image_id, - image_locations.c.value, - image_locations.c.meta_data] + for row in qry: + stmt = (image_locations.delete() + .where(image_locations.c.id == row[0]) + .where(image_locations.c.deleted == False)) + stmt.execute() - # NOTE(flaper87): Get all duplicated rows - qry = (session.query(*grp) - .filter(image_locations.c.deleted == False) - .group_by(*grp) - .having(func.count() > 1)) + else: + session = orm.sessionmaker(bind=migrate_engine)() - for row in qry: - # NOTE(flaper87): Not the fastest way to do it. - # This is the best way to do it since sqlalchemy - # has a bug around delete + limit. - s = (sqlalchemy.sql.select([image_locations.c.id]) - .where(image_locations.c.image_id == row[0]) - .where(image_locations.c.value == row[1]) - .where(image_locations.c.meta_data == row[2]) - .where(image_locations.c.deleted == False) - .limit(1).execute()) - stmt = (image_locations.delete() - .where(image_locations.c.id == s.first()[0])) - stmt.execute() + # NOTE(flaper87): Lets group by + # image_id, location and metadata. + grp = [image_locations.c.image_id, + image_locations.c.value, + image_locations.c.meta_data] - session.close() + # NOTE(flaper87): Get all duplicated rows + qry = (session.query(*grp) + .filter(image_locations.c.deleted == False) + .group_by(*grp) + .having(func.count() > 1)) + + for row in qry: + # NOTE(flaper87): Not the fastest way to do it. + # This is the best way to do it since sqlalchemy + # has a bug around delete + limit. + s = (sql.select([image_locations.c.id]) + .where(image_locations.c.image_id == row[0]) + .where(image_locations.c.value == row[1]) + .where(image_locations.c.meta_data == row[2]) + .where(image_locations.c.deleted == False) + .limit(1).execute()) + stmt = (image_locations.delete() + .where(image_locations.c.id == s.first()[0])) + stmt.execute() + + session.close() def downgrade(migrate_engine):