Drop the downgrade function of migration scripts

We stopped supporting downgrade of OpenStack, so drop the downgrade
function of migration scripts.

Change-Id: I785a19c312488eca531ffc2c4f4401562a283324
Closes-Bug: #1434103
This commit is contained in:
liu-sheng 2015-08-11 09:17:38 +08:00 committed by liusheng
parent 9815d941f4
commit c00f2c0723
44 changed files with 13 additions and 847 deletions

View File

@ -93,11 +93,3 @@ def upgrade(migrate_engine):
tables = [meter, project, resource, user, source, sourceassoc]
for i in sorted(tables, key=lambda table: table.fullname):
i.create()
def downgrade(migrate_engine):
meta = MetaData(bind=migrate_engine)
for name in ['source', 'sourceassoc', 'project',
'user', 'resource', 'meter']:
t = Table(name, meta, autoload=True)
t.drop()

View File

@ -21,10 +21,3 @@ def upgrade(migrate_engine):
meter = Table('meter', meta, autoload=True)
duration = Column('counter_duration', Integer)
meter.drop_column(duration)
def downgrade(migrate_engine):
meta = MetaData(bind=migrate_engine)
meter = Table('meter', meta, autoload=True)
duration = Column('counter_duration', Integer)
meter.create_column(duration)

View File

@ -27,19 +27,3 @@ def upgrade(migrate_engine):
migrate_engine.execute(
"ALTER DATABASE %s DEFAULT CHARACTER SET utf8" %
migrate_engine.url.database)
def downgrade(migrate_engine):
# Operations to reverse the above upgrade go here.
if migrate_engine.name == "mysql":
tables = ['meter', 'user', 'resource', 'project', 'source',
'sourceassoc']
migrate_engine.execute("SET foreign_key_checks = 0")
for table in tables:
migrate_engine.execute(
"ALTER TABLE %s CONVERT TO CHARACTER SET latin1" % table)
migrate_engine.execute("SET foreign_key_checks = 1")
migrate_engine.execute(
"ALTER DATABASE %s DEFAULT CHARACTER SET latin1" %
migrate_engine.url.database)

View File

@ -21,10 +21,3 @@ def upgrade(migrate_engine):
meter = Table('meter', meta, autoload=True)
unit = Column('counter_unit', String(255))
meter.create_column(unit)
def downgrade(migrate_engine):
meta = MetaData(bind=migrate_engine)
meter = Table('meter', meta, autoload=True)
unit = Column('counter_unit', String(255))
meter.drop_column(unit)

View File

@ -12,7 +12,6 @@
# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
# License for the specific language governing permissions and limitations
# under the License.
from oslo_utils import timeutils
from sqlalchemy import MetaData, Table, Column, DateTime
@ -23,13 +22,3 @@ def upgrade(migrate_engine):
resource.drop_column(timestamp)
received_timestamp = Column('received_timestamp', DateTime)
resource.drop_column(received_timestamp)
def downgrade(migrate_engine):
meta = MetaData(bind=migrate_engine)
resource = Table('resource', meta, autoload=True)
timestamp = Column('timestamp', DateTime)
resource.create_column(timestamp)
received_timestamp = Column('received_timestamp', DateTime,
default=timeutils.utcnow)
resource.create_column(received_timestamp)

View File

@ -15,7 +15,6 @@
# under the License.
from sqlalchemy import Float
from sqlalchemy import Integer
from sqlalchemy import MetaData
from sqlalchemy import Table
@ -24,9 +23,3 @@ def upgrade(migrate_engine):
meta = MetaData(bind=migrate_engine)
meter = Table('meter', meta, autoload=True)
meter.c.counter_volume.alter(type=Float(53))
def downgrade(migrate_engine):
meta = MetaData(bind=migrate_engine)
meter = Table('meter', meta, autoload=True)
meter.c.counter_volume.alter(type=Integer)

View File

@ -44,9 +44,3 @@ def upgrade(migrate_engine):
mysql_engine='InnoDB',
mysql_charset='utf8')
alarm.create()
def downgrade(migrate_engine):
meta = MetaData(bind=migrate_engine)
alarm = Table('alarm', meta, autoload=True)
alarm.drop()

View File

@ -58,10 +58,3 @@ def upgrade(migrate_engine):
mysql_charset='utf8',
)
trait.create()
def downgrade(migrate_engine):
meta = MetaData(bind=migrate_engine)
for name in ['trait', 'event', 'unique_name']:
t = Table(name, meta, autoload=True)
t.drop()

View File

@ -22,11 +22,3 @@ def upgrade(migrate_engine):
name.c.key.alter(type=VARCHAR(length=255))
trait = Table('trait', meta, autoload=True)
trait.c.t_string.alter(type=VARCHAR(length=255))
def downgrade(migrate_engine):
meta = MetaData(bind=migrate_engine)
name = Table('unique_name', meta, autoload=True)
name.c.key.alter(type=VARCHAR(length=32))
trait = Table('trait', meta, autoload=True)
trait.c.t_string.alter(type=VARCHAR(length=32))

View File

@ -21,11 +21,3 @@ def upgrade(migrate_engine):
index = sa.Index('idx_meter_rid_cname', meter.c.resource_id,
meter.c.counter_name)
index.create(bind=migrate_engine)
def downgrade(migrate_engine):
meta = sa.MetaData(bind=migrate_engine)
meter = sa.Table('meter', meta, autoload=True)
index = sa.Index('idx_meter_rid_cname', meter.c.resource_id,
meter.c.counter_name)
index.drop(bind=migrate_engine)

View File

@ -35,14 +35,3 @@ def upgrade(migrate_engine):
for index_name, column in indexes:
index = Index(index_name, table.c[column])
index.drop()
def downgrade(migrate_engine):
meta = MetaData(bind=migrate_engine)
load_tables = dict((table_name, Table(table_name, meta, autoload=True))
for table_name in INDEXES.keys())
for table_name, indexes in INDEXES.items():
table = load_tables[table_name]
for index_name, column in indexes:
index = Index(index_name, table.c[column])
index.create()

View File

@ -56,22 +56,3 @@ def upgrade(migrate_engine):
params['name'] = "_".join(('fk', table_name, column))
fkey = ForeignKeyConstraint(**params)
fkey.create()
def downgrade(migrate_engine):
if migrate_engine.name == 'sqlite':
return
meta = MetaData(bind=migrate_engine)
load_tables = dict((table_name, Table(table_name, meta, autoload=True))
for table_name in TABLES)
for table_name, indexes in INDEXES.items():
table = load_tables[table_name]
for column, ref_table_name, ref_column_name in indexes:
ref_table = load_tables[ref_table_name]
params = {'columns': [table.c[column]],
'refcolumns': [ref_table.c[ref_column_name]]}
if migrate_engine.name == 'mysql':
params['name'] = "_".join(('fk', table_name, column))
with migrate_engine.begin():
fkey = ForeignKeyConstraint(**params)
fkey.drop()

View File

@ -21,10 +21,3 @@ def upgrade(migrate_engine):
meta.bind = migrate_engine
alarm = Table('alarm', meta, autoload=True)
alarm.c.counter_name.alter(name='meter_name')
def downgrade(migrate_engine):
meta = MetaData()
meta.bind = migrate_engine
alarm = Table('alarm', meta, autoload=True)
alarm.c.meter_name.alter(name='counter_name')

View File

@ -42,14 +42,3 @@ def upgrade(migrate_engine):
execute())
# Leave the Trait, makes the rollback easier and won't really hurt anyone.
def downgrade(migrate_engine):
meta = sqlalchemy.MetaData(bind=migrate_engine)
event = sqlalchemy.Table('event', meta, autoload=True)
message_id = sqlalchemy.Column('message_id', sqlalchemy.String(50))
cons = UniqueConstraint('message_id', table=event)
cons.drop()
index = sqlalchemy.Index('idx_event_message_id', event.c.message_id)
index.drop(bind=migrate_engine)
event.drop_column(message_id)

View File

@ -61,10 +61,3 @@ def upgrade(migrate_engine):
refcolumns=[user.c.id])]
for fkey in fkeys:
fkey.create(engine=migrate_engine)
def downgrade(migrate_engine):
meta = MetaData()
meta.bind = migrate_engine
alarm_history = Table('alarm_history', meta, autoload=True)
alarm_history.drop()

View File

@ -16,7 +16,7 @@
import json
from sqlalchemy import MetaData, Table, Column, Index
from sqlalchemy import String, Float, Integer, Text
from sqlalchemy import String, Text
def upgrade(migrate_engine):
@ -58,49 +58,3 @@ def upgrade(migrate_engine):
table.c.evaluation_periods.drop()
table.c.period.drop()
table.c.matching_metadata.drop()
def downgrade(migrate_engine):
meta = MetaData()
meta.bind = migrate_engine
table = Table('alarm', meta, autoload=True)
columns = [
Column('meter_name', String(255)),
Column('comparison_operator', String(2)),
Column('threshold', Float),
Column('statistic', String(255)),
Column('evaluation_periods', Integer),
Column('period', Integer),
Column('matching_metadata', Text())
]
for c in columns:
c.create(table)
for row in table.select().execute().fetchall():
if row.type != 'threshold':
# note: type insupported in previous version
table.delete().where(table.c.id == row.id).execute()
else:
rule = json.loads(row.rule)
values = {'comparison_operator': rule['comparison_operator'],
'threshold': float(rule['threshold']),
'statistic': rule['statistic'],
'evaluation_periods': int(rule['evaluation_periods']),
'period': int(rule['period']),
'meter_name': int(rule['mater_name']),
'matching_metadata': {}}
# note: op are ignored because previous format don't support it
for q in rule['query']:
values['matching_metadata'][q['field']] = q['value']
values['matching_metadata'] = json.dumps(
values['matching_metadata'])
table.update().where(table.c.id == row.id
).values(**values).execute()
index = Index('ix_alarm_counter_name', table.c.meter_name)
index.create(bind=migrate_engine)
table.c.type.drop()
table.c.rule.drop()

View File

@ -52,11 +52,3 @@ def upgrade(migrate_engine):
_convert_data_type(meter, _col, sa.DateTime(),
models.PreciseTimestamp(),
pk_attr='id', index=True)
def downgrade(migrate_engine):
if migrate_engine.name == 'mysql':
meta = sa.MetaData(bind=migrate_engine)
meter = sa.Table('meter', meta, autoload=True)
_convert_data_type(meter, _col, models.PreciseTimestamp(),
sa.DateTime(), pk_attr='id', index=True)

View File

@ -16,7 +16,6 @@
# under the License.
from sqlalchemy import MetaData
from sqlalchemy import String
from sqlalchemy import Table
from sqlalchemy import Text
@ -25,9 +24,3 @@ def upgrade(migrate_engine):
meta = MetaData(bind=migrate_engine)
resource = Table('resource', meta, autoload=True)
resource.c.resource_metadata.alter(type=Text)
def downgrade(migrate_engine):
meta = MetaData(bind=migrate_engine)
resource = Table('resource', meta, autoload=True)
resource.c.resource_metadata.alter(type=String(5000))

View File

@ -16,7 +16,6 @@
# under the License.
from sqlalchemy import MetaData
from sqlalchemy import String
from sqlalchemy import Table
from sqlalchemy import Text
@ -25,9 +24,3 @@ def upgrade(migrate_engine):
meta = MetaData(bind=migrate_engine)
alm_hist = Table('alarm_history', meta, autoload=True)
alm_hist.c.detail.alter(type=Text)
def downgrade(migrate_engine):
meta = MetaData(bind=migrate_engine)
alm_hist = Table('alarm_history', meta, autoload=True)
alm_hist.c.detail.alter(type=String(255))

View File

@ -66,10 +66,3 @@ def upgrade(migrate_engine):
ins = meta_tables['metadata_float'].insert()
if ins is not None:
ins.values(id=meter_id, meta_key=key, value=v).execute()
def downgrade(migrate_engine):
meta = MetaData(bind=migrate_engine)
for t in tables:
table = Table(t[0], meta, autoload=True)
table.drop()

View File

@ -75,45 +75,3 @@ def upgrade(migrate_engine):
fkey.create()
event.c.unique_name_id.drop()
def downgrade(migrate_engine):
meta = MetaData(bind=migrate_engine)
event_type = Table('event_type', meta, autoload=True)
event = Table('event', meta, autoload=True)
unique_name = Table('unique_name', meta, autoload=True)
# Re-insert the event type table records into the old
# unique_name table.
conn = migrate_engine.connect()
sql = ("INSERT INTO unique_name "
"SELECT event_type.id, event_type.desc FROM event_type")
conn.execute(sql)
conn.close()
# Drop the foreign key constraint to event_type, drop the
# event_type table, rename the event.event_type column to
# event.unique_name, and re-add the old foreign
# key constraint
params = {'columns': [event.c.event_type_id],
'refcolumns': [event_type.c.id]}
if migrate_engine.name == 'mysql':
params['name'] = "_".join(('fk', 'event_type', 'id'))
fkey = ForeignKeyConstraint(**params)
fkey.drop()
event_type.drop()
Column('unique_name_id', Integer).create(event)
# Move data from event_type_id column to unique_name_id column
query = select([event.c.id, event.c.event_type_id])
for key, value in migration.paged(query):
(event.update().where(event.c.id == key).
values({"unique_name_id": value}).execute())
event.c.event_type_id.drop()
params = {'columns': [event.c.unique_name_id],
'refcolumns': [unique_name.c.id]}
if migrate_engine.name == 'mysql':
params['name'] = 'event_ibfk_1'
fkey = ForeignKeyConstraint(**params)
fkey.create()

View File

@ -1,20 +0,0 @@
ALTER TABLE event RENAME TO event_orig;
INSERT INTO unique_name
SELECT et.id, et.desc
FROM event_type et;
CREATE TABLE event (
id INTEGER PRIMARY KEY ASC,
generated FLOAT NOT NULL,
message_id VARCHAR(50) UNIQUE,
unique_name_id INTEGER NOT NULL,
FOREIGN KEY (unique_name_id) REFERENCES unique_name (id)
);
INSERT INTO event
SELECT id, generated, message_id, event_type_id
FROM event_orig;
DROP TABLE event_orig;
DROP TABLE event_type;

View File

@ -16,7 +16,6 @@
# under the License.
from sqlalchemy import BigInteger
from sqlalchemy import Integer
from sqlalchemy import MetaData
from sqlalchemy import Table
@ -25,9 +24,3 @@ def upgrade(migrate_engine):
meta = MetaData(bind=migrate_engine)
resource = Table('metadata_int', meta, autoload=True)
resource.c.value.alter(type=BigInteger)
def downgrade(migrate_engine):
meta = MetaData(bind=migrate_engine)
resource = Table('metadata_int', meta, autoload=True)
resource.c.value.alter(type=Integer)

View File

@ -84,68 +84,3 @@ def upgrade(migrate_engine):
# Finally, drop the unique_name table - we don't need it
# anymore.
unique_name.drop()
def downgrade(migrate_engine):
meta = MetaData(migrate_engine)
unique_name = Table(
'unique_name', meta,
Column('id', Integer, primary_key=True),
Column('key', String(255), unique=True),
mysql_engine='InnoDB',
mysql_charset='utf8',
)
trait_type = Table('trait_type', meta, autoload=True)
trait = Table('trait', meta, autoload=True)
# Create the UniqueName table, drop the foreign key constraint
# to trait_type, drop the trait_type table, rename the
# trait.trait_type column to traitname, re-add the dtype to
# the trait table, and re-add the old foreign key constraint
unique_name.create(migrate_engine)
conn = migrate_engine.connect()
sql = ("INSERT INTO unique_name "
"SELECT trait_type.id, trait_type.desc "
"FROM trait_type")
conn.execute(sql)
conn.close()
params = {'columns': [trait.c.trait_type_id],
'refcolumns': [trait_type.c.id]}
if migrate_engine.name == 'mysql':
params['name'] = "_".join(('fk', 'trait_type', 'id'))
fkey = ForeignKeyConstraint(**params)
fkey.drop()
# Re-create the old columns in trait
Column("name_id", Integer).create(trait)
Column("t_type", Integer).create(trait)
# copy data from trait_type.data_type into trait.t_type
query = select([trait_type.c.id, trait_type.c.data_type])
for key, value in migration.paged(query):
(trait.update().where(trait.c.trait_type_id == key).
values({"t_type": value}).execute())
# Move data from name_id column into trait_type_id column
query = select([trait.c.id, trait.c.trait_type_id])
for key, value in migration.paged(query):
(trait.update().where(trait.c.id == key).
values({"name_id": value}).execute())
# Add a foreign key to the unique_name table
params = {'columns': [trait.c.name_id],
'refcolumns': [unique_name.c.id]}
if migrate_engine.name == 'mysql':
params['name'] = 'trait_ibfk_1'
fkey = ForeignKeyConstraint(**params)
fkey.create()
trait.c.trait_type_id.drop()
# Drop the trait_type table. It isn't needed anymore
trait_type.drop()

View File

@ -1,29 +0,0 @@
ALTER TABLE trait RENAME TO trait_orig;
INSERT INTO unique_name
SELECT id, 'desc'
FROM trait_type;
CREATE TABLE trait (
id INTEGER PRIMARY KEY ASC,
t_string VARCHAR(255),
t_int INTEGER,
t_float FLOAT,
t_datetime FLOAT,
t_type INTEGER NOT NULL,
name_id INTEGER NOT NULL,
event_id INTEGER NOT NULL,
FOREIGN KEY (name_id) REFERENCES unique_name (id)
FOREIGN KEY (event_id) REFERENCES event (id)
);
INSERT INTO trait
SELECT t.id, t.t_string, t.t_int, t.t_float, t.t_datetime
tt.data_type, t.trait_type_id, t.event_id
FROM trait_orig t
INNER JOIN trait_type tt
ON tt.id = t.trait_type_id
DROP TABLE trait_orig;
DROP TABLE trait_type;

View File

@ -54,14 +54,3 @@ def upgrade(migrate_engine):
_convert_data_type(trait, 't_datetime', sa.Float(),
models.PreciseTimestamp(),
pk_attr='id', index=True)
def downgrade(migrate_engine):
if migrate_engine.name == 'mysql':
meta = sa.MetaData(bind=migrate_engine)
event = sa.Table('event', meta, autoload=True)
_convert_data_type(event, 'generated', models.PreciseTimestamp(),
sa.Float(), pk_attr='id', index=True)
trait = sa.Table('trait', meta, autoload=True)
_convert_data_type(trait, 't_datetime', models.PreciseTimestamp(),
sa.Float(), pk_attr='id', index=True)

View File

@ -56,13 +56,3 @@ def upgrade(migrate_engine):
_convert_data_type(table, col_name, sa.DateTime(),
models.PreciseTimestamp(),
pk_attr=pk_attr)
def downgrade(migrate_engine):
if migrate_engine.name == 'mysql':
meta = sa.MetaData(bind=migrate_engine)
for table_name, col_name, pk_attr in to_convert:
table = sa.Table(table_name, meta, autoload=True)
_convert_data_type(table, col_name, models.PreciseTimestamp(),
sa.DateTime(),
pk_attr=pk_attr)

View File

@ -22,11 +22,3 @@ def upgrade(migrate_engine):
metadata_float.c.value.alter(type=Float(53))
trait = Table('trait', meta, autoload=True)
trait.c.t_float.alter(type=Float(53))
def downgrade(migrate_engine):
meta = MetaData(bind=migrate_engine)
metadata_float = Table('metadata_float', meta, autoload=True)
metadata_float.c.value.alter(type=Float())
trait = Table('trait', meta, autoload=True)
trait.c.t_string.alter(type=Float())

View File

@ -15,7 +15,6 @@
from migrate import ForeignKeyConstraint
from sqlalchemy import MetaData, Table
from sqlalchemy.sql.expression import select
TABLES = ['user', 'project', 'alarm']
@ -41,26 +40,3 @@ def upgrade(migrate_engine):
params['name'] = "_".join(('fk', table_name, column))
fkey = ForeignKeyConstraint(**params)
fkey.drop()
def downgrade(migrate_engine):
if migrate_engine.name == 'sqlite':
return
meta = MetaData(bind=migrate_engine)
load_tables = dict((table_name, Table(table_name, meta, autoload=True))
for table_name in TABLES)
for table_name, indexes in INDEXES.items():
table = load_tables[table_name]
for column, ref_table_name, ref_column_name in indexes:
ref_table = load_tables[ref_table_name]
subq = select([getattr(ref_table.c, ref_column_name)])
sql_del = table.delete().where(
~ getattr(table.c, column).in_(subq))
migrate_engine.execute(sql_del)
params = {'columns': [table.c[column]],
'refcolumns': [ref_table.c[ref_column_name]]}
if migrate_engine.name == 'mysql':
params['name'] = "_".join(('fk', table_name, column))
fkey = ForeignKeyConstraint(**params)
fkey.create()

View File

@ -73,7 +73,7 @@ def index_cleanup(meta, table_name, uniq_name, columns,
sa.Index(uniq_name, *cols).drop()
def change_uniq(meta, downgrade=False):
def change_uniq(meta):
uniq_name = 'uniq_sourceassoc0meter_id0user_id'
columns = ('meter_id', 'user_id')
@ -94,12 +94,9 @@ def change_uniq(meta, downgrade=False):
'refcolumns': [user.c.id],
'name': 'fk_sourceassoc_user_id'}
migrate.ForeignKeyConstraint(**params).drop()
if downgrade:
migrate.UniqueConstraint(*columns, table=sourceassoc,
name=uniq_name).drop()
else:
migrate.UniqueConstraint(*columns, table=sourceassoc,
name=uniq_name).create()
migrate.UniqueConstraint(*columns, table=sourceassoc,
name=uniq_name).create()
if meta.bind.engine.name == 'mysql':
params = {'columns': [sourceassoc.c.meter_id],
'refcolumns': [meter.c.id],
@ -139,22 +136,3 @@ def upgrade(migrate_engine):
change_uniq(meta)
delete_alembic(meta)
def downgrade(migrate_engine):
meta = sa.MetaData(bind=migrate_engine)
change_uniq(meta, downgrade=True)
for (engine_names, table_name, uniq_name,
columns, create, uniq, limited) in INDEXES:
if migrate_engine.name in engine_names:
index_cleanup(meta, table_name, uniq_name,
columns, not create, uniq, limited)
meter = sa.Table('meter', meta, autoload=True)
meter.c.resource_metadata.alter(type=sa.String(5000))
alarm = sa.Table('alarm', meta, autoload=True)
repeat_act = sa.Column('repeat_actions', sa.Boolean)
alarm.drop_column(repeat_act)

View File

@ -22,9 +22,3 @@ def upgrade(migrate_engine):
c = sqlalchemy.Column('recorded_at', models.PreciseTimestamp(),
default=timeutils.utcnow)
meter.create_column(c)
def downgrade(migrate_engine):
meta = sqlalchemy.MetaData(bind=migrate_engine)
meter = sqlalchemy.Table('meter', meta, autoload=True)
meter.drop_column('recorded_at')

View File

@ -15,7 +15,7 @@ import migrate
import sqlalchemy as sa
def _handle_meter_indices(meta, downgrade=False):
def _handle_meter_indices(meta):
if meta.bind.engine.name == 'sqlite':
return
@ -51,17 +51,17 @@ def _handle_meter_indices(meta, downgrade=False):
for fk in fk_params:
params = fk[0]
if meta.bind.engine.name == 'mysql':
params['name'] = fk[2] if downgrade else fk[1]
params['name'] = fk[1]
migrate.ForeignKeyConstraint(**params).drop()
for meter_ix, sample_ix in indices:
meter_ix.create() if downgrade else meter_ix.drop()
sample_ix.drop() if downgrade else sample_ix.create()
meter_ix.drop()
sample_ix.create()
for fk in fk_params:
params = fk[0]
if meta.bind.engine.name == 'mysql':
params['name'] = fk[1] if downgrade else fk[2]
params['name'] = fk[2]
migrate.ForeignKeyConstraint(**params).create()
@ -108,17 +108,3 @@ def upgrade(migrate_engine):
# re-bind metadata to pick up alter name change
meta = sa.MetaData(bind=migrate_engine)
_alter_sourceassoc(meta, 'sample', 'idx_ss', True)
def downgrade(migrate_engine):
meta = sa.MetaData(bind=migrate_engine)
sample = sa.Table('sample', meta, autoload=True)
sample.rename('meter')
_handle_meter_indices(meta, True)
_alter_sourceassoc(meta, 'sample', 'idx_ss')
sourceassoc = sa.Table('sourceassoc', meta, autoload=True)
sourceassoc.c.sample_id.alter(name='meter_id')
meta = sa.MetaData(bind=migrate_engine)
_alter_sourceassoc(meta, 'meter', 'idx_sm', True)

View File

@ -17,7 +17,7 @@ import migrate
import sqlalchemy as sa
def handle_rid_index(meta, downgrade=False):
def handle_rid_index(meta):
if meta.bind.engine.name == 'sqlite':
return
@ -33,7 +33,7 @@ def handle_rid_index(meta, downgrade=False):
index = sa.Index('idx_sample_rid_cname', sample.c.resource_id,
sample.c.counter_name)
index.create() if downgrade else index.drop()
index.drop()
if meta.bind.engine.name == 'mysql':
migrate.ForeignKeyConstraint(**params).create()
@ -85,31 +85,3 @@ def upgrade(migrate_engine):
sample.c.counter_type.drop()
sample.c.counter_unit.drop()
sample.c.counter_volume.alter(name='volume')
def downgrade(migrate_engine):
meta = sa.MetaData(bind=migrate_engine)
sample = sa.Table('sample', meta, autoload=True)
sample.c.volume.alter(name='counter_volume')
sa.Column('counter_name', sa.String(255)).create(sample)
sa.Column('counter_type', sa.String(255)).create(sample)
sa.Column('counter_unit', sa.String(255)).create(sample)
meter = sa.Table('meter', meta, autoload=True)
for row in sa.select([meter]).execute():
(sample.update().
where(sample.c.meter_id == row['id']).
values({sample.c.counter_name: row['name'],
sample.c.counter_type: row['type'],
sample.c.counter_unit: row['unit']}).execute())
params = {'columns': [sample.c.meter_id],
'refcolumns': [meter.c.id]}
if migrate_engine.name == 'mysql':
params['name'] = 'fk_sample_meter_id'
if migrate_engine.name != 'sqlite':
migrate.ForeignKeyConstraint(**params).drop()
handle_rid_index(meta, True)
sample.c.meter_id.drop()
meter.drop()

View File

@ -21,10 +21,3 @@ def upgrade(migrate_engine):
alarm = Table('alarm', meta, autoload=True)
time_constraints = Column('time_constraints', Text())
alarm.create_column(time_constraints)
def downgrade(migrate_engine):
meta = MetaData(bind=migrate_engine)
alarm = Table('alarm', meta, autoload=True)
time_constraints = Column('time_constraints', Text())
alarm.drop_column(time_constraints)

View File

@ -19,9 +19,3 @@ def upgrade(migrate_engine):
meta = MetaData(bind=migrate_engine)
users = Table('alarm', meta, autoload=True)
users.c.id.alter(name='alarm_id')
def downgrade(migrate_engine):
meta = MetaData(bind=migrate_engine)
users = Table('alarm', meta, autoload=True)
users.c.alarm_id.alter(name='id')

View File

@ -31,7 +31,3 @@ def upgrade(migrate_engine):
drop(checkfirst=True))
except sa.exc.NoSuchTableError:
pass
def downgrade(migrate_engine):
pass

View File

@ -13,7 +13,6 @@
from migrate import ForeignKeyConstraint, UniqueConstraint
import sqlalchemy as sa
from sqlalchemy.sql.expression import select, Alias, not_, and_, exists
TABLES_DROP = ['user', 'project']
TABLES = ['user', 'project', 'sourceassoc', 'sample',
@ -83,93 +82,3 @@ def upgrade(migrate_engine):
for table_name in TABLES_DROP:
sa.Table(table_name, meta, autoload=True).drop()
def downgrade(migrate_engine):
meta = sa.MetaData(bind=migrate_engine)
user = sa.Table(
'user', meta,
sa.Column('id', sa.String(255), primary_key=True),
mysql_engine='InnoDB',
mysql_charset='utf8',
)
project = sa.Table(
'project', meta,
sa.Column('id', sa.String(255), primary_key=True),
mysql_engine='InnoDB',
mysql_charset='utf8',
)
tables = [project, user]
for i in sorted(tables):
i.create()
load_tables = dict((table_name, sa.Table(table_name, meta, autoload=True))
for table_name in TABLES)
# Restore the sourceassoc columns and constraints
sourceassoc = load_tables['sourceassoc']
user_id = sa.Column('user_id', sa.String(255))
project_id = sa.Column('project_id', sa.String(255))
sourceassoc.create_column(user_id)
sourceassoc.create_column(project_id)
if migrate_engine.name != 'sqlite':
params = {}
if migrate_engine.name == "mysql":
params = {'name': 'uniq_sourceassoc0sample_id0user_id'}
uc = UniqueConstraint('sample_id', 'user_id',
table=sourceassoc, **params)
uc.create()
params = {}
if migrate_engine.name == "mysql":
params = {'name': 'uniq_sourceassoc0sample_id'}
uc = UniqueConstraint('sample_id', table=sourceassoc, **params)
uc.drop()
idx = sa.Index('idx_su', sourceassoc.c.source_id,
sourceassoc.c.user_id)
idx.create(bind=migrate_engine)
idx = sa.Index('idx_sp', sourceassoc.c.source_id,
sourceassoc.c.project_id)
idx.create(bind=migrate_engine)
# Restore the user/project columns and constraints in all tables
for table_name, indexes in INDEXES.items():
table = load_tables[table_name]
for column, ref_table_name, ref_column_name in indexes:
ref_table = load_tables[ref_table_name]
c = getattr(Alias(table).c, column)
except_q = exists([getattr(ref_table.c, ref_column_name)])
q = select([c]).where(and_(c != sa.null(), not_(except_q)))
q = q.distinct()
# NOTE(sileht): workaround for
# https://bitbucket.org/zzzeek/sqlalchemy/
# issue/3044/insert-from-select-union_all
q.select = lambda: q
sql_ins = ref_table.insert().from_select(
[getattr(ref_table.c, ref_column_name)], q)
try:
migrate_engine.execute(sql_ins)
except TypeError:
# from select is empty
pass
if migrate_engine.name != 'sqlite':
params = {'columns': [table.c[column]],
'refcolumns': [ref_table.c[ref_column_name]]}
if (migrate_engine.name == "mysql" and
table_name != 'alarm_history'):
params['name'] = "_".join(('fk', table_name, column))
elif (migrate_engine.name == "postgresql" and
table_name == "sample"):
# The fk contains the old table name
params['name'] = "_".join(('meter', column, 'fkey'))
fkey = ForeignKeyConstraint(**params)
fkey.create()

View File

@ -10,8 +10,7 @@
# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
# License for the specific language governing permissions and limitations
# under the License.
from migrate import ForeignKeyConstraint, UniqueConstraint
from migrate import ForeignKeyConstraint
import sqlalchemy as sa
from ceilometer.storage.sqlalchemy import migration
@ -67,99 +66,3 @@ def upgrade(migrate_engine):
# drop tables
for table_name in DROP_TABLES:
sa.Table(table_name, meta, autoload=True).drop()
def downgrade(migrate_engine):
meta = sa.MetaData(bind=migrate_engine)
sample = sa.Table('sample', meta, autoload=True)
resource = sa.Table(
'resource', meta,
sa.Column('id', sa.String(255), primary_key=True),
sa.Column('resource_metadata', sa.Text),
sa.Column('user_id', sa.String(255)),
sa.Column('project_id', sa.String(255)),
sa.Index('ix_resource_project_id', 'project_id'),
sa.Index('ix_resource_user_id', 'user_id'),
sa.Index('resource_user_id_project_id_key', 'user_id', 'project_id'),
mysql_engine='InnoDB',
mysql_charset='utf8',
)
resource.create()
source = sa.Table(
'source', meta,
sa.Column('id', sa.String(255), primary_key=True),
mysql_engine='InnoDB',
mysql_charset='utf8',
)
source.create()
sourceassoc = sa.Table(
'sourceassoc', meta,
sa.Column('sample_id', sa.Integer),
sa.Column('resource_id', sa.String(255)),
sa.Column('source_id', sa.String(255)),
sa.Index('idx_sr', 'source_id', 'resource_id'),
sa.Index('idx_ss', 'source_id', 'sample_id'),
mysql_engine='InnoDB',
mysql_charset='utf8',
)
sourceassoc.create()
params = {}
if migrate_engine.name == "mysql":
params = {'name': 'uniq_sourceassoc0sample_id'}
uc = UniqueConstraint('sample_id', table=sourceassoc, **params)
uc.create()
# reload source/resource tables.
# NOTE(gordc): fine to skip non-id attributes in table since
# they're constantly updated and not used by api
for table, col in [(source, 'source_id'), (resource, 'resource_id')]:
q = sa.select([sample.c[col]]).distinct()
# NOTE(sileht): workaround for
# https://bitbucket.org/zzzeek/sqlalchemy/
# issue/3044/insert-from-select-union_all
q.select = lambda: q
sql_ins = table.insert().from_select([table.c.id], q)
try:
migrate_engine.execute(sql_ins)
except TypeError:
# from select is empty
pass
# reload sourceassoc tables
for ref_col, col in [('id', 'sample_id'), ('resource_id', 'resource_id')]:
q = sa.select([sample.c.source_id, sample.c[ref_col]]).distinct()
q.select = lambda: q
sql_ins = sourceassoc.insert().from_select([sourceassoc.c.source_id,
sourceassoc.c[col]], q)
try:
migrate_engine.execute(sql_ins)
except TypeError:
# from select is empty
pass
sample.c.source_id.drop()
load_tables = dict((table_name, sa.Table(table_name, meta,
autoload=True))
for table_name in TABLES)
# add foreign keys
if migrate_engine.name != 'sqlite':
for table_name, indexes in INDEXES.items():
table = load_tables[table_name]
for column, ref_table_name, ref_column_name in indexes:
ref_table = load_tables[ref_table_name]
params = {'columns': [table.c[column]],
'refcolumns': [ref_table.c[ref_column_name]]}
fk_table_name = table_name
if migrate_engine.name == "mysql":
params['name'] = "_".join(('fk', fk_table_name, column))
elif (migrate_engine.name == "postgresql" and
table_name == 'sample'):
# fk was not renamed in script 030
params['name'] = "_".join(('meter', column, 'fkey'))
fkey = ForeignKeyConstraint(**params)
fkey.create()

View File

@ -42,13 +42,3 @@ def upgrade(migrate_engine):
if index.name in ['fk_sample_meter_id', 'fk_sample_resource_id']:
index.drop()
sa.Index('ix_sample_meter_id', sample.c.meter_id).create()
def downgrade(migrate_engine):
if migrate_engine.name == 'sqlite':
return
meta = sa.MetaData(bind=migrate_engine)
sample = sa.Table('sample', meta, autoload=True)
with ForeignKeyHandle(meta):
sa.Index('ix_sample_meter_id', sample.c.meter_id).drop()

View File

@ -129,43 +129,3 @@ def upgrade(migrate_engine):
_migrate_meta_tables(meta, sample.c.id, sample.c.resource_id,
'resource.internal_id')
def downgrade(migrate_engine):
meta = sa.MetaData(bind=migrate_engine)
sample = sa.Table('sample', meta, autoload=True)
_migrate_meta_tables(meta, sample.c.resource_id, sample.c.id,
'sample.id')
sa.Column('user_id', sa.String(255)).create(sample)
sa.Column('project_id', sa.String(255)).create(sample)
sa.Column('source_id', sa.String(255)).create(sample)
sa.Column('resource_id_new', sa.String(255)).create(sample)
sa.Column('resource_metadata', sa.Text).create(sample)
resource = sa.Table('resource', meta, autoload=True)
for row in sa.select([resource]).execute():
(sample.update().
where(sample.c.resource_id == row['internal_id']).
values({sample.c.resource_id_new: row['resource_id'],
sample.c.user_id: row['user_id'],
sample.c.project_id: row['project_id'],
sample.c.source_id: row['source_id'],
sample.c.resource_metadata: row['resource_metadata']})
.execute())
if migrate_engine.name != 'sqlite':
params = {'columns': [sample.c.resource_id],
'refcolumns': [resource.c.internal_id]}
if migrate_engine.name == 'mysql':
params['name'] = 'fk_sample_resource_internal_id'
migrate.ForeignKeyConstraint(**params).drop()
sa.Index('ix_sample_meter_id_resource_id',
sample.c.meter_id, sample.c.resource_id).drop()
sa.Index('ix_sample_resource_id', sample.c.resource_id).drop()
sa.Index('ix_sample_user_id', sample.c.user_id).create()
sa.Index('ix_sample_project_id', sample.c.project_id).create()
resource.drop()
sample.c.resource_id.drop()
sample.c.resource_id_new.alter(name='resource_id')

View File

@ -54,14 +54,3 @@ def upgrade(migrate_engine):
_convert_data_type(trait, 't_datetime', sa.Float(),
models.PreciseTimestamp(),
pk_attr='id', index=True)
def downgrade(migrate_engine):
if migrate_engine.name == 'postgresql':
meta = sa.MetaData(bind=migrate_engine)
event = sa.Table('event', meta, autoload=True)
_convert_data_type(event, 'generated', models.PreciseTimestamp(),
sa.Float(), pk_attr='id', index=True)
trait = sa.Table('trait', meta, autoload=True)
_convert_data_type(trait, 't_datetime', models.PreciseTimestamp(),
sa.Float(), pk_attr='id', index=True)

View File

@ -22,10 +22,3 @@ def upgrade(migrate_engine):
alarm = Table('alarm', meta, autoload=True)
severity = Column('severity', String(50))
alarm.create_column(severity)
def downgrade(migrate_engine):
meta = MetaData(bind=migrate_engine)
alarm = Table('alarm', meta, autoload=True)
severity = Column('severity', String(50))
alarm.drop_column(severity)

View File

@ -52,56 +52,3 @@ def upgrade(migrate_engine):
['event_id', 'key', 'value'], query).execute()
trait.drop()
trait_type.drop()
def downgrade(migrate_engine):
meta = sa.MetaData(bind=migrate_engine)
event = sa.Table('event', meta, autoload=True)
trait_type = sa.Table(
'trait_type', meta,
sa.Column('id', sa.Integer, primary_key=True),
sa.Column('desc', sa.String(255)),
sa.Column('data_type', sa.Integer),
sa.UniqueConstraint('desc', 'data_type', name='tt_unique'),
mysql_engine='InnoDB',
mysql_charset='utf8',
)
trait_type.create()
trait = sa.Table(
'trait', meta,
sa.Column('id', sa.Integer, primary_key=True),
sa.Column('trait_type_id', sa.Integer, sa.ForeignKey(trait_type.c.id)),
sa.Column('event_id', sa.Integer, sa.ForeignKey(event.c.id)),
sa.Column('t_string', sa.String(255), nullable=True, default=None),
sa.Column('t_float', sa.Float(53), nullable=True, default=None),
sa.Column('t_int', sa.Integer, nullable=True, default=None),
sa.Column('t_datetime', models.PreciseTimestamp(), nullable=True,
default=None),
sa.Index('ix_trait_t_int', 't_int'),
sa.Index('ix_trait_t_string', 't_string'),
sa.Index('ix_trait_t_datetime', 't_datetime'),
sa.Index('ix_trait_t_float', 't_float'),
mysql_engine='InnoDB',
mysql_charset='utf8',
)
trait.create()
for t_name, __, __, col_name, type_id in tables:
table = sa.Table(t_name, meta, autoload=True)
trait_type.insert().from_select([trait_type.c.desc,
trait_type.c.data_type],
sa.select([table.c.key,
type_id])
.distinct()).execute()
trait.insert().from_select([trait.c['event_id'],
trait.c['trait_type_id'],
trait.c[col_name]],
sa.select([table.c.event_id,
trait_type.c.id,
table.c.value])
.select_from(
table.join(
trait_type,
table.c.key == trait_type.c.desc))
).execute()
table.drop()

View File

@ -19,10 +19,3 @@ def upgrade(migrate_engine):
event = sa.Table('event', meta, autoload=True)
raw = sa.Column('raw', sa.Text)
event.create_column(raw)
def downgrade(migrate_engine):
meta = sa.MetaData(bind=migrate_engine)
event = sa.Table('event', meta, autoload=True)
raw = sa.Column('raw', sa.Text)
event.drop_column(raw)