Adds migrations for database

This commit is contained in:
sulochan acharya 2016-06-03 09:06:58 +01:00
parent aa4eea0e06
commit 292c4aa75d
9 changed files with 358 additions and 184 deletions

80
craton/cmd/dbsync.py Normal file
View File

@ -0,0 +1,80 @@
import sys
from oslo_config import cfg
from craton.inventory.db.sqlalchemy import migration
CONF = cfg.CONF
class DBCommand(object):
def upgrade(self):
migration.upgrade(CONF.command.revision)
def revision(self):
migration.revision(CONF.command.message, CONF.command.autogenerate)
def stamp(self):
migration.stamp(CONF.command.revision)
def version(self):
print(migration.version())
def create_schema(self):
migration.create_schema()
def add_command_parsers(subparsers):
command_object = DBCommand()
parser = subparsers.add_parser(
'upgrade',
help=("Upgrade the database schema to the latest version. "
"Optionally, use --revision to specify an alembic revision "
"string to upgrade to."))
parser.set_defaults(func=command_object.upgrade)
parser.add_argument('--revision', nargs='?')
parser = subparsers.add_parser('stamp')
parser.add_argument('--revision', nargs='?')
parser.set_defaults(func=command_object.stamp)
parser = subparsers.add_parser(
'revision',
help=("Create a new alembic revision. "
"Use --message to set the message string."))
parser.add_argument('-m', '--message')
parser.add_argument('--autogenerate', action='store_true')
parser.set_defaults(func=command_object.revision)
parser = subparsers.add_parser(
'version',
help=("Print the current version information and exit."))
parser.set_defaults(func=command_object.version)
parser = subparsers.add_parser(
'create_schema',
help=("Create the database schema."))
parser.set_defaults(func=command_object.create_schema)
command_opt = cfg.SubCommandOpt('command',
title='Command',
help=('Available commands'),
handler=add_command_parsers)
CONF.register_cli_opt(command_opt)
def main():
# this is hack to work with previous usage of ironic-dbsync
# pls change it to ironic-dbsync upgrade
valid_commands = set([
'upgrade', 'revision',
'version', 'stamp', 'create_schema',
])
if not set(sys.argv) & valid_commands:
sys.argv.append('upgrade')
CONF.command.func()

View File

@ -2,7 +2,7 @@
[alembic]
# path to migration scripts
script_location = alembic
script_location = %(here)s/alembic
# template used to generate migration files
# file_template = %%(rev)s_%%(slug)s

View File

@ -1 +1,11 @@
Generic single-database configuration.
Please see https://alembic.readthedocs.org/en/latest/index.html for general documentation
To create alembic migrations use:
$ craton-inventory-dbsync --config-file=craton.conf revision --message "revision description" --autogenerate
Stamp db with most recent migration version, without actually running migrations
$ craton-inventory-dbsync --config-file=crton.conf stamp head
Upgrade can be performed by:
$ craton-inventory-dbsync --config-file=craton.conf upgrade
$ craton-inventory-dbsync --config-file=craton.conf upgrade head

View File

@ -1,6 +1,5 @@
from __future__ import with_statement
from alembic import context
from sqlalchemy import engine_from_config, pool
from logging.config import fileConfig
from craton.inventory.db.sqlalchemy import api as sa_api

View File

@ -1,180 +0,0 @@
"""craton_inventory_init
Revision ID: 002c69b5bab1
Revises:
Create Date: 2016-05-24 10:39:39.471049
"""
# revision identifiers, used by Alembic.
revision = '002c69b5bab1'
down_revision = None
branch_labels = None
depends_on = None
from alembic import op
import sqlalchemy as sa
def upgrade():
### commands auto generated by Alembic - please adjust! ###
op.create_table('access_secrets',
sa.Column('created_at', sa.DateTime(), nullable=True),
sa.Column('updated_at', sa.DateTime(), nullable=True),
sa.Column('id', sa.Integer(), nullable=False),
sa.Column('cert', sa.Text(), nullable=True),
sa.PrimaryKeyConstraint('id')
)
op.create_table('labels',
sa.Column('created_at', sa.DateTime(), nullable=True),
sa.Column('updated_at', sa.DateTime(), nullable=True),
sa.Column('id', sa.Integer(), nullable=False),
sa.Column('label', sa.String(length=255), nullable=True),
sa.PrimaryKeyConstraint('id'),
sa.UniqueConstraint('label')
)
op.create_table('projects',
sa.Column('created_at', sa.DateTime(), nullable=True),
sa.Column('updated_at', sa.DateTime(), nullable=True),
sa.Column('id', sa.Integer(), nullable=False),
sa.Column('name', sa.String(length=255), nullable=True),
sa.PrimaryKeyConstraint('id')
)
op.create_table('label_variables',
sa.Column('created_at', sa.DateTime(), nullable=True),
sa.Column('updated_at', sa.DateTime(), nullable=True),
sa.Column('parent_id', sa.Integer(), nullable=False),
sa.Column('key', sa.String(length=255), nullable=False),
sa.Column('value', sqlalchemy_utils.types.json.JSONType(), nullable=True),
sa.ForeignKeyConstraint(['parent_id'], ['labels.id'], ),
sa.PrimaryKeyConstraint('parent_id', 'key')
)
op.create_table('regions',
sa.Column('created_at', sa.DateTime(), nullable=True),
sa.Column('updated_at', sa.DateTime(), nullable=True),
sa.Column('id', sa.Integer(), nullable=False),
sa.Column('project_id', sa.Integer(), nullable=False),
sa.Column('name', sa.String(length=255), nullable=True),
sa.Column('note', sa.Text(), nullable=True),
sa.ForeignKeyConstraint(['project_id'], ['projects.id'], ),
sa.PrimaryKeyConstraint('id'),
sa.UniqueConstraint('project_id', 'name')
)
op.create_index(op.f('ix_regions_project_id'), 'regions', ['project_id'], unique=False)
op.create_table('users',
sa.Column('created_at', sa.DateTime(), nullable=True),
sa.Column('updated_at', sa.DateTime(), nullable=True),
sa.Column('id', sa.Integer(), nullable=False),
sa.Column('project_id', sa.Integer(), nullable=False),
sa.Column('username', sa.String(length=255), nullable=True),
sa.Column('api_key', sa.String(length=36), nullable=True),
sa.Column('is_admin', sa.Boolean(), nullable=True),
sa.Column('roles', sqlalchemy_utils.types.json.JSONType(), nullable=True),
sa.ForeignKeyConstraint(['project_id'], ['projects.id'], ),
sa.PrimaryKeyConstraint('id'),
sa.UniqueConstraint('username', 'project_id', name='uq_user0username0project')
)
op.create_index(op.f('ix_users_project_id'), 'users', ['project_id'], unique=False)
op.create_table('cells',
sa.Column('created_at', sa.DateTime(), nullable=True),
sa.Column('updated_at', sa.DateTime(), nullable=True),
sa.Column('id', sa.Integer(), nullable=False),
sa.Column('region_id', sa.Integer(), nullable=False),
sa.Column('project_id', sa.Integer(), nullable=False),
sa.Column('name', sa.String(length=255), nullable=True),
sa.Column('note', sa.Text(), nullable=True),
sa.ForeignKeyConstraint(['project_id'], ['projects.id'], ),
sa.ForeignKeyConstraint(['region_id'], ['regions.id'], ),
sa.PrimaryKeyConstraint('id'),
sa.UniqueConstraint('region_id', 'name')
)
op.create_index(op.f('ix_cells_project_id'), 'cells', ['project_id'], unique=False)
op.create_index(op.f('ix_cells_region_id'), 'cells', ['region_id'], unique=False)
op.create_table('region_variables',
sa.Column('created_at', sa.DateTime(), nullable=True),
sa.Column('updated_at', sa.DateTime(), nullable=True),
sa.Column('parent_id', sa.Integer(), nullable=False),
sa.Column('key', sa.String(length=255), nullable=False),
sa.Column('value', sqlalchemy_utils.types.json.JSONType(), nullable=True),
sa.ForeignKeyConstraint(['parent_id'], ['regions.id'], ),
sa.PrimaryKeyConstraint('parent_id', 'key')
)
op.create_table('cell_variables',
sa.Column('created_at', sa.DateTime(), nullable=True),
sa.Column('updated_at', sa.DateTime(), nullable=True),
sa.Column('parent_id', sa.Integer(), nullable=False),
sa.Column('key', sa.String(length=255), nullable=False),
sa.Column('value', sqlalchemy_utils.types.json.JSONType(), nullable=True),
sa.ForeignKeyConstraint(['parent_id'], ['cells.id'], ),
sa.PrimaryKeyConstraint('parent_id', 'key')
)
op.create_table('devices',
sa.Column('created_at', sa.DateTime(), nullable=True),
sa.Column('updated_at', sa.DateTime(), nullable=True),
sa.Column('id', sa.Integer(), nullable=False),
sa.Column('type', sa.String(length=50), nullable=True),
sa.Column('name', sa.String(length=255), nullable=False),
sa.Column('region_id', sa.Integer(), nullable=False),
sa.Column('cell_id', sa.Integer(), nullable=True),
sa.Column('project_id', sa.Integer(), nullable=False),
sa.Column('active', sa.Boolean(), nullable=True),
sa.Column('note', sa.Text(), nullable=True),
sa.ForeignKeyConstraint(['cell_id'], ['cells.id'], ),
sa.ForeignKeyConstraint(['project_id'], ['projects.id'], ),
sa.ForeignKeyConstraint(['region_id'], ['regions.id'], ),
sa.PrimaryKeyConstraint('id'),
sa.UniqueConstraint('region_id', 'name')
)
op.create_index(op.f('ix_devices_cell_id'), 'devices', ['cell_id'], unique=False)
op.create_index(op.f('ix_devices_project_id'), 'devices', ['project_id'], unique=False)
op.create_index(op.f('ix_devices_region_id'), 'devices', ['region_id'], unique=False)
op.create_table('device_labels',
sa.Column('device_id', sa.Integer(), nullable=False),
sa.Column('label_id', sa.Integer(), nullable=False),
sa.ForeignKeyConstraint(['device_id'], ['devices.id'], ),
sa.ForeignKeyConstraint(['label_id'], ['labels.id'], ),
sa.PrimaryKeyConstraint('device_id', 'label_id')
)
op.create_table('device_variables',
sa.Column('created_at', sa.DateTime(), nullable=True),
sa.Column('updated_at', sa.DateTime(), nullable=True),
sa.Column('parent_id', sa.Integer(), nullable=False),
sa.Column('key', sa.String(length=255), nullable=False),
sa.Column('value', sqlalchemy_utils.types.json.JSONType(), nullable=True),
sa.ForeignKeyConstraint(['parent_id'], ['devices.id'], ),
sa.PrimaryKeyConstraint('parent_id', 'key')
)
op.create_table('hosts',
sa.Column('id', sa.Integer(), nullable=False),
sa.Column('access_secret_id', sa.Integer(), nullable=True),
sa.Column('ip_address', sqlalchemy_utils.types.ip_address.IPAddressType(length=50), nullable=False),
sa.ForeignKeyConstraint(['access_secret_id'], ['access_secrets.id'], ),
sa.ForeignKeyConstraint(['id'], ['devices.id'], ),
sa.PrimaryKeyConstraint('id')
)
### end Alembic commands ###
def downgrade():
### commands auto generated by Alembic - please adjust! ###
op.drop_table('hosts')
op.drop_table('device_variables')
op.drop_table('device_labels')
op.drop_index(op.f('ix_devices_region_id'), table_name='devices')
op.drop_index(op.f('ix_devices_project_id'), table_name='devices')
op.drop_index(op.f('ix_devices_cell_id'), table_name='devices')
op.drop_table('devices')
op.drop_table('cell_variables')
op.drop_table('region_variables')
op.drop_index(op.f('ix_cells_region_id'), table_name='cells')
op.drop_index(op.f('ix_cells_project_id'), table_name='cells')
op.drop_table('cells')
op.drop_index(op.f('ix_users_project_id'), table_name='users')
op.drop_table('users')
op.drop_index(op.f('ix_regions_project_id'), table_name='regions')
op.drop_table('regions')
op.drop_table('label_variables')
op.drop_table('projects')
op.drop_table('labels')
op.drop_table('access_secrets')
### end Alembic commands ###

View File

@ -0,0 +1,212 @@
"""craton_inventory_init
Revision ID: ffdc1a500db1
Revises:
Create Date: 2016-06-03 09:52:55.302936
"""
# revision identifiers, used by Alembic.
revision = 'ffdc1a500db1'
down_revision = None
branch_labels = None
depends_on = None
from alembic import op
import sqlalchemy as sa
import sqlalchemy_utils
from craton.inventory.db.sqlalchemy import types
def upgrade():
# commands auto generated by Alembic - please adjust!
op.create_table(
'access_secrets',
sa.Column('created_at', sa.DateTime(), nullable=True),
sa.Column('updated_at', sa.DateTime(), nullable=True),
sa.Column('id', sa.Integer(), nullable=False),
sa.Column('cert', sa.Text(), nullable=True),
sa.PrimaryKeyConstraint('id')
)
op.create_table(
'labels',
sa.Column('created_at', sa.DateTime(), nullable=True),
sa.Column('updated_at', sa.DateTime(), nullable=True),
sa.Column('id', sa.Integer(), nullable=False),
sa.Column('label', sa.String(length=255), nullable=True),
sa.PrimaryKeyConstraint('id'),
sa.UniqueConstraint('label')
)
op.create_table(
'projects',
sa.Column('created_at', sa.DateTime(), nullable=True),
sa.Column('updated_at', sa.DateTime(), nullable=True),
sa.Column('id', sa.Integer(), nullable=False),
sa.Column('name', sa.String(length=255), nullable=True),
sa.PrimaryKeyConstraint('id')
)
op.create_table(
'label_variables',
sa.Column('created_at', sa.DateTime(), nullable=True),
sa.Column('updated_at', sa.DateTime(), nullable=True),
sa.Column('parent_id', sa.Integer(), nullable=False),
sa.Column('key', sa.String(length=255), nullable=False),
sa.Column('value', sqlalchemy_utils.types.json.JSONType(),
nullable=True),
sa.ForeignKeyConstraint(['parent_id'], ['labels.id'], ),
sa.PrimaryKeyConstraint('parent_id', 'key')
)
op.create_table(
'regions',
sa.Column('created_at', sa.DateTime(), nullable=True),
sa.Column('updated_at', sa.DateTime(), nullable=True),
sa.Column('id', sa.Integer(), nullable=False),
sa.Column('project_id', sa.Integer(), nullable=False),
sa.Column('name', sa.String(length=255), nullable=True),
sa.Column('note', sa.Text(), nullable=True),
sa.ForeignKeyConstraint(['project_id'], ['projects.id'], ),
sa.PrimaryKeyConstraint('id'),
sa.UniqueConstraint('project_id', 'name',
name='uq_region0projectid0name')
)
op.create_index(op.f('ix_regions_project_id'),
'regions', ['project_id'], unique=False)
op.create_table(
'users',
sa.Column('created_at', sa.DateTime(), nullable=True),
sa.Column('updated_at', sa.DateTime(), nullable=True),
sa.Column('id', sa.Integer(), nullable=False),
sa.Column('project_id', sa.Integer(), nullable=False),
sa.Column('username', sa.String(length=255), nullable=True),
sa.Column('api_key', sa.String(length=36), nullable=True),
sa.Column('is_admin', sa.Boolean(), nullable=True),
sa.Column('roles', sqlalchemy_utils.types.json.JSONType(),
nullable=True),
sa.ForeignKeyConstraint(['project_id'], ['projects.id'], ),
sa.PrimaryKeyConstraint('id'),
sa.UniqueConstraint('username', 'project_id',
name='uq_user0username0project')
)
op.create_index(op.f('ix_users_project_id'), 'users', ['project_id'],
unique=False)
op.create_table(
'cells',
sa.Column('created_at', sa.DateTime(), nullable=True),
sa.Column('updated_at', sa.DateTime(), nullable=True),
sa.Column('id', sa.Integer(), nullable=False),
sa.Column('region_id', sa.Integer(), nullable=False),
sa.Column('project_id', sa.Integer(), nullable=False),
sa.Column('name', sa.String(length=255), nullable=True),
sa.Column('note', sa.Text(), nullable=True),
sa.ForeignKeyConstraint(['project_id'], ['projects.id'], ),
sa.ForeignKeyConstraint(['region_id'], ['regions.id'], ),
sa.PrimaryKeyConstraint('id'),
sa.UniqueConstraint('region_id', 'name', name='uq_cell0regionid0name')
)
op.create_index(op.f('ix_cells_project_id'), 'cells', ['project_id'],
unique=False)
op.create_index(op.f('ix_cells_region_id'), 'cells', ['region_id'],
unique=False)
op.create_table(
'region_variables',
sa.Column('created_at', sa.DateTime(), nullable=True),
sa.Column('updated_at', sa.DateTime(), nullable=True),
sa.Column('parent_id', sa.Integer(), nullable=False),
sa.Column('key', sa.String(length=255), nullable=False),
sa.Column('value', sqlalchemy_utils.types.json.JSONType(),
nullable=True),
sa.ForeignKeyConstraint(['parent_id'], ['regions.id'], ),
sa.PrimaryKeyConstraint('parent_id', 'key')
)
op.create_table(
'cell_variables',
sa.Column('created_at', sa.DateTime(), nullable=True),
sa.Column('updated_at', sa.DateTime(), nullable=True),
sa.Column('parent_id', sa.Integer(), nullable=False),
sa.Column('key', sa.String(length=255), nullable=False),
sa.Column('value', sqlalchemy_utils.types.json.JSONType(),
nullable=True),
sa.ForeignKeyConstraint(['parent_id'], ['cells.id'], ),
sa.PrimaryKeyConstraint('parent_id', 'key')
)
op.create_table(
'devices',
sa.Column('created_at', sa.DateTime(), nullable=True),
sa.Column('updated_at', sa.DateTime(), nullable=True),
sa.Column('id', sa.Integer(), nullable=False),
sa.Column('type', sa.String(length=50), nullable=True),
sa.Column('name', sa.String(length=255), nullable=False),
sa.Column('region_id', sa.Integer(), nullable=False),
sa.Column('cell_id', sa.Integer(), nullable=True),
sa.Column('project_id', sa.Integer(), nullable=False),
sa.Column('ip_address', types.IPAddressType(length=64),
nullable=False),
sa.Column('active', sa.Boolean(), nullable=True),
sa.Column('note', sa.Text(), nullable=True),
sa.ForeignKeyConstraint(['cell_id'], ['cells.id'], ),
sa.ForeignKeyConstraint(['project_id'], ['projects.id'], ),
sa.ForeignKeyConstraint(['region_id'], ['regions.id'], ),
sa.PrimaryKeyConstraint('id'),
sa.UniqueConstraint('region_id', 'name',
name='uq_device0regionid0name')
)
op.create_index(op.f('ix_devices_cell_id'), 'devices', ['cell_id'],
unique=False)
op.create_index(op.f('ix_devices_project_id'), 'devices', ['project_id'],
unique=False)
op.create_index(op.f('ix_devices_region_id'), 'devices', ['region_id'],
unique=False)
op.create_table(
'device_labels',
sa.Column('device_id', sa.Integer(), nullable=False),
sa.Column('label_id', sa.Integer(), nullable=False),
sa.ForeignKeyConstraint(['device_id'], ['devices.id'], ),
sa.ForeignKeyConstraint(['label_id'], ['labels.id'], ),
sa.PrimaryKeyConstraint('device_id', 'label_id')
)
op.create_table(
'device_variables',
sa.Column('created_at', sa.DateTime(), nullable=True),
sa.Column('updated_at', sa.DateTime(), nullable=True),
sa.Column('parent_id', sa.Integer(), nullable=False),
sa.Column('key', sa.String(length=255), nullable=False),
sa.Column('value', sqlalchemy_utils.types.json.JSONType(),
nullable=True),
sa.ForeignKeyConstraint(['parent_id'], ['devices.id'], ),
sa.PrimaryKeyConstraint('parent_id', 'key')
)
op.create_table(
'hosts',
sa.Column('id', sa.Integer(), nullable=False),
sa.Column('access_secret_id', sa.Integer(), nullable=True),
sa.ForeignKeyConstraint(['access_secret_id'], ['access_secrets.id'], ),
sa.ForeignKeyConstraint(['id'], ['devices.id'], ),
sa.PrimaryKeyConstraint('id')
)
# end Alembic commands
def downgrade():
# commands auto generated by Alembic - please adjust!
op.drop_table('hosts')
op.drop_table('device_variables')
op.drop_table('device_labels')
op.drop_index(op.f('ix_devices_region_id'), table_name='devices')
op.drop_index(op.f('ix_devices_project_id'), table_name='devices')
op.drop_index(op.f('ix_devices_cell_id'), table_name='devices')
op.drop_table('devices')
op.drop_table('cell_variables')
op.drop_table('region_variables')
op.drop_index(op.f('ix_cells_region_id'), table_name='cells')
op.drop_index(op.f('ix_cells_project_id'), table_name='cells')
op.drop_table('cells')
op.drop_index(op.f('ix_users_project_id'), table_name='users')
op.drop_table('users')
op.drop_index(op.f('ix_regions_project_id'), table_name='regions')
op.drop_table('regions')
op.drop_table('label_variables')
op.drop_table('projects')
op.drop_table('labels')
op.drop_table('access_secrets')
# end Alembic commands

View File

@ -0,0 +1,52 @@
import os
import alembic
from alembic import config as alembic_config
import alembic.migration as alembic_migration
from oslo_db.sqlalchemy import enginefacade
def _alembic_config():
path = os.path.join(os.path.dirname(__file__), 'alembic.ini')
config = alembic_config.Config(path)
return config
def version(config=None, engine=None):
"""Current database version."""
if engine is None:
engine = enginefacade.get_legacy_facade().get_engine()
with engine.connect() as conn:
context = alembic_migration.MigrationContext.configure(conn)
return context.get_current_revision()
def upgrade(revision, config=None):
"""Used for upgrading database.
:param version: Desired database version
"""
revision = revision or 'head'
config = config or _alembic_config()
alembic.command.upgrade(config, revision or 'head')
def stamp(revision, config=None):
"""Stamps database with provided revision.
Don't run any migrations.
:param revision: Should match one from repository or head - to stamp
database with most recent revision
"""
config = config or _alembic_config()
return alembic.command.stamp(config, revision=revision)
def revision(message=None, autogenerate=False, config=None):
"""Creates template for migration.
:param message: Text that will be used for migration title
:param autogenerate: If True - generates diff based on current database
state
"""
config = config or _alembic_config()
return alembic.command.revision(config, message=message,
autogenerate=autogenerate)

View File

@ -26,6 +26,7 @@ packages =
[entry_points]
console_scripts =
craton-worker = craton.cmd.worker:main
craton-inventory-dbsync = craton.cmd.dbsync:main
craton.workflow =
testflow = craton.workflow.testflow:TestFlow

View File

@ -58,6 +58,6 @@ commands = oslo_debug_helper {posargs}
# E123, E125 skipped as they are invalid PEP-8.
show-source = True
ignore = E123,E125
ignore = E123,E125,E402
builtins = _
exclude=.venv,.git,.tox,dist,doc,*openstack/common*,*lib/python*,*egg,build