Initial support of database migrations

syncdb command now uses alembic upgrade head

Added initial migration for Fuel 4.0

Implements: blueprint nailgun-db-migration

Change-Id: Ia340fa64b15e40fc2bb87125f746f5bf1b0530f8
This commit is contained in:
Nikolay Markov 2014-01-16 19:12:38 +04:00
parent 3a2c7bc6c6
commit 5249e3fcfc
10 changed files with 1007 additions and 17 deletions

View File

@ -87,6 +87,37 @@ def load_db_parsers(subparsers):
)
def load_alembic_parsers(migrate_parser):
alembic_parser = migrate_parser.add_subparsers(
dest="alembic_command",
help='alembic command'
)
for name in ['current', 'history', 'branches']:
parser = alembic_parser.add_parser(name)
for name in ['upgrade', 'downgrade']:
parser = alembic_parser.add_parser(name)
parser.add_argument('--delta', type=int)
parser.add_argument('--sql', action='store_true')
parser.add_argument('revision', nargs='?')
parser = alembic_parser.add_parser('stamp')
parser.add_argument('--sql', action='store_true')
parser.add_argument('revision')
parser = alembic_parser.add_parser('revision')
parser.add_argument('-m', '--message')
parser.add_argument('--autogenerate', action='store_true')
parser.add_argument('--sql', action='store_true')
def load_db_migrate_parsers(subparsers):
migrate_parser = subparsers.add_parser(
'migrate', help='dealing with DB migration'
)
load_alembic_parsers(migrate_parser)
def load_test_parsers(subparsers):
subparsers.add_parser(
'test', help='run unit tests'
@ -138,11 +169,11 @@ def action_loaddefault(params):
def action_syncdb(params):
from nailgun.db import syncdb
from nailgun.db.migration import do_upgrade_head
from nailgun.logger import logger
logger.info("Syncing database...")
syncdb()
do_upgrade_head()
logger.info("Done")
@ -155,6 +186,11 @@ def action_dropdb(params):
logger.info("Done")
def action_migrate(params):
from nailgun.db.migration import action_migrate_alembic
action_migrate_alembic(params)
def action_test(params):
from nailgun.logger import logger
from nailgun.unit_test import TestRunner
@ -166,7 +202,6 @@ def action_test(params):
def action_dump_settings(params):
from nailgun.settings import settings
sys.stdout.write(settings.dump())
@ -209,6 +244,7 @@ if __name__ == "__main__":
load_run_parsers(subparsers)
load_db_parsers(subparsers)
load_db_migrate_parsers(subparsers)
load_test_parsers(subparsers)
load_shell_parsers(subparsers)
load_settings_parsers(subparsers)

View File

@ -0,0 +1,95 @@
# -*- coding: utf-8 -*-
# Copyright 2013 Mirantis, Inc.
#
# Licensed under the Apache License, Version 2.0 (the "License"); you may
# not use this file except in compliance with the License. You may obtain
# a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
# License for the specific language governing permissions and limitations
# under the License.
import os
from alembic import command as alembic_command
from alembic import config as alembic_config
from alembic import util as alembic_util
from nailgun.db.sqlalchemy import db_str
ALEMBIC_CONFIG = alembic_config.Config(
os.path.join(os.path.dirname(__file__), 'alembic.ini')
)
ALEMBIC_CONFIG.set_main_option(
'script_location',
'nailgun.db.migration:alembic_migrations'
)
ALEMBIC_CONFIG.set_main_option(
'sqlalchemy.url',
db_str
)
def do_alembic_command(cmd, *args, **kwargs):
try:
getattr(alembic_command, cmd)(ALEMBIC_CONFIG, *args, **kwargs)
except alembic_util.CommandError as e:
alembic_util.err(str(e))
def do_stamp(cmd):
do_alembic_command(
cmd,
ALEMBIC_CONFIG.params.revision,
sql=ALEMBIC_CONFIG.params.sql
)
def do_revision(cmd):
do_alembic_command(
cmd,
message=ALEMBIC_CONFIG.params.message,
autogenerate=ALEMBIC_CONFIG.params.autogenerate,
sql=ALEMBIC_CONFIG.params.sql
)
def do_upgrade_downgrade(cmd):
params = ALEMBIC_CONFIG.params
if not params.revision and not params.delta:
raise SystemExit('You must provide a revision or relative delta')
if params.delta:
sign = '+' if params.name == 'upgrade' else '-'
revision = sign + str(params.delta)
else:
revision = params.revision
do_alembic_command(cmd, revision, sql=params.sql)
def do_upgrade_head():
do_alembic_command("upgrade", "head")
def action_migrate_alembic(params):
global ALEMBIC_CONFIG
ALEMBIC_CONFIG.params = params
actions = {
'current': do_alembic_command,
'history': do_alembic_command,
'branches': do_alembic_command,
'upgrade': do_upgrade_downgrade,
'downgrade': do_upgrade_downgrade,
'stamp': do_stamp,
'revision': do_revision
}
actions[params.alembic_command](params.alembic_command)

View File

@ -0,0 +1,54 @@
# A generic, single database configuration.
[alembic]
# path to migration scripts
script_location = alembic_migrations
# template used to generate migration files
# file_template = %%(rev)s_%%(slug)s
# max length of characters to apply to the
# "slug" field
#truncate_slug_length = 40
# set to 'true' to run the environment during
# the 'revision' command, regardless of autogenerate
# revision_environment = false
sqlalchemy.url = driver://user:pass@localhost/dbname
# Logging configuration
[loggers]
keys = root,sqlalchemy,alembic
[handlers]
keys = console
[formatters]
keys = generic
[logger_root]
level = WARN
handlers = console
qualname =
[logger_sqlalchemy]
level = WARN
handlers =
qualname = sqlalchemy.engine
[logger_alembic]
level = INFO
handlers =
qualname = alembic
[handler_console]
class = StreamHandler
args = (sys.stderr,)
level = NOTSET
formatter = generic
[formatter_generic]
format = %(levelname)-5.5s [%(name)s] %(message)s
datefmt = %H:%M:%S

View File

@ -0,0 +1 @@
Generic single-database configuration.

View File

@ -0,0 +1,73 @@
from __future__ import with_statement
from logging.config import fileConfig
from alembic import context
from nailgun.db import engine
from nailgun.db.sqlalchemy.models.base import Base
# this is the Alembic Config object, which provides
# access to the values within the .ini file in use.
config = context.config
# Interpret the config file for Python logging.
# This line sets up loggers basically.
fileConfig(config.config_file_name)
# add your model's MetaData object here
# for 'autogenerate' support
# from myapp import mymodel
# target_metadata = mymodel.Base.metadata
target_metadata = Base.metadata
# other values from the config, defined by the needs of env.py,
# can be acquired:
# my_important_option = config.get_main_option("my_important_option")
# ... etc.
def run_migrations_offline():
"""Run migrations in 'offline' mode.
This configures the context with just a URL
and not an Engine, though an Engine is acceptable
here as well. By skipping the Engine creation
we don't even need a DBAPI to be available.
Calls to context.execute() here emit the given string to the
script output.
"""
context.configure(
url=engine.url
)
with context.begin_transaction():
context.run_migrations()
def run_migrations_online():
"""Run migrations in 'online' mode.
In this scenario we need to create an Engine
and associate a connection with the context.
"""
connection = engine.connect()
context.configure(
connection=connection,
target_metadata=target_metadata
)
try:
with context.begin_transaction():
context.run_migrations()
finally:
connection.close()
if context.is_offline_mode():
run_migrations_offline()
else:
run_migrations_online()

View File

@ -0,0 +1,22 @@
"""${message}
Revision ID: ${up_revision}
Revises: ${down_revision}
Create Date: ${create_date}
"""
# revision identifiers, used by Alembic.
revision = ${repr(up_revision)}
down_revision = ${repr(down_revision)}
from alembic import op
import sqlalchemy as sa
${imports if imports else ""}
def upgrade():
${upgrades if upgrades else "pass"}
def downgrade():
${downgrades if downgrades else "pass"}

View File

@ -0,0 +1,137 @@
"""Changes before merge
Revision ID: 4f21f21e2672
Revises: 3540e7a3ba1e
Create Date: 2014-02-12 18:16:55.630914
"""
# revision identifiers, used by Alembic.
revision = '4f21f21e2672'
down_revision = '3540e7a3ba1e'
from alembic import op
import sqlalchemy as sa
from sqlalchemy.dialects import postgresql
from nailgun.db.sqlalchemy.models.fields import JSON
def upgrade():
### commands auto generated by Alembic - please adjust! ###
op.drop_table('global_parameters')
op.alter_column(
'network_groups',
'netmask',
existing_type=sa.VARCHAR(length=25),
nullable=True
)
op.drop_table('plugins')
op.drop_table('allowed_networks')
op.add_column(
'network_groups',
sa.Column('meta', JSON(), nullable=True)
)
### end Alembic commands ###
def downgrade():
### commands auto generated by Alembic - please adjust! ###
op.alter_column(
'network_groups',
'netmask',
existing_type=sa.VARCHAR(length=25),
nullable=False
)
op.create_table(
'global_parameters',
sa.Column(
'id',
sa.INTEGER(),
server_default="nextval('global_parameters_id_seq'::regclass)",
nullable=False
),
sa.Column(
'parameters',
sa.TEXT(),
autoincrement=False,
nullable=True
),
sa.PrimaryKeyConstraint(
'id',
name=u'global_parameters_pkey'
)
)
op.drop_column('network_groups', 'meta')
op.create_table(
'allowed_networks',
sa.Column(
'id',
sa.INTEGER(),
server_default="nextval('allowed_networks_id_seq'::regclass)",
nullable=False
),
sa.Column(
'network_id',
sa.INTEGER(),
autoincrement=False,
nullable=False
),
sa.Column(
'interface_id',
sa.INTEGER(),
autoincrement=False,
nullable=False
),
sa.ForeignKeyConstraint(
['interface_id'],
[u'node_nic_interfaces.id'],
name=u'allowed_networks_interface_id_fkey',
ondelete=u'CASCADE'
),
sa.ForeignKeyConstraint(
['network_id'],
[u'network_groups.id'],
name=u'allowed_networks_network_id_fkey',
ondelete=u'CASCADE'
),
sa.PrimaryKeyConstraint(
'id',
name=u'allowed_networks_pkey'
)
)
op.create_table(
'plugins',
sa.Column(
'id',
sa.INTEGER(),
server_default="nextval('plugins_id_seq'::regclass)",
nullable=False
),
sa.Column(
'type',
postgresql.ENUM(u'nailgun', u'fuel', name='plugin_type'),
autoincrement=False,
nullable=False
),
sa.Column(
'name',
sa.VARCHAR(length=128),
autoincrement=False,
nullable=False
),
sa.Column(
'state',
sa.VARCHAR(length=128),
autoincrement=False,
nullable=False
),
sa.Column(
'version',
sa.VARCHAR(length=128),
autoincrement=False,
nullable=False
),
sa.PrimaryKeyConstraint('id', name=u'plugins_pkey')
)
### end Alembic commands ###

View File

@ -0,0 +1,569 @@
"""Initial revision for 4.0
Revision ID: 3540e7a3ba1e
Revises: None
Create Date: 2014-01-22 17:29:27.717938
"""
# revision identifiers, used by Alembic.
revision = '3540e7a3ba1e'
down_revision = None
from alembic import op
import sqlalchemy as sa
from nailgun.db.sqlalchemy.models.fields import JSON
from nailgun.db.sqlalchemy.models.fields import LowercaseString
def upgrade():
### commands auto generated by Alembic - please adjust! ###
op.create_table(
'plugins',
sa.Column('id', sa.Integer(), nullable=False),
sa.Column(
'type',
sa.Enum('nailgun', 'fuel', name='plugin_type'),
nullable=False
),
sa.Column('name', sa.String(length=128), nullable=False),
sa.Column('state', sa.String(length=128), nullable=False),
sa.Column('version', sa.String(length=128), nullable=False),
sa.PrimaryKeyConstraint('id'),
sa.UniqueConstraint('name')
)
op.create_table(
'global_parameters',
sa.Column('id', sa.Integer(), nullable=False),
sa.Column('parameters', JSON(), nullable=True),
sa.PrimaryKeyConstraint('id')
)
op.create_table(
'red_hat_accounts',
sa.Column('id', sa.Integer(), nullable=False),
sa.Column('username', sa.String(length=100), nullable=False),
sa.Column('password', sa.String(length=100), nullable=False),
sa.Column(
'license_type',
sa.Enum('rhsm', 'rhn', name='license_type'),
nullable=False
),
sa.Column('satellite', sa.String(length=250), nullable=True),
sa.Column(
'activation_key',
sa.String(length=300),
nullable=True
),
sa.PrimaryKeyConstraint('id')
)
op.create_table(
'releases',
sa.Column('id', sa.Integer(), nullable=False),
sa.Column('name', sa.Unicode(length=100), nullable=False),
sa.Column('version', sa.String(length=30), nullable=False),
sa.Column('description', sa.Unicode(), nullable=True),
sa.Column(
'operating_system',
sa.String(length=50),
nullable=False
),
sa.Column(
'state',
sa.Enum(
'not_available',
'downloading',
'error',
'available',
name='release_state'
),
nullable=False
),
sa.Column('networks_metadata', JSON(), nullable=True),
sa.Column('attributes_metadata', JSON(), nullable=True),
sa.Column('volumes_metadata', JSON(), nullable=True),
sa.Column('modes_metadata', JSON(), nullable=True),
sa.Column('roles_metadata', JSON(), nullable=True),
sa.PrimaryKeyConstraint('id'),
sa.UniqueConstraint('name', 'version')
)
op.create_table(
'capacity_log',
sa.Column('id', sa.Integer(), nullable=False),
sa.Column('report', JSON(), nullable=True),
sa.Column('datetime', sa.DateTime(), nullable=True),
sa.PrimaryKeyConstraint('id')
)
op.create_table(
'roles',
sa.Column('id', sa.Integer(), nullable=False),
sa.Column('release_id', sa.Integer(), nullable=False),
sa.Column('name', sa.String(length=50), nullable=False),
sa.ForeignKeyConstraint(
['release_id'],
['releases.id'],
ondelete='CASCADE'
),
sa.PrimaryKeyConstraint('id'),
sa.UniqueConstraint('name', 'release_id')
)
op.create_table(
'clusters',
sa.Column('id', sa.Integer(), nullable=False),
sa.Column(
'mode',
sa.Enum(
'multinode',
'ha_full',
'ha_compact',
name='cluster_mode'
),
nullable=False
),
sa.Column(
'status',
sa.Enum(
'new',
'deployment',
'operational',
'error',
'remove',
name='cluster_status'
),
nullable=False
),
sa.Column(
'net_provider',
sa.Enum(
'nova_network',
'neutron',
name='net_provider'
),
nullable=False
),
sa.Column(
'net_l23_provider',
sa.Enum(
'ovs',
name='net_l23_provider'
),
nullable=False
),
sa.Column(
'net_segment_type',
sa.Enum(
'none',
'vlan',
'gre',
name='net_segment_type'
),
nullable=False
),
sa.Column(
'net_manager',
sa.Enum(
'FlatDHCPManager',
'VlanManager',
name='cluster_net_manager'
),
nullable=False
),
sa.Column(
'grouping',
sa.Enum(
'roles',
'hardware',
'both',
name='cluster_grouping'
),
nullable=False
),
sa.Column('name', sa.Unicode(length=50), nullable=False),
sa.Column('release_id', sa.Integer(), nullable=False),
sa.Column('dns_nameservers', JSON(), nullable=True),
sa.Column('replaced_deployment_info', JSON(), nullable=True),
sa.Column('replaced_provisioning_info', JSON(), nullable=True),
sa.Column('is_customized', sa.Boolean(), nullable=True),
sa.ForeignKeyConstraint(['release_id'], ['releases.id'], ),
sa.PrimaryKeyConstraint('id'),
sa.UniqueConstraint('name')
)
op.create_table(
'network_groups',
sa.Column('id', sa.Integer(), nullable=False),
sa.Column(
'name',
sa.Enum(
'fuelweb_admin',
'storage',
'management',
'public',
'floating',
'fixed',
'private',
name='network_group_name'
),
nullable=False
),
sa.Column('release', sa.Integer(), nullable=True),
sa.Column('cluster_id', sa.Integer(), nullable=True),
sa.Column('network_size', sa.Integer(), nullable=True),
sa.Column('amount', sa.Integer(), nullable=True),
sa.Column('vlan_start', sa.Integer(), nullable=True),
sa.Column('cidr', sa.String(length=25), nullable=True),
sa.Column('gateway', sa.String(length=25), nullable=True),
sa.Column('netmask', sa.String(length=25), nullable=False),
sa.ForeignKeyConstraint(['cluster_id'], ['clusters.id'], ),
sa.ForeignKeyConstraint(['release'], ['releases.id'], ),
sa.PrimaryKeyConstraint('id')
)
op.create_table(
'nodes',
sa.Column('id', sa.Integer(), nullable=False),
sa.Column('cluster_id', sa.Integer(), nullable=True),
sa.Column('name', sa.Unicode(length=100), nullable=True),
sa.Column(
'status',
sa.Enum(
'ready',
'discover',
'provisioning',
'provisioned',
'deploying',
'error',
name='node_status'
),
nullable=False
),
sa.Column('meta', JSON(), nullable=True),
sa.Column('mac', LowercaseString(length=17), nullable=False),
sa.Column('ip', sa.String(length=15), nullable=True),
sa.Column('fqdn', sa.String(length=255), nullable=True),
sa.Column('manufacturer', sa.Unicode(length=50), nullable=True),
sa.Column('platform_name', sa.String(length=150), nullable=True),
sa.Column('progress', sa.Integer(), nullable=True),
sa.Column('os_platform', sa.String(length=150), nullable=True),
sa.Column('pending_addition', sa.Boolean(), nullable=True),
sa.Column('pending_deletion', sa.Boolean(), nullable=True),
sa.Column(
'error_type',
sa.Enum(
'deploy',
'provision',
'deletion',
name='node_error_type'
),
nullable=True
),
sa.Column('error_msg', sa.String(length=255), nullable=True),
sa.Column('timestamp', sa.DateTime(), nullable=False),
sa.Column('online', sa.Boolean(), nullable=True),
sa.ForeignKeyConstraint(['cluster_id'], ['clusters.id'], ),
sa.PrimaryKeyConstraint('id'),
sa.UniqueConstraint('mac')
)
op.create_table(
'tasks',
sa.Column('id', sa.Integer(), nullable=False),
sa.Column('cluster_id', sa.Integer(), nullable=True),
sa.Column('uuid', sa.String(length=36), nullable=False),
sa.Column(
'name',
sa.Enum(
'super',
'deploy',
'deployment',
'provision',
'node_deletion',
'cluster_deletion',
'check_before_deployment',
'check_networks',
'verify_networks',
'check_dhcp',
'verify_network_connectivity',
'install_plugin',
'update_plugin',
'delete_plugin',
'redhat_setup',
'redhat_check_credentials',
'redhat_check_licenses',
'redhat_download_release',
'redhat_update_cobbler_profile',
'dump',
'capacity_log',
name='task_name'
),
nullable=False
),
sa.Column('message', sa.Text(), nullable=True),
sa.Column(
'status',
sa.Enum(
'ready',
'running',
'error',
name='task_status'
),
nullable=False
),
sa.Column('progress', sa.Integer(), nullable=True),
sa.Column('cache', JSON(), nullable=True),
sa.Column('result', JSON(), nullable=True),
sa.Column('parent_id', sa.Integer(), nullable=True),
sa.Column('weight', sa.Float(), nullable=True),
sa.ForeignKeyConstraint(['cluster_id'], ['clusters.id'], ),
sa.ForeignKeyConstraint(['parent_id'], ['tasks.id'], ),
sa.PrimaryKeyConstraint('id')
)
op.create_table(
'neutron_configs',
sa.Column('id', sa.Integer(), nullable=False),
sa.Column('cluster_id', sa.Integer(), nullable=True),
sa.Column('parameters', JSON(), nullable=True),
sa.Column('L2', JSON(), nullable=True),
sa.Column('L3', JSON(), nullable=True),
sa.Column('predefined_networks', JSON(), nullable=True),
sa.Column(
'segmentation_type',
sa.Enum(
'vlan',
'gre',
name='segmentation_type'
),
nullable=False
),
sa.ForeignKeyConstraint(
['cluster_id'],
['clusters.id'],
ondelete='CASCADE'
),
sa.PrimaryKeyConstraint('id')
)
op.create_table(
'attributes',
sa.Column('id', sa.Integer(), nullable=False),
sa.Column('cluster_id', sa.Integer(), nullable=True),
sa.Column('editable', JSON(), nullable=True),
sa.Column('generated', JSON(), nullable=True),
sa.ForeignKeyConstraint(['cluster_id'], ['clusters.id'], ),
sa.PrimaryKeyConstraint('id')
)
op.create_table(
'node_nic_interfaces',
sa.Column('id', sa.Integer(), nullable=False),
sa.Column('node_id', sa.Integer(), nullable=False),
sa.Column('name', sa.String(length=128), nullable=False),
sa.Column('mac', LowercaseString(length=17), nullable=False),
sa.Column('max_speed', sa.Integer(), nullable=True),
sa.Column('current_speed', sa.Integer(), nullable=True),
sa.Column('ip_addr', sa.String(length=25), nullable=True),
sa.Column('netmask', sa.String(length=25), nullable=True),
sa.Column('state', sa.String(length=25), nullable=True),
sa.ForeignKeyConstraint(
['node_id'],
['nodes.id'],
ondelete='CASCADE'
),
sa.PrimaryKeyConstraint('id')
)
op.create_table(
'pending_node_roles',
sa.Column('id', sa.Integer(), nullable=False),
sa.Column('role', sa.Integer(), nullable=True),
sa.Column('node', sa.Integer(), nullable=True),
sa.ForeignKeyConstraint(['node'], ['nodes.id'], ),
sa.ForeignKeyConstraint(
['role'],
['roles.id'],
ondelete='CASCADE'
),
sa.PrimaryKeyConstraint('id')
)
op.create_table(
'ip_addrs',
sa.Column('id', sa.Integer(), nullable=False),
sa.Column('network', sa.Integer(), nullable=True),
sa.Column('node', sa.Integer(), nullable=True),
sa.Column('ip_addr', sa.String(length=25), nullable=False),
sa.ForeignKeyConstraint(
['network'],
['network_groups.id'],
ondelete='CASCADE'
),
sa.ForeignKeyConstraint(
['node'],
['nodes.id'],
ondelete='CASCADE'
),
sa.PrimaryKeyConstraint('id')
)
op.create_table(
'node_roles',
sa.Column('id', sa.Integer(), nullable=False),
sa.Column('role', sa.Integer(), nullable=True),
sa.Column('node', sa.Integer(), nullable=True),
sa.ForeignKeyConstraint(['node'], ['nodes.id'], ),
sa.ForeignKeyConstraint(
['role'],
['roles.id'],
ondelete='CASCADE'
),
sa.PrimaryKeyConstraint('id')
)
op.create_table(
'node_attributes',
sa.Column('id', sa.Integer(), nullable=False),
sa.Column('node_id', sa.Integer(), nullable=True),
sa.Column('volumes', JSON(), nullable=True),
sa.Column('interfaces', JSON(), nullable=True),
sa.ForeignKeyConstraint(
['node_id'],
['nodes.id'],
),
sa.PrimaryKeyConstraint('id')
)
op.create_table(
'ip_addr_ranges',
sa.Column('id', sa.Integer(), nullable=False),
sa.Column('network_group_id', sa.Integer(), nullable=True),
sa.Column('first', sa.String(length=25), nullable=False),
sa.Column('last', sa.String(length=25), nullable=False),
sa.ForeignKeyConstraint(
['network_group_id'],
['network_groups.id'],
),
sa.PrimaryKeyConstraint('id')
)
op.create_table(
'notifications',
sa.Column('id', sa.Integer(), nullable=False),
sa.Column('cluster_id', sa.Integer(), nullable=True),
sa.Column('node_id', sa.Integer(), nullable=True),
sa.Column('task_id', sa.Integer(), nullable=True),
sa.Column(
'topic',
sa.Enum(
'discover',
'done',
'error',
'warning',
name='notif_topic'
),
nullable=False
),
sa.Column('message', sa.Text(), nullable=True),
sa.Column(
'status',
sa.Enum(
'read',
'unread',
name='notif_status'
),
nullable=False
),
sa.Column('datetime', sa.DateTime(), nullable=False),
sa.ForeignKeyConstraint(
['cluster_id'],
['clusters.id'],
ondelete='SET NULL'
),
sa.ForeignKeyConstraint(
['node_id'],
['nodes.id'],
ondelete='SET NULL'
),
sa.ForeignKeyConstraint(
['task_id'],
['tasks.id'],
ondelete='SET NULL'
),
sa.PrimaryKeyConstraint('id')
)
op.create_table(
'cluster_changes',
sa.Column('id', sa.Integer(), nullable=False),
sa.Column('cluster_id', sa.Integer(), nullable=True),
sa.Column('node_id', sa.Integer(), nullable=True),
sa.Column(
'name',
sa.Enum(
'networks',
'attributes',
'disks',
name='possible_changes'
),
nullable=False
),
sa.ForeignKeyConstraint(
['cluster_id'],
['clusters.id'],
),
sa.ForeignKeyConstraint(
['node_id'],
['nodes.id'],
ondelete='CASCADE'
),
sa.PrimaryKeyConstraint('id')
)
op.create_table(
'net_assignments',
sa.Column('id', sa.Integer(), nullable=False),
sa.Column('network_id', sa.Integer(), nullable=False),
sa.Column('interface_id', sa.Integer(), nullable=False),
sa.ForeignKeyConstraint(
['interface_id'],
['node_nic_interfaces.id'],
ondelete='CASCADE'
),
sa.ForeignKeyConstraint(
['network_id'],
['network_groups.id'],
ondelete='CASCADE'
),
sa.PrimaryKeyConstraint('id')
)
op.create_table(
'allowed_networks',
sa.Column('id', sa.Integer(), nullable=False),
sa.Column('network_id', sa.Integer(), nullable=False),
sa.Column('interface_id', sa.Integer(), nullable=False),
sa.ForeignKeyConstraint(
['interface_id'],
['node_nic_interfaces.id'],
ondelete='CASCADE'
),
sa.ForeignKeyConstraint(
['network_id'],
['network_groups.id'],
ondelete='CASCADE'
),
sa.PrimaryKeyConstraint('id')
)
### end Alembic commands ###
def downgrade():
### commands auto generated by Alembic - please adjust! ###
op.drop_table('allowed_networks')
op.drop_table('net_assignments')
op.drop_table('cluster_changes')
op.drop_table('notifications')
op.drop_table('ip_addr_ranges')
op.drop_table('node_attributes')
op.drop_table('node_roles')
op.drop_table('ip_addrs')
op.drop_table('pending_node_roles')
op.drop_table('node_nic_interfaces')
op.drop_table('attributes')
op.drop_table('neutron_configs')
op.drop_table('tasks')
op.drop_table('nodes')
op.drop_table('network_groups')
op.drop_table('clusters')
op.drop_table('roles')
op.drop_table('capacity_log')
op.drop_table('releases')
op.drop_table('red_hat_accounts')
op.drop_table('global_parameters')
op.drop_table('plugins')
### end Alembic commands ###

View File

@ -2,13 +2,13 @@
base=nailgun
module=db
module=db.sqlalchemy
module=excutils
module=fileutils
module=gettextutils
module=importutils
module=lockutils
module=fileutils
module=excutils
module=log
module=jsonutils
module=timeutils
module=local
module=lockutils
module=log
module=test
module=timeutils

View File

@ -1,23 +1,26 @@
Babel==1.3
Jinja2==2.7
Paste==1.7.5.1
PyYAML==3.10
SQLAlchemy==0.7.8
alembic==0.6.2
amqplib==1.0.2
anyjson==0.3.1
argparse==1.2.1
Babel==1.3
decorator==3.4.0
fysom==1.0.11
iso8601==0.1.8
Jinja2==2.7
jsonschema==2.0.0
kombu==2.1.8
Mako==0.9.1
MarkupSafe==0.18
netaddr==0.7.10
netifaces==0.8
oslo.config==1.2.1
pycrypto==2.6
Paste==1.7.5.1
psycopg2==2.4.6
pycrypto==2.6
PyYAML==3.10
Shotgun==0.1.0
simplejson==2.6.2
SQLAlchemy==0.7.8
web.py==0.37
wsgilog==0.3
wsgiref==0.1.2
fysom==1.0.11
jsonschema==2.0.0
Shotgun==0.1.0