US573: Use alembic instead of sqlalchemy migrate

Change-Id: I547e618f1095d3c1febcd20f89129c8601db479f
This commit is contained in:
Romain Ziba 2015-06-25 17:25:07 +02:00
parent 24a211e711
commit df2faae0e1
38 changed files with 1139 additions and 319 deletions

110
cerberus/cmd/dbsync.py Normal file
View File

@ -0,0 +1,110 @@
# -*- encoding: utf-8 -*-
#
# Copyright 2013 Hewlett-Packard Development Company, L.P.
# All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License"); you may
# not use this file except in compliance with the License. You may obtain
# a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
# License for the specific language governing permissions and limitations
# under the License.
"""
Run storage database migration.
"""
import sys
from oslo.config import cfg
from cerberus.db import migration
from cerberus import service
CONF = cfg.CONF
class DBCommand(object):
def upgrade(self):
migration.upgrade(CONF.command.revision)
def downgrade(self):
migration.downgrade(CONF.command.revision)
def revision(self):
migration.revision(CONF.command.message, CONF.command.autogenerate)
def stamp(self):
migration.stamp(CONF.command.revision)
def version(self):
print(migration.version())
def create_schema(self):
migration.create_schema()
def add_command_parsers(subparsers):
command_object = DBCommand()
parser = subparsers.add_parser('upgrade',
help="Upgrade the database schema to the latest version. "
"Optionally, use --revision to specify an alembic revision "
"string to upgrade to.")
parser.set_defaults(func=command_object.upgrade)
parser.add_argument('--revision', nargs='?')
parser = subparsers.add_parser('downgrade',
help="Downgrade the database schema to the oldest revision. "
"While optional, one should generally use --revision to "
"specify the alembic revision string to downgrade to.")
parser.set_defaults(func=command_object.downgrade)
parser.add_argument('--revision', nargs='?')
parser = subparsers.add_parser('stamp')
parser.add_argument('--revision', nargs='?')
parser.set_defaults(func=command_object.stamp)
parser = subparsers.add_parser('revision',
help="Create a new alembic revision. "
"Use --message to set the message string.")
parser.add_argument('-m', '--message')
parser.add_argument('--autogenerate', action='store_true')
parser.set_defaults(func=command_object.revision)
parser = subparsers.add_parser('version',
help="Print the current version information and exit.")
parser.set_defaults(func=command_object.version)
parser = subparsers.add_parser('create_schema',
help="Create the database schema.")
parser.set_defaults(func=command_object.create_schema)
command_opt = cfg.SubCommandOpt('command',
title='Command',
help='Available commands',
handler=add_command_parsers)
CONF.register_cli_opt(command_opt)
def main():
# this is hack to work with previous usage of ironic-dbsync
# pls change it to ironic-dbsync upgrade
valid_commands = set([
'upgrade', 'downgrade', 'revision',
'version', 'stamp', 'create_schema',
])
if not set(sys.argv) & valid_commands:
sys.argv.append('upgrade')
service.prepare_service(sys.argv)
CONF.command.func()

176
cerberus/common/utils.py Normal file
View File

@ -0,0 +1,176 @@
# Copyright 2010 United States Government as represented by the
# Administrator of the National Aeronautics and Space Administration.
# Copyright 2011 Justin Santa Barbara
# Copyright (c) 2012 NTT DOCOMO, INC.
# All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License"); you may
# not use this file except in compliance with the License. You may obtain
# a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
# License for the specific language governing permissions and limitations
# under the License.
"""Utilities and helper functions."""
import netaddr
import re
import six
import uuid
from oslo.config import cfg
from cerberus.common import exception
from cerberus.openstack.common.gettextutils import _ # noqa
from cerberus.openstack.common import log as logging
CONF = cfg.CONF
LOG = logging.getLogger(__name__)
class LazyPluggable(object):
"""A pluggable backend loaded lazily based on some value."""
def __init__(self, pivot, config_group=None, **backends):
self.__backends = backends
self.__pivot = pivot
self.__backend = None
self.__config_group = config_group
def __get_backend(self):
if not self.__backend:
if self.__config_group is None:
backend_name = CONF[self.__pivot]
else:
backend_name = CONF[self.__config_group][self.__pivot]
if backend_name not in self.__backends:
msg = _('Invalid backend: %s') % backend_name
raise exception.CerberusException(msg)
backend = self.__backends[backend_name]
if isinstance(backend, tuple):
name = backend[0]
fromlist = backend[1]
else:
name = backend
fromlist = backend
self.__backend = __import__(name, None, None, fromlist)
return self.__backend
def __getattr__(self, key):
backend = self.__get_backend()
return getattr(backend, key)
def is_valid_ipv4(address):
"""Verify that address represents a valid IPv4 address."""
try:
return netaddr.valid_ipv4(address)
except Exception:
return False
def is_valid_ipv6(address):
try:
return netaddr.valid_ipv6(address)
except Exception:
return False
def is_valid_ipv6_cidr(address):
try:
str(netaddr.IPNetwork(address, version=6).cidr)
return True
except Exception:
return False
def get_shortened_ipv6(address):
addr = netaddr.IPAddress(address, version=6)
return str(addr.ipv6())
def get_shortened_ipv6_cidr(address):
net = netaddr.IPNetwork(address, version=6)
return str(net.cidr)
def is_valid_cidr(address):
"""Check if the provided ipv4 or ipv6 address is a valid CIDR address."""
try:
# Validate the correct CIDR Address
netaddr.IPNetwork(address)
except netaddr.core.AddrFormatError:
return False
except UnboundLocalError:
# NOTE(MotoKen): work around bug in netaddr 0.7.5 (see detail in
# https://github.com/drkjam/netaddr/issues/2)
return False
# Prior validation partially verify /xx part
# Verify it here
ip_segment = address.split('/')
if (len(ip_segment) <= 1 or
ip_segment[1] == ''):
return False
return True
def get_ip_version(network):
"""Returns the IP version of a network (IPv4 or IPv6).
:raises: AddrFormatError if invalid network.
"""
if netaddr.IPNetwork(network).version == 6:
return "IPv6"
elif netaddr.IPNetwork(network).version == 4:
return "IPv4"
def convert_to_list_dict(lst, label):
"""Convert a value or list into a list of dicts."""
if not lst:
return None
if not isinstance(lst, list):
lst = [lst]
return [{label: x} for x in lst]
def sanitize_hostname(hostname):
"""Return a hostname which conforms to RFC-952 and RFC-1123 specs."""
if isinstance(hostname, six.text_type):
hostname = hostname.encode('latin-1', 'ignore')
hostname = re.sub('[ _]', '-', hostname)
hostname = re.sub('[^\w.-]+', '', hostname)
hostname = hostname.lower()
hostname = hostname.strip('.-')
return hostname
def generate_uuid():
return str(uuid.uuid4())
def is_uuid_like(val):
"""Returns validation of a value as a UUID.
For our purposes, a UUID is a canonical form string:
aaaaaaaa-aaaa-aaaa-aaaa-aaaaaaaaaaaa
"""
try:
return str(uuid.UUID(val)) == val
except (TypeError, ValueError, AttributeError):
return False

View File

@ -16,21 +16,27 @@
from oslo.config import cfg
from cerberus.db.sqlalchemy import models
from cerberus.openstack.common.db import api as db_api
CONF = cfg.CONF
CONF.import_opt('backend', 'cerberus.openstack.common.db.options',
group='database')
_BACKEND_MAPPING = {'sqlalchemy': 'cerberus.db.sqlalchemy.api'}
_BACKEND_MAPPING = {'sqlalchemy': 'cerberus.db.sqlalchemy.api'}
IMPL = db_api.DBAPI(CONF.database.backend, backend_mapping=_BACKEND_MAPPING,
lazy=True)
''' JUNO:
IMPL = db_api.DBAPI.from_config(cfg.CONF,
backend_mapping=_BACKEND_MAPPING,
lazy=True)
'''
def setup_db():
engine = get_engine()
models.register_models(engine)
def drop_db():
engine = get_engine()
models.unregister_models(engine)
def get_instance():
@ -46,11 +52,6 @@ def get_session():
return IMPL.get_session()
def db_sync(engine, version=None):
"""Migrate the database to `version` or the most recent version."""
return IMPL.db_sync(engine, version=version)
def security_report_create(values):
"""Create an instance from the values dictionary."""
return IMPL.security_report_create(values)

55
cerberus/db/migration.py Normal file
View File

@ -0,0 +1,55 @@
#
# Copyright (c) 2015 EUROGICIEL
#
# Licensed under the Apache License, Version 2.0 (the "License"); you may
# not use this file except in compliance with the License. You may obtain
# a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
#
"""Database setup and migration commands."""
from oslo.config import cfg
from cerberus.common import utils
CONF = cfg.CONF
CONF.import_opt('backend',
'cerberus.openstack.common.db.options',
group='database')
IMPL = utils.LazyPluggable(
pivot='backend',
config_group='database',
sqlalchemy='cerberus.db.sqlalchemy.migration')
INIT_VERSION = 0
def upgrade(version=None):
"""Migrate the database to `version` or the most recent version."""
return IMPL.upgrade(version)
def downgrade(version=None):
return IMPL.downgrade(version)
def version():
return IMPL.version()
def stamp(version):
return IMPL.stamp(version)
def revision(message, autogenerate):
return IMPL.revision(message, autogenerate)

View File

@ -0,0 +1,54 @@
# A generic, single database configuration.
[alembic]
# path to migration scripts
script_location = %(here)s/alembic
# template used to generate migration files
# file_template = %%(rev)s_%%(slug)s
# max length of characters to apply to the
# "slug" field
#truncate_slug_length = 40
# set to 'true' to run the environment during
# the 'revision' command, regardless of autogenerate
# revision_environment = false
#sqlalchemy.url = driver://user:pass@localhost/dbname
# Logging configuration
[loggers]
keys = root,sqlalchemy,alembic
[handlers]
keys = console
[formatters]
keys = generic
[logger_root]
level = WARN
handlers = console
qualname =
[logger_sqlalchemy]
level = WARN
handlers =
qualname = sqlalchemy.engine
[logger_alembic]
level = INFO
handlers =
qualname = alembic
[handler_console]
class = StreamHandler
args = (sys.stderr,)
level = NOTSET
formatter = generic
[formatter_generic]
format = %(levelname)-5.5s [%(name)s] %(message)s
datefmt = %H:%M:%S

View File

@ -0,0 +1,16 @@
Please see https://alembic.readthedocs.org/en/latest/index.html for general documentation
To create alembic migrations use:
$ cerberus-dbsync revision --message --autogenerate
Stamp db with most recent migration version, without actually running migrations
$ cerberus-dbsync stamp --revision head
Upgrade can be performed by:
$ cerberus-dbsync - for backward compatibility
$ cerberus-dbsync upgrade
# cerberus-dbsync upgrade --revision head
Downgrading db:
$ cerberus-dbsync downgrade
$ cerberus-dbsync downgrade --revision base

View File

@ -0,0 +1,54 @@
# Licensed under the Apache License, Version 2.0 (the "License"); you may
# not use this file except in compliance with the License. You may obtain
# a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
# License for the specific language governing permissions and limitations
# under the License.
from logging import config as log_config
from alembic import context
from cerberus.db.sqlalchemy import api as sqla_api
from cerberus.db.sqlalchemy import models
# this is the Alembic Config object, which provides
# access to the values within the .ini file in use.
config = context.config
# Interpret the config file for Python logging.
# This line sets up loggers basically.
log_config.fileConfig(config.config_file_name)
# add your model's MetaData object here
# for 'autogenerate' support
# from myapp import mymodel
target_metadata = models.Base.metadata
# other values from the config, defined by the needs of env.py,
# can be acquired:
# my_important_option = config.get_main_option("my_important_option")
# ... etc.
def run_migrations_online():
"""Run migrations in 'online' mode.
In this scenario we need to create an Engine
and associate a connection with the context.
"""
engine = sqla_api.get_engine()
with engine.connect() as connection:
context.configure(connection=connection,
target_metadata=target_metadata)
with context.begin_transaction():
context.run_migrations()
run_migrations_online()

View File

@ -0,0 +1,22 @@
"""${message}
Revision ID: ${up_revision}
Revises: ${down_revision}
Create Date: ${create_date}
"""
# revision identifiers, used by Alembic.
revision = ${repr(up_revision)}
down_revision = ${repr(down_revision)}
from alembic import op
import sqlalchemy as sa
${imports if imports else ""}
def upgrade():
${upgrades if upgrades else "pass"}
def downgrade():
${downgrades if downgrades else "pass"}

View File

@ -0,0 +1,116 @@
#
# Copyright (c) 2014 EUROGICIEL
#
# Licensed under the Apache License, Version 2.0 (the "License"); you may
# not use this file except in compliance with the License. You may obtain
# a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
#
"""initial_migration
Revision ID: 2dd6320a2745
Revises: None
Create Date: 2015-06-25 10:45:10.853595
"""
# revision identifiers, used by Alembic.
revision = '2dd6320a2745'
down_revision = None
from alembic import op
import sqlalchemy as sa
def upgrade():
op.create_table(
'plugin_info',
sa.Column('id', sa.Integer, primary_key=True, nullable=False),
sa.Column('uuid', sa.Text),
sa.Column('name', sa.Text),
sa.Column('version', sa.Text),
sa.Column('provider', sa.Text),
sa.Column('type', sa.Text),
sa.Column('description', sa.Text),
sa.Column('tool_name', sa.Text),
sa.Column('created_at', sa.DateTime),
sa.Column('updated_at', sa.DateTime),
sa.Column('deleted_at', sa.DateTime),
sa.Column('deleted', sa.Integer),
mysql_ENGINE='InnoDB',
mysql_DEFAULT_CHARSET='utf8'
)
op.create_table(
'security_report',
sa.Column('id', sa.Integer, primary_key=True, nullable=False),
sa.Column('plugin_id', sa.Text),
sa.Column('report_id', sa.VARCHAR(255), unique=True),
sa.Column('component_id', sa.Text),
sa.Column('component_type', sa.Text),
sa.Column('component_name', sa.Text),
sa.Column('project_id', sa.Text),
sa.Column('ticket_id', sa.Text),
sa.Column('title', sa.Text),
sa.Column('description', sa.Text),
sa.Column('security_rating', sa.Float),
sa.Column('vulnerabilities', sa.Text),
sa.Column('vulnerabilities_number', sa.Integer),
sa.Column('last_report_date', sa.DateTime),
sa.Column('created_at', sa.DateTime),
sa.Column('updated_at', sa.DateTime),
sa.Column('deleted_at', sa.DateTime),
sa.Column('deleted', sa.Integer),
mysql_ENGINE='InnoDB',
mysql_DEFAULT_CHARSET='UTF8'
)
op.create_table(
'security_alarm',
sa.Column('id', sa.Integer, primary_key=True, nullable=False),
sa.Column('plugin_id', sa.Text),
sa.Column('alarm_id', sa.VARCHAR(255), unique=True),
sa.Column('component_id', sa.Text),
sa.Column('project_id', sa.Text),
sa.Column('ticket_id', sa.Text),
sa.Column('timestamp', sa.DateTime),
sa.Column('summary', sa.Text),
sa.Column('severity', sa.Text),
sa.Column('status', sa.Text),
sa.Column('description', sa.Text),
sa.Column('created_at', sa.DateTime),
sa.Column('updated_at', sa.DateTime),
sa.Column('deleted_at', sa.DateTime),
sa.Column('deleted', sa.Integer),
mysql_engine='InnoDB',
mysql_charset='utf8'
)
op.create_table(
'task',
sa.Column('id', sa.Integer, primary_key=True, nullable=False),
sa.Column('type', sa.Text),
sa.Column('plugin_id', sa.Text),
sa.Column('uuid', sa.Text),
sa.Column('name', sa.Text),
sa.Column('method', sa.Text),
sa.Column('running', sa.Boolean),
sa.Column('period', sa.Integer),
sa.Column('created_at', sa.DateTime),
sa.Column('updated_at', sa.DateTime),
sa.Column('deleted_at', sa.DateTime),
sa.Column('deleted', sa.Integer),
mysql_engine='InnoDB',
mysql_charset='utf8'
)
def downgrade():
raise NotImplementedError(('Downgrade from initial migration is'
' unsupported.'))

View File

@ -0,0 +1,332 @@
#
# Copyright (c) 2014 EUROGICIEL
#
# Licensed under the Apache License, Version 2.0 (the "License"); you may
# not use this file except in compliance with the License. You may obtain
# a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
#
"""text_to_varchar
Revision ID: 4426f811d4d9
Revises: 2dd6320a2745
Create Date: 2015-06-25 10:47:00.485303
"""
# revision identifiers, used by Alembic.
revision = '4426f811d4d9'
down_revision = '2dd6320a2745'
from alembic import op
import sqlalchemy as sa
def upgrade():
# In table plugin_info
op.alter_column(
table_name='plugin_info',
column_name='uuid',
type_=sa.VARCHAR(255)
)
op.alter_column(
table_name='plugin_info',
column_name='name',
type_=sa.VARCHAR(255)
)
op.alter_column(
table_name='plugin_info',
column_name='version',
type_=sa.VARCHAR(255)
)
op.alter_column(
table_name='plugin_info',
column_name='provider',
type_=sa.VARCHAR(255)
)
op.alter_column(
table_name='plugin_info',
column_name='type',
type_=sa.VARCHAR(255)
)
op.alter_column(
table_name='plugin_info',
column_name='description',
type_=sa.VARCHAR(255)
)
op.alter_column(
table_name='plugin_info',
column_name='tool_name',
type_=sa.VARCHAR(255)
)
# In table security_report, except column vulnerabilities
op.alter_column(
table_name='security_report',
column_name='plugin_id',
type_=sa.VARCHAR(255)
)
op.alter_column(
table_name='security_report',
column_name='component_id',
type_=sa.VARCHAR(255)
)
op.alter_column(
table_name='security_report',
column_name='component_type',
type_=sa.VARCHAR(255)
)
op.alter_column(
table_name='security_report',
column_name='component_name',
type_=sa.VARCHAR(255)
)
op.alter_column(
table_name='security_report',
column_name='project_id',
type_=sa.VARCHAR(255)
)
op.alter_column(
table_name='security_report',
column_name='ticket_id',
type_=sa.VARCHAR(255)
)
op.alter_column(
table_name='security_report',
column_name='title',
type_=sa.VARCHAR(255)
)
op.alter_column(
table_name='security_report',
column_name='description',
type_=sa.VARCHAR(255)
)
# In table security_alarm
op.alter_column(
table_name='security_alarm',
column_name='plugin_id',
type_=sa.VARCHAR(255)
)
op.alter_column(
table_name='security_alarm',
column_name='component_id',
type_=sa.VARCHAR(255)
)
op.alter_column(
table_name='security_alarm',
column_name='project_id',
type_=sa.VARCHAR(255)
)
op.alter_column(
table_name='security_alarm',
column_name='ticket_id',
type_=sa.VARCHAR(255)
)
op.alter_column(
table_name='security_alarm',
column_name='summary',
type_=sa.VARCHAR(255)
)
op.alter_column(
table_name='security_alarm',
column_name='severity',
type_=sa.VARCHAR(255)
)
op.alter_column(
table_name='security_alarm',
column_name='status',
type_=sa.VARCHAR(255)
)
op.alter_column(
table_name='security_alarm',
column_name='description',
type_=sa.VARCHAR(255)
)
# In table task
op.alter_column(
table_name='task',
column_name='type',
type_=sa.VARCHAR(255)
)
op.alter_column(
table_name='task',
column_name='plugin_id',
type_=sa.VARCHAR(255)
)
op.alter_column(
table_name='task',
column_name='uuid',
type_=sa.VARCHAR(255)
)
op.alter_column(
table_name='task',
column_name='name',
type_=sa.VARCHAR(255)
)
op.alter_column(
table_name='task',
column_name='method',
type_=sa.VARCHAR(255)
)
def downgrade():
# In table plugin_info
op.alter_column(
table_name='plugin_info',
column_name='uuid',
type_=sa.TEXT
)
op.alter_column(
table_name='plugin_info',
column_name='name',
type_=sa.TEXT
)
op.alter_column(
table_name='plugin_info',
column_name='version',
type_=sa.TEXT
)
op.alter_column(
table_name='plugin_info',
column_name='provider',
type_=sa.TEXT
)
op.alter_column(
table_name='plugin_info',
column_name='type',
type_=sa.TEXT
)
op.alter_column(
table_name='plugin_info',
column_name='description',
type_=sa.TEXT
)
op.alter_column(
table_name='plugin_info',
column_name='tool_name',
type_=sa.TEXT
)
# In table security_report, except column vulnerabilities (still Text)
# and report_id (already varchar)
op.alter_column(
table_name='security_report',
column_name='plugin_id',
type_=sa.TEXT
)
op.alter_column(
table_name='security_report',
column_name='component_id',
type_=sa.TEXT
)
op.alter_column(
table_name='security_report',
column_name='component_type',
type_=sa.TEXT
)
op.alter_column(
table_name='security_report',
column_name='component_name',
type_=sa.TEXT
)
op.alter_column(
table_name='security_report',
column_name='project_id',
type_=sa.TEXT
)
op.alter_column(
table_name='security_report',
column_name='ticket_id',
type_=sa.TEXT
)
op.alter_column(
table_name='security_report',
column_name='title',
type_=sa.TEXT
)
op.alter_column(
table_name='security_report',
column_name='description',
type_=sa.TEXT
)
# In table security_alarm, except alarm_id (already varchar)
op.alter_column(
table_name='security_alarm',
column_name='plugin_id',
type_=sa.TEXT
)
op.alter_column(
table_name='security_alarm',
column_name='component_id',
type_=sa.TEXT
)
op.alter_column(
table_name='security_alarm',
column_name='project_id',
type_=sa.TEXT
)
op.alter_column(
table_name='security_alarm',
column_name='ticket_id',
type_=sa.TEXT
)
op.alter_column(
table_name='security_alarm',
column_name='summary',
type_=sa.TEXT
)
op.alter_column(
table_name='security_alarm',
column_name='severity',
type_=sa.TEXT
)
op.alter_column(
table_name='security_alarm',
column_name='status',
type_=sa.TEXT
)
op.alter_column(
table_name='security_alarm',
column_name='description',
type_=sa.TEXT
)
# In table task
op.alter_column(
table_name='task',
column_name='type',
type_=sa.TEXT
)
op.alter_column(
table_name='task',
column_name='plugin_id',
type_=sa.TEXT
)
op.alter_column(
table_name='task',
column_name='uuid',
type_=sa.TEXT
)
op.alter_column(
table_name='task',
column_name='name',
type_=sa.TEXT
)
op.alter_column(
table_name='task',
column_name='method',
type_=sa.TEXT
)

View File

@ -0,0 +1,50 @@
#
# Copyright (c) 2014 EUROGICIEL
#
# Licensed under the Apache License, Version 2.0 (the "License"); you may
# not use this file except in compliance with the License. You may obtain
# a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
#
"""alter_security_report_add_uuid
Revision ID: 479e56a9ae3b
Revises: 4426f811d4d9
Create Date: 2015-06-25 10:48:06.260041
"""
# revision identifiers, used by Alembic.
revision = '479e56a9ae3b'
down_revision = '4426f811d4d9'
from alembic import op
import sqlalchemy as sa
def upgrade():
op.add_column('security_report',
sa.Column('uuid', sa.VARCHAR(255), unique=True))
op.drop_constraint('report_id', 'security_report', type_='unique')
op.create_unique_constraint('unique_uuid',
'security_report',
['uuid'])
op.create_unique_constraint('unique_report_id_plugin_id',
'security_report',
['report_id', 'plugin_id'])
def downgrade():
op.drop_column('security_report', 'uuid')
op.drop_constraint('unique_report_id_plugin_id',
'security_report',
type_='unique')
op.create_unique_constraint('report_id', 'security_report', ['report_id'])

View File

@ -20,7 +20,6 @@ import threading
from oslo.config import cfg
from cerberus.common import exception
from cerberus.db.sqlalchemy import migration
from cerberus.db.sqlalchemy import models
from cerberus.openstack.common.db import exception as db_exc
from cerberus.openstack.common.db.sqlalchemy import session as db_session
@ -73,16 +72,6 @@ def model_query(model, *args, **kwargs):
return query
def db_sync(engine, version=None):
"""Migrate the database to `version` or the most recent version."""
return migration.db_sync(engine, version=version)
def db_version(engine):
"""Display the current database version."""
return migration.db_version(engine)
def _security_report_create(values):
try:
security_report_ref = models.SecurityReport()

View File

@ -1,4 +0,0 @@
This is a database migration repository.
More information at
http://code.google.com/p/sqlalchemy-migrate/

View File

@ -1,5 +0,0 @@
#!/usr/bin/env python
from migrate.versioning.shell import main
if __name__ == '__main__':
main(debug='False')

View File

@ -1,25 +0,0 @@
[db_settings]
# Used to identify which repository this database is versioned under.
# You can use the name of your project.
repository_id=cerberus
# The name of the database table used to track the schema version.
# This name shouldn't already be used by your project.
# If this is changed once a database is under version control, you'll need to
# change the table name in each database too.
version_table=migrate_version
# When committing a change script, Migrate will attempt to generate the
# sql for all supported databases; normally, if one of them fails - probably
# because you don't have that database installed - it is ignored and the
# commit continues, perhaps ending successfully.
# Databases in this list MUST compile successfully during a commit, or the
# entire commit will fail. List the databases your application will actually
# be using to ensure your updates to that database work properly.
# This must be a list; example: ['postgres','sqlite']
required_dbs=[]
# When creating new change scripts, Migrate will stamp the new script with
# a version number. By default this is latest_version + 1. You can set this
# to 'true' to tell Migrate to use the UTC timestamp instead.
use_timestamp_numbering=False

View File

@ -1,124 +0,0 @@
#
# Licensed under the Apache License, Version 2.0 (the "License"); you may
# not use this file except in compliance with the License. You may obtain
# a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
# License for the specific language governing permissions and limitations
# under the License.
import sqlalchemy
def upgrade(migrate_engine):
meta = sqlalchemy.MetaData()
meta.bind = migrate_engine
plugin_info = sqlalchemy.Table(
'plugin_info', meta,
sqlalchemy.Column('id', sqlalchemy.Integer, primary_key=True,
nullable=False),
sqlalchemy.Column('uuid', sqlalchemy.Text),
sqlalchemy.Column('name', sqlalchemy.Text),
sqlalchemy.Column('version', sqlalchemy.Text),
sqlalchemy.Column('provider', sqlalchemy.Text),
sqlalchemy.Column('type', sqlalchemy.Text),
sqlalchemy.Column('description', sqlalchemy.Text),
sqlalchemy.Column('tool_name', sqlalchemy.Text),
sqlalchemy.Column('created_at', sqlalchemy.DateTime),
sqlalchemy.Column('updated_at', sqlalchemy.DateTime),
sqlalchemy.Column('deleted_at', sqlalchemy.DateTime),
sqlalchemy.Column('deleted', sqlalchemy.Integer),
mysql_engine='InnoDB',
mysql_charset='utf8'
)
security_report = sqlalchemy.Table(
'security_report', meta,
sqlalchemy.Column('id', sqlalchemy.Integer, primary_key=True,
nullable=False),
sqlalchemy.Column('plugin_id', sqlalchemy.Text),
sqlalchemy.Column('report_id', sqlalchemy.VARCHAR(255), unique=True),
sqlalchemy.Column('component_id', sqlalchemy.Text),
sqlalchemy.Column('component_type', sqlalchemy.Text),
sqlalchemy.Column('component_name', sqlalchemy.Text),
sqlalchemy.Column('project_id', sqlalchemy.Text),
sqlalchemy.Column('title', sqlalchemy.Text),
sqlalchemy.Column('description', sqlalchemy.Text),
sqlalchemy.Column('security_rating', sqlalchemy.Float),
sqlalchemy.Column('vulnerabilities', sqlalchemy.Text),
sqlalchemy.Column('vulnerabilities_number', sqlalchemy.Integer),
sqlalchemy.Column('last_report_date', sqlalchemy.DateTime),
sqlalchemy.Column('created_at', sqlalchemy.DateTime),
sqlalchemy.Column('updated_at', sqlalchemy.DateTime),
sqlalchemy.Column('deleted_at', sqlalchemy.DateTime),
sqlalchemy.Column('deleted', sqlalchemy.Integer),
mysql_engine='InnoDB',
mysql_charset='utf8'
)
security_alarm = sqlalchemy.Table(
'security_alarm', meta,
sqlalchemy.Column('id', sqlalchemy.Integer, primary_key=True,
nullable=False),
sqlalchemy.Column('plugin_id', sqlalchemy.Text),
sqlalchemy.Column('alarm_id', sqlalchemy.VARCHAR(255), unique=True),
sqlalchemy.Column('component_id', sqlalchemy.Text),
sqlalchemy.Column('project_id', sqlalchemy.Text),
sqlalchemy.Column('ticket_id', sqlalchemy.Text),
sqlalchemy.Column('timestamp', sqlalchemy.DateTime),
sqlalchemy.Column('summary', sqlalchemy.Text),
sqlalchemy.Column('severity', sqlalchemy.Text),
sqlalchemy.Column('status', sqlalchemy.Text),
sqlalchemy.Column('description', sqlalchemy.Text),
sqlalchemy.Column('created_at', sqlalchemy.DateTime),
sqlalchemy.Column('updated_at', sqlalchemy.DateTime),
sqlalchemy.Column('deleted_at', sqlalchemy.DateTime),
sqlalchemy.Column('deleted', sqlalchemy.Integer),
mysql_engine='InnoDB',
mysql_charset='utf8'
)
task = sqlalchemy.Table(
'task', meta,
sqlalchemy.Column('id', sqlalchemy.Integer, primary_key=True,
nullable=False),
sqlalchemy.Column('type', sqlalchemy.Text),
sqlalchemy.Column('plugin_id', sqlalchemy.Text),
sqlalchemy.Column('uuid', sqlalchemy.Text),
sqlalchemy.Column('name', sqlalchemy.Text),
sqlalchemy.Column('method', sqlalchemy.Text),
sqlalchemy.Column('running', sqlalchemy.Boolean),
sqlalchemy.Column('period', sqlalchemy.Integer),
sqlalchemy.Column('created_at', sqlalchemy.DateTime),
sqlalchemy.Column('updated_at', sqlalchemy.DateTime),
sqlalchemy.Column('deleted_at', sqlalchemy.DateTime),
sqlalchemy.Column('deleted', sqlalchemy.Integer),
mysql_engine='InnoDB',
mysql_charset='utf8'
)
tables = (
security_report,
plugin_info,
security_alarm,
task
)
for index, table in enumerate(tables):
try:
table.create()
except Exception:
# If an error occurs, drop all tables created so far to return
# to the previously existing state.
meta.drop_all(tables=tables[:index])
raise
def downgrade(migrate_engine):
raise NotImplementedError('Database downgrade not supported - '
'would drop all tables')

View File

@ -1,30 +0,0 @@
#
# Licensed under the Apache License, Version 2.0 (the "License"); you may
# not use this file except in compliance with the License. You may obtain
# a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
# License for the specific language governing permissions and limitations
# under the License.
def upgrade(migrate_engine):
if migrate_engine.name == "mysql":
security_report = 'security_report'
sql = "ALTER TABLE %s ADD uuid VARCHAR(255)" \
" NOT NULL AFTER id," \
"ADD UNIQUE(uuid);" % security_report
sql += "ALTER TABLE %s DROP INDEX report_id;" % security_report
sql += "ALTER TABLE %s ADD UNIQUE (report_id, plugin_id);" \
% security_report
migrate_engine.execute(sql)
def downgrade(migrate_engine):
raise NotImplementedError('Database downgrade not supported - '
'would drop all tables')

View File

@ -13,26 +13,78 @@
import os
from cerberus.openstack.common.db.sqlalchemy import migration as oslo_migration
import alembic
from alembic import config as alembic_config
import alembic.migration as alembic_migration
from cerberus.db.sqlalchemy import api as sqla_api
INIT_VERSION = 0
def db_sync(engine, version=None):
path = os.path.join(os.path.abspath(os.path.dirname(__file__)),
'migrate_repo')
return oslo_migration.db_sync(engine, path, version,
init_version=INIT_VERSION)
def _alembic_config():
path = os.path.join(os.path.dirname(__file__), 'alembic.ini')
config = alembic_config.Config(path)
return config
def db_version(engine):
path = os.path.join(os.path.abspath(os.path.dirname(__file__)),
'migrate_repo')
return oslo_migration.db_version(engine, path, INIT_VERSION)
def version(config=None):
"""Current database version.
:returns: Database version
:rtype: string
"""
engine = sqla_api.get_engine()
with engine.connect() as conn:
context = alembic_migration.MigrationContext.configure(conn)
return context.get_current_revision()
def db_version_control(engine, version=None):
path = os.path.join(os.path.abspath(os.path.dirname(__file__)),
'migrate_repo')
return oslo_migration.db_version_control(engine, path, version)
def upgrade(revision, config=None):
"""Used for upgrading database.
:param version: Desired database version
:type version: string
"""
revision = revision or 'head'
config = config or _alembic_config()
alembic.command.upgrade(config, revision or 'head')
def downgrade(revision, config=None):
"""Used for downgrading database.
:param version: Desired database version
:type version: string
"""
revision = revision or 'base'
config = config or _alembic_config()
return alembic.command.downgrade(config, revision)
def stamp(revision, config=None):
"""Stamps database with provided revision.
Dont run any migrations.
:param revision: Should match one from repository or head - to stamp
database with most recent revision
:type revision: string
"""
config = config or _alembic_config()
return alembic.command.stamp(config, revision=revision)
def revision(message=None, autogenerate=False, config=None):
"""Creates template for migration.
:param message: Text that will be used for migration title
:type message: string
:param autogenerate: If True - generates diff based on current database
state
:type autogenerate: bool
"""
config = config or _alembic_config()
return alembic.command.revision(config, message=message,
autogenerate=autogenerate)

View File

@ -28,7 +28,6 @@ from cerberus.openstack.common.db.sqlalchemy import models
CONF = cfg.CONF
BASE = declarative_base()
class CerberusBase(models.SoftDeleteMixin,
@ -45,8 +44,10 @@ class CerberusBase(models.SoftDeleteMixin,
super(CerberusBase, self).save(session=session)
Base = declarative_base(cls=CerberusBase)
class PluginInfo(BASE, CerberusBase):
class PluginInfo(Base, CerberusBase):
"""Plugin info"""
__tablename__ = 'plugin_info'
@ -73,7 +74,7 @@ class PluginInfoJsonSerializer(serialize.JsonSerializer):
__object_class__ = PluginInfo
class SecurityReport(BASE, CerberusBase):
class SecurityReport(Base, CerberusBase):
"""Security Report"""
__tablename__ = 'security_report'
@ -111,7 +112,7 @@ class SecurityReportJsonSerializer(serialize.JsonSerializer):
__object_class__ = SecurityReport
class SecurityAlarm(BASE, CerberusBase):
class SecurityAlarm(Base, CerberusBase):
"""Security alarm coming from Security Information and Event Manager
for example
"""
@ -145,7 +146,7 @@ class SecurityAlarmJsonSerializer(serialize.JsonSerializer):
__object_class__ = SecurityAlarm
class Task(BASE, CerberusBase):
class Task(Base, CerberusBase):
"""Tasks for security purposes (e.g: daily scans...)
"""
__tablename__ = 'task'
@ -171,3 +172,17 @@ class TaskJsonSerializer(serialize.JsonSerializer):
__attribute_serializer__ = dict(created_at='date', deleted_at='date',
acknowledged_at='date')
__object_class__ = Task
def register_models(engine):
"""Creates database tables for all models with the given engine."""
models = (PluginInfo, SecurityReport, SecurityAlarm, Task)
for model in models:
model.metadata.create_all(engine)
def unregister_models(engine):
"""Drops database tables for all models with the given engine."""
models = (PluginInfo, SecurityReport, SecurityAlarm, Task)
for model in models:
model.metadata.drop_all(engine)

View File

@ -135,7 +135,7 @@ class FixedIntervalLoopingCallEncoder(json.JSONEncoder):
state = 'running'
else:
state = 'stopped'
return {'id': obj.kw.get('task_id', None),
return {'id': str(obj.kw.get('task_id', None)),
'name': obj.kw.get('task_name', None),
'period': obj.kw.get('task_period', None),
'type': obj.kw.get('task_type', None),
@ -148,7 +148,7 @@ class ThreadEncoder(json.JSONEncoder):
def default(self, obj):
if not isinstance(obj, threadgroup.Thread):
return super(ThreadEncoder, self).default(obj)
return {'id': obj.kw.get('task_id', None),
return {'id': str(obj.kw.get('task_id', None)),
'name': obj.kw.get('task_name', None),
'type': obj.kw.get('task_type', None),
'plugin_id': obj.kw.get('plugin_id', None),

View File

@ -16,12 +16,11 @@
import os
import socket
import sys
from oslo.config import cfg
from oslo.messaging import rpc
from stevedore import named
from cerberus.common import config
from cerberus.openstack.common.gettextutils import _ # noqa
from cerberus.openstack.common import log
from cerberus import utils
@ -127,8 +126,8 @@ def get_workers(name):
return workers
def prepare_service(argv=None):
rpc.set_defaults(control_exchange='cerberus')
def prepare_service(argv=[]):
config.parse_args(argv)
cfg.set_defaults(log.log_opts,
default_log_levels=['amqplib=WARN',
'qpid.messaging=INFO',
@ -136,9 +135,7 @@ def prepare_service(argv=None):
'keystoneclient=INFO',
'stevedore=INFO',
'eventlet.wsgi.server=WARN',
'iso8601=WARN'
'iso8601=WARN',
'paramiko=WARN',
])
if argv is None:
argv = sys.argv
cfg.CONF(argv[1:], project='cerberus')
log.setup('cerberus')

View File

@ -25,10 +25,10 @@ from cerberus.tests.unit import base
PATH_PREFIX = '/v1'
class TestApiBase(base.TestBase):
class TestApiCase(base.TestCase):
def setUp(self):
super(TestApiBase, self).setUp()
super(TestApiCase, self).setUp()
self.app = self._make_app()
self.dbapi = dbapi.get_instance()
cfg.CONF.set_override("auth_version",

View File

@ -30,7 +30,7 @@ PLUGIN_ID_2 = 2
PLUGIN_NAME_2 = 'toolyx'
class TestPlugins(base.TestApiBase):
class TestPlugins(base.TestApiCase):
def setUp(self):
super(TestPlugins, self).setUp()

View File

@ -27,7 +27,7 @@ SECURITY_ALARM_ID = 'abc123'
SECURITY_ALARM_ID_2 = 'xyz789'
class TestSecurityReports(base.TestApiBase):
class TestSecurityReports(base.TestApiCase):
def setUp(self):
super(TestSecurityReports, self).setUp()

View File

@ -27,7 +27,7 @@ SECURITY_REPORT_ID = 'abc123'
SECURITY_REPORT_ID_2 = 'xyz789'
class TestSecurityReports(base.TestApiBase):
class TestSecurityReports(base.TestApiCase):
def setUp(self):
super(TestSecurityReports, self).setUp()

View File

@ -39,7 +39,7 @@ class MockTask(object):
self.method = method
class TestTasks(base.TestApiBase):
class TestTasks(base.TestApiCase):
def setUp(self):
super(TestTasks, self).setUp()

View File

@ -18,21 +18,19 @@ import os
from oslo.config import cfg
from oslotest import base
from cerberus.db import api as db_api
from cerberus.tests.unit import config_fixture
from cerberus.tests.unit import policy_fixture
from cerberus.tests.unit import utils
CONF = cfg.CONF
class TestBase(base.BaseTestCase):
class TestCase(base.BaseTestCase):
"""Test case base class for all unit tests."""
def setUp(self):
super(TestBase, self).setUp()
utils.setup_dummy_db()
self.addCleanup(utils.reset_dummy_db)
super(TestCase, self).setUp()
self.useFixture(config_fixture.ConfigFixture(CONF))
self.policy = self.useFixture(policy_fixture.PolicyFixture())
@ -52,5 +50,18 @@ class TestBase(base.BaseTestCase):
return root
class TestBaseFaulty(TestBase):
class WithDbTestCase(TestCase):
def override_config(self, name, override, group=None):
CONF.set_override(name, override, group)
self.addCleanup(CONF.clear_override, name, group)
def setUp(self):
super(WithDbTestCase, self).setUp()
self.override_config('connection', "sqlite://", group='database')
db_api.setup_db()
self.addCleanup(db_api.drop_db)
class TestCaseFaulty(TestCase):
"""This test ensures we aren't letting any exceptions go unhandled."""

View File

@ -23,7 +23,7 @@ from cerberus.tests.unit import base
cfg.CONF.import_group('service_credentials', 'cerberus.service')
class TestKeystoneClient(base.TestBase):
class TestKeystoneClient(base.TestCase):
def setUp(self):
super(TestKeystoneClient, self).setUp()

View File

@ -23,7 +23,7 @@ from cerberus.tests.unit import base
cfg.CONF.import_group('service_credentials', 'cerberus.service')
class TestNeutronClient(base.TestBase):
class TestNeutronClient(base.TestCase):
def setUp(self):
super(TestNeutronClient, self).setUp()

View File

@ -23,7 +23,7 @@ from cerberus.tests.unit import base
cfg.CONF.import_group('service_credentials', 'cerberus.service')
class TestNovaClient(base.TestBase):
class TestNovaClient(base.TestCase):
@staticmethod
def fake_servers_list(*args, **kwargs):

View File

@ -19,23 +19,24 @@ Tests for `db api` module.
"""
import mock
from oslo.config import fixture as fixture_config
from cerberus.db.sqlalchemy import api
from cerberus.db.sqlalchemy import api as db_api
from cerberus.openstack.common.db.sqlalchemy import models as db_models
from cerberus.tests.unit import base
class DbApiTestCase(base.TestBase):
class DbApiTestCase(base.WithDbTestCase):
def setUp(self):
super(DbApiTestCase, self).setUp()
def test_security_report_create(self):
self.CONF = self.useFixture(fixture_config.Config()).conf
self.CONF([], project='cerberus')
db_models.ModelBase.save = mock.MagicMock()
report = api.security_report_create({'title': 'TitleSecurityReport',
'plugin_id': '123456789',
'description': 'The first',
'component_id': '1234'})
report = db_api.security_report_create(
{'title': 'TitleSecurityReport',
'plugin_id': '123456789',
'description': 'The first',
'component_id': '1234'})
self.assertEqual('TitleSecurityReport', report.title)
self.assertEqual('123456789', report.plugin_id)
@ -43,18 +44,15 @@ class DbApiTestCase(base.TestBase):
self.assertEqual('1234', report.component_id)
def test_plugin_info_create(self):
self.CONF = self.useFixture(fixture_config.Config()).conf
self.CONF([], project='cerberus')
pi = api.plugin_info_create({'name': 'NameOfPlugin',
'uuid': '0000-aaaa-1111-bbbb'})
pi = db_api.plugin_info_create(
{'name': 'NameOfPlugin',
'uuid': '0000-aaaa-1111-bbbb'})
self.assertTrue(pi.id >= 0)
def test_plugin_info_get(self):
self.CONF = self.useFixture(fixture_config.Config()).conf
self.CONF([], project='cerberus')
db_api.plugin_info_create(
{'name': 'NameOfPluginToGet',
'uuid': '3333-aaaa-1111-bbbb'})
api.plugin_info_create({'name': 'NameOfPluginToGet',
'uuid': '3333-aaaa-1111-bbbb'})
pi = api.plugin_info_get('NameOfPluginToGet')
pi = db_api.plugin_info_get('NameOfPluginToGet')
self.assertEqual('NameOfPluginToGet', pi.name)

View File

@ -73,7 +73,7 @@ class EntryPoint(object):
"FooPkg-1.2-py2.4.egg")
class TestCerberusManager(base.TestBase):
class TestCerberusManager(base.WithDbTestCase):
def setUp(self):
super(TestCerberusManager, self).setUp()
@ -339,7 +339,7 @@ class TestCerberusManager(base.TestBase):
self.assertTrue(int(json.loads(task).get('id')) == recurrent_task_id)
task_2 = self.manager.get_task({'some': 'context'}, 2)
self.assertTrue(json.loads(task_2).get('name') == unique_task_name)
self.assertTrue(json.loads(task_2).get('id') == unique_task_id)
self.assertTrue(int(json.loads(task_2).get('id')) == unique_task_id)
def test_stop_unique_task(self):
task_id = 1
@ -436,7 +436,7 @@ class TestCerberusManager(base.TestBase):
assert(self.manager.tg.timers[0]._running is True)
class FaultyTestCerberusManager(base.TestBaseFaulty):
class FaultyTestCerberusManager(base.TestCaseFaulty):
def setUp(self):
super(FaultyTestCerberusManager, self).setUp()

View File

@ -27,7 +27,7 @@ from cerberus.tests.unit import base
EXP_RESOURCE_TYPE = uuid.uuid4().hex
class NotificationsTestCase(base.TestBase):
class NotificationsTestCase(base.TestCase):
def setUp(self):
super(NotificationsTestCase, self).setUp()
fixture = self.useFixture(moxstubout.MoxStubout())

View File

@ -1,42 +0,0 @@
#
# Copyright (c) 2014 EUROGICIEL
#
# Licensed under the Apache License, Version 2.0 (the "License"); you may
# not use this file except in compliance with the License. You may obtain
# a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
#
import sqlalchemy
from cerberus.db import api as db_api
from cerberus.openstack.common.db import options
get_engine = db_api.get_engine
def setup_dummy_db():
options.cfg.set_defaults(options.database_opts, sqlite_synchronous=False)
options.set_defaults("sqlite://", sqlite_db='heat.db')
engine = get_engine()
db_api.db_sync(engine)
engine.connect()
def reset_dummy_db():
engine = get_engine()
meta = sqlalchemy.MetaData()
meta.reflect(bind=engine)
for table in reversed(meta.sorted_tables):
if table.name == 'migrate_version':
continue
engine.execute(table.delete())

View File

@ -2,6 +2,7 @@
# of appearance. Changing the order has an impact on the overall integration
# process, which may cause wedges in the gate later.
pbr>=0.6,!=0.7,<1.0
alembic>=0.7.2
Babel>=1.3
eventlet>=0.15.1
greenlet>=0.3.2

View File

@ -51,6 +51,7 @@ console_scripts =
cerberus-api = cerberus.cmd.api:main
cerberus-agent = cerberus.cmd.agent:main
dbcreate = cerberus.cmd.db_create:main
cerberus-dbsync = cerberus.cmd.dbsync:main
cerberus.plugins =
testplugin = cerberus.plugins.test_plugin:TestPlugin