Introduces Alembic database migration tool

This PR introduces Alembic database migration tool. Trove currently
uses sqlalchemy-migrate with which SQLAlchemy-2.0 does not work.

Story: 2010922
Task: 48782
Change-Id: Idd63f470a2b941720314b6356fe28cd8e394427e
This commit is contained in:
Hirotaka Wakabayashi 2024-05-08 12:25:36 +00:00
parent 78088c5717
commit 033798e29a
7 changed files with 1077 additions and 2 deletions

View File

@ -0,0 +1,117 @@
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
# A generic, single database configuration.
[alembic]
# path to migration scripts
script_location = %(here)s/migrations
# template used to generate migration file names; The default value is %%(rev)s_%%(slug)s
# Uncomment the line below if you want the files to be prepended with date and time
# see https://alembic.sqlalchemy.org/en/latest/tutorial.html#editing-the-ini-file
# for all available tokens
# file_template = %%(year)d_%%(month).2d_%%(day).2d_%%(hour).2d%%(minute).2d-%%(rev)s_%%(slug)s
# sys.path path, will be prepended to sys.path if present.
# defaults to the current working directory.
prepend_sys_path = .
# timezone to use when rendering the date within the migration file
# as well as the filename.
# If specified, requires the python-dateutil library that can be
# installed by adding `alembic[tz]` to the pip requirements
# string value is passed to dateutil.tz.gettz()
# leave blank for localtime
# timezone =
# max length of characters to apply to the
# "slug" field
# truncate_slug_length = 40
# set to 'true' to run the environment during
# the 'revision' command, regardless of autogenerate
# revision_environment = false
# set to 'true' to allow .pyc and .pyo files without
# a source .py file to be detected as revisions in the
# versions/ directory
# sourceless = false
# version location specification; This defaults
# to migrations/versions. When using multiple version
# directories, initial revisions must be specified with --version-path.
# The path separator used here should be the separator specified by "version_path_separator" below.
# version_locations = %(here)s/bar:%(here)s/bat:migrations/versions
# version path separator; As mentioned above, this is the character used to split
# version_locations. The default within new alembic.ini files is "os", which uses os.pathsep.
# If this key is omitted entirely, it falls back to the legacy behavior of splitting on spaces and/or commas.
# Valid values for version_path_separator are:
#
# version_path_separator = :
# version_path_separator = ;
# version_path_separator = space
version_path_separator = os # Use os.pathsep. Default configuration used for new projects.
# the output encoding used when revision files
# are written from script.py.mako
# output_encoding = utf-8
sqlalchemy.url = sqlite:///trove.db
[post_write_hooks]
# post_write_hooks defines scripts or Python functions that are run
# on newly generated revision scripts. See the documentation for further
# detail and examples
# format using "black" - use the console_scripts runner, against the "black" entrypoint
# hooks = black
# black.type = console_scripts
# black.entrypoint = black
# black.options = -l 79 REVISION_SCRIPT_FILENAME
# Logging configuration
[loggers]
keys = root,sqlalchemy,alembic
[handlers]
keys = console
[formatters]
keys = generic
[logger_root]
level = WARN
handlers = console
qualname =
[logger_sqlalchemy]
level = WARN
handlers =
qualname = sqlalchemy.engine
[logger_alembic]
level = INFO
handlers =
qualname = alembic
[handler_console]
class = StreamHandler
args = (sys.stderr,)
level = NOTSET
formatter = generic
[formatter_generic]
format = %(levelname)-5.5s [%(name)s] %(message)s
datefmt = %H:%M:%S

View File

@ -13,10 +13,15 @@
# License for the specific language governing permissions and limitations
# under the License.
from pathlib import Path
import sqlalchemy.exc
from trove.common import exception
from trove.db.sqlalchemy import migration
from alembic import command as alembic_command
from alembic import config as alembic_config
from trove.db.sqlalchemy import session
@ -128,12 +133,41 @@ def clean_db():
session.clean_db()
def _configure_alembic(options):
alembic_ini = Path(__file__).joinpath('alembic.ini')
if alembic_ini.exists():
# alembic configuration
config = alembic_config.Config(alembic_ini)
# override the database configuration from the file
config.set_main_option('sqlalchemy.url',
options['database']['connection'])
# override the logger configuration from the file
# https://stackoverflow.com/a/42691781/613428
config.attributes['configure_logger'] = False
return config
# return None if no alembic.ini exists
return None
def db_sync(options, version=None, repo_path=None):
migration.db_sync(options, version, repo_path)
config = _configure_alembic(options)
if config:
# Create the alembic version table if it does not exist already
alembic_command.ensure_version(config, False)
# Upgrade to a later version using alembic
alembic_command.upgrade(config, 'head')
else:
migration.db_sync(options, version, repo_path)
def db_upgrade(options, version=None, repo_path=None):
migration.upgrade(options, version, repo_path)
config = _configure_alembic(options)
if config:
# Upgrade to a later version using alembic
alembic_command.upgrade(config, 'head')
else:
migration.upgrade(options, version, repo_path)
def db_reset(options, *plugins):

View File

@ -0,0 +1 @@
Generic single-database configuration.

View File

@ -0,0 +1,101 @@
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
from logging.config import fileConfig
from sqlalchemy import engine_from_config
from sqlalchemy import pool
from alembic import context
# this is the Alembic Config object, which provides
# access to the values within the .ini file in use.
config = context.config
# Interpret the config file for Python logging.
# This line sets up loggers basically unless we're told not to.
if config.attributes.get('configure_logger', True):
fileConfig(config.config_file_name)
# add your model's MetaData object here
# for 'autogenerate' support
# from myapp import mymodel
# target_metadata = mymodel.Base.metadata
target_metadata = None
# other values from the config, defined by the needs of env.py,
# can be acquired:
# my_important_option = config.get_main_option("my_important_option")
# ... etc.
def run_migrations_offline() -> None:
"""Run migrations in 'offline' mode.
This configures the context with just a URL
and not an Engine, though an Engine is acceptable
here as well. By skipping the Engine creation
we don't even need a DBAPI to be available.
Calls to context.execute() here emit the given string to the
script output.
"""
url = config.get_main_option("sqlalchemy.url")
context.configure(
url=url,
target_metadata=target_metadata,
literal_binds=True,
dialect_opts={"paramstyle": "named"},
)
with context.begin_transaction():
context.run_migrations()
def run_migrations_online() -> None:
"""Run migrations in 'online' mode.
In this scenario we need to create an Engine
and associate a connection with the context.
This is modified from the default based on the below, since we want to
share an engine when unit testing so in-memory database testing actually
works.
https://alembic.sqlalchemy.org/en/latest/cookbook.html#connection-sharing
"""
connectable = config.attributes.get('connection', None)
if connectable is None:
# only create Engine if we don't have a Connection from the outside
connectable = engine_from_config(
config.get_section(config.config_ini_section),
prefix="sqlalchemy.",
poolclass=pool.NullPool,
)
# when connectable is already a Connection object, calling connect() gives
# us a *branched connection*.
with connectable.connect() as connection:
context.configure(
connection=connection, target_metadata=target_metadata
)
with context.begin_transaction():
context.run_migrations()
if context.is_offline_mode():
run_migrations_offline()
else:
run_migrations_online()

View File

@ -0,0 +1,38 @@
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
"""${message}
Revision ID: ${up_revision}
Revises: ${down_revision | comma,n}
Create Date: ${create_date}
"""
from typing import Sequence, Union
from alembic import op
import sqlalchemy as sa
${imports if imports else ""}
# revision identifiers, used by Alembic.
revision: str = ${repr(up_revision)}
down_revision: Union[str, None] = ${repr(down_revision)}
branch_labels: Union[str, Sequence[str], None] = ${repr(branch_labels)}
depends_on: Union[str, Sequence[str], None] = ${repr(depends_on)}
def upgrade() -> None:
${upgrades if upgrades else "pass"}
def downgrade() -> None:
${downgrades if downgrades else "pass"}

View File

@ -0,0 +1,86 @@
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
"""Add Datastore Version Registry Extension
Revision ID: 5c68b4fb3cd1
Revises: 906cffda7b29
Create Date: 2024-04-30 13:59:10.690895
"""
from typing import Sequence, Union
from alembic import op
from sqlalchemy.sql import table, column
from sqlalchemy import text
from sqlalchemy import String, Text
from trove.common.constants import REGISTRY_EXT_DEFAULTS
# revision identifiers, used by Alembic.
revision: str = '5c68b4fb3cd1'
down_revision: Union[str, None] = '906cffda7b29'
branch_labels: Union[str, Sequence[str], None] = None
depends_on: Union[str, Sequence[str], None] = None
repl_namespaces = {
"mariadb": "trove.guestagent.strategies.replication.mariadb_gtid",
"mongodb":
"trove.guestagent.strategies.replication.experimental.mongo_impl",
"mysql": "trove.guestagent.strategies.replication.mysql_gtid",
"percona": "trove.guestagent.strategies.replication.mysql_gtid",
"postgresql": "trove.guestagent.strategies.replication.postgresql",
"pxc": "trove.guestagent.strategies.replication.mysql_gtid",
"redis": "trove.guestagent.strategies.replication.experimental.redis_sync",
}
repl_strategies = {
"mariadb": "MariaDBGTIDReplication",
"mongodb": "Replication",
"mysql": "MysqlGTIDReplication",
"percona": "MysqlGTIDReplication",
"postgresql": "PostgresqlReplicationStreaming",
"pxc": "MysqlGTIDReplication",
"redis": "RedisSyncReplication",
}
def upgrade() -> None:
# 1. select id and manager from datastore_versions table
connection = op.get_bind()
for dsv_id, dsv_manager in connection.execute(
text("select id, manager from datastore_versions")):
registry_ext = REGISTRY_EXT_DEFAULTS.get(dsv_manager, '')
repl_strategy = "%(repl_namespace)s.%(repl_strategy)s" % {
'repl_namespace': repl_namespaces.get(dsv_manager, ''),
'repl_strategy': repl_strategies.get(dsv_manager, '')
}
ds_versions_table = table("datastore_versions", column("", String))
op.execute(
ds_versions_table.update()
.where(ds_versions_table.c.id == dsv_id)
.values({"registry_ext": registry_ext,
"repl_strategy": repl_strategy})
)
op.alter_column("datastore_versions", "registry_ext", nullable=False,
existing_type=Text)
op.alter_column("datastore_versions", "repl_strategy", nullable=False,
existing_type=Text)
def downgrade() -> None:
pass

View File

@ -0,0 +1,698 @@
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
"""init trove db
Revision ID: 906cffda7b29
Revises:
Create Date: 2024-04-30 13:58:12.700444
"""
from typing import Sequence, Union
from alembic import op
from sqlalchemy import Boolean, Column, DateTime, Float, ForeignKey, Integer, \
String, Text, UniqueConstraint, CheckConstraint
from sqlalchemy.sql import table, column
from trove.common import cfg
CONF = cfg.CONF
# revision identifiers, used by Alembic.
revision: str = '906cffda7b29'
down_revision: Union[str, None] = None
branch_labels: Union[str, Sequence[str], None] = None
depends_on: Union[str, Sequence[str], None] = None
def upgrade():
bind = op.get_bind()
""" merges the schemas up to 048 are treated asis.
"""
"""001_base_schema.py
"""
op.create_table(
'instances',
Column('id', String(36), primary_key=True, nullable=False),
Column('created', DateTime()),
Column('updated', DateTime()),
Column('name', String(255)),
Column('hostname', String(255)),
Column('compute_instance_id', String(36)),
Column('task_id', Integer()),
Column('task_description', String(255)),
Column('task_start_time', DateTime()),
Column('volume_id', String(36)),
Column('flavor_id', String(255)),
Column('volume_size', Integer()),
Column('tenant_id', String(36)),
Column('server_status', String(64)),
Column('deleted', Boolean()),
Column('deleted_at', DateTime()),
Column('datastore_version_id', String(36)),
Column('configuration_id', String(36)),
Column('slave_of_id', String(36)),
Column('cluster_id', String(36)),
Column('shard_id', String(36)),
Column('type', String(64)),
Column('region_id', String(255)),
Column('encrypted_key', String(255)),
Column('access', Text()),
)
op.create_index('datastore_version_id', 'instances',
['datastore_version_id'])
op.create_index('configuration_id', 'instances', ['configuration_id'])
op.create_index('instances_tenant_id', 'instances', ['tenant_id'])
op.create_index('instances_deleted', 'instances', ['deleted'])
op.create_index('slave_of_id', 'instances', ['slave_of_id'])
op.create_index('instances_cluster_id', 'instances', ['cluster_id'])
"""002_service_images.py
"""
op.create_table(
'service_images',
Column('id', String(36), primary_key=True, nullable=False),
Column('service_name', String(255)),
Column('image_id', String(255))
)
"""003_service_statuses.py
"""
op.create_table(
'service_statuses',
Column('id', String(36), primary_key=True, nullable=False),
Column('instance_id', String(36), nullable=False),
Column('status_id', Integer(), nullable=False),
Column('status_description', String(64), nullable=False),
Column('updated_at', DateTime()),
)
op.create_index('service_statuses_instance_id', 'service_statuses',
['instance_id'])
"""004_root_enabled.py
"""
op.create_table(
'root_enabled_history',
Column('id', String(36), primary_key=True, nullable=False),
Column('user', String(length=255)),
Column('created', DateTime()),
)
"""005_heartbeat.py
"""
op.create_table(
'agent_heartbeats',
Column('id', String(36), primary_key=True, nullable=False),
Column('instance_id', String(36), nullable=False, index=True,
unique=True),
Column('guest_agent_version', String(255), index=True),
Column('deleted', Boolean(), index=True),
CheckConstraint("deleted in (0,1)", name="CONSTRAINT_1"),
Column('deleted_at', DateTime()),
Column('updated_at', DateTime(), nullable=False)
)
"""006_dns_records.py
"""
op.create_table(
'dns_records',
Column('name', String(length=255), primary_key=True),
Column('record_id', String(length=64))
)
"""007_add_volume_flavor.py
"""
# added the columns to instances table
"""008_add_instance_fields.py
"""
# added the columns to instances table
"""009_add_deleted_flag_to_instances.py
"""
# added the columns to instances table
"""010_add_usage.py
"""
op.create_table(
'usage_events',
Column('id', String(36), primary_key=True, nullable=False),
Column('instance_name', String(36)),
Column('tenant_id', String(36)),
Column('nova_instance_id', String(36)),
Column('instance_size', Integer()),
Column('nova_volume_id', String(36)),
Column('volume_size', Integer()),
Column('end_time', DateTime()),
Column('updated', DateTime()),
)
"""011_quota.py
"""
op.create_table(
'quotas',
Column('id', String(36),
primary_key=True, nullable=False),
Column('created', DateTime()),
Column('updated', DateTime()),
Column('tenant_id', String(36)),
Column('resource', String(length=255), nullable=False),
Column('hard_limit', Integer()),
UniqueConstraint('tenant_id', 'resource')
)
op.create_table(
'quota_usages',
Column('id', String(36),
primary_key=True, nullable=False),
Column('created', DateTime()),
Column('updated', DateTime()),
Column('tenant_id', String(36)),
Column('in_use', Integer(), default=0),
Column('reserved', Integer(), default=0),
Column('resource', String(length=255), nullable=False),
UniqueConstraint('tenant_id', 'resource')
)
op.create_table(
'reservations',
Column('created', DateTime()),
Column('updated', DateTime()),
Column('id', String(36),
primary_key=True, nullable=False),
Column('usage_id', String(36)),
Column('delta', Integer(), nullable=False),
Column('status', String(length=36))
)
"""012_backup.py
"""
op.create_table(
'backups',
Column('id', String(36), primary_key=True, nullable=False),
Column('name', String(255), nullable=False),
Column('description', String(512)),
Column('location', String(1024)),
Column('backup_type', String(32)),
Column('size', Float()),
Column('tenant_id', String(36)),
Column('state', String(32), nullable=False),
Column('instance_id', String(36)),
Column('checksum', String(32)),
Column('backup_timestamp', DateTime()),
Column('deleted', Boolean(), default=0),
CheckConstraint("deleted in (0,1)", name="CONSTRAINT_1"),
Column('created', DateTime()),
Column('updated', DateTime()),
Column('deleted_at', DateTime()),
# 022_add_backup_parent_id.py
Column('parent_id', String(36)),
# 029_add_backup_datastore.py
Column('datastore_version_id', String(36)),
)
op.create_index('backups_instance_id', 'backups', ['instance_id'])
op.create_index('backups_deleted', 'backups', ['deleted'])
op.create_index('datastore_version_id', 'backups',
['datastore_version_id'])
"""013_add_security_group_artifacts.py
"""
op.create_table(
'security_groups',
Column('id', String(length=36), primary_key=True, nullable=False),
Column('name', String(length=255)),
Column('description', String(length=255)),
Column('user', String(length=255)),
Column('tenant_id', String(length=255)),
Column('created', DateTime()),
Column('updated', DateTime()),
Column('deleted', Boolean(), default=0),
CheckConstraint("deleted in (0,1)", name="CONSTRAINT_1"),
Column('deleted_at', DateTime()),
)
op.create_table(
'security_group_instance_associations',
Column('id', String(length=36), primary_key=True, nullable=False),
Column('security_group_id', String(length=36),
ForeignKey('security_groups.id', ondelete='CASCADE',
onupdate='CASCADE')),
Column('instance_id', String(length=36),
ForeignKey('instances.id', ondelete='CASCADE',
onupdate='CASCADE')),
Column('created', DateTime()),
Column('updated', DateTime()),
Column('deleted', Boolean(), default=0),
CheckConstraint("deleted in (0,1)", name="CONSTRAINT_1"),
Column('deleted_at', DateTime())
)
op.create_table(
'security_group_rules',
Column('id', String(length=36), primary_key=True, nullable=False),
Column('group_id', String(length=36),
ForeignKey('security_groups.id', ondelete='CASCADE',
onupdate='CASCADE')),
Column('parent_group_id', String(length=36),
ForeignKey('security_groups.id', ondelete='CASCADE',
onupdate='CASCADE')),
Column('protocol', String(length=255)),
Column('from_port', Integer()),
Column('to_port', Integer()),
Column('cidr', String(length=255)),
Column('created', DateTime()),
Column('updated', DateTime()),
Column('deleted', Boolean(), default=0),
CheckConstraint("deleted in (0,1)", name="CONSTRAINT_1"),
Column('deleted_at', DateTime()),
)
"""014_update_instance_flavor_id.py
"""
# updated the instances table schema
"""015_add_service_type.py
"""
# updated the instances table schema
# NOTE(hiwkby)
# service_type was deleted in 016_add_datastore_type.py
"""016_add_datastore_type.py
"""
op.create_table(
'datastores',
Column('id', String(36), primary_key=True, nullable=False),
Column('name', String(255), unique=True),
# NOTE(hiwkby) manager was dropped in 017_update_datastores.py
# Column('manager', String(255), nullable=False),
Column('default_version_id', String(36)),
)
op.create_table(
'datastore_versions',
Column('id', String(36), primary_key=True, nullable=False),
Column('datastore_id', String(36), ForeignKey('datastores.id')),
# NOTE(hiwkby)
# unique=false in 018_datastore_versions_fix.py
Column('name', String(255), unique=False),
Column('image_id', String(36)),
Column('packages', String(511)),
Column('active', Boolean(), nullable=False),
CheckConstraint("active in (0,1)", name="CONSTRAINT_1"),
# manager added in 017_update_datastores.py
Column('manager', String(255)),
# image_tags added in 047_image_tag_in_datastore_version.py
Column('image_tags', String(255)),
# version added in 048_add_version_to_datastore_version.py
Column('version', String(255)),
# registry_ext and repl_strategy added in
# 049_add_registry_ext_to_datastore_version.py
Column('registry_ext', Text()),
Column('repl_strategy', Text()),
UniqueConstraint('datastore_id', 'name', 'version',
name='ds_versions')
)
"""017_update_datastores.py
"""
# updated the datastores and datastore_versions table schema.
"""018_datastore_versions_fix.py
"""
# updated the datastore_versions table schema
"""019_datastore_fix.py
"""
# updated the datastore_versions table schema
"""020_configurations.py
"""
op.create_table(
'configurations',
Column('id', String(36), primary_key=True, nullable=False),
Column('name', String(64), nullable=False),
Column('description', String(256)),
Column('tenant_id', String(36), nullable=False),
Column('datastore_version_id', String(36), nullable=False),
Column('deleted', Boolean(), nullable=False, default=False),
CheckConstraint("deleted in (0,1)", name="CONSTRAINT_1"),
Column('deleted_at', DateTime()),
# NOTE(hiwkby)
# created added in 031_add_timestamps_to_configurations.py
Column('created', DateTime()),
Column('updated', DateTime()),
)
op.create_table(
'configuration_parameters',
Column('configuration_id', String(36), ForeignKey('configurations.id'),
nullable=False, primary_key=True),
Column('configuration_key', String(128),
nullable=False, primary_key=True),
Column('configuration_value', String(128)),
Column('deleted', Boolean(), nullable=False, default=False),
CheckConstraint("deleted in (0,1)", name="CONSTRAINT_1"),
Column('deleted_at', DateTime()),
)
"""021_conductor_last_seen.py
"""
op.create_table(
'conductor_lastseen',
Column('instance_id', String(36), primary_key=True, nullable=False),
Column('method_name', String(36), primary_key=True, nullable=False),
Column('sent', Float(precision=32))
)
"""022_add_backup_parent_id.py
"""
# updated the backups table schema
"""023_add_instance_indexes.py
"""
# updated the instances table schema
"""024_add_backup_indexes.py
"""
# updated the backups table schema
"""025_add_service_statuses_indexes.py
"""
# updated the service_statuses table schema
"""026_datastore_versions_unique_fix.py
"""
# updated the datastore_versions table schema
"""027_add_datastore_capabilities.py
"""
op.create_table(
'capabilities',
Column('id', String(36), primary_key=True, nullable=False),
Column('name', String(255), unique=True),
Column('description', String(255), nullable=False),
Column('enabled', Boolean()),
CheckConstraint("enabled in (0,1)", name="CONSTRAINT_1"),
)
op.create_table(
'capability_overrides',
Column('id', String(36), primary_key=True, nullable=False),
Column('datastore_version_id', String(36),
ForeignKey('datastore_versions.id')),
Column('capability_id', String(36), ForeignKey('capabilities.id')),
Column('enabled', Boolean()),
CheckConstraint("enabled in (0,1)", name="CONSTRAINT_1"),
UniqueConstraint('datastore_version_id', 'capability_id',
name='idx_datastore_capabilities_enabled')
)
"""028_recreate_agent_heartbeat.py
"""
# updated the datastore_versions table schema
"""029_add_backup_datastore.py
"""
with op.batch_alter_table('backups') as batch_op:
batch_op.create_foreign_key(
'backups_ibfk_1',
'datastore_versions',
['datastore_version_id'],
['id']
)
"""030_add_master_slave.py
"""
# updated the instances table schema
"""031_add_timestamps_to_configurations.py
"""
# updated the configurations table schema
"""032_clusters.py
"""
op.create_table(
'clusters',
Column('id', String(36), primary_key=True, nullable=False),
Column('created', DateTime(), nullable=False),
Column('updated', DateTime(), nullable=False),
Column('name', String(255), nullable=False),
Column('task_id', Integer(), nullable=False),
Column('tenant_id', String(36), nullable=False),
Column('datastore_version_id', String(36), nullable=False),
Column('deleted', Boolean()),
CheckConstraint("deleted in (0,1)", name="CONSTRAINT_1"),
Column('deleted_at', DateTime()),
Column('configuration_id', String(36)),
)
with op.batch_alter_table('clusters') as batch_op:
batch_op.create_foreign_key(
'clusters_ibfk_1',
'datastore_versions',
['datastore_version_id'],
['id']
)
with op.batch_alter_table('clusters') as batch_op:
batch_op.create_foreign_key(
'clusters_ibfk_2',
'configurations',
['configuration_id'],
['id']
)
op.create_index('datastore_version_id',
'clusters', ['datastore_version_id'])
op.create_index('clusters_deleted', 'clusters', ['deleted'])
op.create_index('clusters_tenant_id', 'clusters', ['tenant_id'])
op.create_index('configuration_id', 'clusters', ['configuration_id'])
"""033_datastore_parameters.py
"""
op.create_table(
'datastore_configuration_parameters',
Column('id', String(36), primary_key=True, nullable=False),
Column('name', String(128), primary_key=True, nullable=False),
Column('datastore_version_id', String(36),
ForeignKey("datastore_versions.id"),
primary_key=True, nullable=False),
Column('restart_required', Boolean(), nullable=False, default=False),
CheckConstraint("restart_required in (0,1)", name="CONSTRAINT_1"),
Column('max_size', String(40)),
Column('min_size', String(40)),
Column('data_type', String(128), nullable=False),
UniqueConstraint(
'datastore_version_id', 'name',
name='UQ_datastore_configuration_parameters_datastore_version_id_name') # noqa
)
"""034_change_task_description.py
"""
# updated the configurations table schema
"""035_flavor_id_int_to_string.py
"""
# updated the configurations table schema
"""036_add_datastore_version_metadata.py
"""
op.create_table(
'datastore_version_metadata',
Column('id', String(36), primary_key=True, nullable=False),
Column(
'datastore_version_id',
String(36),
ForeignKey('datastore_versions.id', ondelete='CASCADE'),
),
Column('key', String(128), nullable=False),
Column('value', String(128)),
Column('created', DateTime(), nullable=False),
Column('deleted', Boolean(), nullable=False, default=False),
CheckConstraint("deleted in (0,1)", name="CONSTRAINT_1"),
Column('deleted_at', DateTime()),
Column('updated_at', DateTime()),
UniqueConstraint(
'datastore_version_id', 'key', 'value',
name='UQ_datastore_version_metadata_datastore_version_id_key_value') # noqa
)
"""037_modules.py
"""
is_nullable = True if bind.engine.name == "sqlite" else False
op.create_table(
'modules',
Column('id', String(length=64), primary_key=True, nullable=False),
Column('name', String(length=255), nullable=False),
Column('type', String(length=255), nullable=False),
Column('contents', Text(length=65535), nullable=False),
Column('description', String(length=255)),
Column('tenant_id', String(length=64)),
Column('datastore_id', String(length=64)),
Column('datastore_version_id', String(length=64)),
Column('auto_apply', Boolean(), default=0, nullable=False),
CheckConstraint("auto_apply in (0,1)", name="CONSTRAINT_1"),
Column('visible', Boolean(), default=1, nullable=False),
CheckConstraint("visible in (0,1)", name="CONSTRAINT_2"),
Column('live_update', Boolean(), default=0, nullable=False),
CheckConstraint("live_update in (0,1)", name="CONSTRAINT_3"),
Column('md5', String(length=32), nullable=False),
Column('created', DateTime(), nullable=False),
Column('updated', DateTime(), nullable=False),
Column('deleted', Boolean(), default=0, nullable=False),
CheckConstraint("deleted in (0,1)", name="CONSTRAINT_4"),
Column('deleted_at', DateTime()),
UniqueConstraint(
'type', 'tenant_id', 'datastore_id', 'datastore_version_id',
'name', 'deleted_at',
name='UQ_type_tenant_datastore_datastore_version_name'),
# NOTE(hiwkby)
# the following columns added in 040_module_priority.py
Column('priority_apply', Boolean(), nullable=is_nullable, default=0),
Column('apply_order', Integer(), nullable=is_nullable, default=5),
Column('is_admin', Boolean(), nullable=is_nullable, default=0),
)
op.create_table(
'instance_modules',
Column('id', String(length=64), primary_key=True, nullable=False),
Column('instance_id', String(length=64),
ForeignKey('instances.id', ondelete="CASCADE",
onupdate="CASCADE"), nullable=False),
Column('module_id', String(length=64),
ForeignKey('modules.id', ondelete="CASCADE",
onupdate="CASCADE"), nullable=False),
Column('md5', String(length=32), nullable=False),
Column('created', DateTime(), nullable=False),
Column('updated', DateTime(), nullable=False),
Column('deleted', Boolean(), default=0, nullable=False),
CheckConstraint("deleted in (0,1)", name="CONSTRAINT_1"),
Column('deleted_at', DateTime()),
)
"""038_instance_faults.py
"""
op.create_table(
'instance_faults',
Column('id', String(length=64), primary_key=True, nullable=False),
Column('instance_id', String(length=64),
ForeignKey('instances.id', ondelete="CASCADE",
onupdate="CASCADE"), nullable=False),
Column('message', String(length=255), nullable=False),
Column('details', Text(length=65535), nullable=False),
Column('created', DateTime(), nullable=False),
Column('updated', DateTime(), nullable=False),
Column('deleted', Boolean(), default=0, nullable=False),
CheckConstraint("deleted in (0,1)", name="CONSTRAINT_1"),
Column('deleted_at', DateTime()),
)
"""039_region.py
"""
instances = table("instances", column("region_id", String))
op.execute(
instances.update()
.values({"region_id": CONF.service_credentials.region_name})
)
"""040_module_priority.py
"""
# updated the modules table schema
"""041_instance_keys.py
"""
# updated the instances table schema
"""042_add_cluster_configuration_id.py
"""
# updated the clusters table schema
"""043_instance_ds_version_nullable.py
"""
# updated the instances table schema
"""044_remove_datastore_configuration_parameters_deleted.py
"""
# updated the datastore_configuration_parameters table schema
"""045_add_backup_strategy.py
"""
op.create_table(
'backup_strategy',
Column('id', String(36), primary_key=True, nullable=False),
Column('tenant_id', String(36), nullable=False),
Column('instance_id', String(36), nullable=False, default=''),
Column('backend', String(255), nullable=False),
Column('swift_container', String(255)),
Column('created', DateTime()),
UniqueConstraint(
'tenant_id', 'instance_id',
name='UQ_backup_strategy_tenant_id_instance_id'),
)
op.create_index('backup_strategy_tenant_id_instance_id',
'backup_strategy', ['tenant_id', 'instance_id'])
"""046_add_access_to_instance.py
"""
# updated the instances table schema
"""047_image_tag_in_datastore_version.py
"""
# updated the datastore_versions table schema
"""048_add_version_to_datastore_version.py
"""
# updated the datastore_versions table schema
# Adds foreign keys after creating tables
# datastore_version_id = Column('datastore_version_id', String(36))
# op.add_column('instances', datastore_version_id)
with op.batch_alter_table('instances') as batch_op:
batch_op.create_foreign_key(
'instances_ibfk_1',
'datastore_versions',
['datastore_version_id'],
['id']
)
# configuration_id = Column('configuration_id', String(36))
# op.add_column('instances', configuration_id)
with op.batch_alter_table('instances') as batch_op:
batch_op.create_foreign_key(
'instances_ibfk_2',
'configurations',
['configuration_id'],
['id']
)
# op.add_column('instances', Column('slave_of_id', String(36)))
with op.batch_alter_table('instances') as batch_op:
batch_op.create_foreign_key(
'instances_ibfk_3',
'instances',
['slave_of_id'],
['id']
)
# op.add_column('instances', Column('cluster_id', String(36)))
with op.batch_alter_table('instances') as batch_op:
batch_op.create_foreign_key(
'instances_ibfk_4',
'clusters',
['cluster_id'],
['id']
)
def downgrade() -> None:
pass