Merge "Attach Manila shares via virtiofs (db)"

This commit is contained in:
Zuul 2024-01-18 21:51:57 +00:00 committed by Gerrit Code Review
commit a8cdffefb6
5 changed files with 458 additions and 3 deletions

View File

@ -4903,3 +4903,81 @@ def console_auth_token_destroy_expired_by_host(context, host):
filter_by(host=host).\
filter(models.ConsoleAuthToken.expires <= timeutils.utcnow_ts()).\
delete()
####################
@require_context
@pick_context_manager_reader
def share_mapping_get_all(context):
"""Get all share_mapping."""
return context.session.query(models.ShareMapping).all()
@require_context
@pick_context_manager_reader
def share_mapping_get_by_share_id(context, share_id):
"""Get share_mapping records for a specific share."""
return context.session.query(models.ShareMapping).\
filter_by(share_id=share_id).all()
@require_context
@pick_context_manager_reader
def share_mapping_get_by_instance_uuid(context, instance_uuid):
"""Get share_mapping records for a specific instance."""
return context.session.query(models.ShareMapping).\
filter_by(instance_uuid=instance_uuid).all()
@require_context
@pick_context_manager_reader
def share_mapping_get_by_instance_uuid_and_share_id(
context, instance_uuid, share_id):
"""Get share_mapping record for a specific instance and share_id."""
return context.session.query(models.ShareMapping).\
filter_by(instance_uuid=instance_uuid, share_id=share_id).first()
@require_context
@pick_context_manager_writer
def share_mapping_delete_by_instance_uuid_and_share_id(
context, instance_uuid, share_id):
"""Delete share_mapping record for a specific instance and share_id."""
context.session.query(models.ShareMapping).\
filter_by(instance_uuid=instance_uuid, share_id=share_id).delete()
@require_context
@pick_context_manager_writer
def share_mapping_update(
context, uuid, instance_uuid, share_id, status, tag, export_location,
share_proto
):
"""Update share_mapping for a share
Creates new record if needed.
"""
share_mapping = share_mapping_get_by_instance_uuid_and_share_id(
context, instance_uuid, share_id)
if share_mapping:
share_mapping.status = status
share_mapping.tag = tag
share_mapping.export_location = export_location
share_mapping.share_proto = share_proto
share_mapping.save(context.session)
context.session.refresh(share_mapping)
else:
share_mapping = models.ShareMapping()
share_mapping.uuid = uuid
share_mapping.instance_uuid = instance_uuid
share_mapping.share_id = share_id
share_mapping.status = status
share_mapping.tag = tag
share_mapping.export_location = export_location
share_mapping.share_proto = share_proto
share_mapping.save(context.session)
return share_mapping

View File

@ -0,0 +1,64 @@
# Licensed under the Apache License, Version 2.0 (the "License"); you may
# not use this file except in compliance with the License. You may obtain
# a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
# License for the specific language governing permissions and limitations
# under the License.
"""create_share_mapping_table
Revision ID: 13863f4e1612
Revises: 960aac0e09ea
Create Date: 2022-02-17 18:34:09.050246
"""
from alembic import op
import sqlalchemy as sa
# revision identifiers, used by Alembic.
revision = '13863f4e1612'
down_revision = '1acf2c98e646'
branch_labels = None
depends_on = None
def upgrade():
op.create_table(
'share_mapping',
sa.Column('created_at', sa.DateTime),
sa.Column('updated_at', sa.DateTime),
sa.Column(
"id",
sa.BigInteger().with_variant(sa.Integer, "sqlite"),
primary_key=True,
autoincrement=True,
nullable=False,
),
sa.Column('uuid', sa.String(36)),
sa.Column(
'instance_uuid', sa.String(length=36),
sa.ForeignKey(
'instances.uuid',
name='share_mapping_instance_uuid_fkey')),
sa.Column('share_id', sa.String(length=36)),
sa.Column('status', sa.String(length=32)),
sa.Column('tag', sa.String(48)),
sa.Column('export_location', sa.Text),
sa.Column('share_proto', sa.String(32)),
sa.Index('share_idx', 'share_id'),
sa.Index(
'share_mapping_instance_uuid_share_id_idx',
'instance_uuid', 'share_id'),
mysql_engine='InnoDB',
mysql_charset='utf8'
)
def downgrade():
pass

View File

@ -754,6 +754,40 @@ class BlockDeviceMapping(BASE, NovaBase, models.SoftDeleteMixin):
encryption_options = sa.Column(sa.String(4096))
class ShareMapping(BASE, NovaBase):
"""Represents share / instance mapping."""
__tablename__ = "share_mapping"
__table_args__ = (
sa.Index('share_idx', 'share_id'),
sa.Index('share_mapping_instance_uuid_share_id_idx',
'instance_uuid', 'share_id'),
)
# sqlite> create table my_table(id bigint primary key AUTOINCREMENT,
# name text);
# Parse error: AUTOINCREMENT is only allowed on an INTEGER PRIMARY KEY
# Use BigInteger variant for sqlite to allow unit tests. Other database
# should support BigInteger and autoincrement.
id = sa.Column(
sa.BigInteger().with_variant(sa.Integer, "sqlite"),
primary_key=True,
autoincrement=True,
nullable=False,
)
uuid = sa.Column(sa.String(36))
instance_uuid = sa.Column(sa.String(36), sa.ForeignKey('instances.uuid'))
instance = orm.relationship(
"Instance",
foreign_keys=instance_uuid,
primaryjoin='and_(ShareMapping.instance_uuid == Instance.uuid,'
'Instance.deleted == 0)'
)
share_id = sa.Column(sa.String(36))
status = sa.Column(sa.String(32))
tag = sa.Column(sa.String(48))
export_location = sa.Column(sa.Text)
share_proto = sa.Column(sa.String(32))
# TODO(stephenfin): Remove once we drop the security_groups field from the
# Instance table. Until then, this is tied to the SecurityGroup table
class SecurityGroupInstanceAssociation(BASE, NovaBase, models.SoftDeleteMixin):

View File

@ -4171,6 +4171,222 @@ class VolumeUsageDBApiTestCase(test.TestCase):
self.assertEqual(vol_usage[key], value, key)
class ShareMappingDBApiTestCase(test.TestCase):
def setUp(self):
super(ShareMappingDBApiTestCase, self).setUp()
self.user_id = 'fake'
self.project_id = 'fake'
self.context = context.RequestContext(self.user_id, self.project_id)
def _compare(self, share_mapping, expected):
for key, value in expected.items():
self.assertEqual(share_mapping[key], value)
@staticmethod
def create_test_data(ctxt):
expected_share_mappings = {
'1': {
'uuid': 'fake-uuid1',
'instance_uuid': 'fake-instance-uuid1',
'share_id': '1',
'status': None,
'tag': 'fake-tag1',
'export_location': 'fake-export_location1',
'share_proto': 'NFS'
},
'2': {
'uuid': 'fake-uuid2',
'instance_uuid': 'fake-instance-uuid2',
'share_id': '2',
'status': 'attached',
'tag': 'fake-tag2',
'export_location': 'fake-export_location2',
'share_proto': 'NFS'
}
}
db.share_mapping_update(
ctxt,
uuid='fake-uuid1',
instance_uuid='fake-instance-uuid1',
share_id='1',
status=None,
tag='fake-tag1',
export_location='fake-export_location1',
share_proto='NFS'
)
db.share_mapping_update(
ctxt,
uuid='fake-uuid2',
instance_uuid='fake-instance-uuid2',
share_id='2',
status='attached',
tag='fake-tag2',
export_location='fake-export_location2',
share_proto='NFS'
)
return expected_share_mappings
def test_share_mapping_update(self):
ctxt = context.get_admin_context()
expected_share_mappings = self.create_test_data(ctxt)
share_mappings = db.share_mapping_get_all(ctxt)
self.assertEqual(len(share_mappings), 2)
for share in share_mappings:
self._compare(
share, expected_share_mappings[share.share_id])
# Making an update
db.share_mapping_update(
ctxt,
uuid='fake-uuid1',
instance_uuid='fake-instance-uuid1',
share_id='1',
status='attached',
tag='fake-tag1',
export_location='fake-export_location1',
share_proto='NFS'
)
db.share_mapping_update(
ctxt,
uuid='fake-uuid2',
instance_uuid='fake-instance-uuid2',
share_id='2',
status='attached',
tag='fake-tag2-updated',
export_location='fake-export_location2',
share_proto='NFS'
)
expected_share_mappings_after_update = {
'1': {
'uuid': 'fake-uuid1',
'instance_uuid': 'fake-instance-uuid1',
'share_id': '1',
'status': 'attached',
'tag': 'fake-tag1',
'export_location': 'fake-export_location1',
'share_proto': 'NFS'
},
'2': {
'uuid': 'fake-uuid2',
'instance_uuid': 'fake-instance-uuid2',
'share_id': '2',
'status': 'attached',
'tag': 'fake-tag2-updated',
'export_location': 'fake-export_location2',
'share_proto': 'NFS'
}
}
share_mappings = db.share_mapping_get_all(ctxt)
self.assertEqual(len(share_mappings), 2)
for share in share_mappings:
self._compare(
share, expected_share_mappings_after_update[share.share_id])
def test_share_mapping_get_by_share_id(self):
ctxt = context.get_admin_context()
expected_share_mappings = self.create_test_data(ctxt)
share_mappings = db.share_mapping_get_by_share_id(ctxt, '1')
self.assertEqual(len(share_mappings), 1)
self._compare(
share_mappings[0],
expected_share_mappings[share_mappings[0].share_id]
)
share_mappings = db.share_mapping_get_by_share_id(ctxt, '2')
self.assertEqual(len(share_mappings), 1)
self._compare(
share_mappings[0],
expected_share_mappings[share_mappings[0].share_id]
)
def test_share_mapping_get_by_instance_uuid(self):
ctxt = context.get_admin_context()
expected_share_mappings = self.create_test_data(ctxt)
share_mappings = (
db.share_mapping_get_by_instance_uuid(ctxt, 'fake-instance-uuid1')
)
self.assertEqual(len(share_mappings), 1)
self._compare(
share_mappings[0],
expected_share_mappings[share_mappings[0].share_id]
)
share_mappings = (
db.share_mapping_get_by_instance_uuid(ctxt, 'fake-instance-uuid2')
)
self.assertEqual(len(share_mappings), 1)
self._compare(
share_mappings[0],
expected_share_mappings[share_mappings[0].share_id]
)
def test_share_mapping_get_by_instance_uuid_and_share_id(self):
ctxt = context.get_admin_context()
expected_share_mappings = self.create_test_data(ctxt)
share_mappings = (
db.share_mapping_get_by_instance_uuid_and_share_id(
ctxt,
'fake-instance-uuid1',
'1')
)
self._compare(
share_mappings,
expected_share_mappings[share_mappings.share_id]
)
share_mappings = (
db.share_mapping_get_by_instance_uuid_and_share_id(
ctxt,
'fake-instance-uuid2',
'2')
)
self._compare(
share_mappings,
expected_share_mappings[share_mappings.share_id]
)
def test_share_mapping_get_by_instance_uuid_and_share_id_missing(self):
ctxt = context.get_admin_context()
share_mappings = (
db.share_mapping_get_by_instance_uuid_and_share_id(
ctxt,
'fake-instance-uuid1',
'3')
)
self.assertIsNone(share_mappings)
def test_share_mapping_delete_by_instance_uuid_and_share_id(self):
ctxt = context.get_admin_context()
expected_share_mappings = self.create_test_data(ctxt)
db.share_mapping_delete_by_instance_uuid_and_share_id(
ctxt,
'fake-instance-uuid1',
'1'
)
share_mappings = db.share_mapping_get_all(ctxt)
self.assertEqual(len(share_mappings), 1)
for share in share_mappings:
self._compare(
share, expected_share_mappings[share.share_id]
)
db.share_mapping_delete_by_instance_uuid_and_share_id(
ctxt,
'fake-instance-uuid2',
'2'
)
share_mappings = db.share_mapping_get_all(ctxt)
self.assertEqual(len(share_mappings), 0)
class TaskLogTestCase(test.TestCase):
def setUp(self):
@ -5703,7 +5919,7 @@ class ArchiveTestCase(test.TestCase, ModelsObjectComparatorMixin):
if table_name in [
'tags', 'resource_providers', 'allocations',
'inventories', 'resource_provider_aggregates',
'console_auth_tokens',
'console_auth_tokens', 'share_mapping',
]:
continue
@ -5934,7 +6150,7 @@ class ArchiveTestCase(test.TestCase, ModelsObjectComparatorMixin):
""":returns: 0 on success, 1 if no uuid column, 2 if insert failed."""
# NOTE(cdent): migration 314 adds the resource_providers
# table with a uuid column that does not archive, so skip.
skip_tables = ['resource_providers']
skip_tables = ['resource_providers', 'share_mapping']
if tablename in skip_tables:
return 1
main_table = sqlalchemyutils.get_table(self.engine, tablename)

View File

@ -39,6 +39,7 @@ from oslo_log import log as logging
from oslotest import base
import sqlalchemy as sa
import sqlalchemy.exc
from sqlalchemy import Table
from nova.db.main import models
from nova.db import migration
@ -187,7 +188,7 @@ class NovaMigrationsWalk(
def assertIndexExists(self, connection, table_name, index):
self.assertTrue(
oslodbutils.index_exists(connection, table_name, index),
'Index %s on table %s should not exist' % (index, table_name),
'Index %s on table %s should exist' % (index, table_name),
)
def assertIndexNotExists(self, connection, table_name, index):
@ -202,6 +203,40 @@ class NovaMigrationsWalk(
'Column %s on table %s should exist' % (column, table_name),
)
def assertColumnNotExists(self, connection, table_name, column):
self.assertFalse(
oslodbutils.column_exists(connection, table_name, column),
'Column %s on table %s should not exist' % (column, table_name),
)
def assertForeignKeyExists(self, connection, table_name, column):
self.assertTrue(
oslodbutils.get_foreign_key_constraint_name(
connection, table_name, column),
'Foreign key %s on table %s should exist' % (column, table_name),
)
def assertForeignKeyNotExists(self, connection, table_name, column):
self.assertFalse(
oslodbutils.get_foreign_key_constraint_name(
connection, table_name, column),
'Foreign key %s on table %s should not exist' % (
column, table_name),
)
def assertTableExists(self, connection, table_name):
# Use assertIsInstance to instead of assertTrue
# because Table.exists method that returns a boolean is deprecated.
self.assertIsInstance(
oslodbutils.get_table(connection, table_name), Table,
'Table %s should exist' % (table_name),
)
def assertTableNotExists(self, connection, table_name):
self.assertRaises(sqlalchemy.exc.NoSuchTableError,
oslodbutils.get_table,
connection, table_name)
def _migrate_up(self, connection, revision):
if revision == self.init_version: # no tests for the initial revision
alembic_api.upgrade(self.config, revision)
@ -301,6 +336,34 @@ class NovaMigrationsWalk(
prefix + 'migrations',
'migrations_dest_compute_id_deleted_idx')
def _pre_upgrade_13863f4e1612(self, connection):
self.assertTableNotExists(connection, 'share_mapping')
def _check_13863f4e1612(self, connection):
fields = ['id',
'instance_uuid',
'share_id',
'status',
'tag',
'export_location',
'share_proto']
self.assertTableExists(connection, 'share_mapping')
for field in fields:
self.assertColumnExists(connection, 'share_mapping', field)
self.assertIndexExists(
connection,
'share_mapping',
'share_idx'
)
self.assertIndexExists(
connection,
'share_mapping',
'share_mapping_instance_uuid_share_id_idx'
)
self.assertForeignKeyExists(
connection, 'share_mapping', 'instance_uuid')
def test_single_base_revision(self):
"""Ensure we only have a single base revision.