Fix ShareSnapshotInstance DB table

removing redundant column "share_id" and making it proxy to
"share_instance_id" as it was prior to merge of [1].
It is redundant because there is only one place, in manila, where it is
used, but used without real need.

[1] I81ba26437554d02a79e4b536d781617f46ce2f07

Change-Id: I4ee14cf89b8882941974741da311f4556e604116
Closes-Bug: #1697581
This commit is contained in:
Valeriy Ponomaryov 2017-06-15 17:51:50 +03:00
parent d72396785f
commit 160a9a113e
10 changed files with 46 additions and 41 deletions

View File

@ -40,7 +40,7 @@ class ShareGroupSnapshotViewBuilder(common.ViewBuilder):
'project_id': member.get('project_id'),
'share_group_snapshot_id': member.get(
'share_group_snapshot_id'),
'share_id': member.get('share_id'),
'share_id': member.get('share_instance', {}).get('share_id'),
# TODO(vponomaryov): add 'provider_location' key in Pike.
}
members_list.append(member_dict)
@ -89,7 +89,7 @@ class ShareGroupSnapshotViewBuilder(common.ViewBuilder):
member_dict = {
'id': member.get('id'),
'size': member.get('size'),
'share_id': member.get('share_id'),
'share_id': member.get('share_instance', {}).get('share_id'),
}
members_list.append(member_dict)

View File

@ -37,7 +37,6 @@ def upgrade():
op.add_column(SSI_TABLE_NAME, sa.Column('project_id', sa.String(255)))
op.add_column(SSI_TABLE_NAME, sa.Column('size', sa.Integer))
op.add_column(SSI_TABLE_NAME, sa.Column('share_proto', sa.String(255)))
op.add_column(SSI_TABLE_NAME, sa.Column('share_id', sa.String(36)))
op.add_column(
SSI_TABLE_NAME, sa.Column('share_group_snapshot_id', sa.String(36)))
@ -53,7 +52,6 @@ def upgrade():
ported_data.append({
"id": ssgm_record.id,
"share_group_snapshot_id": ssgm_record.share_group_snapshot_id,
"share_id": ssgm_record.share_id,
"share_instance_id": ssgm_record.share_instance_id,
"size": ssgm_record.size,
"status": ssgm_record.status,
@ -108,25 +106,33 @@ def downgrade():
# have not null 'share_snapshot_group_id' to new table
connection = op.get_bind()
ssi_table = utils.load_table(SSI_TABLE_NAME, connection)
share_instances_table = utils.load_table("share_instances", connection)
ssgm_table = utils.load_table(SGSM_TABLE_NAME, connection)
ported_data = []
for ssi_record in connection.execute(ssi_table.select().where(
ssi_table.c.share_group_snapshot_id.isnot(None))):
for row in connection.execute(
ssi_table.join(
share_instances_table,
share_instances_table.c.id == ssi_table.c.share_instance_id
).select(use_labels=True).where(
ssi_table.c.share_group_snapshot_id.isnot(None),
)):
ported_data.append({
"id": ssi_record.id,
"share_group_snapshot_id": ssi_record.share_group_snapshot_id,
"share_id": ssi_record.share_id,
"share_instance_id": ssi_record.share_instance_id,
"size": ssi_record.size,
"status": ssi_record.status,
"share_proto": ssi_record.share_proto,
"user_id": ssi_record.user_id,
"project_id": ssi_record.project_id,
"provider_location": ssi_record.provider_location,
"created_at": ssi_record.created_at,
"updated_at": ssi_record.updated_at,
"deleted_at": ssi_record.deleted_at,
"deleted": ssi_record.deleted or "False",
"id": row.share_snapshot_instances_id,
"share_group_snapshot_id": (
row.share_snapshot_instances_share_group_snapshot_id),
"share_id": row.share_instances_share_id,
"share_instance_id": row.share_instances_id,
"size": row.share_snapshot_instances_size,
"status": row.share_snapshot_instances_status,
"share_proto": row.share_snapshot_instances_share_proto,
"user_id": row.share_snapshot_instances_user_id,
"project_id": row.share_snapshot_instances_project_id,
"provider_location": (
row.share_snapshot_instances_provider_location),
"created_at": row.share_snapshot_instances_created_at,
"updated_at": row.share_snapshot_instances_updated_at,
"deleted_at": row.share_snapshot_instances_deleted_at,
"deleted": row.share_snapshot_instances_deleted or "False",
})
# Copy share group snapshot members to new table
@ -139,7 +145,7 @@ def downgrade():
# Remove redundant fields from 'share_snapshot_instance' table
for column_name in ('user_id', 'project_id', 'size', 'share_proto',
'share_id', 'share_group_snapshot_id'):
'share_group_snapshot_id'):
op.drop_column(SSI_TABLE_NAME, column_name)
# Add back FK for 'snapshot_id' field

View File

@ -4020,8 +4020,12 @@ def count_share_group_snapshot_members_in_share(context, share_id,
return model_query(
context, models.ShareSnapshotInstance, session=session,
project_only=True, read_deleted="no",
).filter_by(
share_id=share_id,
).join(
models.ShareInstance,
models.ShareInstance.id == (
models.ShareSnapshotInstance.share_instance_id),
).filter(
models.ShareInstance.share_id == share_id,
).count()

View File

@ -704,7 +704,7 @@ class ShareSnapshot(BASE, ManilaBase):
class ShareSnapshotInstance(BASE, ManilaBase):
"""Represents a snapshot of a share."""
__tablename__ = 'share_snapshot_instances'
_extra_keys = ['name', 'share_name']
_extra_keys = ['name', 'share_id', 'share_name']
@property
def name(self):
@ -714,6 +714,12 @@ class ShareSnapshotInstance(BASE, ManilaBase):
def share_name(self):
return CONF.share_name_template % self.share_instance_id
@property
def share_id(self):
# NOTE(u_glide): This property required for compatibility
# with share drivers
return self.share_instance_id
id = Column(String(36), primary_key=True)
deleted = Column(String(36), default='False')
snapshot_id = Column(String(36), nullable=True)
@ -724,7 +730,6 @@ class ShareSnapshotInstance(BASE, ManilaBase):
provider_location = Column(String(255))
share_proto = Column(String(255))
size = Column(Integer)
share_id = Column(String(36), nullable=True)
share_group_snapshot_id = Column(String(36), nullable=True)
user_id = Column(String(255))
project_id = Column(String(255))

View File

@ -3449,7 +3449,6 @@ class ShareManager(manager.SchedulerDependentManager):
for member in snap_ref['share_group_snapshot_members']:
member['share'] = self.db.share_instance_get(
context, member['share_instance_id'], with_share_data=True)
member['share_id'] = member['share_instance_id']
if 'share_group' in snap_ref:
parent_share_server_id = snap_ref['share_group'][
'share_server_id']
@ -3605,7 +3604,6 @@ class ShareManager(manager.SchedulerDependentManager):
for member in snap_ref['share_group_snapshot_members']:
member['share'] = self.db.share_instance_get(
context, member['share_instance_id'], with_share_data=True)
member['share_id'] = member['share_instance_id']
status = constants.STATUS_AVAILABLE
now = timeutils.utcnow()
@ -3704,7 +3702,6 @@ class ShareManager(manager.SchedulerDependentManager):
for member in snap_ref['share_group_snapshot_members']:
member['share'] = self.db.share_instance_get(
context, member['share_instance_id'], with_share_data=True)
member['share_id'] = member['share_instance_id']
snapshot_update = False

View File

@ -161,9 +161,10 @@ class API(base.Base):
members = self.db.share_group_snapshot_members_get_all(
context, source_share_group_snapshot_id)
for member in members:
share = self.db.share_get(context, member['share_id'])
share_instance = self.db.share_instance_get(
context, member['share_instance_id'])
share_type = share_types.get_share_type(
context, share['share_type_id'])
context, share_instance['share_type_id'])
self.share_api.create(
context,
member['share_proto'],
@ -295,7 +296,6 @@ class API(base.Base):
'status': constants.STATUS_CREATING,
'size': s['size'],
'share_proto': s['share_proto'],
'share_id': s['id'],
'share_instance_id': s.instance['id']
}
member = self.db.share_group_snapshot_member_create(

View File

@ -2262,7 +2262,7 @@ class SquashSGSnapshotMembersAndSSIModelsChecks(BaseMigrationChecks):
share_group_snapshot_id = uuidutils.generate_uuid()
share_group_snapshot_member_id = uuidutils.generate_uuid()
keys = (
'user_id', 'project_id', 'size', 'share_proto', 'share_id',
'user_id', 'project_id', 'size', 'share_proto',
'share_group_snapshot_id',
)
@ -2341,7 +2341,6 @@ class SquashSGSnapshotMembersAndSSIModelsChecks(BaseMigrationChecks):
'project_id': ('p' * 255),
'share_proto': ('s' * 255),
'size': 123456789,
'share_id': self.share_id,
'share_group_snapshot_id': self.share_group_snapshot_id,
}))

View File

@ -905,9 +905,9 @@ class ShareGroupDatabaseAPITestCase(test.TestCase):
si2 = db_utils.create_share_instance(share_id=share2['id'])
sg_snap = db_utils.create_share_group_snapshot(sg['id'])
db_utils.create_share_group_snapshot_member(
sg_snap['id'], share_id=share['id'], share_instance_id=si['id'])
sg_snap['id'], share_instance_id=si['id'])
db_utils.create_share_group_snapshot_member(
sg_snap['id'], share_id=share2['id'], share_instance_id=si2['id'])
sg_snap['id'], share_instance_id=si2['id'])
count = db_api.count_share_group_snapshot_members_in_share(
self.ctxt, share['id'])
@ -1084,7 +1084,6 @@ class ShareSnapshotDatabaseAPITestCase(test.TestCase):
self.assertTrue(hasattr(instance, 'share_id'))
self.assertIn('name', instance_dict)
self.assertIn('share_name', instance_dict)
self.assertIn('share_id', instance_dict)
@ddt.data(None, constants.STATUS_ERROR)
def test_share_snapshot_instance_get_all_with_filters_some(self, status):

View File

@ -58,7 +58,6 @@ def create_share_group_snapshot_member(share_group_snapshot_id, **kwargs):
member = {
'share_proto': "NFS",
'size': 0,
'share_id': None,
'share_instance_id': None,
'user_id': 'fake',
'project_id': 'fake',

View File

@ -874,7 +874,6 @@ class ShareGroupsAPITestCase(test.TestCase):
'status': constants.STATUS_CREATING,
'size': share['size'],
'share_proto': share['share_proto'],
'share_id': share['id'],
'share_instance_id': mock.ANY,
}
self.mock_object(
@ -945,7 +944,6 @@ class ShareGroupsAPITestCase(test.TestCase):
'status': constants.STATUS_CREATING,
'size': share['size'],
'share_proto': share['share_proto'],
'share_id': share['id'],
'share_instance_id': mock.ANY,
}
expected_member_2_values = {
@ -955,7 +953,6 @@ class ShareGroupsAPITestCase(test.TestCase):
'status': constants.STATUS_CREATING,
'size': share_2['size'],
'share_proto': share_2['share_proto'],
'share_id': share_2['id'],
'share_instance_id': mock.ANY,
}
self.mock_object(
@ -1005,7 +1002,6 @@ class ShareGroupsAPITestCase(test.TestCase):
'status': constants.STATUS_CREATING,
'size': share['size'],
'share_proto': share['share_proto'],
'share_id': share['id'],
'share_instance_id': mock.ANY,
}
self.mock_object(