Add auto increase primary key for unified limit

What this patch did and why:

1. added an auto increment primary column `interenal_id` for both
   registered_limit and limit tables. Removed the primary key but
   added unique index for `id` column. This change can improve
   the db performance.
2. dropped the forieign keys in limit table. The `project_id`
   column has a foreign key. Using foreign key between different
   backends can lead some unexpected error.
3. dropped the unique constraints and foreign key constraint in both
   tables. Because `region_id` can be null, in this case, both
   constraints can't work well in all kinds of DBs. Instead, we'll
   check the unique and foreign reference in code.

NOTE: For MySQL and PostgreSQL, we did the change inner tables. But
for SQLite, it doesn't support adding a primary column into an existed
table, so that we recreated the tables instead..

Closes-bug: #1777893
Change-Id: Ibb408758466ff367f57bafbd4b8c9213499f8dc3
This commit is contained in:
wangxiyuan 2018-06-18 11:39:31 +08:00
parent 01afe79ac9
commit 209462c90d
8 changed files with 308 additions and 65 deletions

View File

@ -0,0 +1,63 @@
# Licensed under the Apache License, Version 2.0 (the "License"); you may
# not use this file except in compliance with the License. You may obtain
# a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
# License for the specific language governing permissions and limitations
# under the License.
import migrate
import sqlalchemy as sql
def upgrade(migrate_engine):
meta = sql.MetaData()
meta.bind = migrate_engine
# For Mysql and PostgreSQL, drop the FK in limit table, drop the unique
# constraint in registered limit and limit tables.
#
# For SQLite, drop the old tables, then rename the new tables.
limit_table = sql.Table('limit', meta, autoload=True)
registered_limit_table = sql.Table('registered_limit', meta, autoload=True)
if migrate_engine.name != 'sqlite':
project_table = sql.Table('project', meta, autoload=True)
inspector = sql.engine.reflection.Inspector.from_engine(migrate_engine)
for fk in inspector.get_foreign_keys('limit'):
fkey = migrate.ForeignKeyConstraint(
[limit_table.c.project_id],
[project_table.c.id],
name=fk['name'])
fkey.drop()
for uc in inspector.get_unique_constraints('limit'):
if set(uc['column_names']) == set(['project_id', 'service_id',
'region_id', 'resource_name']):
uc = migrate.UniqueConstraint(limit_table.c.project_id,
limit_table.c.service_id,
limit_table.c.region_id,
limit_table.c.resource_name,
name=uc['name'])
uc.drop()
for uc in inspector.get_unique_constraints('registered_limit'):
if set(uc['column_names']) == set(['service_id', 'region_id',
'resource_name']):
uc = migrate.UniqueConstraint(
registered_limit_table.c.service_id,
registered_limit_table.c.region_id,
registered_limit_table.c.resource_name,
name=uc['name'])
uc.drop()
else:
registered_limit_table_new = sql.Table('registered_limit_new', meta,
autoload=True)
limit_table_new = sql.Table('limit_new', meta, autoload=True)
limit_table.drop()
limit_table_new.rename('limit')
registered_limit_table.drop()
registered_limit_table_new.rename('registered_limit')

View File

@ -0,0 +1,37 @@
# Licensed under the Apache License, Version 2.0 (the "License"); you may
# not use this file except in compliance with the License. You may obtain
# a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
# License for the specific language governing permissions and limitations
# under the License.
import sqlalchemy as sql
def upgrade(migrate_engine):
meta = sql.MetaData()
meta.bind = migrate_engine
# For SQLite, migrate the data from old tables to new ones.
if migrate_engine == 'sqlite':
registered_limit_table = sql.Table('registered_limit', meta,
autoload=True)
registered_limit_table_new = sql.Table('registered_limit_new', meta,
autoload=True)
limit_table = sql.Table('limit', meta, autoload=True)
limit_table_new = sql.Table('limit_new', meta, autoload=True)
registered_limit_table_new.insert().from_select(
['id', 'service_id', 'region_id', 'resource_name', 'default_limit',
'description'],
registered_limit_table.select()).execute()
limit_table_new.insert().from_select(
['id', 'project_id', 'service_id', 'region_id', 'resource_name',
'resource_limit', 'description'],
limit_table.select()).execute()

View File

@ -0,0 +1,103 @@
# Licensed under the Apache License, Version 2.0 (the "License"); you may
# not use this file except in compliance with the License. You may obtain
# a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
# License for the specific language governing permissions and limitations
# under the License.
import migrate
import sqlalchemy as sql
MYSQL_CREATE_ID_PRIMARY_KEY_COLUMN = """
ALTER TABLE `%s` ADD `internal_id` INT NOT NULL AUTO_INCREMENT PRIMARY KEY;
"""
POSTGRESQL_CREATE_ID_PRIMARY_KEY_COLUMN = """
ALTER TABLE "%s" ADD COLUMN "internal_id" SERIAL PRIMARY KEY;
"""
def upgrade(migrate_engine):
# For both registered_limit and limit tables in MySQL and PostgreSQL:
#
# 1. drop the primary key on `id` column.
# 2. create a auto increment `internal_id` column with primary key.
# 3. add unique constraint on `id` column.
#
# But SQLite doesn't support add primary key to a existed table, so for
# SQLite, we'll follow the steps, take the registered_limit as an example:
#
# 1. Add a new table `registered_limit_new` which contains `internal_id`
# column.
# 2. migrate the data from `registered_limit` to `registered_limit_new`
# 3. drop the `registered_limit`, rename `registered_limit_new` to
# `registered_limit`.
meta = sql.MetaData()
meta.bind = migrate_engine
registered_limit_table = sql.Table('registered_limit', meta, autoload=True)
limit_table = sql.Table('limit', meta, autoload=True)
if migrate_engine.name != 'sqlite':
pk = migrate.PrimaryKeyConstraint('id', table=registered_limit_table)
pk.drop()
if migrate_engine.name == 'mysql':
migrate_engine.execute(
MYSQL_CREATE_ID_PRIMARY_KEY_COLUMN % 'registered_limit')
else:
migrate_engine.execute(
POSTGRESQL_CREATE_ID_PRIMARY_KEY_COLUMN % 'registered_limit')
unique_constraint = migrate.UniqueConstraint(
'id', table=registered_limit_table)
unique_constraint.create()
pk = migrate.PrimaryKeyConstraint('id', table=limit_table)
pk.drop()
if migrate_engine.name == 'mysql':
migrate_engine.execute(
MYSQL_CREATE_ID_PRIMARY_KEY_COLUMN % 'limit')
else:
migrate_engine.execute(
POSTGRESQL_CREATE_ID_PRIMARY_KEY_COLUMN % 'limit')
unique_constraint = migrate.UniqueConstraint('id', table=limit_table)
unique_constraint.create()
else:
# SQLite case
registered_limit_table_new = sql.Table(
'registered_limit_new',
meta,
sql.Column('internal_id', sql.Integer, primary_key=True),
sql.Column('id', sql.String(length=64), unique=True),
sql.Column('service_id',
sql.String(64)),
sql.Column('region_id',
sql.String(64),
nullable=True),
sql.Column('resource_name', sql.String(255)),
sql.Column('default_limit', sql.Integer, nullable=False),
sql.Column('description', sql.Text),
mysql_engine='InnoDB',
mysql_charset='utf8')
registered_limit_table_new.create(migrate_engine, checkfirst=True)
limit_table_new = sql.Table(
'limit_new',
meta,
sql.Column('internal_id', sql.Integer, primary_key=True),
sql.Column('id', sql.String(length=64), unique=True),
sql.Column('project_id', sql.String(64)),
sql.Column('service_id', sql.String(64)),
sql.Column('region_id', sql.String(64), nullable=True),
sql.Column('resource_name', sql.String(255)),
sql.Column('resource_limit', sql.Integer, nullable=False),
sql.Column('description', sql.Text),
mysql_engine='InnoDB',
mysql_charset='utf8')
limit_table_new.create(migrate_engine, checkfirst=True)

View File

@ -16,7 +16,6 @@
import copy
from oslo_db import exception as db_exception
import sqlalchemy
from keystone.common import driver_hints
from keystone.common import sql
@ -28,6 +27,7 @@ from keystone.limit.backends import base
class RegisteredLimitModel(sql.ModelBase, sql.ModelDictMixin):
__tablename__ = 'registered_limit'
attributes = [
'internal_id',
'id',
'service_id',
'region_id',
@ -36,7 +36,8 @@ class RegisteredLimitModel(sql.ModelBase, sql.ModelDictMixin):
'description'
]
id = sql.Column(sql.String(length=64), primary_key=True)
internal_id = sql.Column(sql.Integer, primary_key=True, nullable=False)
id = sql.Column(sql.String(length=64), nullable=False, unique=True)
service_id = sql.Column(sql.String(255),
sql.ForeignKey('service.id'))
region_id = sql.Column(sql.String(64),
@ -45,15 +46,16 @@ class RegisteredLimitModel(sql.ModelBase, sql.ModelDictMixin):
default_limit = sql.Column(sql.Integer, nullable=False)
description = sql.Column(sql.Text())
__table_args__ = (
sqlalchemy.UniqueConstraint('service_id',
'region_id',
'resource_name'),)
def to_dict(self):
ref = super(RegisteredLimitModel, self).to_dict()
ref.pop('internal_id')
return ref
class LimitModel(sql.ModelBase, sql.ModelDictMixin):
__tablename__ = 'limit'
attributes = [
'internal_id',
'id',
'project_id',
'service_id',
@ -63,59 +65,55 @@ class LimitModel(sql.ModelBase, sql.ModelDictMixin):
'description'
]
id = sql.Column(sql.String(length=64), primary_key=True)
project_id = sql.Column(sql.String(64),
sql.ForeignKey('project.id'))
internal_id = sql.Column(sql.Integer, primary_key=True, nullable=False)
id = sql.Column(sql.String(length=64), nullable=False, unique=True)
project_id = sql.Column(sql.String(64))
service_id = sql.Column(sql.String(255))
region_id = sql.Column(sql.String(64), nullable=True)
resource_name = sql.Column(sql.String(255))
resource_limit = sql.Column(sql.Integer, nullable=False)
description = sql.Column(sql.Text())
__table_args__ = (
sqlalchemy.ForeignKeyConstraint(['service_id',
'region_id',
'resource_name'],
['registered_limit.service_id',
'registered_limit.region_id',
'registered_limit.resource_name']),
sqlalchemy.UniqueConstraint('project_id',
'service_id',
'region_id',
'resource_name'),)
def to_dict(self):
ref = super(LimitModel, self).to_dict()
ref.pop('internal_id')
return ref
class UnifiedLimit(base.UnifiedLimitDriverBase):
def _check_unified_limit_without_region(self, unified_limit,
is_registered_limit=True):
def _check_unified_limit_unique(self, unified_limit,
is_registered_limit=True):
# Ensure the new created or updated unified limit won't break the
# current reference between registered limit and limit.
hints = driver_hints.Hints()
hints.add_filter('service_id', unified_limit['service_id'])
hints.add_filter('resource_name', unified_limit['resource_name'])
hints.add_filter('region_id', None)
if not is_registered_limit:
# For limit, we should ensure:
# 1. there is no duplicate entry.
# 2. there is a registered limit reference.
reference_hints = copy.deepcopy(hints)
hints.add_filter('project_id', unified_limit['project_id'])
hints.add_filter('region_id', unified_limit.get('region_id'))
if is_registered_limit:
# For registered limit, we should ensure that there is no duplicate
# entry.
with sql.session_for_read() as session:
unified_limits = session.query(LimitModel)
unified_limits = sql.filter_limit_query(LimitModel,
unified_limits = session.query(RegisteredLimitModel)
unified_limits = sql.filter_limit_query(RegisteredLimitModel,
unified_limits,
hints)
else:
# For limit, we should ensure:
# 1. there is a registered limit reference.
# 2. there is no duplicate entry.
reference_hints = copy.deepcopy(hints)
with sql.session_for_read() as session:
registered_limits = session.query(RegisteredLimitModel)
registered_limits = sql.filter_limit_query(
RegisteredLimitModel, registered_limits, reference_hints)
if not registered_limits.all():
raise exception.NoLimitReference
else:
# For registered limit, we should just ensure that there is no
# duplicate entry.
hints.add_filter('project_id', unified_limit['project_id'])
with sql.session_for_read() as session:
unified_limits = session.query(RegisteredLimitModel)
unified_limits = sql.filter_limit_query(RegisteredLimitModel,
unified_limits = session.query(LimitModel)
unified_limits = sql.filter_limit_query(LimitModel,
unified_limits,
hints)
if unified_limits.all():
@ -123,11 +121,13 @@ class UnifiedLimit(base.UnifiedLimitDriverBase):
limit_type = 'registered_limit' if is_registered_limit else 'limit'
raise exception.Conflict(type=limit_type, details=msg)
def _check_referenced_limit_without_region(self, registered_limit):
def _check_referenced_limit_reference(self, registered_limit):
# When updating or deleting a registered limit, we should ensure there
# is no reference limit.
hints = driver_hints.Hints()
hints.add_filter('service_id', registered_limit.service_id)
hints.add_filter('resource_name', registered_limit.resource_name)
hints.add_filter('region_id', None)
hints.add_filter('region_id', registered_limit.region_id)
with sql.session_for_read() as session:
limits = session.query(LimitModel)
limits = sql.filter_limit_query(LimitModel,
@ -141,8 +141,7 @@ class UnifiedLimit(base.UnifiedLimitDriverBase):
with sql.session_for_write() as session:
new_registered_limits = []
for registered_limit in registered_limits:
if registered_limit.get('region_id') is None:
self._check_unified_limit_without_region(registered_limit)
self._check_unified_limit_unique(registered_limit)
ref = RegisteredLimitModel.from_dict(registered_limit)
session.add(ref)
new_registered_limits.append(ref.to_dict())
@ -153,10 +152,13 @@ class UnifiedLimit(base.UnifiedLimitDriverBase):
try:
with sql.session_for_write() as session:
ref = self._get_registered_limit(session, registered_limit_id)
if not ref.region_id:
self._check_referenced_limit_without_region(ref)
self._check_referenced_limit_reference(ref)
old_dict = ref.to_dict()
old_dict.update(registered_limit)
if (registered_limit.get('service_id') or
registered_limit.get('region_id') or
registered_limit.get('resource_name')):
self._check_unified_limit_unique(old_dict)
new_registered_limit = RegisteredLimitModel.from_dict(
old_dict)
for attr in registered_limit:
@ -177,7 +179,9 @@ class UnifiedLimit(base.UnifiedLimitDriverBase):
return [s.to_dict() for s in registered_limits]
def _get_registered_limit(self, session, registered_limit_id):
ref = session.query(RegisteredLimitModel).get(registered_limit_id)
query = session.query(RegisteredLimitModel).filter_by(
id=registered_limit_id)
ref = query.first()
if ref is None:
raise exception.RegisteredLimitNotFound(id=registered_limit_id)
return ref
@ -192,8 +196,7 @@ class UnifiedLimit(base.UnifiedLimitDriverBase):
with sql.session_for_write() as session:
ref = self._get_registered_limit(session,
registered_limit_id)
if not ref.region_id:
self._check_referenced_limit_without_region(ref)
self._check_referenced_limit_reference(ref)
session.delete(ref)
except db_exception.DBReferenceError:
raise exception.RegisteredLimitError(id=registered_limit_id)
@ -204,9 +207,8 @@ class UnifiedLimit(base.UnifiedLimitDriverBase):
with sql.session_for_write() as session:
new_limits = []
for limit in limits:
if limit.get('region_id') is None:
self._check_unified_limit_without_region(
limit, is_registered_limit=False)
self._check_unified_limit_unique(limit,
is_registered_limit=False)
ref = LimitModel.from_dict(limit)
session.add(ref)
new_limits.append(ref.to_dict())
@ -235,7 +237,8 @@ class UnifiedLimit(base.UnifiedLimitDriverBase):
return [s.to_dict() for s in limits]
def _get_limit(self, session, limit_id):
ref = session.query(LimitModel).get(limit_id)
query = session.query(LimitModel).filter_by(id=limit_id)
ref = query.first()
if ref is None:
raise exception.LimitNotFound(id=limit_id)
return ref

View File

@ -16,7 +16,6 @@ from keystone.common import driver_hints
from keystone.common import provider_api
from keystone import exception
from keystone.tests import unit
from keystone.tests.unit import utils as test_utils
PROVIDERS = provider_api.ProviderAPIs
@ -198,7 +197,6 @@ class RegisteredLimitTests(object):
PROVIDERS.unified_limit_api.update_registered_limit,
registered_limit_1['id'], update_ref)
@test_utils.wip("Skipped until Bug 1744195 is resolved")
def test_update_registered_limit_when_reference_limit_exist(self):
registered_limit_1 = unit.new_registered_limit_ref(
service_id=self.service_one['id'],
@ -360,7 +358,6 @@ class RegisteredLimitTests(object):
PROVIDERS.unified_limit_api.delete_registered_limit,
uuid.uuid4().hex)
@test_utils.wip("Skipped until Bug 1744195 is resolved")
def test_delete_registered_limit_when_reference_limit_exist(self):
registered_limit_1 = unit.new_registered_limit_ref(
service_id=self.service_one['id'],
@ -465,12 +462,11 @@ class LimitTests(object):
PROVIDERS.unified_limit_api.create_limits,
[limit])
@test_utils.wip("Skipped until Bug 1744195 is resolved")
def test_create_limit_without_reference_registered_limit(self):
limit_1 = unit.new_limit_ref(
project_id=self.tenant_bar['id'],
service_id=self.service_one['id'],
region_id=self.region_one['id'],
region_id=self.region_two['id'],
resource_name='volume', resource_limit=10, id=uuid.uuid4().hex)
self.assertRaises(exception.NoLimitReference,
PROVIDERS.unified_limit_api.create_limits,

View File

@ -1193,5 +1193,9 @@ class SqlLimit(SqlTests, limit_tests.LimitTests):
service_id=self.service_one['id'],
region_id=self.region_two['id'],
resource_name='snapshot', default_limit=10, id=uuid.uuid4().hex)
registered_limit_3 = unit.new_registered_limit_ref(
service_id=self.service_one['id'],
region_id=self.region_two['id'],
resource_name='backup', default_limit=10, id=uuid.uuid4().hex)
PROVIDERS.unified_limit_api.create_registered_limits(
[registered_limit_1, registered_limit_2])
[registered_limit_1, registered_limit_2, registered_limit_3])

View File

@ -18,7 +18,6 @@ import uuid
from keystone.common import provider_api
from keystone.tests import unit
from keystone.tests.unit import test_v3
from keystone.tests.unit import utils as test_utils
PROVIDERS = provider_api.ProviderAPIs
@ -234,7 +233,6 @@ class RegisteredLimitsTestCase(test_v3.RestfulTestCase):
body={'registered_limit': input_limit},
expected_status=http_client.BAD_REQUEST)
@test_utils.wip("Skipped until Bug 1744195 is resolved")
def test_update_registered_limit_with_referenced_limit(self):
ref = unit.new_registered_limit_ref(service_id=self.service_id,
region_id=self.region_id,
@ -361,7 +359,6 @@ class RegisteredLimitsTestCase(test_v3.RestfulTestCase):
registered_limits = r.result['registered_limits']
self.assertEqual(len(registered_limits), 1)
@test_utils.wip("Skipped until Bug 1744195 is resolved")
def test_delete_registered_limit_with_referenced_limit(self):
ref = unit.new_registered_limit_ref(service_id=self.service_id,
region_id=self.region_id,
@ -412,12 +409,13 @@ class LimitsTestCase(test_v3.RestfulTestCase):
resource_name='volume')
ref2 = unit.new_registered_limit_ref(service_id=self.service_id2,
resource_name='snapshot')
r = self.post(
ref3 = unit.new_registered_limit_ref(service_id=self.service_id,
region_id=self.region_id,
resource_name='backup')
self.post(
'/registered_limits',
body={'registered_limits': [ref1, ref2]},
body={'registered_limits': [ref1, ref2, ref3]},
expected_status=http_client.CREATED)
self.registered_limit1 = r.result['registered_limits'][0]
self.registered_limit2 = r.result['registered_limits'][1]
def test_create_limit(self):
ref = unit.new_limit_ref(project_id=self.project_id,
@ -503,8 +501,7 @@ class LimitsTestCase(test_v3.RestfulTestCase):
self.assertEqual(1, len(limits))
ref2 = unit.new_limit_ref(project_id=self.project_id,
service_id=self.service_id,
region_id=self.region_id,
service_id=self.service_id2,
resource_name='snapshot')
ref3 = unit.new_limit_ref(project_id=self.project_id,
service_id=self.service_id,
@ -544,7 +541,6 @@ class LimitsTestCase(test_v3.RestfulTestCase):
body={'limits': [ref]},
expected_status=http_client.CONFLICT)
@test_utils.wip("Skipped until Bug 1744195 is resolved")
def test_create_limit_without_reference_registered_limit(self):
ref = unit.new_limit_ref(project_id=self.project_id,
service_id=self.service_id,

View File

@ -3075,6 +3075,47 @@ class FullMigration(SqlMigrateBase, unit.TestCase):
else:
raise ValueError('Too Many Passwords Found')
def test_migration_047_add_auto_increment_pk_column_to_unified_limit(self):
self.expand(46)
self.migrate(46)
self.contract(46)
registered_limit_table_name = 'registered_limit'
limit_table_name = 'limit'
self.assertTableColumns(
registered_limit_table_name,
['id', 'service_id', 'region_id', 'resource_name', 'default_limit',
'description']
)
self.assertTableColumns(
limit_table_name,
['id', 'project_id', 'service_id', 'region_id', 'resource_name',
'resource_limit', 'description']
)
self.assertTrue(self.does_pk_exist('registered_limit', 'id'))
self.assertTrue(self.does_pk_exist('limit', 'id'))
self.assertTrue(self.does_fk_exist('limit', 'project_id'))
self.expand(47)
self.migrate(47)
self.contract(47)
self.assertTableColumns(
registered_limit_table_name,
['id', 'service_id', 'region_id', 'resource_name', 'default_limit',
'description', 'internal_id']
)
self.assertTableColumns(
limit_table_name,
['id', 'project_id', 'service_id', 'region_id', 'resource_name',
'resource_limit', 'description', 'internal_id']
)
self.assertFalse(self.does_pk_exist('registered_limit', 'id'))
self.assertTrue(self.does_pk_exist('registered_limit', 'internal_id'))
self.assertFalse(self.does_pk_exist('limit', 'id'))
self.assertTrue(self.does_pk_exist('limit', 'internal_id'))
limit_table = sqlalchemy.Table(limit_table_name,
self.metadata, autoload=True)
self.assertEqual(set([]), limit_table.foreign_keys)
class MySQLOpportunisticFullMigration(FullMigration):
FIXTURE = db_fixtures.MySQLOpportunisticFixture