Create indexes for foreign keys

Some database backends (for example PostgreSQL) do not automatically
create an index on a foreign key. As a result database queries are slow.
Adding the missing indexes and a migration that will only add indexes if
they were not already there.

In total, 26 foreign keys were identified as missing indexes. 2 of them
are already covered by UniqueConstraints, for the rest, new indexes have
been created.

Closes-Bug: #1666547
Change-Id: I1437c3a1aa13142ee7a7e3e7bf9ff867b9d72652
This commit is contained in:
Mate Lakat 2017-02-23 11:05:47 +01:00
parent b441c6d151
commit c72f5ef8e3
2 changed files with 109 additions and 1 deletions

View File

@ -0,0 +1,65 @@
# Licensed under the Apache License, Version 2.0 (the "License"); you may
# not use this file except in compliance with the License. You may obtain
# a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
# License for the specific language governing permissions and limitations
# under the License.
from oslo_db.sqlalchemy import utils
from oslo_log import log as logging
from sqlalchemy import MetaData
LOG = logging.getLogger(__name__)
def ensure_index_exists(migrate_engine, table_name, column):
index_name = table_name + '_' + column + '_idx'
columns = [column]
if utils.index_exists_on_columns(migrate_engine, table_name, columns):
LOG.info(
'Skipped adding %s because an equivalent index already exists.',
index_name
)
else:
utils.add_index(migrate_engine, table_name, index_name, columns)
def upgrade(migrate_engine):
meta = MetaData()
meta.bind = migrate_engine
for table_name, column in INDEXES_TO_CREATE:
ensure_index_exists(migrate_engine, table_name, column)
INDEXES_TO_CREATE = (
('attachment_specs', 'attachment_id'),
('cgsnapshots', 'consistencygroup_id'),
('group_snapshots', 'group_id'),
('group_type_specs', 'group_type_id'),
('group_volume_type_mapping', 'group_id'),
('group_volume_type_mapping', 'volume_type_id'),
('quality_of_service_specs', 'specs_id'),
('reservations', 'allocated_id'),
('reservations', 'usage_id'),
('snapshot_metadata', 'snapshot_id'),
('snapshots', 'cgsnapshot_id'),
('snapshots', 'group_snapshot_id'),
('snapshots', 'volume_id'),
('transfers', 'volume_id'),
('volume_admin_metadata', 'volume_id'),
('volume_attachment', 'volume_id'),
('volume_glance_metadata', 'snapshot_id'),
('volume_glance_metadata', 'volume_id'),
('volume_metadata', 'volume_id'),
('volume_type_extra_specs', 'volume_type_id'),
('volume_types', 'qos_specs_id'),
('volumes', 'consistencygroup_id'),
('volumes', 'group_id'),
('workers', 'service_id'),
)

View File

@ -29,6 +29,7 @@ from oslo_db.sqlalchemy import test_base
from oslo_db.sqlalchemy import test_migrations
from oslo_db.sqlalchemy import utils as db_utils
import sqlalchemy
from sqlalchemy.engine import reflection
from cinder.db import migration
import cinder.db.sqlalchemy.migrate_repo
@ -1098,13 +1099,55 @@ class MigrationsMixin(test_migrations.WalkVersionsMixin):
self.assertIsInstance(workers.c.race_preventer.type,
self.INTEGER_TYPE)
def get_table_names(self, engine):
inspector = reflection.Inspector.from_engine(engine)
return inspector.get_table_names()
def get_foreign_key_columns(self, engine, table_name):
foreign_keys = set()
table = db_utils.get_table(engine, table_name)
inspector = reflection.Inspector.from_engine(engine)
for column_dict in inspector.get_columns(table_name):
column_name = column_dict['name']
column = getattr(table.c, column_name)
if column.foreign_keys:
foreign_keys.add(column_name)
return foreign_keys
def get_indexed_columns(self, engine, table_name):
indexed_columns = set()
for index in db_utils.get_indexes(engine, table_name):
for column_name in index['column_names']:
indexed_columns.add(column_name)
return indexed_columns
def assert_each_foreign_key_is_part_of_an_index(self):
engine = self.migrate_engine
non_indexed_foreign_keys = set()
for table_name in self.get_table_names(engine):
indexed_columns = self.get_indexed_columns(engine, table_name)
foreign_key_columns = self.get_foreign_key_columns(
engine, table_name
)
for column_name in foreign_key_columns - indexed_columns:
non_indexed_foreign_keys.add(table_name + '.' + column_name)
self.assertSetEqual(set(), non_indexed_foreign_keys)
def test_walk_versions(self):
self.walk_versions(False, False)
self.assert_each_foreign_key_is_part_of_an_index()
class TestSqliteMigrations(test_base.DbTestCase,
MigrationsMixin):
pass
def assert_each_foreign_key_is_part_of_an_index(self):
# Skip the test for SQLite because SQLite does not list
# UniqueConstraints as indexes, which makes this test fail.
# Given that SQLite is only for testing purposes, it is safe to skip
pass
class TestMysqlMigrations(test_base.MySQLOpportunisticTestCase,