summaryrefslogtreecommitdiff
diff options
context:
space:
mode:
authorHemanth Makkapati <hemanth.makkapati@rackspace.com>2016-11-14 17:18:34 -0600
committerHemanth Makkapati <hemanth.makkapati@rackspace.com>2017-02-01 16:08:17 -0600
commit95c7c1b753a87b97715ae7b4dd8283f78efaf0ff (patch)
treeb8b670958dd2dbf9466072dfdd587bd3e4667f7c
parent21d431013f6ad8a9f4a2afc34b66f67ff0d628eb (diff)
Refactor tests to use Alembic to run migrations
* Functional tests now use alembic instead of sqlalchmey-migrate to build and destroy test database. * All tests now use a file-based sqlite db as opposed to an in-memory database. Partially-Implements: blueprint alembic-migrations Change-Id: I77921366a05ba6f9841143af89c1f4059d8454c6 Depends-On: Ie8594ff339a13bf190aefa308f54e97ee20ecfa2
Notes
Notes (review): Code-Review+2: Nikhil Komawar <nik.komawar@gmail.com> Code-Review+2: Steve Lewis (stevelle) <steve.lewis@rackspace.com> Code-Review+2: Brian Rosmaita <brian.rosmaita@rackspace.com> Workflow+1: Brian Rosmaita <brian.rosmaita@rackspace.com> Verified+2: Jenkins Submitted-by: Jenkins Submitted-at: Thu, 02 Feb 2017 13:09:46 +0000 Reviewed-on: https://review.openstack.org/397409 Project: openstack/glance Branch: refs/heads/master
-rw-r--r--glance/db/migration.py13
-rw-r--r--glance/db/sqlalchemy/alembic_migrations/__init__.py15
-rw-r--r--glance/db/sqlalchemy/alembic_migrations/versions/__init__.py0
-rw-r--r--glance/tests/functional/db/migrations/__init__.py0
-rw-r--r--glance/tests/functional/db/migrations/test_mitaka01.py48
-rw-r--r--glance/tests/functional/db/migrations/test_mitaka02.py65
-rw-r--r--glance/tests/functional/db/migrations/test_ocata01.py142
-rw-r--r--glance/tests/functional/db/test_migrations.py173
-rw-r--r--glance/tests/integration/legacy_functional/base.py3
-rw-r--r--glance/tests/integration/v2/base.py3
-rw-r--r--glance/tests/unit/test_migrations.py1712
-rw-r--r--glance/tests/utils.py13
-rw-r--r--tox.ini9
13 files changed, 460 insertions, 1736 deletions
diff --git a/glance/db/migration.py b/glance/db/migration.py
index a35591d..e6f5179 100644
--- a/glance/db/migration.py
+++ b/glance/db/migration.py
@@ -26,8 +26,6 @@ from oslo_config import cfg
26from oslo_db import options as db_options 26from oslo_db import options as db_options
27from stevedore import driver 27from stevedore import driver
28 28
29from glance.db.sqlalchemy import api as db_api
30
31 29
32_IMPL = None 30_IMPL = None
33_LOCK = threading.Lock() 31_LOCK = threading.Lock()
@@ -53,14 +51,3 @@ MIGRATE_REPO_PATH = os.path.join(
53 'sqlalchemy', 51 'sqlalchemy',
54 'migrate_repo', 52 'migrate_repo',
55) 53)
56
57
58def db_sync(version=None, init_version=0, engine=None):
59 """Migrate the database to `version` or the most recent version."""
60
61 if engine is None:
62 engine = db_api.get_engine()
63 return get_backend().db_sync(engine=engine,
64 abs_path=MIGRATE_REPO_PATH,
65 version=version,
66 init_version=init_version)
diff --git a/glance/db/sqlalchemy/alembic_migrations/__init__.py b/glance/db/sqlalchemy/alembic_migrations/__init__.py
index b1a0499..32476db 100644
--- a/glance/db/sqlalchemy/alembic_migrations/__init__.py
+++ b/glance/db/sqlalchemy/alembic_migrations/__init__.py
@@ -20,19 +20,20 @@ from alembic import command as alembic_command
20from alembic import config as alembic_config 20from alembic import config as alembic_config
21from alembic import migration as alembic_migration 21from alembic import migration as alembic_migration
22from oslo_db import exception as db_exception 22from oslo_db import exception as db_exception
23from oslo_db.sqlalchemy import migration 23from oslo_db.sqlalchemy import migration as sqla_migration
24 24
25from glance.db import migration as db_migration 25from glance.db import migration as db_migration
26from glance.db.sqlalchemy import api as db_api 26from glance.db.sqlalchemy import api as db_api
27from glance.i18n import _ 27from glance.i18n import _
28 28
29 29
30def get_alembic_config(): 30def get_alembic_config(engine=None):
31 """Return a valid alembic config object""" 31 """Return a valid alembic config object"""
32 ini_path = os.path.join(os.path.dirname(__file__), 'alembic.ini') 32 ini_path = os.path.join(os.path.dirname(__file__), 'alembic.ini')
33 config = alembic_config.Config(os.path.abspath(ini_path)) 33 config = alembic_config.Config(os.path.abspath(ini_path))
34 dbconn = str(db_api.get_engine().url) 34 if engine is None:
35 config.set_main_option('sqlalchemy.url', dbconn) 35 engine = db_api.get_engine()
36 config.set_main_option('sqlalchemy.url', str(engine.url))
36 return config 37 return config
37 38
38 39
@@ -47,9 +48,9 @@ def get_current_alembic_heads():
47 48
48def get_current_legacy_head(): 49def get_current_legacy_head():
49 try: 50 try:
50 legacy_head = migration.db_version(db_api.get_engine(), 51 legacy_head = sqla_migration.db_version(db_api.get_engine(),
51 db_migration.MIGRATE_REPO_PATH, 52 db_migration.MIGRATE_REPO_PATH,
52 db_migration.INIT_VERSION) 53 db_migration.INIT_VERSION)
53 except db_exception.DbMigrationError: 54 except db_exception.DbMigrationError:
54 legacy_head = None 55 legacy_head = None
55 return legacy_head 56 return legacy_head
diff --git a/glance/db/sqlalchemy/alembic_migrations/versions/__init__.py b/glance/db/sqlalchemy/alembic_migrations/versions/__init__.py
new file mode 100644
index 0000000..e69de29
--- /dev/null
+++ b/glance/db/sqlalchemy/alembic_migrations/versions/__init__.py
diff --git a/glance/tests/functional/db/migrations/__init__.py b/glance/tests/functional/db/migrations/__init__.py
new file mode 100644
index 0000000..e69de29
--- /dev/null
+++ b/glance/tests/functional/db/migrations/__init__.py
diff --git a/glance/tests/functional/db/migrations/test_mitaka01.py b/glance/tests/functional/db/migrations/test_mitaka01.py
new file mode 100644
index 0000000..c222313
--- /dev/null
+++ b/glance/tests/functional/db/migrations/test_mitaka01.py
@@ -0,0 +1,48 @@
1# Licensed under the Apache License, Version 2.0 (the "License"); you may
2# not use this file except in compliance with the License. You may obtain
3# a copy of the License at
4#
5# http://www.apache.org/licenses/LICENSE-2.0
6#
7# Unless required by applicable law or agreed to in writing, software
8# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
9# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
10# License for the specific language governing permissions and limitations
11# under the License.
12
13from oslo_db.sqlalchemy import test_base
14import sqlalchemy
15
16from glance.tests.functional.db import test_migrations
17
18
19def get_indexes(table, engine):
20 inspector = sqlalchemy.inspect(engine)
21 return [idx['name'] for idx in inspector.get_indexes(table)]
22
23
24class TestMitaka01Mixin(test_migrations.AlembicMigrationsMixin):
25
26 def _pre_upgrade_mitaka01(self, engine):
27 indexes = get_indexes('images', engine)
28 self.assertNotIn('created_at_image_idx', indexes)
29 self.assertNotIn('updated_at_image_idx', indexes)
30
31 def _check_mitaka01(self, engine, data):
32 indexes = get_indexes('images', engine)
33 self.assertIn('created_at_image_idx', indexes)
34 self.assertIn('updated_at_image_idx', indexes)
35
36
37class TestMitaka01MySQL(TestMitaka01Mixin,
38 test_base.MySQLOpportunisticTestCase):
39 pass
40
41
42class TestMitaka01PostgresSQL(TestMitaka01Mixin,
43 test_base.PostgreSQLOpportunisticTestCase):
44 pass
45
46
47class TestMitaka01Sqlite(TestMitaka01Mixin, test_base.DbTestCase):
48 pass
diff --git a/glance/tests/functional/db/migrations/test_mitaka02.py b/glance/tests/functional/db/migrations/test_mitaka02.py
new file mode 100644
index 0000000..48eb858
--- /dev/null
+++ b/glance/tests/functional/db/migrations/test_mitaka02.py
@@ -0,0 +1,65 @@
1# Licensed under the Apache License, Version 2.0 (the "License"); you may
2# not use this file except in compliance with the License. You may obtain
3# a copy of the License at
4#
5# http://www.apache.org/licenses/LICENSE-2.0
6#
7# Unless required by applicable law or agreed to in writing, software
8# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
9# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
10# License for the specific language governing permissions and limitations
11# under the License.
12
13import datetime
14
15from oslo_db.sqlalchemy import test_base
16from oslo_db.sqlalchemy import utils as db_utils
17
18from glance.tests.functional.db import test_migrations
19
20
21class TestMitaka02Mixin(test_migrations.AlembicMigrationsMixin):
22
23 def _pre_upgrade_mitaka02(self, engine):
24 metadef_resource_types = db_utils.get_table(engine,
25 'metadef_resource_types')
26 now = datetime.datetime.now()
27 db_rec1 = dict(id='9580',
28 name='OS::Nova::Instance',
29 protected=False,
30 created_at=now,
31 updated_at=now,)
32 db_rec2 = dict(id='9581',
33 name='OS::Nova::Blah',
34 protected=False,
35 created_at=now,
36 updated_at=now,)
37 db_values = (db_rec1, db_rec2)
38 metadef_resource_types.insert().values(db_values).execute()
39
40 def _check_mitaka02(self, engine, data):
41 metadef_resource_types = db_utils.get_table(engine,
42 'metadef_resource_types')
43 result = (metadef_resource_types.select()
44 .where(metadef_resource_types.c.name == 'OS::Nova::Instance')
45 .execute().fetchall())
46 self.assertEqual(0, len(result))
47
48 result = (metadef_resource_types.select()
49 .where(metadef_resource_types.c.name == 'OS::Nova::Server')
50 .execute().fetchall())
51 self.assertEqual(1, len(result))
52
53
54class TestMitaka02MySQL(TestMitaka02Mixin,
55 test_base.MySQLOpportunisticTestCase):
56 pass
57
58
59class TestMitaka02PostgresSQL(TestMitaka02Mixin,
60 test_base.PostgreSQLOpportunisticTestCase):
61 pass
62
63
64class TestMitaka02Sqlite(TestMitaka02Mixin, test_base.DbTestCase):
65 pass
diff --git a/glance/tests/functional/db/migrations/test_ocata01.py b/glance/tests/functional/db/migrations/test_ocata01.py
new file mode 100644
index 0000000..323fee3
--- /dev/null
+++ b/glance/tests/functional/db/migrations/test_ocata01.py
@@ -0,0 +1,142 @@
1# Licensed under the Apache License, Version 2.0 (the "License"); you may
2# not use this file except in compliance with the License. You may obtain
3# a copy of the License at
4#
5# http://www.apache.org/licenses/LICENSE-2.0
6#
7# Unless required by applicable law or agreed to in writing, software
8# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
9# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
10# License for the specific language governing permissions and limitations
11# under the License.
12
13import datetime
14
15from oslo_db.sqlalchemy import test_base
16from oslo_db.sqlalchemy import utils as db_utils
17
18from glance.tests.functional.db import test_migrations
19
20
21class TestOcata01Mixin(test_migrations.AlembicMigrationsMixin):
22
23 def _pre_upgrade_ocata01(self, engine):
24 images = db_utils.get_table(engine, 'images')
25 now = datetime.datetime.now()
26 image_members = db_utils.get_table(engine, 'image_members')
27
28 # inserting a public image record
29 public_temp = dict(deleted=False,
30 created_at=now,
31 status='active',
32 is_public=True,
33 min_disk=0,
34 min_ram=0,
35 id='public_id')
36 images.insert().values(public_temp).execute()
37
38 # inserting a non-public image record for 'shared' visibility test
39 shared_temp = dict(deleted=False,
40 created_at=now,
41 status='active',
42 is_public=False,
43 min_disk=0,
44 min_ram=0,
45 id='shared_id')
46 images.insert().values(shared_temp).execute()
47
48 # inserting a non-public image records for 'private' visibility test
49 private_temp = dict(deleted=False,
50 created_at=now,
51 status='active',
52 is_public=False,
53 min_disk=0,
54 min_ram=0,
55 id='private_id_1')
56 images.insert().values(private_temp).execute()
57 private_temp = dict(deleted=False,
58 created_at=now,
59 status='active',
60 is_public=False,
61 min_disk=0,
62 min_ram=0,
63 id='private_id_2')
64 images.insert().values(private_temp).execute()
65
66 # adding an active as well as a deleted image member for checking
67 # 'shared' visibility
68 temp = dict(deleted=False,
69 created_at=now,
70 image_id='shared_id',
71 member='fake_member_452',
72 can_share=True,
73 id=45)
74 image_members.insert().values(temp).execute()
75
76 temp = dict(deleted=True,
77 created_at=now,
78 image_id='shared_id',
79 member='fake_member_453',
80 can_share=True,
81 id=453)
82 image_members.insert().values(temp).execute()
83
84 # adding an image member, but marking it deleted,
85 # for testing 'private' visibility
86 temp = dict(deleted=True,
87 created_at=now,
88 image_id='private_id_2',
89 member='fake_member_451',
90 can_share=True,
91 id=451)
92 image_members.insert().values(temp).execute()
93
94 # adding an active image member for the 'public' image,
95 # to test it remains public regardless.
96 temp = dict(deleted=False,
97 created_at=now,
98 image_id='public_id',
99 member='fake_member_450',
100 can_share=True,
101 id=450)
102 image_members.insert().values(temp).execute()
103
104 def _check_ocata01(self, engine, data):
105 # check that after migration, 'visibility' column is introduced
106 images = db_utils.get_table(engine, 'images')
107 self.assertIn('visibility', images.c)
108 self.assertNotIn('is_public', images.c)
109
110 # tests to identify the visibilities of images created above
111 rows = images.select().where(
112 images.c.id == 'public_id').execute().fetchall()
113 self.assertEqual(1, len(rows))
114 self.assertEqual('public', rows[0][16])
115
116 rows = images.select().where(
117 images.c.id == 'shared_id').execute().fetchall()
118 self.assertEqual(1, len(rows))
119 self.assertEqual('shared', rows[0][16])
120
121 rows = images.select().where(
122 images.c.id == 'private_id_1').execute().fetchall()
123 self.assertEqual(1, len(rows))
124 self.assertEqual('private', rows[0][16])
125
126 rows = images.select().where(
127 images.c.id == 'private_id_2').execute().fetchall()
128 self.assertEqual(1, len(rows))
129 self.assertEqual('private', rows[0][16])
130
131
132class TestOcata01MySQL(TestOcata01Mixin, test_base.MySQLOpportunisticTestCase):
133 pass
134
135
136class TestOcata01PostgresSQL(TestOcata01Mixin,
137 test_base.PostgreSQLOpportunisticTestCase):
138 pass
139
140
141class TestOcata01Sqlite(TestOcata01Mixin, test_base.DbTestCase):
142 pass
diff --git a/glance/tests/functional/db/test_migrations.py b/glance/tests/functional/db/test_migrations.py
new file mode 100644
index 0000000..2621ed3
--- /dev/null
+++ b/glance/tests/functional/db/test_migrations.py
@@ -0,0 +1,173 @@
1# Copyright 2016 Rackspace
2# Copyright 2016 Intel Corporation
3#
4# All Rights Reserved.
5#
6# Licensed under the Apache License, Version 2.0 (the "License"); you may
7# not use this file except in compliance with the License. You may obtain
8# a copy of the License at
9#
10# http://www.apache.org/licenses/LICENSE-2.0
11#
12# Unless required by applicable law or agreed to in writing, software
13# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
14# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
15# License for the specific language governing permissions and limitations
16# under the License.
17
18import os
19
20from alembic import command as alembic_command
21from alembic import script as alembic_script
22from oslo_db.sqlalchemy import test_base
23from oslo_db.sqlalchemy import test_migrations
24import sqlalchemy.types as types
25
26from glance.db.sqlalchemy import alembic_migrations
27from glance.db.sqlalchemy.alembic_migrations import versions
28from glance.db.sqlalchemy import models
29from glance.db.sqlalchemy import models_glare
30from glance.db.sqlalchemy import models_metadef
31import glance.tests.utils as test_utils
32
33
34class AlembicMigrationsMixin(object):
35
36 def _get_revisions(self, config):
37 scripts_dir = alembic_script.ScriptDirectory.from_config(config)
38 revisions = list(scripts_dir.walk_revisions(base='base', head='heads'))
39 revisions = list(reversed(revisions))
40 revisions = [rev.revision for rev in revisions]
41 return revisions
42
43 def _migrate_up(self, config, engine, revision, with_data=False):
44 if with_data:
45 data = None
46 pre_upgrade = getattr(self, '_pre_upgrade_%s' % revision, None)
47 if pre_upgrade:
48 data = pre_upgrade(engine)
49
50 alembic_command.upgrade(config, revision)
51
52 if with_data:
53 check = getattr(self, '_check_%s' % revision, None)
54 if check:
55 check(engine, data)
56
57 def test_walk_versions(self):
58 alembic_config = alembic_migrations.get_alembic_config(self.engine)
59 for revision in self._get_revisions(alembic_config):
60 self._migrate_up(alembic_config, self.engine, revision,
61 with_data=True)
62
63
64class TestMysqlMigrations(test_base.MySQLOpportunisticTestCase,
65 AlembicMigrationsMixin):
66
67 def test_mysql_innodb_tables(self):
68 test_utils.db_sync(engine=self.engine)
69
70 total = self.engine.execute(
71 "SELECT COUNT(*) "
72 "FROM information_schema.TABLES "
73 "WHERE TABLE_SCHEMA='%s'"
74 % self.engine.url.database)
75 self.assertGreater(total.scalar(), 0, "No tables found. Wrong schema?")
76
77 noninnodb = self.engine.execute(
78 "SELECT count(*) "
79 "FROM information_schema.TABLES "
80 "WHERE TABLE_SCHEMA='%s' "
81 "AND ENGINE!='InnoDB' "
82 "AND TABLE_NAME!='migrate_version'"
83 % self.engine.url.database)
84 count = noninnodb.scalar()
85 self.assertEqual(0, count, "%d non InnoDB tables created" % count)
86
87
88class TestPostgresqlMigrations(test_base.PostgreSQLOpportunisticTestCase,
89 AlembicMigrationsMixin):
90 pass
91
92
93class TestSqliteMigrations(test_base.DbTestCase, AlembicMigrationsMixin):
94 pass
95
96
97class TestMigrations(test_base.DbTestCase, test_utils.BaseTestCase):
98
99 def test_no_downgrade(self):
100 migrate_file = versions.__path__[0]
101 for parent, dirnames, filenames in os.walk(migrate_file):
102 for filename in filenames:
103 if filename.split('.')[1] == 'py':
104 model_name = filename.split('.')[0]
105 model = __import__(
106 'glance.db.sqlalchemy.alembic_migrations.versions.' +
107 model_name)
108 obj = getattr(getattr(getattr(getattr(getattr(
109 model, 'db'), 'sqlalchemy'), 'alembic_migrations'),
110 'versions'), model_name)
111 func = getattr(obj, 'downgrade', None)
112 self.assertIsNone(func)
113
114
115class ModelsMigrationSyncMixin(object):
116
117 def get_metadata(self):
118 for table in models_metadef.BASE_DICT.metadata.sorted_tables:
119 models.BASE.metadata._add_table(table.name, table.schema, table)
120 for table in models_glare.BASE.metadata.sorted_tables:
121 models.BASE.metadata._add_table(table.name, table.schema, table)
122 return models.BASE.metadata
123
124 def get_engine(self):
125 return self.engine
126
127 def db_sync(self, engine):
128 test_utils.db_sync(engine=engine)
129
130 # TODO(akamyshikova): remove this method as soon as comparison with Variant
131 # will be implemented in oslo.db or alembic
132 def compare_type(self, ctxt, insp_col, meta_col, insp_type, meta_type):
133 if isinstance(meta_type, types.Variant):
134 meta_orig_type = meta_col.type
135 insp_orig_type = insp_col.type
136 meta_col.type = meta_type.impl
137 insp_col.type = meta_type.impl
138
139 try:
140 return self.compare_type(ctxt, insp_col, meta_col, insp_type,
141 meta_type.impl)
142 finally:
143 meta_col.type = meta_orig_type
144 insp_col.type = insp_orig_type
145 else:
146 ret = super(ModelsMigrationSyncMixin, self).compare_type(
147 ctxt, insp_col, meta_col, insp_type, meta_type)
148 if ret is not None:
149 return ret
150 return ctxt.impl.compare_type(insp_col, meta_col)
151
152 def include_object(self, object_, name, type_, reflected, compare_to):
153 if name in ['migrate_version'] and type_ == 'table':
154 return False
155 return True
156
157
158class ModelsMigrationsSyncMysql(ModelsMigrationSyncMixin,
159 test_migrations.ModelsMigrationsSync,
160 test_base.MySQLOpportunisticTestCase):
161 pass
162
163
164class ModelsMigrationsSyncPostgres(ModelsMigrationSyncMixin,
165 test_migrations.ModelsMigrationsSync,
166 test_base.PostgreSQLOpportunisticTestCase):
167 pass
168
169
170class ModelsMigrationsSyncSqlite(ModelsMigrationSyncMixin,
171 test_migrations.ModelsMigrationsSync,
172 test_base.DbTestCase):
173 pass
diff --git a/glance/tests/integration/legacy_functional/base.py b/glance/tests/integration/legacy_functional/base.py
index b094d91..174e3e6 100644
--- a/glance/tests/integration/legacy_functional/base.py
+++ b/glance/tests/integration/legacy_functional/base.py
@@ -21,7 +21,6 @@ from oslo_db import options
21 21
22import glance.common.client 22import glance.common.client
23from glance.common import config 23from glance.common import config
24from glance.db import migration
25import glance.db.sqlalchemy.api 24import glance.db.sqlalchemy.api
26import glance.registry.client.v1.client 25import glance.registry.client.v1.client
27from glance import tests as glance_tests 26from glance import tests as glance_tests
@@ -171,7 +170,7 @@ class ApiTest(test_utils.BaseTestCase):
171 test_utils.execute('cp %s %s/tests.sqlite' 170 test_utils.execute('cp %s %s/tests.sqlite'
172 % (db_location, self.test_dir)) 171 % (db_location, self.test_dir))
173 else: 172 else:
174 migration.db_sync() 173 test_utils.db_sync()
175 174
176 # copy the clean db to a temp location so that it 175 # copy the clean db to a temp location so that it
177 # can be reused for future tests 176 # can be reused for future tests
diff --git a/glance/tests/integration/v2/base.py b/glance/tests/integration/v2/base.py
index 4e57feb..71cec78 100644
--- a/glance/tests/integration/v2/base.py
+++ b/glance/tests/integration/v2/base.py
@@ -24,7 +24,6 @@ from oslo_db import options
24 24
25import glance.common.client 25import glance.common.client
26from glance.common import config 26from glance.common import config
27from glance.db import migration
28import glance.db.sqlalchemy.api 27import glance.db.sqlalchemy.api
29import glance.registry.client.v1.client 28import glance.registry.client.v1.client
30from glance import tests as glance_tests 29from glance import tests as glance_tests
@@ -166,7 +165,7 @@ class ApiTest(test_utils.BaseTestCase):
166 test_utils.execute('cp %s %s/tests.sqlite' 165 test_utils.execute('cp %s %s/tests.sqlite'
167 % (db_location, self.test_dir)) 166 % (db_location, self.test_dir))
168 else: 167 else:
169 migration.db_sync() 168 test_utils.db_sync()
170 169
171 # copy the clean db to a temp location so that it 170 # copy the clean db to a temp location so that it
172 # can be reused for future tests 171 # can be reused for future tests
diff --git a/glance/tests/unit/test_migrations.py b/glance/tests/unit/test_migrations.py
deleted file mode 100644
index 7f7e23b..0000000
--- a/glance/tests/unit/test_migrations.py
+++ /dev/null
@@ -1,1712 +0,0 @@
1# Copyright 2010-2011 OpenStack Foundation
2# All Rights Reserved.
3# Copyright 2013 IBM Corp.
4#
5# Licensed under the Apache License, Version 2.0 (the "License"); you may
6# not use this file except in compliance with the License. You may obtain
7# a copy of the License at
8#
9# http://www.apache.org/licenses/LICENSE-2.0
10#
11# Unless required by applicable law or agreed to in writing, software
12# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
13# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
14# License for the specific language governing permissions and limitations
15# under the License.
16
17"""
18Tests for database migrations run a series of test cases to ensure that
19migrations work properly both upgrading and downgrading, and that no data loss
20occurs if possible.
21"""
22
23from __future__ import print_function
24
25import datetime
26import os
27import pickle
28import uuid
29
30from migrate.versioning import api as migration_api
31from migrate.versioning.repository import Repository
32from oslo_config import cfg
33from oslo_db.sqlalchemy import test_base
34from oslo_db.sqlalchemy import test_migrations
35from oslo_db.sqlalchemy import utils as db_utils
36from oslo_serialization import jsonutils
37from oslo_utils import uuidutils
38# NOTE(jokke): simplified transition to py3, behaves like py2 xrange
39from six.moves import range
40import sqlalchemy
41import sqlalchemy.types as types
42
43from glance.common import crypt
44from glance.common import exception
45from glance.common import timeutils
46from glance.db import migration
47from glance.db.sqlalchemy import migrate_repo
48from glance.db.sqlalchemy.migrate_repo.schema import from_migration_import
49from glance.db.sqlalchemy.migrate_repo import versions
50from glance.db.sqlalchemy import models
51from glance.db.sqlalchemy import models_glare
52from glance.db.sqlalchemy import models_metadef
53import glance.tests.utils as test_utils
54
55from glance.i18n import _
56
57CONF = cfg.CONF
58CONF.import_opt('metadata_encryption_key', 'glance.common.config')
59
60
61def index_exist(index, table, engine):
62 inspector = sqlalchemy.inspect(engine)
63 return index in [i['name'] for i in inspector.get_indexes(table)]
64
65
66def unique_constraint_exist(constraint, table, engine):
67 inspector = sqlalchemy.inspect(engine)
68 return constraint in [c['name'] for c in
69 inspector.get_unique_constraints(table)]
70
71
72class MigrationsMixin(test_migrations.WalkVersionsMixin):
73 @property
74 def INIT_VERSION(self):
75 return migration.INIT_VERSION
76
77 @property
78 def REPOSITORY(self):
79 migrate_file = migrate_repo.__file__
80 return Repository(os.path.abspath(os.path.dirname(migrate_file)))
81
82 @property
83 def migration_api(self):
84 return migration_api
85
86 @property
87 def migrate_engine(self):
88 return self.engine
89
90 def test_walk_versions(self):
91 # No more downgrades
92 self._walk_versions(False, False)
93
94 def _create_unversioned_001_db(self, engine):
95 # Create the initial version of the images table
96 meta = sqlalchemy.schema.MetaData()
97 meta.bind = engine
98 images_001 = sqlalchemy.Table('images', meta,
99 sqlalchemy.Column('id', models.Integer,
100 primary_key=True),
101 sqlalchemy.Column('name',
102 sqlalchemy.String(255)
103 ),
104 sqlalchemy.Column('type',
105 sqlalchemy.String(30)),
106 sqlalchemy.Column('size',
107 sqlalchemy.Integer),
108 sqlalchemy.Column('status',
109 sqlalchemy.String(30)),
110 sqlalchemy.Column('is_public',
111 sqlalchemy.Boolean,
112 default=False),
113 sqlalchemy.Column('location',
114 sqlalchemy.Text),
115 sqlalchemy.Column('created_at',
116 sqlalchemy.DateTime(),
117 nullable=False),
118 sqlalchemy.Column('updated_at',
119 sqlalchemy.DateTime()),
120 sqlalchemy.Column('deleted_at',
121 sqlalchemy.DateTime()),
122 sqlalchemy.Column('deleted',
123 sqlalchemy.Boolean(),
124 nullable=False,
125 default=False),
126 mysql_engine='InnoDB',
127 mysql_charset='utf8')
128 images_001.create()
129
130 def test_version_control_existing_db(self):
131 """
132 Creates a DB without version control information, places it
133 under version control and checks that it can be upgraded
134 without errors.
135 """
136 self._create_unversioned_001_db(self.migrate_engine)
137
138 old_version = migration.INIT_VERSION
139 # we must start from version 1
140 migration.INIT_VERSION = 1
141 self.addCleanup(setattr, migration, 'INIT_VERSION', old_version)
142
143 self._walk_versions(False, False)
144
145 def _pre_upgrade_003(self, engine):
146 now = datetime.datetime.now()
147 images = db_utils.get_table(engine, 'images')
148 data = {'deleted': False, 'created_at': now, 'updated_at': now,
149 'type': 'kernel', 'status': 'active', 'is_public': True}
150 images.insert().values(data).execute()
151 return data
152
153 def _check_003(self, engine, data):
154 images = db_utils.get_table(engine, 'images')
155 self.assertNotIn('type', images.c,
156 "'type' column found in images table columns! "
157 "images table columns reported by metadata: %s\n"
158 % images.c.keys())
159 images_prop = db_utils.get_table(engine, 'image_properties')
160 result = images_prop.select().execute()
161 types = []
162 for row in result:
163 if row['key'] == 'type':
164 types.append(row['value'])
165 self.assertIn(data['type'], types)
166
167 def _pre_upgrade_004(self, engine):
168 """Insert checksum data sample to check if migration goes fine with
169 data.
170 """
171 now = timeutils.utcnow()
172 images = db_utils.get_table(engine, 'images')
173 data = [
174 {
175 'deleted': False, 'created_at': now, 'updated_at': now,
176 'type': 'kernel', 'status': 'active', 'is_public': True,
177 }
178 ]
179 engine.execute(images.insert(), data)
180 return data
181
182 def _check_004(self, engine, data):
183 """Assure that checksum data is present on table"""
184 images = db_utils.get_table(engine, 'images')
185 self.assertIn('checksum', images.c)
186 self.assertEqual(32, images.c['checksum'].type.length)
187
188 def _pre_upgrade_005(self, engine):
189 now = timeutils.utcnow()
190 images = db_utils.get_table(engine, 'images')
191 data = [
192 {
193 'deleted': False, 'created_at': now, 'updated_at': now,
194 'type': 'kernel', 'status': 'active', 'is_public': True,
195 # Integer type signed size limit
196 'size': 2147483647
197 }
198 ]
199 engine.execute(images.insert(), data)
200 return data
201
202 def _check_005(self, engine, data):
203
204 images = db_utils.get_table(engine, 'images')
205 select = images.select().execute()
206
207 sizes = [row['size'] for row in select if row['size'] is not None]
208 migrated_data_sizes = [element['size'] for element in data]
209
210 for migrated in migrated_data_sizes:
211 self.assertIn(migrated, sizes)
212
213 def _pre_upgrade_006(self, engine):
214 now = timeutils.utcnow()
215 images = db_utils.get_table(engine, 'images')
216 image_data = [
217 {
218 'deleted': False, 'created_at': now, 'updated_at': now,
219 'type': 'kernel', 'status': 'active', 'is_public': True,
220 'id': 9999,
221 }
222 ]
223 engine.execute(images.insert(), image_data)
224
225 images_properties = db_utils.get_table(engine, 'image_properties')
226 properties_data = [
227 {
228 'id': 10, 'image_id': 9999, 'updated_at': now,
229 'created_at': now, 'deleted': False, 'key': 'image_name'
230 }
231 ]
232 engine.execute(images_properties.insert(), properties_data)
233 return properties_data
234
235 def _check_006(self, engine, data):
236 images_properties = db_utils.get_table(engine, 'image_properties')
237 select = images_properties.select().execute()
238
239 # load names from name collumn
240 image_names = [row['name'] for row in select]
241
242 # check names from data in image names from name column
243 for element in data:
244 self.assertIn(element['key'], image_names)
245
246 def _pre_upgrade_010(self, engine):
247 """Test rows in images with NULL updated_at get updated to equal
248 created_at.
249 """
250
251 initial_values = [
252 (datetime.datetime(1999, 1, 2, 4, 10, 20),
253 datetime.datetime(1999, 1, 2, 4, 10, 30)),
254 (datetime.datetime(1999, 2, 4, 6, 15, 25),
255 datetime.datetime(1999, 2, 4, 6, 15, 35)),
256 (datetime.datetime(1999, 3, 6, 8, 20, 30),
257 None),
258 (datetime.datetime(1999, 4, 8, 10, 25, 35),
259 None),
260 ]
261
262 images = db_utils.get_table(engine, 'images')
263 for created_at, updated_at in initial_values:
264 row = dict(deleted=False,
265 created_at=created_at,
266 updated_at=updated_at,
267 status='active',
268 is_public=True,
269 min_disk=0,
270 min_ram=0)
271 images.insert().values(row).execute()
272
273 return initial_values
274
275 def _check_010(self, engine, data):
276 values = {c: u for c, u in data}
277
278 images = db_utils.get_table(engine, 'images')
279 for row in images.select().execute():
280 if row['created_at'] in values:
281 # updated_at should be unchanged if not previous NULL, or
282 # set to created_at if previously NULL
283 updated_at = values.pop(row['created_at']) or row['created_at']
284 self.assertEqual(row['updated_at'], updated_at)
285
286 # No initial values should be remaining
287 self.assertEqual(0, len(values))
288
289 def _pre_upgrade_012(self, engine):
290 """Test rows in images have id changes from int to varchar(32) and
291 value changed from int to UUID. Also test image_members and
292 image_properties gets updated to point to new UUID keys.
293 """
294
295 images = db_utils.get_table(engine, 'images')
296 image_members = db_utils.get_table(engine, 'image_members')
297 image_properties = db_utils.get_table(engine, 'image_properties')
298
299 # Insert kernel, ramdisk and normal images
300 now = timeutils.utcnow()
301 data = {'created_at': now, 'updated_at': now,
302 'status': 'active', 'deleted': False,
303 'is_public': True, 'min_disk': 0, 'min_ram': 0}
304
305 test_data = {}
306 for name in ('kernel', 'ramdisk', 'normal'):
307 data['name'] = '%s migration 012 test' % name
308 result = images.insert().values(data).execute()
309 test_data[name] = result.inserted_primary_key[0]
310
311 # Insert image_members and image_properties rows
312 data = {'created_at': now, 'updated_at': now, 'deleted': False,
313 'image_id': test_data['normal'], 'member': 'foobar',
314 'can_share': False}
315 result = image_members.insert().values(data).execute()
316 test_data['member'] = result.inserted_primary_key[0]
317
318 data = {'created_at': now, 'updated_at': now, 'deleted': False,
319 'image_id': test_data['normal'], 'name': 'ramdisk_id',
320 'value': test_data['ramdisk']}
321 result = image_properties.insert().values(data).execute()
322 test_data['properties'] = [result.inserted_primary_key[0]]
323
324 data.update({'name': 'kernel_id', 'value': test_data['kernel']})
325 result = image_properties.insert().values(data).execute()
326 test_data['properties'].append(result.inserted_primary_key)
327
328 return test_data
329
330 def _check_012(self, engine, test_data):
331 images = db_utils.get_table(engine, 'images')
332 image_members = db_utils.get_table(engine, 'image_members')
333 image_properties = db_utils.get_table(engine, 'image_properties')
334
335 # Find kernel, ramdisk and normal images. Make sure id has been
336 # changed to a uuid
337 uuids = {}
338 for name in ('kernel', 'ramdisk', 'normal'):
339 image_name = '%s migration 012 test' % name
340 rows = images.select().where(
341 images.c.name == image_name).execute().fetchall()
342
343 self.assertEqual(1, len(rows))
344
345 row = rows[0]
346 self.assertTrue(uuidutils.is_uuid_like(row['id']))
347
348 uuids[name] = row['id']
349
350 # Find all image_members to ensure image_id has been updated
351 results = image_members.select().where(
352 image_members.c.image_id == uuids['normal']).execute().fetchall()
353 self.assertEqual(1, len(results))
354
355 # Find all image_properties to ensure image_id has been updated
356 # as well as ensure kernel_id and ramdisk_id values have been
357 # updated too
358 results = image_properties.select().where(
359 image_properties.c.image_id == uuids['normal']
360 ).execute().fetchall()
361 self.assertEqual(2, len(results))
362 for row in results:
363 self.assertIn(row['name'], ('kernel_id', 'ramdisk_id'))
364
365 if row['name'] == 'kernel_id':
366 self.assertEqual(row['value'], uuids['kernel'])
367 if row['name'] == 'ramdisk_id':
368 self.assertEqual(row['value'], uuids['ramdisk'])
369
370 def _assert_invalid_swift_uri_raises_bad_store_uri(self,
371 legacy_parse_uri_fn):
372 invalid_uri = ('swift://http://acct:usr:pass@example.com'
373 '/container/obj-id')
374 # URI cannot contain more than one occurrence of a scheme.
375 self.assertRaises(exception.BadStoreUri,
376 legacy_parse_uri_fn,
377 invalid_uri,
378 True)
379
380 invalid_scheme_uri = ('http://acct:usr:pass@example.com'
381 '/container/obj-id')
382 self.assertRaises(exception.BadStoreUri,
383 legacy_parse_uri_fn,
384 invalid_scheme_uri,
385 True)
386
387 invalid_account_missing_uri = 'swift+http://container/obj-id'
388 # Badly formed Swift URI: swift+http://container/obj-id
389 self.assertRaises(exception.BadStoreUri,
390 legacy_parse_uri_fn,
391 invalid_account_missing_uri,
392 True)
393
394 invalid_container_missing_uri = ('swift+http://'
395 'acct:usr:pass@example.com/obj-id')
396 # Badly formed Swift URI: swift+http://acct:usr:pass@example.com/obj-id
397 self.assertRaises(exception.BadStoreUri,
398 legacy_parse_uri_fn,
399 invalid_container_missing_uri,
400 True)
401
402 invalid_object_missing_uri = ('swift+http://'
403 'acct:usr:pass@example.com/container')
404 # Badly formed Swift URI:
405 # swift+http://acct:usr:pass@example.com/container
406 self.assertRaises(exception.BadStoreUri,
407 legacy_parse_uri_fn,
408 invalid_object_missing_uri,
409 True)
410
411 invalid_user_without_pass_uri = ('swift://acctusr@example.com'
412 '/container/obj-id')
413 # Badly formed credentials '%(creds)s' in Swift URI
414 self.assertRaises(exception.BadStoreUri,
415 legacy_parse_uri_fn,
416 invalid_user_without_pass_uri,
417 True)
418
419 # Badly formed credentials in Swift URI.
420 self.assertRaises(exception.BadStoreUri,
421 legacy_parse_uri_fn,
422 invalid_user_without_pass_uri,
423 False)
424
425 def test_legacy_parse_swift_uri_015(self):
426 (legacy_parse_uri,) = from_migration_import(
427 '015_quote_swift_credentials', ['legacy_parse_uri'])
428
429 uri = legacy_parse_uri(
430 'swift://acct:usr:pass@example.com/container/obj-id',
431 True)
432 self.assertTrue(uri, 'swift://acct%3Ausr:pass@example.com'
433 '/container/obj-id')
434
435 self._assert_invalid_swift_uri_raises_bad_store_uri(legacy_parse_uri)
436
437 def _pre_upgrade_015(self, engine):
438 images = db_utils.get_table(engine, 'images')
439 unquoted_locations = [
440 'swift://acct:usr:pass@example.com/container/obj-id',
441 'file://foo',
442 ]
443 now = datetime.datetime.now()
444 temp = dict(deleted=False,
445 created_at=now,
446 updated_at=now,
447 status='active',
448 is_public=True,
449 min_disk=0,
450 min_ram=0)
451 data = []
452 for i, location in enumerate(unquoted_locations):
453 temp.update(location=location, id=str(uuid.uuid4()))
454 data.append(temp)
455 images.insert().values(temp).execute()
456 return data
457
458 def _check_015(self, engine, data):
459 images = db_utils.get_table(engine, 'images')
460 quoted_locations = [
461 'swift://acct%3Ausr:pass@example.com/container/obj-id',
462 'file://foo',
463 ]
464 result = images.select().execute()
465 locations = list(map(lambda x: x['location'], result))
466 for loc in quoted_locations:
467 self.assertIn(loc, locations)
468
469 def _pre_upgrade_016(self, engine):
470 images = db_utils.get_table(engine, 'images')
471 now = datetime.datetime.now()
472 temp = dict(deleted=False,
473 created_at=now,
474 updated_at=now,
475 status='active',
476 is_public=True,
477 min_disk=0,
478 min_ram=0,
479 id='fake-image-id1')
480 images.insert().values(temp).execute()
481 image_members = db_utils.get_table(engine, 'image_members')
482 now = datetime.datetime.now()
483 data = {'deleted': False,
484 'created_at': now,
485 'member': 'fake-member',
486 'updated_at': now,
487 'can_share': False,
488 'image_id': 'fake-image-id1'}
489 image_members.insert().values(data).execute()
490 return data
491
492 def _check_016(self, engine, data):
493 image_members = db_utils.get_table(engine, 'image_members')
494 self.assertIn('status', image_members.c,
495 "'status' column found in image_members table "
496 "columns! image_members table columns: %s"
497 % image_members.c.keys())
498
499 def test_legacy_parse_swift_uri_017(self):
500 metadata_encryption_key = 'a' * 16
501 CONF.set_override('metadata_encryption_key', metadata_encryption_key,
502 enforce_type=True)
503 self.addCleanup(CONF.reset)
504 (legacy_parse_uri, encrypt_location) = from_migration_import(
505 '017_quote_encrypted_swift_credentials', ['legacy_parse_uri',
506 'encrypt_location'])
507
508 uri = legacy_parse_uri('swift://acct:usr:pass@example.com'
509 '/container/obj-id', True)
510 self.assertTrue(uri, encrypt_location(
511 'swift://acct%3Ausr:pass@example.com/container/obj-id'))
512
513 self._assert_invalid_swift_uri_raises_bad_store_uri(legacy_parse_uri)
514
515 def _pre_upgrade_017(self, engine):
516 metadata_encryption_key = 'a' * 16
517 CONF.set_override('metadata_encryption_key', metadata_encryption_key,
518 enforce_type=True)
519 self.addCleanup(CONF.reset)
520 images = db_utils.get_table(engine, 'images')
521 unquoted = 'swift://acct:usr:pass@example.com/container/obj-id'
522 encrypted_unquoted = crypt.urlsafe_encrypt(
523 metadata_encryption_key,
524 unquoted, 64)
525 data = []
526 now = datetime.datetime.now()
527 temp = dict(deleted=False,
528 created_at=now,
529 updated_at=now,
530 status='active',
531 is_public=True,
532 min_disk=0,
533 min_ram=0,
534 location=encrypted_unquoted,
535 id='fakeid1')
536 images.insert().values(temp).execute()
537
538 locations = [
539 'file://ab',
540 'file://abc',
541 'swift://acct3A%foobar:pass@example.com/container/obj-id2'
542 ]
543
544 now = datetime.datetime.now()
545 temp = dict(deleted=False,
546 created_at=now,
547 updated_at=now,
548 status='active',
549 is_public=True,
550 min_disk=0,
551 min_ram=0)
552 for i, location in enumerate(locations):
553 temp.update(location=location, id=str(uuid.uuid4()))
554 data.append(temp)
555 images.insert().values(temp).execute()
556 return data
557
558 def _check_017(self, engine, data):
559 metadata_encryption_key = 'a' * 16
560 quoted = 'swift://acct%3Ausr:pass@example.com/container/obj-id'
561 images = db_utils.get_table(engine, 'images')
562 result = images.select().execute()
563 locations = list(map(lambda x: x['location'], result))
564 actual_location = []
565 for location in locations:
566 if location:
567 try:
568 temp_loc = crypt.urlsafe_decrypt(metadata_encryption_key,
569 location)
570 actual_location.append(temp_loc)
571 except TypeError:
572 actual_location.append(location)
573 except ValueError:
574 actual_location.append(location)
575
576 self.assertIn(quoted, actual_location)
577 loc_list = ['file://ab',
578 'file://abc',
579 'swift://acct3A%foobar:pass@example.com/container/obj-id2']
580
581 for location in loc_list:
582 if location not in actual_location:
583 self.fail(_("location: %s data lost") % location)
584
585 def _pre_upgrade_019(self, engine):
586 images = db_utils.get_table(engine, 'images')
587 now = datetime.datetime.now()
588 base_values = {
589 'deleted': False,
590 'created_at': now,
591 'updated_at': now,
592 'status': 'active',
593 'is_public': True,
594 'min_disk': 0,
595 'min_ram': 0,
596 }
597 data = [
598 {'id': 'fake-19-1', 'location': 'http://glance.example.com'},
599 # NOTE(bcwaldon): images with a location of None should
600 # not be migrated
601 {'id': 'fake-19-2', 'location': None},
602 ]
603 for image in data:
604 image.update(base_values)
605 images.insert().values(image).execute()
606 return data
607
608 def _check_019(self, engine, data):
609 image_locations = db_utils.get_table(engine, 'image_locations')
610 records = image_locations.select().execute().fetchall()
611 locations = {il.image_id: il.value for il in records}
612 self.assertEqual('http://glance.example.com',
613 locations.get('fake-19-1'))
614
615 def _check_020(self, engine, data):
616 images = db_utils.get_table(engine, 'images')
617 self.assertNotIn('location', images.c)
618
619 def _pre_upgrade_026(self, engine):
620 image_locations = db_utils.get_table(engine, 'image_locations')
621
622 now = datetime.datetime.now()
623 image_id = 'fake_id'
624 url = 'file:///some/place/onthe/fs'
625
626 images = db_utils.get_table(engine, 'images')
627 temp = dict(deleted=False,
628 created_at=now,
629 updated_at=now,
630 status='active',
631 is_public=True,
632 min_disk=0,
633 min_ram=0,
634 id=image_id)
635 images.insert().values(temp).execute()
636
637 temp = dict(deleted=False,
638 created_at=now,
639 updated_at=now,
640 image_id=image_id,
641 value=url)
642 image_locations.insert().values(temp).execute()
643 return image_id
644
645 def _check_026(self, engine, data):
646 image_locations = db_utils.get_table(engine, 'image_locations')
647 results = image_locations.select().where(
648 image_locations.c.image_id == data).execute()
649
650 r = list(results)
651 self.assertEqual(1, len(r))
652 self.assertEqual('file:///some/place/onthe/fs', r[0]['value'])
653 self.assertIn('meta_data', r[0])
654 x = pickle.loads(r[0]['meta_data'])
655 self.assertEqual({}, x)
656
657 def _check_027(self, engine, data):
658 table = "images"
659 index = "checksum_image_idx"
660 columns = ["checksum"]
661
662 meta = sqlalchemy.MetaData()
663 meta.bind = engine
664
665 new_table = sqlalchemy.Table(table, meta, autoload=True)
666
667 index_data = [(idx.name, idx.columns.keys())
668 for idx in new_table.indexes]
669
670 self.assertIn((index, columns), index_data)
671
672 def _check_028(self, engine, data):
673 owner_index = "owner_image_idx"
674 columns = ["owner"]
675
676 images_table = db_utils.get_table(engine, 'images')
677
678 index_data = [(idx.name, idx.columns.keys())
679 for idx in images_table.indexes
680 if idx.name == owner_index]
681
682 self.assertIn((owner_index, columns), index_data)
683
684 def _pre_upgrade_029(self, engine):
685 image_locations = db_utils.get_table(engine, 'image_locations')
686
687 meta_data = {'somelist': ['a', 'b', 'c'], 'avalue': 'hello',
688 'adict': {}}
689
690 now = datetime.datetime.now()
691 image_id = 'fake_029_id'
692 url = 'file:///some/place/onthe/fs029'
693
694 images = db_utils.get_table(engine, 'images')
695 temp = dict(deleted=False,
696 created_at=now,
697 updated_at=now,
698 status='active',
699 is_public=True,
700 min_disk=0,
701 min_ram=0,
702 id=image_id)
703 images.insert().values(temp).execute()
704
705 pickle_md = pickle.dumps(meta_data)
706 temp = dict(deleted=False,
707 created_at=now,
708 updated_at=now,
709 image_id=image_id,
710 value=url,
711 meta_data=pickle_md)
712 image_locations.insert().values(temp).execute()
713
714 return meta_data, image_id
715
716 def _check_029(self, engine, data):
717 meta_data = data[0]
718 image_id = data[1]
719 image_locations = db_utils.get_table(engine, 'image_locations')
720
721 records = image_locations.select().where(
722 image_locations.c.image_id == image_id).execute().fetchall()
723
724 for r in records:
725 d = jsonutils.loads(r['meta_data'])
726 self.assertEqual(d, meta_data)
727
728 def _check_030(self, engine, data):
729 table = "tasks"
730 index_type = ('ix_tasks_type', ['type'])
731 index_status = ('ix_tasks_status', ['status'])
732 index_owner = ('ix_tasks_owner', ['owner'])
733 index_deleted = ('ix_tasks_deleted', ['deleted'])
734 index_updated_at = ('ix_tasks_updated_at', ['updated_at'])
735
736 meta = sqlalchemy.MetaData()
737 meta.bind = engine
738
739 tasks_table = sqlalchemy.Table(table, meta, autoload=True)
740
741 index_data = [(idx.name, idx.columns.keys())
742 for idx in tasks_table.indexes]
743
744 self.assertIn(index_type, index_data)
745 self.assertIn(index_status, index_data)
746 self.assertIn(index_owner, index_data)
747 self.assertIn(index_deleted, index_data)
748 self.assertIn(index_updated_at, index_data)
749
750 expected = [u'id',
751 u'type',
752 u'status',
753 u'owner',
754 u'input',
755 u'result',
756 u'message',
757 u'expires_at',
758 u'created_at',
759 u'updated_at',
760 u'deleted_at',
761 u'deleted']
762
763 # NOTE(flwang): Skip the column type checking for now since Jenkins is
764 # using sqlalchemy.dialects.postgresql.base.TIMESTAMP instead of
765 # DATETIME which is using by mysql and sqlite.
766 col_data = [col.name for col in tasks_table.columns]
767 self.assertEqual(expected, col_data)
768
769 def _pre_upgrade_031(self, engine):
770 images = db_utils.get_table(engine, 'images')
771 now = datetime.datetime.now()
772 image_id = 'fake_031_id'
773 temp = dict(deleted=False,
774 created_at=now,
775 updated_at=now,
776 status='active',
777 is_public=True,
778 min_disk=0,
779 min_ram=0,
780 id=image_id)
781 images.insert().values(temp).execute()
782
783 locations_table = db_utils.get_table(engine, 'image_locations')
784 locations = [
785 ('file://ab', '{"a": "yo yo"}'),
786 ('file://ab', '{}'),
787 ('file://ab', '{}'),
788 ('file://ab1', '{"a": "that one, please"}'),
789 ('file://ab1', '{"a": "that one, please"}'),
790 ]
791 temp = dict(deleted=False,
792 created_at=now,
793 updated_at=now,
794 image_id=image_id)
795
796 for location, metadata in locations:
797 temp.update(value=location, meta_data=metadata)
798 locations_table.insert().values(temp).execute()
799 return image_id
800
801 def _check_031(self, engine, image_id):
802 locations_table = db_utils.get_table(engine, 'image_locations')
803 result = locations_table.select().where(
804 locations_table.c.image_id == image_id).execute().fetchall()
805
806 locations = set([(x['value'], x['meta_data']) for x in result])
807 actual_locations = set([
808 ('file://ab', '{"a": "yo yo"}'),
809 ('file://ab', '{}'),
810 ('file://ab1', '{"a": "that one, please"}'),
811 ])
812 self.assertFalse(actual_locations.symmetric_difference(locations))
813
814 def _pre_upgrade_032(self, engine):
815 self.assertRaises(sqlalchemy.exc.NoSuchTableError,
816 db_utils.get_table, engine, 'task_info')
817
818 tasks = db_utils.get_table(engine, 'tasks')
819 now = datetime.datetime.now()
820 base_values = {
821 'deleted': False,
822 'created_at': now,
823 'updated_at': now,
824 'status': 'active',
825 'owner': 'TENANT',
826 'type': 'import',
827 }
828 data = [
829 {
830 'id': 'task-1',
831 'input': 'some input',
832 'message': None,
833 'result': 'successful'
834 },
835 {
836 'id': 'task-2',
837 'input': None,
838 'message': None,
839 'result': None
840 },
841 ]
842 for task in data:
843 task.update(base_values)
844 tasks.insert().values(task).execute()
845 return data
846
847 def _check_032(self, engine, data):
848 task_info_table = db_utils.get_table(engine, 'task_info')
849
850 task_info_refs = task_info_table.select().execute().fetchall()
851
852 self.assertEqual(2, len(task_info_refs))
853
854 for x in range(len(task_info_refs)):
855 self.assertEqual(task_info_refs[x].task_id, data[x]['id'])
856 self.assertEqual(task_info_refs[x].input, data[x]['input'])
857 self.assertEqual(task_info_refs[x].result, data[x]['result'])
858 self.assertIsNone(task_info_refs[x].message)
859
860 tasks_table = db_utils.get_table(engine, 'tasks')
861 self.assertNotIn('input', tasks_table.c)
862 self.assertNotIn('result', tasks_table.c)
863 self.assertNotIn('message', tasks_table.c)
864
865 def _pre_upgrade_033(self, engine):
866 images = db_utils.get_table(engine, 'images')
867 image_locations = db_utils.get_table(engine, 'image_locations')
868
869 now = datetime.datetime.now()
870 image_id = 'fake_id_028_%d'
871 url = 'file:///some/place/onthe/fs_%d'
872 status_list = ['active', 'saving', 'queued', 'killed',
873 'pending_delete', 'deleted']
874 image_id_list = []
875
876 for (idx, status) in enumerate(status_list):
877 temp = dict(deleted=False,
878 created_at=now,
879 updated_at=now,
880 status=status,
881 is_public=True,
882 min_disk=0,
883 min_ram=0,
884 id=image_id % idx)
885 images.insert().values(temp).execute()
886
887 temp = dict(deleted=False,
888 created_at=now,
889 updated_at=now,
890 image_id=image_id % idx,
891 value=url % idx)
892 image_locations.insert().values(temp).execute()
893
894 image_id_list.append(image_id % idx)
895 return image_id_list
896
897 def _check_033(self, engine, data):
898 image_locations = db_utils.get_table(engine, 'image_locations')
899
900 self.assertIn('status', image_locations.c)
901 self.assertEqual(30, image_locations.c['status'].type.length)
902
903 status_list = ['active', 'active', 'active',
904 'deleted', 'pending_delete', 'deleted']
905
906 for (idx, image_id) in enumerate(data):
907 results = image_locations.select().where(
908 image_locations.c.image_id == image_id).execute()
909 r = list(results)
910 self.assertEqual(1, len(r))
911 self.assertIn('status', r[0])
912 self.assertEqual(status_list[idx], r[0]['status'])
913
914 def _pre_upgrade_034(self, engine):
915 images = db_utils.get_table(engine, 'images')
916
917 now = datetime.datetime.now()
918 image_id = 'fake_id_034'
919 temp = dict(deleted=False,
920 created_at=now,
921 status='active',
922 is_public=True,
923 min_disk=0,
924 min_ram=0,
925 id=image_id)
926 images.insert().values(temp).execute()
927
928 def _check_034(self, engine, data):
929 images = db_utils.get_table(engine, 'images')
930 self.assertIn('virtual_size', images.c)
931
932 result = (images.select()
933 .where(images.c.id == 'fake_id_034')
934 .execute().fetchone())
935 self.assertIsNone(result.virtual_size)
936
937 def _pre_upgrade_035(self, engine):
938 self.assertRaises(sqlalchemy.exc.NoSuchTableError,
939 db_utils.get_table, engine, 'metadef_namespaces')
940 self.assertRaises(sqlalchemy.exc.NoSuchTableError,
941 db_utils.get_table, engine, 'metadef_properties')
942 self.assertRaises(sqlalchemy.exc.NoSuchTableError,
943 db_utils.get_table, engine, 'metadef_objects')
944 self.assertRaises(sqlalchemy.exc.NoSuchTableError,
945 db_utils.get_table, engine, 'metadef_resource_types')
946 self.assertRaises(sqlalchemy.exc.NoSuchTableError,
947 db_utils.get_table, engine,
948 'metadef_namespace_resource_types')
949
950 def _check_035(self, engine, data):
951 meta = sqlalchemy.MetaData()
952 meta.bind = engine
953
954 # metadef_namespaces
955 table = sqlalchemy.Table("metadef_namespaces", meta, autoload=True)
956 index_namespace = ('ix_namespaces_namespace', ['namespace'])
957 index_data = [(idx.name, idx.columns.keys())
958 for idx in table.indexes]
959 self.assertIn(index_namespace, index_data)
960
961 expected_cols = [u'id',
962 u'namespace',
963 u'display_name',
964 u'description',
965 u'visibility',
966 u'protected',
967 u'owner',
968 u'created_at',
969 u'updated_at']
970 col_data = [col.name for col in table.columns]
971 self.assertEqual(expected_cols, col_data)
972
973 # metadef_objects
974 table = sqlalchemy.Table("metadef_objects", meta, autoload=True)
975 index_namespace_id_name = (
976 'ix_objects_namespace_id_name', ['namespace_id', 'name'])
977 index_data = [(idx.name, idx.columns.keys())
978 for idx in table.indexes]
979 self.assertIn(index_namespace_id_name, index_data)
980
981 expected_cols = [u'id',
982 u'namespace_id',
983 u'name',
984 u'description',
985 u'required',
986 u'schema',
987 u'created_at',
988 u'updated_at']
989 col_data = [col.name for col in table.columns]
990 self.assertEqual(expected_cols, col_data)
991
992 # metadef_properties
993 table = sqlalchemy.Table("metadef_properties", meta, autoload=True)
994 index_namespace_id_name = (
995 'ix_metadef_properties_namespace_id_name',
996 ['namespace_id', 'name'])
997 index_data = [(idx.name, idx.columns.keys())
998 for idx in table.indexes]
999 self.assertIn(index_namespace_id_name, index_data)
1000
1001 expected_cols = [u'id',
1002 u'namespace_id',
1003 u'name',
1004 u'schema',
1005 u'created_at',
1006 u'updated_at']
1007 col_data = [col.name for col in table.columns]
1008 self.assertEqual(expected_cols, col_data)
1009
1010 # metadef_resource_types
1011 table = sqlalchemy.Table(
1012 "metadef_resource_types", meta, autoload=True)
1013 index_resource_types_name = (
1014 'ix_metadef_resource_types_name', ['name'])
1015 index_data = [(idx.name, idx.columns.keys())
1016 for idx in table.indexes]
1017 self.assertIn(index_resource_types_name, index_data)
1018
1019 expected_cols = [u'id',
1020 u'name',
1021 u'protected',
1022 u'created_at',
1023 u'updated_at']
1024 col_data = [col.name for col in table.columns]
1025 self.assertEqual(expected_cols, col_data)
1026
1027 # metadef_namespace_resource_types
1028 table = sqlalchemy.Table(
1029 "metadef_namespace_resource_types", meta, autoload=True)
1030 index_ns_res_types_res_type_id_ns_id = (
1031 'ix_metadef_ns_res_types_res_type_id_ns_id',
1032 ['resource_type_id', 'namespace_id'])
1033 index_data = [(idx.name, idx.columns.keys())
1034 for idx in table.indexes]
1035 self.assertIn(index_ns_res_types_res_type_id_ns_id, index_data)
1036
1037 expected_cols = [u'resource_type_id',
1038 u'namespace_id',
1039 u'properties_target',
1040 u'prefix',
1041 u'created_at',
1042 u'updated_at']
1043 col_data = [col.name for col in table.columns]
1044 self.assertEqual(expected_cols, col_data)
1045
1046 def _pre_upgrade_036(self, engine):
1047 meta = sqlalchemy.MetaData()
1048 meta.bind = engine
1049
1050 # metadef_objects
1051 table = sqlalchemy.Table("metadef_objects", meta, autoload=True)
1052 expected_cols = [u'id',
1053 u'namespace_id',
1054 u'name',
1055 u'description',
1056 u'required',
1057 u'schema',
1058 u'created_at',
1059 u'updated_at']
1060 col_data = [col.name for col in table.columns]
1061 self.assertEqual(expected_cols, col_data)
1062
1063 # metadef_properties
1064 table = sqlalchemy.Table("metadef_properties", meta, autoload=True)
1065 expected_cols = [u'id',
1066 u'namespace_id',
1067 u'name',
1068 u'schema',
1069 u'created_at',
1070 u'updated_at']
1071 col_data = [col.name for col in table.columns]
1072 self.assertEqual(expected_cols, col_data)
1073
1074 def _check_036(self, engine, data):
1075 meta = sqlalchemy.MetaData()
1076 meta.bind = engine
1077
1078 # metadef_objects
1079 table = sqlalchemy.Table("metadef_objects", meta, autoload=True)
1080 expected_cols = [u'id',
1081 u'namespace_id',
1082 u'name',
1083 u'description',
1084 u'required',
1085 u'json_schema',
1086 u'created_at',
1087 u'updated_at']
1088 col_data = [col.name for col in table.columns]
1089 self.assertEqual(expected_cols, col_data)
1090
1091 # metadef_properties
1092 table = sqlalchemy.Table("metadef_properties", meta, autoload=True)
1093 expected_cols = [u'id',
1094 u'namespace_id',
1095 u'name',
1096 u'json_schema',
1097 u'created_at',
1098 u'updated_at']
1099 col_data = [col.name for col in table.columns]
1100 self.assertEqual(expected_cols, col_data)
1101
1102 def _check_037(self, engine, data):
1103 if engine.name == 'mysql':
1104 self.assertFalse(unique_constraint_exist('image_id',
1105 'image_properties',
1106 engine))
1107
1108 self.assertTrue(unique_constraint_exist(
1109 'ix_image_properties_image_id_name',
1110 'image_properties',
1111 engine))
1112
1113 image_members = db_utils.get_table(engine, 'image_members')
1114 images = db_utils.get_table(engine, 'images')
1115
1116 self.assertFalse(image_members.c.status.nullable)
1117 self.assertFalse(images.c.protected.nullable)
1118
1119 now = datetime.datetime.now()
1120 temp = dict(
1121 deleted=False,
1122 created_at=now,
1123 status='active',
1124 is_public=True,
1125 min_disk=0,
1126 min_ram=0,
1127 id='fake_image_035'
1128 )
1129 images.insert().values(temp).execute()
1130
1131 image = (images.select()
1132 .where(images.c.id == 'fake_image_035')
1133 .execute().fetchone())
1134
1135 self.assertFalse(image['protected'])
1136
1137 temp = dict(
1138 deleted=False,
1139 created_at=now,
1140 image_id='fake_image_035',
1141 member='fake_member',
1142 can_share=True,
1143 id=3
1144 )
1145
1146 image_members.insert().values(temp).execute()
1147
1148 image_member = (image_members.select()
1149 .where(image_members.c.id == 3)
1150 .execute().fetchone())
1151
1152 self.assertEqual('pending', image_member['status'])
1153
1154 def _pre_upgrade_038(self, engine):
1155 self.assertRaises(sqlalchemy.exc.NoSuchTableError,
1156 db_utils.get_table, engine, 'metadef_tags')
1157
1158 def _check_038(self, engine, data):
1159 meta = sqlalchemy.MetaData()
1160 meta.bind = engine
1161
1162 # metadef_tags
1163 table = sqlalchemy.Table("metadef_tags", meta, autoload=True)
1164 expected_cols = [u'id',
1165 u'namespace_id',
1166 u'name',
1167 u'created_at',
1168 u'updated_at']
1169 col_data = [col.name for col in table.columns]
1170 self.assertEqual(expected_cols, col_data)
1171
1172 def _check_039(self, engine, data):
1173 meta = sqlalchemy.MetaData()
1174 meta.bind = engine
1175
1176 metadef_namespaces = sqlalchemy.Table('metadef_namespaces', meta,
1177 autoload=True)
1178 metadef_properties = sqlalchemy.Table('metadef_properties', meta,
1179 autoload=True)
1180 metadef_objects = sqlalchemy.Table('metadef_objects', meta,
1181 autoload=True)
1182 metadef_ns_res_types = sqlalchemy.Table(
1183 'metadef_namespace_resource_types',
1184 meta, autoload=True)
1185 metadef_resource_types = sqlalchemy.Table('metadef_resource_types',
1186 meta, autoload=True)
1187
1188 tables = [metadef_namespaces, metadef_properties, metadef_objects,
1189 metadef_ns_res_types, metadef_resource_types]
1190
1191 for table in tables:
1192 for index_name in ['ix_namespaces_namespace',
1193 'ix_objects_namespace_id_name',
1194 'ix_metadef_properties_namespace_id_name']:
1195 self.assertFalse(index_exist(index_name, table.name, engine))
1196 for uc_name in ['resource_type_id', 'namespace', 'name',
1197 'namespace_id',
1198 'metadef_objects_namespace_id_name_key',
1199 'metadef_properties_namespace_id_name_key']:
1200 self.assertFalse(unique_constraint_exist(uc_name, table.name,
1201 engine))
1202
1203 self.assertTrue(index_exist('ix_metadef_ns_res_types_namespace_id',
1204 metadef_ns_res_types.name, engine))
1205
1206 self.assertTrue(index_exist('ix_metadef_namespaces_namespace',
1207 metadef_namespaces.name, engine))
1208
1209 self.assertTrue(index_exist('ix_metadef_namespaces_owner',
1210 metadef_namespaces.name, engine))
1211
1212 self.assertTrue(index_exist('ix_metadef_objects_name',
1213 metadef_objects.name, engine))
1214
1215 self.assertTrue(index_exist('ix_metadef_objects_namespace_id',
1216 metadef_objects.name, engine))
1217
1218 self.assertTrue(index_exist('ix_metadef_properties_name',
1219 metadef_properties.name, engine))
1220
1221 self.assertTrue(index_exist('ix_metadef_properties_namespace_id',
1222 metadef_properties.name, engine))
1223
1224 def _check_040(self, engine, data):
1225 meta = sqlalchemy.MetaData()
1226 meta.bind = engine
1227 metadef_tags = sqlalchemy.Table('metadef_tags', meta, autoload=True)
1228
1229 if engine.name == 'mysql':
1230 self.assertFalse(index_exist('namespace_id',
1231 metadef_tags.name, engine))
1232
1233 def _pre_upgrade_041(self, engine):
1234 self.assertRaises(sqlalchemy.exc.NoSuchTableError,
1235 db_utils.get_table, engine,
1236 'artifacts')
1237 self.assertRaises(sqlalchemy.exc.NoSuchTableError,
1238 db_utils.get_table, engine,
1239 'artifact_tags')
1240 self.assertRaises(sqlalchemy.exc.NoSuchTableError,
1241 db_utils.get_table, engine,
1242 'artifact_properties')
1243 self.assertRaises(sqlalchemy.exc.NoSuchTableError,
1244 db_utils.get_table, engine,
1245 'artifact_blobs')
1246 self.assertRaises(sqlalchemy.exc.NoSuchTableError,
1247 db_utils.get_table, engine,
1248 'artifact_dependencies')
1249 self.assertRaises(sqlalchemy.exc.NoSuchTableError,
1250 db_utils.get_table, engine,
1251 'artifact_locations')
1252
1253 def _check_041(self, engine, data):
1254 artifacts_indices = [('ix_artifact_name_and_version',
1255 ['name', 'version_prefix', 'version_suffix']),
1256 ('ix_artifact_type',
1257 ['type_name',
1258 'type_version_prefix',
1259 'type_version_suffix']),
1260 ('ix_artifact_state', ['state']),
1261 ('ix_artifact_visibility', ['visibility']),
1262 ('ix_artifact_owner', ['owner'])]
1263 artifacts_columns = ['id',
1264 'name',
1265 'type_name',
1266 'type_version_prefix',
1267 'type_version_suffix',
1268 'type_version_meta',
1269 'version_prefix',
1270 'version_suffix',
1271 'version_meta',
1272 'description',
1273 'visibility',
1274 'state',
1275 'owner',
1276 'created_at',
1277 'updated_at',
1278 'deleted_at',
1279 'published_at']
1280 self.assert_table(engine, 'artifacts', artifacts_indices,
1281 artifacts_columns)
1282
1283 tags_indices = [('ix_artifact_tags_artifact_id', ['artifact_id']),
1284 ('ix_artifact_tags_artifact_id_tag_value',
1285 ['artifact_id',
1286 'value'])]
1287 tags_columns = ['id',
1288 'artifact_id',
1289 'value',
1290 'created_at',
1291 'updated_at']
1292 self.assert_table(engine, 'artifact_tags', tags_indices, tags_columns)
1293
1294 prop_indices = [
1295 ('ix_artifact_properties_artifact_id', ['artifact_id']),
1296 ('ix_artifact_properties_name', ['name'])]
1297 prop_columns = ['id',
1298 'artifact_id',
1299 'name',
1300 'string_value',
1301 'int_value',
1302 'numeric_value',
1303 'bool_value',
1304 'text_value',
1305 'created_at',
1306 'updated_at',
1307 'position']
1308 self.assert_table(engine, 'artifact_properties', prop_indices,
1309 prop_columns)
1310
1311 blobs_indices = [
1312 ('ix_artifact_blobs_artifact_id', ['artifact_id']),
1313 ('ix_artifact_blobs_name', ['name'])]
1314 blobs_columns = ['id',
1315 'artifact_id',
1316 'size',
1317 'checksum',
1318 'name',
1319 'item_key',
1320 'position',
1321 'created_at',
1322 'updated_at']
1323 self.assert_table(engine, 'artifact_blobs', blobs_indices,
1324 blobs_columns)
1325
1326 dependencies_indices = [
1327 ('ix_artifact_dependencies_source_id', ['artifact_source']),
1328 ('ix_artifact_dependencies_direct_dependencies',
1329 ['artifact_source', 'is_direct']),
1330 ('ix_artifact_dependencies_dest_id', ['artifact_dest']),
1331 ('ix_artifact_dependencies_origin_id', ['artifact_origin'])]
1332 dependencies_columns = ['id',
1333 'artifact_source',
1334 'artifact_dest',
1335 'artifact_origin',
1336 'is_direct',
1337 'position',
1338 'name',
1339 'created_at',
1340 'updated_at']
1341 self.assert_table(engine, 'artifact_dependencies',
1342 dependencies_indices,
1343 dependencies_columns)
1344
1345 locations_indices = [
1346 ('ix_artifact_blob_locations_blob_id', ['blob_id'])]
1347 locations_columns = ['id',
1348 'blob_id',
1349 'value',
1350 'created_at',
1351 'updated_at',
1352 'position',
1353 'status']
1354 self.assert_table(engine, 'artifact_blob_locations', locations_indices,
1355 locations_columns)
1356
1357 def _pre_upgrade_042(self, engine):
1358 meta = sqlalchemy.MetaData()
1359 meta.bind = engine
1360
1361 metadef_namespaces = sqlalchemy.Table('metadef_namespaces', meta,
1362 autoload=True)
1363 metadef_objects = sqlalchemy.Table('metadef_objects', meta,
1364 autoload=True)
1365 metadef_properties = sqlalchemy.Table('metadef_properties', meta,
1366 autoload=True)
1367 metadef_tags = sqlalchemy.Table('metadef_tags', meta, autoload=True)
1368 metadef_resource_types = sqlalchemy.Table('metadef_resource_types',
1369 meta, autoload=True)
1370 metadef_ns_res_types = sqlalchemy.Table(
1371 'metadef_namespace_resource_types',
1372 meta, autoload=True)
1373
1374 # These will be dropped and recreated as unique constraints.
1375 self.assertTrue(index_exist('ix_metadef_namespaces_namespace',
1376 metadef_namespaces.name, engine))
1377 self.assertTrue(index_exist('ix_metadef_objects_namespace_id',
1378 metadef_objects.name, engine))
1379 self.assertTrue(index_exist('ix_metadef_properties_namespace_id',
1380 metadef_properties.name, engine))
1381 self.assertTrue(index_exist('ix_metadef_tags_namespace_id',
1382 metadef_tags.name, engine))
1383 self.assertTrue(index_exist('ix_metadef_resource_types_name',
1384 metadef_resource_types.name, engine))
1385
1386 # This one will be dropped - not needed
1387 self.assertTrue(index_exist(
1388 'ix_metadef_ns_res_types_res_type_id_ns_id',
1389 metadef_ns_res_types.name, engine))
1390
1391 # The rest must remain
1392 self.assertTrue(index_exist('ix_metadef_namespaces_owner',
1393 metadef_namespaces.name, engine))
1394 self.assertTrue(index_exist('ix_metadef_objects_name',
1395 metadef_objects.name, engine))
1396 self.assertTrue(index_exist('ix_metadef_properties_name',
1397 metadef_properties.name, engine))
1398 self.assertTrue(index_exist('ix_metadef_tags_name',
1399 metadef_tags.name, engine))
1400 self.assertTrue(index_exist('ix_metadef_ns_res_types_namespace_id',
1401 metadef_ns_res_types.name, engine))
1402
1403 # To be created
1404 self.assertFalse(unique_constraint_exist
1405 ('uq_metadef_objects_namespace_id_name',
1406 metadef_objects.name, engine)
1407 )
1408 self.assertFalse(unique_constraint_exist
1409 ('uq_metadef_properties_namespace_id_name',
1410 metadef_properties.name, engine)
1411 )
1412 self.assertFalse(unique_constraint_exist
1413 ('uq_metadef_tags_namespace_id_name',
1414 metadef_tags.name, engine)
1415 )
1416 self.assertFalse(unique_constraint_exist
1417 ('uq_metadef_namespaces_namespace',
1418 metadef_namespaces.name, engine)
1419 )
1420 self.assertFalse(unique_constraint_exist
1421 ('uq_metadef_resource_types_name',
1422 metadef_resource_types.name, engine)
1423 )
1424
1425 def _check_042(self, engine, data):
1426 meta = sqlalchemy.MetaData()
1427 meta.bind = engine
1428
1429 metadef_namespaces = sqlalchemy.Table('metadef_namespaces', meta,
1430 autoload=True)
1431 metadef_objects = sqlalchemy.Table('metadef_objects', meta,
1432 autoload=True)
1433 metadef_properties = sqlalchemy.Table('metadef_properties', meta,
1434 autoload=True)
1435 metadef_tags = sqlalchemy.Table('metadef_tags', meta, autoload=True)
1436 metadef_resource_types = sqlalchemy.Table('metadef_resource_types',
1437 meta, autoload=True)
1438 metadef_ns_res_types = sqlalchemy.Table(
1439 'metadef_namespace_resource_types',
1440 meta, autoload=True)
1441
1442 # Dropped for unique constraints
1443 self.assertFalse(index_exist('ix_metadef_namespaces_namespace',
1444 metadef_namespaces.name, engine))
1445 self.assertFalse(index_exist('ix_metadef_objects_namespace_id',
1446 metadef_objects.name, engine))
1447 self.assertFalse(index_exist('ix_metadef_properties_namespace_id',
1448 metadef_properties.name, engine))
1449 self.assertFalse(index_exist('ix_metadef_tags_namespace_id',
1450 metadef_tags.name, engine))
1451 self.assertFalse(index_exist('ix_metadef_resource_types_name',
1452 metadef_resource_types.name, engine))
1453
1454 # Dropped - not needed because of the existing primary key
1455 self.assertFalse(index_exist(
1456 'ix_metadef_ns_res_types_res_type_id_ns_id',
1457 metadef_ns_res_types.name, engine))
1458
1459 # Still exist as before
1460 self.assertTrue(index_exist('ix_metadef_namespaces_owner',
1461 metadef_namespaces.name, engine))
1462 self.assertTrue(index_exist('ix_metadef_ns_res_types_namespace_id',
1463 metadef_ns_res_types.name, engine))
1464 self.assertTrue(index_exist('ix_metadef_objects_name',
1465 metadef_objects.name, engine))
1466 self.assertTrue(index_exist('ix_metadef_properties_name',
1467 metadef_properties.name, engine))
1468 self.assertTrue(index_exist('ix_metadef_tags_name',
1469 metadef_tags.name, engine))
1470
1471 self.assertTrue(unique_constraint_exist
1472 ('uq_metadef_namespaces_namespace',
1473 metadef_namespaces.name, engine)
1474 )
1475 self.assertTrue(unique_constraint_exist
1476 ('uq_metadef_objects_namespace_id_name',
1477 metadef_objects.name, engine)
1478 )
1479 self.assertTrue(unique_constraint_exist
1480 ('uq_metadef_properties_namespace_id_name',
1481 metadef_properties.name, engine)
1482 )
1483 self.assertTrue(unique_constraint_exist
1484 ('uq_metadef_tags_namespace_id_name',
1485 metadef_tags.name, engine)
1486 )
1487 self.assertTrue(unique_constraint_exist
1488 ('uq_metadef_resource_types_name',
1489 metadef_resource_types.name, engine)
1490 )
1491
1492 def _pre_upgrade_045(self, engine):
1493 images = db_utils.get_table(engine, 'images')
1494 now = datetime.datetime.now()
1495 image_members = db_utils.get_table(engine, 'image_members')
1496
1497 # inserting a public image record
1498 public_temp = dict(deleted=False,
1499 created_at=now,
1500 status='active',
1501 is_public=True,
1502 min_disk=0,
1503 min_ram=0,
1504 id='public_id')
1505 images.insert().values(public_temp).execute()
1506
1507 # inserting a non-public image record for 'shared' visibility test
1508 shared_temp = dict(deleted=False,
1509 created_at=now,
1510 status='active',
1511 is_public=False,
1512 min_disk=0,
1513 min_ram=0,
1514 id='shared_id')
1515 images.insert().values(shared_temp).execute()
1516
1517 # inserting a non-public image records for 'private' visbility test
1518 private_temp = dict(deleted=False,
1519 created_at=now,
1520 status='active',
1521 is_public=False,
1522 min_disk=0,
1523 min_ram=0,
1524 id='private_id_1')
1525 images.insert().values(private_temp).execute()
1526 private_temp = dict(deleted=False,
1527 created_at=now,
1528 status='active',
1529 is_public=False,
1530 min_disk=0,
1531 min_ram=0,
1532 id='private_id_2')
1533 images.insert().values(private_temp).execute()
1534
1535 # adding an image member for checking 'shared' visbility
1536 temp = dict(deleted=False,
1537 created_at=now,
1538 image_id='shared_id',
1539 member='fake_member_452',
1540 can_share=True,
1541 id=45)
1542 image_members.insert().values(temp).execute()
1543
1544 # adding an image member, but marking it deleted,
1545 # for testing 'private' visibility
1546 temp = dict(deleted=True,
1547 created_at=now,
1548 image_id='private_id_2',
1549 member='fake_member_451',
1550 can_share=True,
1551 id=451)
1552 image_members.insert().values(temp).execute()
1553
1554 # adding an active image member for the 'public' image,
1555 # to test it remains public regardless.
1556 temp = dict(deleted=False,
1557 created_at=now,
1558 image_id='public_id',
1559 member='fake_member_450',
1560 can_share=True,
1561 id=450)
1562 image_members.insert().values(temp).execute()
1563
1564 def _check_045(self, engine, data):
1565 # check that after migration, 'visbility' column is introduced
1566 images = db_utils.get_table(engine, 'images')
1567 self.assertIn('visibility', images.c)
1568 self.assertNotIn('is_public', images.c)
1569
1570 # tests to identify the visbilities of images created above
1571 rows = images.select().where(
1572 images.c.id == 'public_id').execute().fetchall()
1573 self.assertEqual(1, len(rows))
1574 self.assertEqual('public', rows[0][16])
1575
1576 rows = images.select().where(
1577 images.c.id == 'shared_id').execute().fetchall()
1578 self.assertEqual(1, len(rows))
1579 self.assertEqual('shared', rows[0][16])
1580
1581 rows = images.select().where(
1582 images.c.id == 'private_id_1').execute().fetchall()
1583 self.assertEqual(1, len(rows))
1584 self.assertEqual('private', rows[0][16])
1585
1586 rows = images.select().where(
1587 images.c.id == 'private_id_2').execute().fetchall()
1588 self.assertEqual(1, len(rows))
1589 self.assertEqual('private', rows[0][16])
1590
1591 def assert_table(self, engine, table_name, indices, columns):
1592 table = db_utils.get_table(engine, table_name)
1593 index_data = [(index.name, index.columns.keys()) for index in
1594 table.indexes]
1595 column_data = [column.name for column in table.columns]
1596 self.assertItemsEqual(columns, column_data)
1597 self.assertItemsEqual(indices, index_data)
1598
1599
1600class TestMigrations(test_base.DbTestCase, test_utils.BaseTestCase):
1601
1602 def test_no_downgrade(self):
1603 migrate_file = versions.__path__[0]
1604 for parent, dirnames, filenames in os.walk(migrate_file):
1605 for filename in filenames:
1606 if filename.split('.')[1] == 'py':
1607 model_name = filename.split('.')[0]
1608 model = __import__(
1609 'glance.db.sqlalchemy.migrate_repo.versions.' +
1610 model_name)
1611 obj = getattr(getattr(getattr(getattr(getattr(
1612 model, 'db'), 'sqlalchemy'), 'migrate_repo'),
1613 'versions'), model_name)
1614 func = getattr(obj, 'downgrade', None)
1615 self.assertIsNone(func)
1616
1617
1618class TestMysqlMigrations(test_base.MySQLOpportunisticTestCase,
1619 MigrationsMixin):
1620
1621 def test_mysql_innodb_tables(self):
1622 migration.db_sync(engine=self.migrate_engine)
1623
1624 total = self.migrate_engine.execute(
1625 "SELECT COUNT(*) "
1626 "FROM information_schema.TABLES "
1627 "WHERE TABLE_SCHEMA='%s'"
1628 % self.migrate_engine.url.database)
1629 self.assertGreater(total.scalar(), 0, "No tables found. Wrong schema?")
1630
1631 noninnodb = self.migrate_engine.execute(
1632 "SELECT count(*) "
1633 "FROM information_schema.TABLES "
1634 "WHERE TABLE_SCHEMA='%s' "
1635 "AND ENGINE!='InnoDB' "
1636 "AND TABLE_NAME!='migrate_version'"
1637 % self.migrate_engine.url.database)
1638 count = noninnodb.scalar()
1639 self.assertEqual(0, count, "%d non InnoDB tables created" % count)
1640
1641
1642class TestPostgresqlMigrations(test_base.PostgreSQLOpportunisticTestCase,
1643 MigrationsMixin):
1644 pass
1645
1646
1647class TestSqliteMigrations(test_base.DbTestCase,
1648 MigrationsMixin):
1649 def test_walk_versions(self):
1650 # No more downgrades
1651 self._walk_versions(False, False)
1652
1653
1654class ModelsMigrationSyncMixin(object):
1655
1656 def get_metadata(self):
1657 for table in models_metadef.BASE_DICT.metadata.sorted_tables:
1658 models.BASE.metadata._add_table(table.name, table.schema, table)
1659 for table in models_glare.BASE.metadata.sorted_tables:
1660 models.BASE.metadata._add_table(table.name, table.schema, table)
1661 return models.BASE.metadata
1662
1663 def get_engine(self):
1664 return self.engine
1665
1666 def db_sync(self, engine):
1667 migration.db_sync(engine=engine)
1668
1669 # TODO(akamyshikova): remove this method as soon as comparison with Variant
1670 # will be implemented in oslo.db or alembic
1671 def compare_type(self, ctxt, insp_col, meta_col, insp_type, meta_type):
1672 if isinstance(meta_type, types.Variant):
1673 meta_orig_type = meta_col.type
1674 insp_orig_type = insp_col.type
1675 meta_col.type = meta_type.impl
1676 insp_col.type = meta_type.impl
1677
1678 try:
1679 return self.compare_type(ctxt, insp_col, meta_col, insp_type,
1680 meta_type.impl)
1681 finally:
1682 meta_col.type = meta_orig_type
1683 insp_col.type = insp_orig_type
1684 else:
1685 ret = super(ModelsMigrationSyncMixin, self).compare_type(
1686 ctxt, insp_col, meta_col, insp_type, meta_type)
1687 if ret is not None:
1688 return ret
1689 return ctxt.impl.compare_type(insp_col, meta_col)
1690
1691 def include_object(self, object_, name, type_, reflected, compare_to):
1692 if name in ['migrate_version'] and type_ == 'table':
1693 return False
1694 return True
1695
1696
1697class ModelsMigrationsSyncMysql(ModelsMigrationSyncMixin,
1698 test_migrations.ModelsMigrationsSync,
1699 test_base.MySQLOpportunisticTestCase):
1700 pass
1701
1702
1703class ModelsMigrationsSyncPostgres(ModelsMigrationSyncMixin,
1704 test_migrations.ModelsMigrationsSync,
1705 test_base.PostgreSQLOpportunisticTestCase):
1706 pass
1707
1708
1709class ModelsMigrationsSyncSQLite(ModelsMigrationSyncMixin,
1710 test_migrations.ModelsMigrationsSync,
1711 test_base.DbTestCase):
1712 pass
diff --git a/glance/tests/utils.py b/glance/tests/utils.py
index 05c118a..48897ad 100644
--- a/glance/tests/utils.py
+++ b/glance/tests/utils.py
@@ -23,6 +23,7 @@ import shutil
23import socket 23import socket
24import subprocess 24import subprocess
25 25
26from alembic import command as alembic_command
26import fixtures 27import fixtures
27from oslo_config import cfg 28from oslo_config import cfg
28from oslo_config import fixture as cfg_fixture 29from oslo_config import fixture as cfg_fixture
@@ -42,6 +43,7 @@ from glance.common import timeutils
42from glance.common import utils 43from glance.common import utils
43from glance.common import wsgi 44from glance.common import wsgi
44from glance import context 45from glance import context
46from glance.db.sqlalchemy import alembic_migrations
45from glance.db.sqlalchemy import api as db_api 47from glance.db.sqlalchemy import api as db_api
46from glance.db.sqlalchemy import models as db_models 48from glance.db.sqlalchemy import models as db_models
47 49
@@ -670,3 +672,14 @@ class HttplibWsgiAdapter(object):
670 response = self.req.get_response(self.app) 672 response = self.req.get_response(self.app)
671 return FakeHTTPResponse(response.status_code, response.headers, 673 return FakeHTTPResponse(response.status_code, response.headers,
672 response.body) 674 response.body)
675
676
677def db_sync(version=None, engine=None):
678 """Migrate the database to `version` or the most recent version."""
679 if version is None:
680 version = 'heads'
681 if engine is None:
682 engine = db_api.get_engine()
683
684 alembic_config = alembic_migrations.get_alembic_config(engine=engine)
685 alembic_command.upgrade(alembic_config, version)
diff --git a/tox.ini b/tox.ini
index f396bd4..ebf2255 100644
--- a/tox.ini
+++ b/tox.ini
@@ -10,6 +10,15 @@ basepython =
10setenv = 10setenv =
11 VIRTUAL_ENV={envdir} 11 VIRTUAL_ENV={envdir}
12 PYTHONWARNINGS=default::DeprecationWarning 12 PYTHONWARNINGS=default::DeprecationWarning
13# NOTE(hemanthm): The environment variable "OS_TEST_DBAPI_ADMIN_CONNECTION"
14# must be set to force oslo.db tests to use a file-based sqlite database
15# instead of the default in-memory database, which doesn't work well with
16# alembic migrations. The file-based database pointed by the environment
17# variable itself is not used for testing. Neither is it ever created. Oslo.db
18# creates another file-based database for testing purposes and deletes it as a
19# part of its test clean-up. Think of setting this environment variable as a
20# clue for oslo.db to use file-based database.
21 OS_TEST_DBAPI_ADMIN_CONNECTION=sqlite:////tmp/placeholder-never-created-nor-used.db
13usedevelop = True 22usedevelop = True
14install_command = pip install -c{env:UPPER_CONSTRAINTS_FILE:https://git.openstack.org/cgit/openstack/requirements/plain/upper-constraints.txt} {opts} {packages} 23install_command = pip install -c{env:UPPER_CONSTRAINTS_FILE:https://git.openstack.org/cgit/openstack/requirements/plain/upper-constraints.txt} {opts} {packages}
15deps = -r{toxinidir}/test-requirements.txt 24deps = -r{toxinidir}/test-requirements.txt