Add versioning for fuel_health tests

Now version of release for which particular test is suitable can be
specified in test sets profile attribute and tests' docstrings. For now
we assume that test is available for releases that are == or >= in
comparison to release specified for test.


Change-Id: I568db380120e3a429bdd531f51a7301c49d41281
Implements: blueprint ostf-tests-versioning
This commit is contained in:
Artem Roma 2015-01-22 16:33:04 +02:00
parent 6c046b69d2
commit 6c250763b4
19 changed files with 424 additions and 73 deletions

View File

@ -229,7 +229,8 @@ class CeilometerApiPlatformTests(ceilometermanager.CeilometerBaseTest):
3. Check keystone role notifications.
4. Check keystone group notifications.
Duration: 5 s.
Deployment tags: Ceilometer, 2014.2-6.0, 2014.2-6.1
Available since release: 2014.2-6.0
Deployment tags: Ceilometer
"""
tenant, user, role, group, trust = self.identity_helper()

View File

@ -124,7 +124,8 @@ class VanillaTwoClusterTest(SaharaClusterTest):
7. Delete the cluster template
Duration: 3600 s.
Deployment tags: Sahara, 2014.2-6.1
Available since release: 2014.2-6.1
Deployment tags: Sahara
"""
fail_msg = 'Failed to create cluster template.'

View File

@ -53,7 +53,8 @@ class NetworksTest(nmanager.SanityChecksTest):
2. Confirm that a response is received.
Duration: 20 s.
Deployment tags: neutron, 2014.2-6.0, 2014.2-6.1
Available since release: 2014.2-6.0
Deployment tags: neutron
"""
fail_msg = "Networks list is unavailable. "
networks = self.verify(20, self._list_networks, 1,

View File

@ -71,7 +71,8 @@ class VanillaTwoTemplatesTest(SaharaTemplatesTest):
8. Delete the cluster template
Duration: 80 s.
Deployment tags: Sahara, 2014.2-6.1
Available since release: 2014.2-6.1
Deployment tags: Sahara
"""
fail_msg = 'Failed to create node group template.'
@ -133,7 +134,8 @@ class HDPTwoTemplatesTest(SaharaTemplatesTest):
8. Delete the cluster template
Duration: 80 s.
Deployment tags: Sahara, 2014.2-6.1
Available since release: 2014.2-6.1
Deployment tags: Sahara
"""
fail_msg = 'Failed to create node group template.'

View File

@ -53,6 +53,7 @@ adapter_opts = [
cli_opts = [
cfg.BoolOpt('debug', default=False),
cfg.BoolOpt('clear-db', default=False),
cfg.BoolOpt('after-initialization-environment-hook', default=False),
cfg.StrOpt('debug_tests')
]

View File

@ -28,7 +28,7 @@ LOG = logging.getLogger(__name__)
TEST_REPOSITORY = []
def clean_db(session):
def delete_db_data(session):
LOG.info('Starting clean db action.')
session.query(models.ClusterTestingPattern).delete()
session.query(models.ClusterState).delete()
@ -42,12 +42,14 @@ def cache_test_repository(session):
.options(joinedload('tests'))\
.all()
crucial_tests_attrs = ['name', 'deployment_tags']
crucial_tests_attrs = ['name', 'deployment_tags',
'available_since_release']
for test_set in test_repository:
data_elem = dict()
data_elem['test_set_id'] = test_set.id
data_elem['deployment_tags'] = test_set.deployment_tags
data_elem['available_since_release'] = test_set.available_since_release
data_elem['tests'] = []
for test in test_set.tests:
@ -59,26 +61,24 @@ def cache_test_repository(session):
def discovery_check(session, cluster, token=None):
cluster_deployment_args = _get_cluster_depl_tags(cluster, token=token)
cluster_attrs = _get_cluster_attrs(cluster, token=token)
cluster_data = {
'cluster_id': cluster,
'deployment_tags': cluster_deployment_args
'id': cluster,
'deployment_tags': cluster_attrs['deployment_tags'],
'release_version': cluster_attrs['release_version'],
}
cluster_state = session.query(models.ClusterState)\
.filter_by(id=cluster_data['cluster_id'])\
.filter_by(id=cluster_data['id'])\
.first()
if not cluster_state:
session.add(
models.ClusterState(
id=cluster_data['cluster_id'],
deployment_tags=list(cluster_data['deployment_tags'])
)
models.ClusterState(**cluster_data)
)
# flush data to db, cuz _add_cluster_testing_pattern
# flush data to db, because _add_cluster_testing_pattern
# is dependent on it
session.flush()
@ -100,7 +100,9 @@ def discovery_check(session, cluster, token=None):
session.merge(cluster_state)
def _get_cluster_depl_tags(cluster_id, token=None):
def _get_cluster_attrs(cluster_id, token=None):
cluster_attrs = {}
REQ_SES = requests.Session()
REQ_SES.trust_env = False
@ -130,13 +132,15 @@ def _get_cluster_depl_tags(cluster_id, token=None):
release_data = REQ_SES.get(release_url).json()
if 'version' in release_data:
cluster_attrs['release_version'] = release_data['version']
# info about deployment type and operating system
mode = 'ha' if 'ha' in response['mode'].lower() else response['mode']
deployment_tags.add(mode)
deployment_tags.add(release_data.get(
'operating_system', 'failed to get os'))
if 'version' in release_data:
deployment_tags.add(release_data['version'])
# networks manager
network_type = response.get('net_provider', 'nova_network')
deployment_tags.add(network_type)
@ -175,7 +179,11 @@ def _get_cluster_depl_tags(cluster_id, token=None):
if libvrt_data and libvrt_data.get('value'):
deployment_tags.add(libvrt_data['value'])
return set([tag.lower() for tag in deployment_tags])
cluster_attrs['deployment_tags'] = set(
[tag.lower() for tag in deployment_tags]
)
return cluster_attrs
def _add_cluster_testing_pattern(session, cluster_data):
@ -188,22 +196,14 @@ def _add_cluster_testing_pattern(session, cluster_data):
cache_test_repository(session)
for test_set in TEST_REPOSITORY:
if nose_utils.process_deployment_tags(
cluster_data['deployment_tags'],
test_set['deployment_tags']
):
testing_pattern = dict()
testing_pattern['cluster_id'] = cluster_data['cluster_id']
if nose_utils.tests_availability_cond(cluster_data, test_set):
testing_pattern = {}
testing_pattern['cluster_id'] = cluster_data['id']
testing_pattern['test_set_id'] = test_set['test_set_id']
testing_pattern['tests'] = []
for test in test_set['tests']:
if nose_utils.process_deployment_tags(
cluster_data['deployment_tags'],
test['deployment_tags']
):
if nose_utils.tests_availability_cond(cluster_data, test):
testing_pattern['tests'].append(test['name'])
to_database.append(

View File

@ -14,11 +14,76 @@
import logging
from sqlalchemy import create_engine
from sqlalchemy.engine import reflection
from sqlalchemy import MetaData
from sqlalchemy import schema
from fuel_plugin.ostf_adapter.storage import alembic_cli
LOG = logging.getLogger(__name__)
def clear_db(dbpath):
"""Clean database (to prevent issue with changed
head revision script) and upgrade to head revision.
Expect 0 on success by nailgun
Exception is good enough signal that something goes wrong
"""
db_engine = create_engine(dbpath)
conn = db_engine.connect()
trans = conn.begin()
meta = MetaData()
meta.reflect(bind=db_engine)
inspector = reflection.Inspector.from_engine(db_engine)
tbs = []
all_fks = []
for table_name in inspector.get_table_names():
fks = []
for fk in inspector.get_foreign_keys(table_name):
if not fk['name']:
continue
fks.append(
schema.ForeignKeyConstraint((), (), name=fk['name'])
)
t = schema.Table(
table_name,
meta,
*fks,
extend_existing=True
)
tbs.append(t)
all_fks.extend(fks)
for fkc in all_fks:
conn.execute(schema.DropConstraint(fkc))
for table in tbs:
conn.execute(schema.DropTable(table))
custom_types = conn.execute(
"SELECT n.nspname as schema, t.typname as type "
"FROM pg_type t LEFT JOIN pg_catalog.pg_namespace n "
"ON n.oid = t.typnamespace "
"WHERE (t.typrelid = 0 OR (SELECT c.relkind = 'c' "
"FROM pg_catalog.pg_class c WHERE c.oid = t.typrelid)) "
"AND NOT EXISTS(SELECT 1 FROM pg_catalog.pg_type el "
"WHERE el.oid = t.typelem AND el.typarray = t.oid) "
"AND n.nspname NOT IN ('pg_catalog', 'information_schema')"
)
for tp in custom_types:
conn.execute("DROP TYPE {0}".format(tp[1]))
trans.commit()
alembic_cli.drop_migration_meta(db_engine)
conn.close()
db_engine.dispose()
return 0
def after_initialization_environment_hook():
"""Expect 0 on success by nailgun
Exception is good enough signal that something goes wrong

View File

@ -81,13 +81,17 @@ class DiscoveryPlugin(plugins.Plugin):
test_id = test.id()
for test_set_id in self.test_sets.keys():
if self.test_belongs_to_testset(test_id, test_set_id):
data = dict()
test_kwargs = {
"title": "",
"description": "",
"duration": "",
"deployment_tags": [],
"available_since_release": "",
}
(data['title'], data['description'],
data['duration'], data['deployment_tags']) = \
nose_utils.get_description(test)
test_kwargs.update(nose_utils.get_description(test))
data.update(
test_kwargs.update(
{
'test_set_id': test_set_id,
'name': test_id
@ -95,7 +99,7 @@ class DiscoveryPlugin(plugins.Plugin):
)
try:
test_obj = models.Test(**data)
test_obj = models.Test(**test_kwargs)
self.session.merge(test_obj)
# flush tests data into db

View File

@ -20,6 +20,8 @@ import os
import re
import traceback
from distutils import version
from nose import case
from nose.suite import ContextSuite
@ -69,6 +71,7 @@ def get_description(test_obj):
if isinstance(test_obj, case.Test):
docstring = test_obj.test._testMethodDoc
test_data = {}
if docstring:
deployment_tags_pattern = r'Deployment tags:.?(?P<tags>.+)?'
docstring, deployment_tags = _process_docstring(
@ -83,21 +86,31 @@ def get_description(test_obj):
deployment_tags = [
tag.strip().lower() for tag in deployment_tags.split(',')
]
else:
deployment_tags = []
test_data["deployment_tags"] = deployment_tags
rel_vers_pattern = "Available since release:.?(?P<rel_vers>.+)"
docstring, rel_vers = _process_docstring(
docstring,
rel_vers_pattern
)
if rel_vers:
test_data["available_since_release"] = rel_vers
duration_pattern = r'Duration:.?(?P<duration>.+)'
docstring, duration = _process_docstring(
docstring,
duration_pattern
)
if duration:
test_data["duration"] = duration
docstring = docstring.split('\n')
name = docstring.pop(0)
description = u'\n'.join(docstring) if docstring else u""
test_data["title"] = docstring.pop(0)
test_data["description"] = \
u'\n'.join(docstring) if docstring else u""
return name, description, duration, deployment_tags
return u"", u"", u"", []
return test_data
def modify_test_name_for_nose(test_path):
@ -163,7 +176,7 @@ def get_tests_to_update(test):
return tests
def process_deployment_tags(cluster_depl_tags, test_depl_tags):
def _process_deployment_tags(cluster_depl_tags, test_depl_tags):
"""Process alternative deployment tags for testsets and tests
and determines whether current test entity (testset or test)
is appropriate for cluster.
@ -179,3 +192,46 @@ def process_deployment_tags(cluster_depl_tags, test_depl_tags):
return True
return False
def _compare_release_versions(cluster_release_version, test_release_version):
cl_openstack_ver, cl_fuel_ver = cluster_release_version.split('-')
test_openstack_ver, test_fuel_ver = test_release_version.split('-')
cond = (
(version.StrictVersion(cl_openstack_ver) >=
version.StrictVersion(test_openstack_ver))
and
(version.StrictVersion(cl_fuel_ver) >=
version.StrictVersion(test_fuel_ver))
)
return cond
def tests_availability_cond(cluster_data, test_entity_data):
is_test_available = False
is_rel_ver_suitable = False
# if 'available_since_release' attritube of test entity
# is empty then this test entity is available for cluster
# in other case execute release comparator logic
if not test_entity_data['available_since_release']:
is_rel_ver_suitable = True
else:
is_rel_ver_suitable = _compare_release_versions(
cluster_data['release_version'],
test_entity_data['available_since_release']
)
# if release version of test entity is suitable for cluster
# then check test entity compatibility with cluster
# by deployment tags
if is_rel_ver_suitable:
is_depl_tags_suitable = _process_deployment_tags(
cluster_data['deployment_tags'],
test_entity_data['deployment_tags']
)
if is_depl_tags_suitable:
is_test_available = True
return is_test_available

View File

@ -44,12 +44,18 @@ def main():
root = app.setup_app({})
# completely clean db (drop tables, constraints and types)
# plus drop alembic_version table (needed if, for example, head migration
# script was changed after applying)
if CONF.clear_db:
return nailgun_hooks.clear_db(CONF.adapter.dbpath)
if CONF.after_initialization_environment_hook:
return nailgun_hooks.after_initialization_environment_hook()
with engine.contexted_session(CONF.adapter.dbpath) as session:
# performing cleaning of expired data (if any) in db
mixins.clean_db(session)
mixins.delete_db_data(session)
log.info('Cleaned up database.')
# discover testsets and their tests
CORE_PATH = CONF.debug_tests or 'fuel_health'

View File

@ -33,3 +33,7 @@ def do_apply_migrations():
# apply initial migration
command.upgrade(alembic_conf, 'head')
def drop_migration_meta(engine):
engine.execute("DROP TABLE IF EXISTS alembic_version")

View File

@ -0,0 +1,46 @@
# -*- coding: utf-8 -*-
# Copyright 2015 Mirantis, Inc.
#
# Licensed under the Apache License, Version 2.0 (the "License"); you may
# not use this file except in compliance with the License. You may obtain
# a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
# License for the specific language governing permissions and limitations
# under the License.
"""release_version
Revision ID: 495b4125ae83
Revises: 54904076d82d
Create Date: 2015-01-22 17:24:12.963260
"""
# revision identifiers, used by Alembic.
revision = '495b4125ae83'
down_revision = '54904076d82d'
from alembic import op
import sqlalchemy as sa
def upgrade():
op.add_column('test_sets', sa.Column('available_since_release',
sa.String(64),
default=""))
op.add_column('tests', sa.Column('available_since_release',
sa.String(64),
default=""))
op.add_column('cluster_state', sa.Column('release_version', sa.String(64)))
def downgrade():
op.drop_column('test_sets', 'available_since_release')
op.drop_column('tests', 'available_since_release')
op.drop_column('cluster_state', 'release_version')

View File

@ -48,6 +48,7 @@ class ClusterState(BASE):
id = sa.Column(sa.Integer, primary_key=True, autoincrement=False)
deployment_tags = sa.Column(ARRAY(sa.String(64)))
release_version = sa.Column(sa.String(64))
class ClusterTestingPattern(BASE):
@ -90,6 +91,8 @@ class TestSet(BASE):
# with current test set
exclusive_testsets = sa.Column(ARRAY(sa.String(128)))
available_since_release = sa.Column(sa.String(64), default="")
tests = relationship(
'Test',
backref='test_set',
@ -135,6 +138,7 @@ class Test(BASE):
time_taken = sa.Column(sa.Float())
meta = sa.Column(fields.JsonField())
deployment_tags = sa.Column(ARRAY(sa.String(64)))
available_since_release = sa.Column(sa.String(64), default="")
test_run_id = sa.Column(
sa.Integer(),

View File

@ -0,0 +1,48 @@
# Copyright 2015 Mirantis, Inc.
#
# Licensed under the Apache License, Version 2.0 (the "License"); you may
# not use this file except in compliance with the License. You may obtain
# a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
# License for the specific language governing permissions and limitations
# under the License.
__profile__ = {
"id": "test_versioning",
"driver": "nose",
"test_path": "fuel_plugin/tests/functional/dummy_tests/test_versioning.py",
"description": "Test suite that contains fake tests for versioning check",
"deployment_tags": ["releases_comparison"],
"test_runs_ordering_priority": 13,
"exclusive_testsets": [],
"available_since_release": "2015.2-6.0",
}
import unittest2
class TestVersioning(unittest2.TestCase):
def test_simple_fake_first(self):
"""This is simple fake test
for versioning checking.
It should be discovered for
releases == of >= 2015.2-6.0
Available since release: 2015.2-6.0
Deployment tags: releases_comparison
"""
self.assertTrue(True)
def test_simple_fake_second(self):
"""This is simple fake test
for versioning checking.
It should be discovered for
releases == of >= 2015.2-6.1
Available since release: 2015.2-6.1
Deployment tags: releases_comparison
"""
self.assertTrue(True)

View File

@ -39,7 +39,8 @@ CLUSTERS = {
'mode': 'ha'
},
'release_data': {
'operating_system': 'rhel'
'operating_system': 'rhel',
'version': '2015.2-1.0',
},
'cluster_attributes': {
'editable': {
@ -54,7 +55,8 @@ CLUSTERS = {
'mode': 'multinode',
},
'release_data': {
'operating_system': 'ubuntu'
'operating_system': 'ubuntu',
'version': '2015.2-1.0',
},
'cluster_attributes': {
'editable': {
@ -69,7 +71,8 @@ CLUSTERS = {
'mode': 'ha'
},
'release_data': {
'operating_system': 'rhel'
'operating_system': 'rhel',
'version': '2015.2-1.0',
},
'cluster_attributes': {
'editable': {
@ -91,7 +94,8 @@ CLUSTERS = {
'mode': 'test_error'
},
'release_data': {
'operating_system': 'none'
'operating_system': 'none',
'version': '2015.2-1.0',
},
'cluster_attributes': {
'editable': {
@ -106,7 +110,24 @@ CLUSTERS = {
'mode': 'dependent_tests'
},
'release_data': {
'operating_system': 'none'
'operating_system': 'none',
'version': '2015.2-1.0',
},
'cluster_attributes': {
'editable': {
'additional_components': {},
'common': {}
}
}
},
6: {
'cluster_meta': {
'release_id': 6,
'mode': 'releases_comparison'
},
'release_data': {
'operating_system': '',
'version': '2015.2-6.0',
},
'cluster_attributes': {
'editable': {

View File

@ -222,11 +222,13 @@ class TestTestRunsController(base.BaseWSGITest):
class TestClusterRedeployment(base.BaseWSGITest):
@mock.patch('fuel_plugin.ostf_adapter.mixins._get_cluster_depl_tags')
def test_cluster_redeployment_with_different_tags(self, m_get_depl_tags):
m_get_depl_tags.return_value = set(
['multinode', 'centos']
)
@mock.patch('fuel_plugin.ostf_adapter.mixins._get_cluster_attrs')
def test_cluster_redeployment_with_different_tags(self,
m_get_cluster_attrs):
m_get_cluster_attrs.return_value = {
'deployment_tags': set(['multinode', 'centos']),
'release_version': '2015.2-1.0'
}
cluster_id = self.expected['cluster']['id']
self.app.get('/v1/testsets/{0}'.format(cluster_id))
@ -260,10 +262,44 @@ class TestClusterRedeployment(base.BaseWSGITest):
# patch request_to_nailgun function in orded to emulate
# redeployment of cluster
m_get_depl_tags.return_value = set(
['multinode', 'ubuntu', 'nova_network']
)
m_get_cluster_attrs.return_value = {
'deployment_tags': set(['multinode', 'ubuntu', 'nova_network']),
'release_version': '2015.2-1.0'
}
self.app.get('/v1/testsets/{0}'.format(cluster_id))
self.assertTrue(self.is_background_working)
class TestVersioning(base.BaseWSGITest):
def test_discover_tests_with_versions(self):
cluster_id = 6
self.mock_api_for_cluster(cluster_id)
self.app.get('/v1/testsets/{0}'.format(cluster_id))
self.expected = {
'cluster': {
'id': 6,
'deployment_tags': set(['releases_comparison'])
},
'test_sets': ['general_test', 'stopped_test', 'test_versioning',
'environment_variables'],
'tests': [self.ext_id + test for test in [
'general_test.Dummy_test.test_fast_pass',
'general_test.Dummy_test.test_long_pass',
'general_test.Dummy_test.test_fast_fail',
'general_test.Dummy_test.test_fast_error',
'general_test.Dummy_test.test_fail_with_step',
'general_test.Dummy_test.test_skip',
'general_test.Dummy_test.test_skip_directly',
'stopped_test.dummy_tests_stopped.test_really_long',
'stopped_test.dummy_tests_stopped.test_one_no_so_long',
'stopped_test.dummy_tests_stopped.test_not_long_at_all',
('test_environment_variables.TestEnvVariables.'
'test_os_credentials_env_variables'),
'test_versioning.TestVersioning.test_simple_fake_first',
]]
}
self.assertTrue(self.is_background_working)

View File

@ -54,10 +54,16 @@ class TestNoseDiscovery(base.BaseUnitTest):
if isinstance(el[0][0], models.Test)
]
def _find_needed_test(self, test_name):
return next(t for t in self.tests if t.name == test_name)
def _find_needed_test_set(self, test_set_id):
return next(t for t in self.test_sets if t.id == test_set_id)
def test_discovery(self):
expected = {
'test_sets_count': 9,
'tests_count': 27
'test_sets_count': 10,
'tests_count': 29
}
self.assertTrue(
@ -115,12 +121,9 @@ class TestNoseDiscovery(base.BaseUnitTest):
'deployment_tags': ['one_tag| another_tag', 'other_tag']
}
}
needed_testset = self._find_needed_test_set(expected['testset']['id'])
needed_testset = [testset for testset in self.test_sets
if testset.id == expected['testset']['id']][0]
needed_test = [test for test in self.tests
if test.name == expected['test']['name']][0]
needed_test = self._find_needed_test(expected['test']['name'])
self.assertEqual(
needed_testset.deployment_tags,
@ -157,3 +160,40 @@ class TestNoseDiscovery(base.BaseUnitTest):
nose_discovery.DiscoveryPlugin.test_belongs_to_testset(
test_id, test_set_id)
)
def test_release_version_attribute(self):
for test_entity in (self.tests, self.test_sets):
self.assertTrue(
all(
[hasattr(t, 'available_since_release')
for t in test_entity]
)
)
expected = {
'test_set': {
'id': 'test_versioning',
'available_since_release': '2015.2-6.0',
},
'tests': [
{'name': ('fuel_plugin.testing.fixture.dummy_tests.'
'test_versioning.TestVersioning.'
'test_simple_fake_first'),
'available_since_release': '2015.2-6.0', },
{'name': ('fuel_plugin.testing.fixture.dummy_tests.'
'test_versioning.TestVersioning.'
'test_simple_fake_second'),
'available_since_release': '2015.2-6.1', },
]
}
needed_test_set = self._find_needed_test_set(
expected['test_set']['id']
)
self.assertEqual(needed_test_set.available_since_release,
expected['test_set']['available_since_release'])
for test in expected['tests']:
needed_test = self._find_needed_test(test['name'])
self.assertEqual(needed_test.available_since_release,
test['available_since_release'])

View File

@ -24,13 +24,15 @@ class TestDeplTagsGetter(base.BaseUnitTest):
def setUp(self):
config.init_config([])
def test_get_cluster_depl_tags(self):
def test_get_cluster_attrs(self):
expected = {
'cluster_id': 3,
'depl_tags': set(
['ha', 'rhel', 'additional_components',
'murano', 'nova_network', 'public_on_all_nodes']
)
'attrs': {
'deployment_tags': set(
['ha', 'rhel', 'additional_components',
'murano', 'nova_network', 'public_on_all_nodes']),
'release_version': '2015.2-1.0'
}
}
with requests_mock.Mocker() as m:
@ -41,6 +43,6 @@ class TestDeplTagsGetter(base.BaseUnitTest):
json=cluster['cluster_attributes'])
m.register_uri('GET', '/api/releases/3',
json=cluster['release_data'])
res = mixins._get_cluster_depl_tags(expected['cluster_id'])
res = mixins._get_cluster_attrs(expected['cluster_id'])
self.assertEqual(res, expected['depl_tags'])
self.assertEqual(res, expected['attrs'])

View File

@ -213,6 +213,18 @@ function syncdb {
}
function cleardb {
local SERVER_SETTINGS=$1
local RUN_CLEARDB="\
ostf-server \
--debug
--clear-db
--config-file $SERVER_SETTINGS"
tox -evenv -- $RUN_CLEARDB > /dev/null
}
function run_integration_tests {
echo "Starting integration tests"
@ -229,6 +241,7 @@ function run_integration_tests {
create_ostf_conf $config $artifacts
cleardb $config
syncdb $config
# run tests