Fix pep8 for ostf_adapter

PEP8 fix has been added for ostf_adapter scripts. Fix is incomplete
(especially for E501 "Line is too long") because of meaningless of such
fixing.

Fix unit tests

Several unit tests have been fixed in order to support new discovery
behaviour - new deployment tag which is indicates network manager for
deployment (nova-network or quantum).

Change-Id: Id7b39477e33719695bfcc99173825d75b304c5b3
This commit is contained in:
Artem Roma 2013-10-18 19:00:57 +03:00
parent 5d83cd88f9
commit fd89458b09
26 changed files with 458 additions and 274 deletions

5
fabfile.py vendored
View File

@ -59,7 +59,10 @@ def createmigration(comment):
def migrate(database='ostf'):
path = 'postgresql+psycopg2://ostf:ostf@localhost/{0}'.format(database)
local('ostf-server --after-initialization-environment-hook --dbpath {0}'.format(path))
local(
'ostf-server --after-initialization-environment-hook --dbpath {0}'
.format(path)
)
def auth(method='trust', os='ubuntu'):

View File

@ -56,7 +56,7 @@ class NoseDriver(object):
exit=False,
argv=['ostf_tests'] + argv_add)
self._named_threads.pop(int(test_run_id), None)
except Exception, e:
except Exception:
LOG.exception('Test run ID: %s', test_run_id)
finally:
models.TestRun.update_test_run(

View File

@ -14,7 +14,6 @@
import logging
import os
import pecan
from nose import plugins

View File

@ -17,7 +17,6 @@ import logging
import os
from nose import plugins
from nose.suite import ContextSuite
from pecan import conf
from fuel_plugin.ostf_adapter.nose_plugin import nose_utils

View File

@ -19,53 +19,75 @@ from fuel_plugin.ostf_adapter.storage.fields import JsonField, ListField
def upgrade():
### commands auto generated by Alembic - please adjust! ###
op.create_table('cluster_state',
sa.Column('id', sa.Integer(), autoincrement=False, nullable=False),
sa.Column('deployment_tags', postgresql.ARRAY(sa.String(length=64)), nullable=True),
sa.PrimaryKeyConstraint('id')
op.create_table(
'cluster_state',
sa.Column('id', sa.Integer(), autoincrement=False, nullable=False),
sa.Column('deployment_tags', postgresql.ARRAY(sa.String(length=64)),
nullable=True),
sa.PrimaryKeyConstraint('id')
)
op.create_table('test_sets',
sa.Column('id', sa.String(length=128), nullable=False),
sa.Column('cluster_id', sa.Integer(), autoincrement=False, nullable=False),
sa.Column('description', sa.String(length=256), nullable=True),
sa.Column('test_path', sa.String(length=256), nullable=True),
sa.Column('driver', sa.String(length=128), nullable=True),
sa.Column('additional_arguments', ListField(), nullable=True),
sa.Column('cleanup_path', sa.String(length=128), nullable=True),
sa.Column('meta', JsonField(), nullable=True),
sa.Column('deployment_tags', postgresql.ARRAY(sa.String(length=64)), nullable=True),
sa.PrimaryKeyConstraint('id', 'cluster_id')
op.create_table(
'test_sets',
sa.Column('id', sa.String(length=128), nullable=False),
sa.Column('cluster_id', sa.Integer(), autoincrement=False,
nullable=False),
sa.Column('description', sa.String(length=256), nullable=True),
sa.Column('test_path', sa.String(length=256), nullable=True),
sa.Column('driver', sa.String(length=128), nullable=True),
sa.Column('additional_arguments', ListField(), nullable=True),
sa.Column('cleanup_path', sa.String(length=128), nullable=True),
sa.Column('meta', JsonField(), nullable=True),
sa.Column('deployment_tags', postgresql.ARRAY(sa.String(length=64)),
nullable=True),
sa.PrimaryKeyConstraint('id', 'cluster_id')
)
op.create_table('test_runs',
sa.Column('id', sa.Integer(), nullable=False),
sa.Column('cluster_id', sa.Integer(), nullable=False),
sa.Column('status', sa.Enum('running', 'finished', name='test_run_states'), nullable=False),
sa.Column('meta', JsonField(), nullable=True),
sa.Column('started_at', sa.DateTime(), nullable=True),
sa.Column('ended_at', sa.DateTime(), nullable=True),
sa.Column('test_set_id', sa.String(length=128), nullable=True),
sa.ForeignKeyConstraint(['test_set_id', 'cluster_id'], ['test_sets.id', 'test_sets.cluster_id'], ondelete='CASCADE'),
sa.PrimaryKeyConstraint('id')
op.create_table(
'test_runs',
sa.Column('id', sa.Integer(), nullable=False),
sa.Column('cluster_id', sa.Integer(), nullable=False),
sa.Column('status',
sa.Enum('running', 'finished', name='test_run_states'),
nullable=False),
sa.Column('meta', JsonField(), nullable=True),
sa.Column('started_at', sa.DateTime(), nullable=True),
sa.Column('ended_at', sa.DateTime(), nullable=True),
sa.Column('test_set_id', sa.String(length=128), nullable=True),
sa.ForeignKeyConstraint(['test_set_id', 'cluster_id'],
['test_sets.id', 'test_sets.cluster_id'],
ondelete='CASCADE'),
sa.PrimaryKeyConstraint('id')
)
op.create_table('tests',
sa.Column('id', sa.Integer(), nullable=False),
sa.Column('name', sa.String(length=512), nullable=True),
sa.Column('title', sa.String(length=512), nullable=True),
sa.Column('description', sa.Text(), nullable=True),
sa.Column('duration', sa.String(length=512), nullable=True),
sa.Column('message', sa.Text(), nullable=True),
sa.Column('traceback', sa.Text(), nullable=True),
sa.Column('status', sa.Enum('wait_running', 'running', 'failure', 'success', 'error', 'stopped', 'disabled', name='test_states'), nullable=True),
sa.Column('step', sa.Integer(), nullable=True),
sa.Column('time_taken', sa.Float(), nullable=True),
sa.Column('meta', JsonField(), nullable=True),
sa.Column('deployment_tags', postgresql.ARRAY(sa.String(length=64)), nullable=True),
sa.Column('cluster_id', sa.Integer(), nullable=False),
sa.Column('test_set_id', sa.String(length=128), nullable=True),
sa.Column('test_run_id', sa.Integer(), nullable=True),
sa.ForeignKeyConstraint(['test_run_id'], ['test_runs.id'], ondelete='CASCADE'),
sa.ForeignKeyConstraint(['test_set_id', 'cluster_id'], ['test_sets.id', 'test_sets.cluster_id'], ondelete='CASCADE'),
sa.PrimaryKeyConstraint('id')
op.create_table(
'tests',
sa.Column('id', sa.Integer(), nullable=False),
sa.Column('name', sa.String(length=512), nullable=True),
sa.Column('title', sa.String(length=512), nullable=True),
sa.Column('description', sa.Text(), nullable=True),
sa.Column('duration', sa.String(length=512), nullable=True),
sa.Column('message', sa.Text(), nullable=True),
sa.Column('traceback', sa.Text(), nullable=True),
sa.Column('status',
sa.Enum('wait_running', 'running', 'failure',
'success', 'error', 'stopped',
'disabled', name='test_states'),
nullable=True),
sa.Column('step', sa.Integer(), nullable=True),
sa.Column('time_taken', sa.Float(), nullable=True),
sa.Column('meta', JsonField(), nullable=True),
sa.Column('deployment_tags', postgresql.ARRAY(sa.String(length=64)),
nullable=True),
sa.Column('cluster_id', sa.Integer(), nullable=False),
sa.Column('test_set_id', sa.String(length=128), nullable=True),
sa.Column('test_run_id', sa.Integer(), nullable=True),
sa.ForeignKeyConstraint(['test_run_id'], ['test_runs.id'],
ondelete='CASCADE'),
sa.ForeignKeyConstraint(['test_set_id', 'cluster_id'],
['test_sets.id', 'test_sets.cluster_id'],
ondelete='CASCADE'),
sa.PrimaryKeyConstraint('id')
)
### end Alembic commands ###

View File

@ -14,8 +14,6 @@
import logging
from stevedore import extension
from pecan import hooks
from fuel_plugin.ostf_adapter.storage import engine

View File

@ -18,7 +18,7 @@ import sys
from requests import get
from docopt import docopt
from clint.textui import puts, colored, columns, indent
from clint.textui import puts, colored, columns
from blessings import Terminal
from fuel_plugin.ostf_client.client import TestingAdapterClient
@ -36,7 +36,8 @@ def main():
args = docopt(__doc__, version='0.1')
test_set = args['<test_set>']
cluster_id = args['--id'] or os.environ.get('OSTF_CLUSTER_ID') \
or get_cluster_id() or '1'
or get_cluster_id() or '1'
tests = args['--tests'] or []
timeout = args['--timeout']
quite = args['-q']
@ -95,10 +96,12 @@ def main():
finished_statuses = ['success', 'failure', 'stopped', 'error']
finished_tests = [item for item in current_tests
finished_tests = [
item for item in current_tests
if item['status'] in finished_statuses
and item
not in quite_polling_hook.__dict__['published_tests']]
not in quite_polling_hook.__dict__['published_tests']
]
for test in finished_tests:
print_results(test)
@ -117,9 +120,9 @@ def main():
try:
r = client.run_testset_with_timeout(test_set, cluster_id,
timeout, 2, polling_hook)
except AssertionError as e:
except AssertionError:
return 1
except KeyboardInterrupt as e:
except KeyboardInterrupt:
r = client.stop_testrun_last(test_set, cluster_id)
print t.move_left + t.move_left,
polling_hook(r)

View File

@ -121,15 +121,16 @@ class BaseAdapterTest(TestCase):
for test_id, test_data in tests.iteritems():
for data_key, data_value in test_data.iteritems():
if not response_tests[test_id][data_key] == data_value:
raise AssertionError(('excpected: test_set {0}, test_id {1} data_key {2}, data_value {3}...'
'got: {4}')
raise AssertionError(
('excpected: test_set {0}, test_id {1} '
'data_key {2}, data_value {3}...got: {4}')
.format(
test_set,
test_id,
data_key,
data_value,
response_tests[test_id][data_key]
)
)
)
@staticmethod

View File

@ -12,96 +12,105 @@
# License for the specific language governing permissions and limitations
# under the License.
CONFIG = {'compute-admin_password': 'nova',
'compute-admin_tenant_name': '',
'compute-admin_username': '',
'compute_allow_tenant_isolation': 'True',
'compute_allow_tenant_reuse': 'true',
'compute_block_migrate_supports_cinder_iscsi': 'false',
'compute_build_interval': '3',
'compute_build_timeout': '300',
'compute_catalog_type': 'compute',
'compute_change_password_available': 'False',
'compute_controller_node': '10.30.1.101',
'compute_controller_node_name': 'fuel-controller-01.localdomain.',
'compute_controller_node_ssh_password': 'r00tme',
'compute_controller_node_ssh_user': 'root',
'compute_create_image_enabled': 'true',
'compute_disk_config_enabled_override': 'true',
'compute_enabled_services': 'nova-cert, nova-consoleauth, nova-scheduler, nova-conductor, nova-cert, nova-consoleauth, nova-scheduler, nova-conductor, nova-cert, nova-consoleauth, nova-scheduler, nova-conductor, nova-compute',
'compute_fixed_network_name': 'private',
'compute_flavor_ref': '1',
'compute_flavor_ref_alt': '2',
'compute_image_alt_ssh_user': 'cirros',
'compute_image_ref': '53734a0d-60a8-4689-b7c8-3c14917a7197',
'compute_image_ref_alt': '53734a0d-60a8-4689-b7c8-3c14917a7197',
'compute_image_ssh_user': 'cirros',
'compute_ip_version_for_ssh': '4',
'compute_live_migration_available': 'False',
'compute_network_for_ssh': 'private',
'compute_resize_available': 'true',
'compute_run_ssh': 'false',
'compute_ssh_channel_timeout': '60',
'compute_ssh_timeout': '300',
'compute_ssh_user': 'cirros',
'compute_use_block_migration_for_live_migration': 'False',
'identity_admin_password': 'nova',
'identity_admin_tenant_name': 'admin',
'identity_admin_username': 'admin',
'identity_alt_password': 'nova',
'identity_alt_tenant_name': 'alt_demo',
'identity_alt_username': 'alt_demo',
'identity_catalog_type': 'identity',
'identity_disable_ssl_certificate_validation': 'False',
'identity_password': 'nova',
'identity_region': 'RegionOne',
'identity_strategy': 'keystone',
'identity_tenant_name': 'admin',
'identity_uri': 'http://172.18.164.70:5000/v2.0/',
'identity_url': 'http://172.18.164.70/',
'identity_username': 'admin',
'image_api_version': '1',
'image_catalog_type': 'image',
'image_http_image': 'http://download.cirros-cloud.net/0.3.1/cirros-0.3.1-x86_64-uec.tar.gz',
'network_api_version': '2.0',
'network_catalog_type': 'network',
'network_public_network_id': 'cdb94175-2002-449f-be41-6b8afce8de13',
'network_public_router_id': '2a6bf65b-01f7-4c91-840a-2b5f676e7016',
'network_quantum_available': 'true',
'network_tenant_network_cidr': '10.13.0.0/16',
'network_tenant_network_mask_bits': '28',
'network_tenant_networks_reachable': 'true',
'object-storage_catalog_type': 'object-store',
'object-storage_container_sync_interval': '5',
'object-storage_container_sync_timeout': '120',
'smoke_allow_tenant_isolation': 'True',
'smoke_allow_tenant_reuse': 'true',
'smoke_block_migrate_supports_cinder_iscsi': 'false',
'smoke_build_interval': '3',
'smoke_build_timeout': '300',
'smoke_catalog_type': 'compute',
'smoke_change_password_available': 'False',
'smoke_create_image_enabled': 'true',
'smoke_disk_config_enabled_override': 'true',
'smoke_fixed_network_name': 'net04',
'smoke_flavor_ref': '1',
'smoke_flavor_ref_alt': '2',
'smoke_image_alt_ssh_user': 'cirros',
'smoke_image_ref': '53734a0d-60a8-4689-b7c8-3c14917a7197',
'smoke_image_ref_alt': '53734a0d-60a8-4689-b7c8-3c14917a7197',
'smoke_image_ssh_user': 'cirros',
'smoke_ip_version_for_ssh': '4',
'smoke_live_migration_available': 'False',
'smoke_network_for_ssh': 'net04',
'smoke_resize_available': 'true',
'smoke_run_ssh': 'false',
'smoke_ssh_channel_timeout': '60',
'smoke_ssh_timeout': '320',
'smoke_ssh_user': 'cirros',
'smoke_use_block_migration_for_live_migration': 'False',
'volume_backend1_name': 'BACKEND_1',
'volume_backend2_name': 'BACKEND_2',
'volume_build_interval': '3',
'volume_build_timeout': '300',
'volume_catalog_type': 'volume',
'volume_multi_backend_enabled': 'false'}
CONFIG = {
'compute-admin_password': 'nova',
'compute-admin_tenant_name': '',
'compute-admin_username': '',
'compute_allow_tenant_isolation': 'True',
'compute_allow_tenant_reuse': 'true',
'compute_block_migrate_supports_cinder_iscsi': 'false',
'compute_build_interval': '3',
'compute_build_timeout': '300',
'compute_catalog_type': 'compute',
'compute_change_password_available': 'False',
'compute_controller_node': '10.30.1.101',
'compute_controller_node_name': 'fuel-controller-01.localdomain.',
'compute_controller_node_ssh_password': 'r00tme',
'compute_controller_node_ssh_user': 'root',
'compute_create_image_enabled': 'true',
'compute_disk_config_enabled_override': 'true',
'compute_enabled_services': (
'nova-cert, nova-consoleauth, nova-scheduler, '
'nova-conductor, nova-cert, nova-consoleauth, '
'nova-scheduler, nova-conductor, nova-cert, '
'nova-consoleauth, nova-scheduler, '
'nova-conductor, nova-compute'
),
'compute_fixed_network_name': 'private',
'compute_flavor_ref': '1',
'compute_flavor_ref_alt': '2',
'compute_image_alt_ssh_user': 'cirros',
'compute_image_ref': '53734a0d-60a8-4689-b7c8-3c14917a7197',
'compute_image_ref_alt': '53734a0d-60a8-4689-b7c8-3c14917a7197',
'compute_image_ssh_user': 'cirros',
'compute_ip_version_for_ssh': '4',
'compute_live_migration_available': 'False',
'compute_network_for_ssh': 'private',
'compute_resize_available': 'true',
'compute_run_ssh': 'false',
'compute_ssh_channel_timeout': '60',
'compute_ssh_timeout': '300',
'compute_ssh_user': 'cirros',
'compute_use_block_migration_for_live_migration': 'False',
'identity_admin_password': 'nova',
'identity_admin_tenant_name': 'admin',
'identity_admin_username': 'admin',
'identity_alt_password': 'nova',
'identity_alt_tenant_name': 'alt_demo',
'identity_alt_username': 'alt_demo',
'identity_catalog_type': 'identity',
'identity_disable_ssl_certificate_validation': 'False',
'identity_password': 'nova',
'identity_region': 'RegionOne',
'identity_strategy': 'keystone',
'identity_tenant_name': 'admin',
'identity_uri': 'http://172.18.164.70:5000/v2.0/',
'identity_url': 'http://172.18.164.70/',
'identity_username': 'admin',
'image_api_version': '1',
'image_catalog_type': 'image',
'image_http_image': ('http://download.cirros-cloud.net/'
'0.3.1/cirros-0.3.1-x86_64-uec.tar.gz'),
'network_api_version': '2.0',
'network_catalog_type': 'network',
'network_public_network_id': 'cdb94175-2002-449f-be41-6b8afce8de13',
'network_public_router_id': '2a6bf65b-01f7-4c91-840a-2b5f676e7016',
'network_quantum_available': 'true',
'network_tenant_network_cidr': '10.13.0.0/16',
'network_tenant_network_mask_bits': '28',
'network_tenant_networks_reachable': 'true',
'object-storage_catalog_type': 'object-store',
'object-storage_container_sync_interval': '5',
'object-storage_container_sync_timeout': '120',
'smoke_allow_tenant_isolation': 'True',
'smoke_allow_tenant_reuse': 'true',
'smoke_block_migrate_supports_cinder_iscsi': 'false',
'smoke_build_interval': '3',
'smoke_build_timeout': '300',
'smoke_catalog_type': 'compute',
'smoke_change_password_available': 'False',
'smoke_create_image_enabled': 'true',
'smoke_disk_config_enabled_override': 'true',
'smoke_fixed_network_name': 'net04',
'smoke_flavor_ref': '1',
'smoke_flavor_ref_alt': '2',
'smoke_image_alt_ssh_user': 'cirros',
'smoke_image_ref': '53734a0d-60a8-4689-b7c8-3c14917a7197',
'smoke_image_ref_alt': '53734a0d-60a8-4689-b7c8-3c14917a7197',
'smoke_image_ssh_user': 'cirros',
'smoke_ip_version_for_ssh': '4',
'smoke_live_migration_available': 'False',
'smoke_network_for_ssh': 'net04',
'smoke_resize_available': 'true',
'smoke_run_ssh': 'false',
'smoke_ssh_channel_timeout': '60',
'smoke_ssh_timeout': '320',
'smoke_ssh_user': 'cirros',
'smoke_use_block_migration_for_live_migration': 'False',
'volume_backend1_name': 'BACKEND_1',
'volume_backend2_name': 'BACKEND_2',
'volume_build_interval': '3',
'volume_build_timeout': '300',
'volume_catalog_type': 'volume',
'volume_multi_backend_enabled': 'false'
}

View File

@ -20,6 +20,7 @@ opts = [
cfg.StrOpt('quantum', default='fake')
]
class Config(TestCase):
def test_config(self):
file_path = env['OSTF_CONF_PATH']

View File

@ -15,7 +15,8 @@
__profile__ = {
"id": "ha_deployment_test",
"driver": "nose",
"test_path": "fuel_plugin/tests/functional/deployment_types_tests/ha_deployment_test.py",
"test_path": ("fuel_plugin/tests/functional/deployment_types_tests/"
"ha_deployment_test.py"),
"description": "Fake tests for HA deployment",
"deployment_tags": ["Ha"]
}

View File

@ -15,12 +15,12 @@
__profile__ = {
"id": "multinode_deployment_test",
"driver": "nose",
"test_path": "fuel_plugin/tests/functional/deployment_types_tests/multinode_deployment.py",
"test_path": ("fuel_plugin/tests/functional/deployment_types_tests/"
"multinode_deployment.py"),
"description": "Fake tests for multinode deployment on ubuntu",
"deployment_tags": ["multinode", "ubuntu"]
}
import time
import unittest

View File

@ -17,22 +17,42 @@ import json
import time
import pprint
def make_requests(claster_id, test_set):
body = [{'testset': test_set,
'metadata': {'config': {'identity_uri': 'hommeee'},
'cluster_id': claster_id}
body = [
{
'testset': test_set,
'metadata': {
'config': {'identity_uri': 'hommeee'},
'cluster_id': claster_id
}
]
}
]
headers = {'Content-Type': 'application/json'}
response = requests.post('http://172.18.164.37:8777/v1/testruns', data=json.dumps(body), headers=headers)
response = requests.post(
'http://172.18.164.37:8777/v1/testruns',
data=json.dumps(body),
headers=headers
)
pprint.pprint(response.json())
_id = response.json()[0]['id']
time.sleep(1)
body = [{'id': _id, 'status': 'stopped'}]
update = requests.put('http://172.18.164.37:8777/v1/testruns', data=json.dumps(body), headers=headers)
get_resp = requests.get('http://172.18.164.37:8777/v1/testruns/last/%s' % claster_id)
update = requests.put(
'http://172.18.164.37:8777/v1/testruns',
data=json.dumps(body),
headers=headers
)
get_resp = requests.get(
'http://172.18.164.37:8777/v1/testruns/last/%s' % claster_id
)
data = get_resp.json()
pprint.pprint(data)
if __name__ == '__main__':
make_requests(11, 'fuel_health')
make_requests(11, 'fuel_health')

View File

@ -14,17 +14,26 @@
import requests
import json
import time
import pprint
def make_requests(claster_id, test_set):
tests = ['functional.dummy_tests.general_test.Dummy_test.test_fast_pass',
'functional.dummy_tests.general_test.Dummy_test.test_fast_error']
body = [{'testset': test_set,
'tests': tests,
tests = [
'functional.dummy_tests.general_test.Dummy_test.test_fast_pass',
'functional.dummy_tests.general_test.Dummy_test.test_fast_error'
]
body = [
{
'testset': test_set,
'tests': tests,
'metadata': {
'cluster_id': claster_id}}]
'cluster_id': claster_id
}
}
]
headers = {'Content-Type': 'application/json'}
response = requests.post('http://127.0.0.1:8989/v1/testruns',
data=json.dumps(body), headers=headers)
@ -32,4 +41,4 @@ def make_requests(claster_id, test_set):
if __name__ == '__main__':
make_requests('101', 'plugin_general')
make_requests('101', 'plugin_general')

View File

@ -17,14 +17,20 @@ from gevent import monkey
monkey.patch_all()
import requests
import json
import time
import pprint
def make_requests(claster_id, test_set):
body = [{'testset': test_set,
'metadata': {'config': {},
'cluster_id': claster_id}}]
body = [
{
'testset': test_set,
'metadata': {
'config': {},
'cluster_id': claster_id
}
}
]
headers = {'Content-Type': 'application/json'}
response = requests.post('http://127.0.0.1:8989/v1/testruns',
data=json.dumps(body), headers=headers)
@ -32,4 +38,4 @@ def make_requests(claster_id, test_set):
if __name__ == '__main__':
make_requests('308', 'general_test')
make_requests('308', 'general_test')

View File

@ -14,21 +14,29 @@
import requests
import json
import time
import pprint
def make_requests(claster_id, test_set):
tests = ['fuel_plugin.tests.functional.dummy_tests.general_test.Dummy_test.test_long_pass']
body = [{'id': claster_id,
'tests': tests,
'status': 'restarted',
}]
tests = [
('fuel_plugin.tests.functional.dummy_tests.general_test.'
'Dummy_test.test_long_pass')
]
body = [
{
'id': claster_id,
'tests': tests,
'status': 'restarted',
}
]
headers = {'Content-Type': 'application/json'}
response = requests.put('http://127.0.0.1:8989/v1/testruns',
data=json.dumps(body), headers=headers)
data=json.dumps(body), headers=headers)
pprint.pprint(response.json())
if __name__ == '__main__':
make_requests(370, 'plugin_general')
make_requests(370, 'plugin_general')

View File

@ -14,17 +14,22 @@
import requests
import json
import time
import pprint
def make_requests(claster_id, test_set):
body = [{'id': claster_id, 'status': 'stopped'}]
headers = {'Content-Type': 'application/json'}
update = requests.put(
'http://localhost:8989/v1/testruns',
data=json.dumps(body), headers=headers)
'http://localhost:8989/v1/testruns',
data=json.dumps(body),
headers=headers
)
data = update.json()
pprint.pprint(data)
if __name__ == '__main__':
make_requests(378, 'plugin_stopped')
make_requests(378, 'plugin_stopped')

View File

@ -33,13 +33,25 @@ class ScenarioTests(BaseAdapterTest):
from pprint import pprint
for i in range(1):
r = self.client.run_with_timeout(testset, tests, cluster_id, timeout)
r = self.client.run_with_timeout(
testset,
tests,
cluster_id,
timeout
)
pprint([item for item in r.test_sets[testset]['tests']])
if r.fuel_sanity['status'] == 'stopped':
running_tests = [test for test in r._tests
if r._tests[test]['status'] is 'stopped']
print "restarting: ", running_tests
result = self.client.restart_with_timeout(testset, running_tests, cluster_id, timeout)
result = self.client.restart_with_timeout(
testset,
running_tests,
cluster_id,
timeout
)
print 'Restart', result
def test_run_fuel_sanity(self):

View File

@ -179,8 +179,10 @@ class AdapterTests(BaseAdapterTest):
for testset in testsets:
r = self.client.start_testrun(testset, cluster_id)
msg = "Response {0} is not empty when you try to start testrun" \
" with testset and cluster_id that are already running".format(r)
msg = (
"Response {0} is not empty when you try to start testrun"
" with testset and cluster_id that are already running"
).format(r)
self.assertTrue(r.is_empty, msg)
@ -206,8 +208,10 @@ class AdapterTests(BaseAdapterTest):
"""Verify that you can run individual tests from given testset"""
testset = "general_test"
tests = [
'fuel_plugin.tests.functional.dummy_tests.general_test.Dummy_test.test_fast_pass',
'fuel_plugin.tests.functional.dummy_tests.general_test.Dummy_test.test_fast_fail'
('fuel_plugin.tests.functional.dummy_tests.'
'general_test.Dummy_test.test_fast_pass'),
('fuel_plugin.tests.functional.dummy_tests.'
'general_test.Dummy_test.test_fast_fail')
]
cluster_id = 1
@ -224,27 +228,32 @@ class AdapterTests(BaseAdapterTest):
{
'status': 'disabled',
'name': 'Fast fail with step',
'id': 'fuel_plugin.tests.functional.dummy_tests.general_test.Dummy_test.test_fail_with_step',
'id': ('fuel_plugin.tests.functional.dummy_tests.'
'general_test.Dummy_test.test_fail_with_step'),
},
{
'status': 'disabled',
'name': 'And fast error',
'id': 'fuel_plugin.tests.functional.dummy_tests.general_test.Dummy_test.test_fast_error',
'id': ('fuel_plugin.tests.functional.dummy_tests.'
'general_test.Dummy_test.test_fast_error'),
},
{
'status': 'wait_running',
'name': 'Fast fail',
'id': 'fuel_plugin.tests.functional.dummy_tests.general_test.Dummy_test.test_fast_fail',
'id': ('fuel_plugin.tests.functional.dummy_tests.'
'general_test.Dummy_test.test_fast_fail'),
},
{
'status': 'wait_running',
'name': 'fast pass test',
'id': 'fuel_plugin.tests.functional.dummy_tests.general_test.Dummy_test.test_fast_pass',
'id': ('fuel_plugin.tests.functional.dummy_tests.'
'general_test.Dummy_test.test_fast_pass'),
},
{
'status': 'disabled',
'name': 'Will sleep 5 sec',
'id': 'fuel_plugin.tests.functional.dummy_tests.general_test.Dummy_test.test_long_pass',
'id': ('fuel_plugin.tests.functional.dummy_tests.'
'general_test.Dummy_test.test_long_pass'),
}
],
'cluster_id': '1',
@ -268,8 +277,10 @@ class AdapterTests(BaseAdapterTest):
"""Verify that you restart individual tests for given testrun"""
testset = "general_test"
tests = [
'fuel_plugin.tests.functional.dummy_tests.general_test.Dummy_test.test_fast_pass',
'fuel_plugin.tests.functional.dummy_tests.general_test.Dummy_test.test_fast_fail'
('fuel_plugin.tests.functional.dummy_tests.'
'general_test.Dummy_test.test_fast_pass'),
('fuel_plugin.tests.functional.dummy_tests.general_test.'
'Dummy_test.test_fast_fail')
]
cluster_id = 1
@ -288,27 +299,32 @@ class AdapterTests(BaseAdapterTest):
{
'status': 'failure',
'name': 'Fast fail with step',
'id': 'fuel_plugin.tests.functional.dummy_tests.general_test.Dummy_test.test_fail_with_step',
'id': ('fuel_plugin.tests.functional.dummy_tests.'
'general_test.Dummy_test.test_fail_with_step'),
},
{
'status': 'error',
'name': 'And fast error',
'id': 'fuel_plugin.tests.functional.dummy_tests.general_test.Dummy_test.test_fast_error',
'id': ('fuel_plugin.tests.functional.dummy_tests.'
'general_test.Dummy_test.test_fast_error'),
},
{
'status': 'wait_running',
'name': 'Fast fail',
'id': 'fuel_plugin.tests.functional.dummy_tests.general_test.Dummy_test.test_fast_fail',
'id': ('fuel_plugin.tests.functional.dummy_tests.'
'general_test.Dummy_test.test_fast_fail'),
},
{
'status': 'wait_running',
'name': 'fast pass test',
'id': 'fuel_plugin.tests.functional.dummy_tests.general_test.Dummy_test.test_fast_pass',
'id': ('fuel_plugin.tests.functional.dummy_tests.'
'general_test.Dummy_test.test_fast_pass'),
},
{
'status': 'success',
'name': 'Will sleep 5 sec',
'id': 'fuel_plugin.tests.functional.dummy_tests.general_test.Dummy_test.test_long_pass',
'id': ('fuel_plugin.tests.functional.dummy_tests.'
'general_test.Dummy_test.test_long_pass'),
}
],
'cluster_id': '1',
@ -333,10 +349,15 @@ class AdapterTests(BaseAdapterTest):
ran and did not run during single test start"""
testset = "general_test"
tests = [
'fuel_plugin.tests.functional.dummy_tests.general_test.Dummy_test.test_fast_pass',
'fuel_plugin.tests.functional.dummy_tests.general_test.Dummy_test.test_fast_fail'
('fuel_plugin.tests.functional.dummy_tests.'
'general_test.Dummy_test.test_fast_pass'),
('fuel_plugin.tests.functional.dummy_tests.'
'general_test.Dummy_test.test_fast_fail')
]
disabled_test = [
('fuel_plugin.tests.functional.dummy_tests.'
'general_test.Dummy_test.test_fast_error')
]
disabled_test = ['fuel_plugin.tests.functional.dummy_tests.general_test.Dummy_test.test_fast_error', ]
cluster_id = 1
#make sure we have all needed data in db
@ -355,27 +376,32 @@ class AdapterTests(BaseAdapterTest):
{
'status': 'disabled',
'name': 'Fast fail with step',
'id': 'fuel_plugin.tests.functional.dummy_tests.general_test.Dummy_test.test_fail_with_step',
'id': ('fuel_plugin.tests.functional.dummy_tests.'
'general_test.Dummy_test.test_fail_with_step'),
},
{
'status': 'wait_running',
'name': 'And fast error',
'id': 'fuel_plugin.tests.functional.dummy_tests.general_test.Dummy_test.test_fast_error',
'id': ('fuel_plugin.tests.functional.dummy_tests.'
'general_test.Dummy_test.test_fast_error'),
},
{
'status': 'failure',
'name': 'Fast fail',
'id': 'fuel_plugin.tests.functional.dummy_tests.general_test.Dummy_test.test_fast_fail',
'id': ('fuel_plugin.tests.functional.dummy_tests.'
'general_test.Dummy_test.test_fast_fail'),
},
{
'status': 'success',
'name': 'fast pass test',
'id': 'fuel_plugin.tests.functional.dummy_tests.general_test.Dummy_test.test_fast_pass',
'id': ('fuel_plugin.tests.functional.dummy_tests.'
'general_test.Dummy_test.test_fast_pass'),
},
{
'status': 'disabled',
'name': 'Will sleep 5 sec',
'id': 'fuel_plugin.tests.functional.dummy_tests.general_test.Dummy_test.test_long_pass',
'id': ('fuel_plugin.tests.functional.dummy_tests.'
'general_test.Dummy_test.test_long_pass'),
}
],
'cluster_id': '1',
@ -395,9 +421,12 @@ class AdapterTests(BaseAdapterTest):
def test_cant_restart_during_run(self):
testset = 'general_test'
tests = [
'fuel_plugin.tests.functional.dummy_tests.general_test.Dummy_test.test_fast_pass',
'fuel_plugin.tests.functional.dummy_tests.general_test.Dummy_test.test_fast_fail',
'fuel_plugin.tests.functional.dummy_tests.general_test.Dummy_test.test_fast_pass'
('fuel_plugin.tests.functional.dummy_tests.'
'general_test.Dummy_test.test_fast_pass'),
('fuel_plugin.tests.functional.dummy_tests.'
'general_test.Dummy_test.test_fast_fail'),
('fuel_plugin.tests.functional.dummy_tests.'
'general_test.Dummy_test.test_fast_pass')
]
cluster_id = 1
@ -408,7 +437,8 @@ class AdapterTests(BaseAdapterTest):
time.sleep(2)
r = self.client.restart_tests_last(testset, tests, cluster_id)
msg = 'Response was not empty after trying to restart running testset:\n {0}'.format(r.request)
msg = ('Response was not empty after trying'
' to restart running testset:\n {0}').format(r.request)
self.assertTrue(r.is_empty, msg)
def test_nose_adapter_error_while_running_tests(self):

View File

@ -16,7 +16,6 @@ def main():
print cluster_id
return 0
if __name__ == '__main__':
sys.exit(main())

View File

@ -12,7 +12,6 @@
# License for the specific language governing permissions and limitations
# under the License.
from pecan import conf
from bottle import route, run

View File

@ -22,11 +22,27 @@ def main():
for item in data:
if 'argv' in data[item] and item in ['fuel_sanity', 'fuel_smoke']:
if "--with-xunit" not in data[item]['argv']:
data[item]['argv'].extend(["--with-xunit", '--xunit-file={0}.xml'.format(item)])
data[item]['argv'].extend(
["--with-xunit",
'--xunit-file={0}.xml'.format(item)]
)
elif item in ['fuel_sanity', 'fuel_smoke']:
data[item]['argv'] = ["--with-xunit", ]
test_apps = {"plugin_general": {"test_path": "fuel_plugin/tests/functional/dummy_tests/general_test.py", "driver": "nose"},
"plugin_stopped": {"test_path": "fuel_plugin/tests/functional/dummy_tests/stopped_test.py", "driver": "nose"}}
test_apps = {
"plugin_general": {
"test_path": ("fuel_plugin/tests/functional/"
"dummy_tests/general_test.py"),
"driver": "nose"
},
"plugin_stopped": {
"test_path": ("fuel_plugin/tests/functional/"
"dummy_tests/stopped_test.py"),
"driver": "nose"
}
}
if 'plugin_general' not in data or 'plugin_stopped' not in data:
data.update(test_apps)
commands.seek(0)
@ -34,4 +50,4 @@ def main():
commands.truncate()
if __name__ == '__main__':
main()
main()

View File

@ -55,7 +55,8 @@ class BaseTestNoseDiscovery(unittest2.TestCase):
self.mocked_get_session = lambda *args: self.session
self.session_patcher = patch(
'fuel_plugin.ostf_adapter.nose_plugin.nose_discovery.engine.get_session',
('fuel_plugin.ostf_adapter.nose_plugin.'
'nose_discovery.engine.get_session'),
self.mocked_get_session
)
self.session_patcher.start()
@ -106,7 +107,8 @@ class TestNoseDiscovery(BaseTestNoseDiscovery):
}
nose_discovery.discovery(
path='fuel_plugin.tests.functional.dummy_tests.deployment_types_tests.ha_deployment_test',
path=('fuel_plugin.tests.functional.dummy_tests.'
'deployment_types_tests.ha_deployment_test'),
deployment_info=self.fixtures['ha_deployment_test']
)
@ -127,13 +129,18 @@ class TestNoseDiscovery(BaseTestNoseDiscovery):
'results_count': 2,
'results_data': {
'names': [
'fuel_plugin.tests.functional.dummy_tests.deployment_types_tests.ha_deployment_test.HATest.test_ha_rhel_depl',
'fuel_plugin.tests.functional.dummy_tests.deployment_types_tests.ha_deployment_test.HATest.test_ha_depl'
('fuel_plugin.tests.functional.dummy_tests.'
'deployment_types_tests.ha_deployment_test.'
'HATest.test_ha_rhel_depl'),
('fuel_plugin.tests.functional.dummy_tests.'
'deployment_types_tests.ha_deployment_test.'
'HATest.test_ha_depl')
]
}
}
nose_discovery.discovery(
path='fuel_plugin.tests.functional.dummy_tests.deployment_types_tests.ha_deployment_test',
path=('fuel_plugin.tests.functional.dummy_tests.'
'deployment_types_tests.ha_deployment_test'),
deployment_info=self.fixtures['ha_deployment_test']
)
@ -148,7 +155,9 @@ class TestNoseDiscovery(BaseTestNoseDiscovery):
self.assertTrue(test.name in expected['results_data']['names'])
self.assertTrue(
set(test.deployment_tags)
.issubset(self.fixtures['ha_deployment_test']['deployment_tags'])
.issubset(
self.fixtures['ha_deployment_test']['deployment_tags']
)
)
def test_get_proper_description(self):
@ -165,7 +174,8 @@ class TestNoseDiscovery(BaseTestNoseDiscovery):
}
nose_discovery.discovery(
path='fuel_plugin.tests.functional.dummy_tests.deployment_types_tests.ha_deployment_test',
path=('fuel_plugin.tests.functional.dummy_tests.'
'deployment_types_tests.ha_deployment_test'),
deployment_info=self.fixtures['ha_deployment_test']
)

View File

@ -23,7 +23,10 @@ class TestDeplTagsGetter(unittest.TestCase):
def test_get_cluster_depl_tags(self):
expected = {
'cluster_id': 3,
'depl_tags': set(['ha', 'rhel', 'additional_components', 'murano'])
'depl_tags': set(
['ha', 'rhel', 'additional_components',
'murano', 'nova_network']
)
}
mocked_pecan_conf = mock.Mock()

View File

@ -105,23 +105,29 @@ class TestTestsController(BaseTestController):
'status': None,
'taken': None,
'step': None,
'testset': u'ha_deployment_test',
'name': u'fake empty test',
'testset': 'ha_deployment_test',
'name': 'fake empty test',
'duration': None,
'message': None,
'id': u'fuel_plugin.tests.functional.dummy_tests.deployment_types_tests.ha_deployment_test.HATest.test_ha_depl',
'description': u' This is empty test for any\n ha deployment\n ',
'id': ('fuel_plugin.tests.functional.dummy_tests.'
'deployment_types_tests.ha_deployment_test.'
'HATest.test_ha_depl'),
'description': (u' This is empty test for any\n'
' ha deployment\n '),
},
{
'status': None,
'taken': None,
'step': None,
'testset': u'ha_deployment_test',
'name': u'fake empty test',
'duration': u'0sec',
'testset': 'ha_deployment_test',
'name': 'fake empty test',
'duration': '0sec',
'message': None,
'id': u'fuel_plugin.tests.functional.dummy_tests.deployment_types_tests.ha_deployment_test.HATest.test_ha_rhel_depl',
'description': u' This is fake tests for ha\n rhel deployment\n '
'id': ('fuel_plugin.tests.functional.dummy_tests.'
'deployment_types_tests.ha_deployment_test.'
'HATest.test_ha_rhel_depl'),
'description': (' This is fake tests for ha\n'
' rhel deployment\n ')
}
]
}
@ -136,7 +142,8 @@ class TestTestsController(BaseTestController):
return discovery(**kwargs)
with patch(
'fuel_plugin.ostf_adapter.wsgi.wsgi_utils.nose_discovery.discovery',
('fuel_plugin.ostf_adapter.wsgi.wsgi_utils.'
'nose_discovery.discovery'),
discovery_mock
):
with patch(
@ -182,7 +189,8 @@ class TestTestSetsController(BaseTestController):
return discovery(**kwargs)
with patch(
'fuel_plugin.ostf_adapter.wsgi.wsgi_utils.nose_discovery.discovery',
('fuel_plugin.ostf_adapter.wsgi.wsgi_utils.'
'nose_discovery.discovery'),
discovery_mock
):
with patch(
@ -248,8 +256,12 @@ class TestTestRunsPostController(TestTestRunsController):
'cluster_id': 1,
'tests': {
'names': [
u'fuel_plugin.tests.functional.dummy_tests.deployment_types_tests.ha_deployment_test.HATest.test_ha_depl',
u'fuel_plugin.tests.functional.dummy_tests.deployment_types_tests.ha_deployment_test.HATest.test_ha_rhel_depl'
('fuel_plugin.tests.functional.dummy_tests.'
'deployment_types_tests.ha_deployment_test.'
'HATest.test_ha_depl'),
('fuel_plugin.tests.functional.dummy_tests.'
'deployment_types_tests.ha_deployment_test.'
'HATest.test_ha_rhel_depl')
]
}
}
@ -281,7 +293,7 @@ class TestTestRunsPostController(TestTestRunsController):
self.assertTrue(test_run)
testrun_tests = self.session.query(models.Test)\
.filter(models.Test.test_run_id != None)\
.filter(models.Test.test_run_id != (None))\
.all()
tests_names = [
@ -307,7 +319,8 @@ class TestTestRunsPutController(TestTestRunsController):
super(TestTestRunsPutController, self).setUp()
self.nose_adapter_session_patcher = patch(
'fuel_plugin.ostf_adapter.nose_plugin.nose_adapter.engine.get_session',
('fuel_plugin.ostf_adapter.nose_plugin.'
'nose_adapter.engine.get_session'),
lambda *args: self.session
)
self.nose_adapter_session_patcher.start()
@ -334,8 +347,12 @@ class TestTestRunsPutController(TestTestRunsController):
'cluster_id': 1,
'tests': {
'names': [
u'fuel_plugin.tests.functional.dummy_tests.deployment_types_tests.ha_deployment_test.HATest.test_ha_depl',
u'fuel_plugin.tests.functional.dummy_tests.deployment_types_tests.ha_deployment_test.HATest.test_ha_rhel_depl'
('fuel_plugin.tests.functional.dummy_tests.'
'deployment_types_tests.ha_deployment_test.'
'HATest.test_ha_depl'),
('fuel_plugin.tests.functional.dummy_tests.'
'deployment_types_tests.ha_deployment_test.'
'HATest.test_ha_rhel_depl')
]
}
}
@ -343,7 +360,9 @@ class TestTestRunsPutController(TestTestRunsController):
testruns_to_stop = [
{
'id': int(self.stored_test_run['id']),
'metadata': {'cluster_id': int(self.stored_test_run['cluster_id'])},
'metadata': {
'cluster_id': int(self.stored_test_run['cluster_id'])
},
'status': 'stopped'
}
]
@ -399,8 +418,8 @@ class TestClusterRedployment(BaseTestController):
'cluster_id': 1,
'old_test_set_id': 'ha_deployment_test',
'new_test_set_id': 'multinode_deployment_test',
'old_depl_tags': ['ha', 'rhel'],
'new_depl_tags': ['multinode', 'ubuntu']
'old_depl_tags': set(['ha', 'rhel', 'nova_network']),
'new_depl_tags': set(['multinode', 'ubuntu', 'nova_network'])
}
def discovery_mock(**kwargs):
@ -409,7 +428,8 @@ class TestClusterRedployment(BaseTestController):
#start discoverying for testsets and tests for given cluster info
with patch(
'fuel_plugin.ostf_adapter.wsgi.wsgi_utils.nose_discovery.discovery',
('fuel_plugin.ostf_adapter.wsgi.'
'wsgi_utils.nose_discovery.discovery'),
discovery_mock
):
with patch(
@ -428,8 +448,8 @@ class TestClusterRedployment(BaseTestController):
)
self.assertEqual(
cluster_state.deployment_tags,
expected['old_depl_tags']
set(cluster_state.deployment_tags),
set(expected['old_depl_tags'])
)
test_set = self.session.query(models.TestSet)\
@ -447,33 +467,36 @@ class TestClusterRedployment(BaseTestController):
#patch request_to_nailgun function in orded to emulate
#redeployment of cluster
cluster_data = set(
['multinode', 'ubuntu']
['multinode', 'ubuntu', 'nova_network']
)
with patch(
'fuel_plugin.ostf_adapter.wsgi.wsgi_utils._get_cluster_depl_tags',
('fuel_plugin.ostf_adapter.wsgi.'
'wsgi_utils._get_cluster_depl_tags'),
lambda *args: cluster_data
):
with patch(
'fuel_plugin.ostf_adapter.wsgi.wsgi_utils.nose_discovery.discovery',
('fuel_plugin.ostf_adapter.wsgi.'
'wsgi_utils.nose_discovery.discovery'),
discovery_mock
):
with patch(
'fuel_plugin.ostf_adapter.wsgi.wsgi_utils.conf',
self.pecan_conf_mock
):
res = self.controller.get(expected['cluster_id'])
self.controller.get(expected['cluster_id'])
new_cluster_state = self.session.query(models.ClusterState)\
.filter_by(id=expected['cluster_id'])\
.first()
self.assertEqual(
new_cluster_state.deployment_tags,
set(new_cluster_state.deployment_tags),
expected['new_depl_tags']
)
#check whether testset and bound with it test have been deleted from db
#check whether testset and bound with
#it test have been deleted from db
old_test_set = self.session.query(models.TestSet)\
.filter_by(id=expected['old_test_set_id'])\
.filter_by(cluster_id=expected['cluster_id'])\
@ -489,7 +512,7 @@ class TestClusterRedployment(BaseTestController):
.filter_by(cluster_id=expected['cluster_id'])\
.all()
if old_test_set:
if old_tests:
raise AssertionError(
"There must not be tests for old deployment in db"
)

View File

@ -51,26 +51,34 @@ class WsgiInterfaceTests(unittest2.TestCase):
@patch('fuel_plugin.ostf_adapter.wsgi.controllers.models')
def test_post_testruns(self, models, request):
testruns = [
{'testset': 'test_simple',
'metadata': {'cluster_id': 3}
{
'testset': 'test_simple',
'metadata': {'cluster_id': 3}
},
{'testset': 'test_simple',
'metadata': {'cluster_id': 4}
}]
{
'testset': 'test_simple',
'metadata': {'cluster_id': 4}
}
]
request.body = json.dumps(testruns)
models.TestRun.start.return_value = {}
self.app.post_json('/v1/testruns', testruns)
def test_put_testruns(self, request):
testruns = [
{'id': 2,
'metadata': {'cluster_id': 3},
'status': 'non_exist'
{
'id': 2,
'metadata': {'cluster_id': 3},
'status': 'non_exist'
},
{'id': 1,
'metadata': {'cluster_id': 4},
'status': 'non_exist'
}]
{
'id': 1,
'metadata': {'cluster_id': 4},
'status': 'non_exist'
}
]
request.body = json.dumps(testruns)
request.storage.get_test_run.return_value = MagicMock(frontend={})
self.app.put_json('/v1/testruns', testruns)