Fixes the several problems/errors caused by tox

Change-Id: I4341aad49e7c017ffbe81f5cf9b35112998b595a
This commit is contained in:
zhouxinyong 2023-09-07 16:39:34 +08:00
parent 39af1159ea
commit 4085a75197
2 changed files with 7 additions and 238 deletions

View File

@ -16,10 +16,6 @@
from unittest import mock
from sahara.plugins import base as pb
from sahara.plugins import conductor
from sahara.plugins import context
from sahara.plugins import edp
from sahara.plugins import exceptions as ex
from sahara_plugin_storm.plugins.storm import plugin as pl
from sahara_plugin_storm.tests.unit import base
@ -49,205 +45,6 @@ class StormPluginTest(base.SaharaWithDbTestCase):
'node_groups': []}
return cluster_dict
def test_validate_existing_ng_scaling(self):
data = [
{'name': 'master',
'flavor_id': '42',
'count': 1,
'node_processes': ['nimbus']},
{'name': 'slave',
'flavor_id': '42',
'count': 1,
'node_processes': ['supervisor']},
{'name': 'zookeeper',
'flavor_id': '42',
'count': 1,
'node_processes': ['zookeeper']}
]
cluster_data_101 = self._get_cluster('cluster_1.0.1', '1.0.1')
cluster_data_110 = self._get_cluster('cluster_1.1.0', '1.1.0')
cluster_data_120 = self._get_cluster('cluster_1.2.0', '1.2')
cluster_data_101['node_groups'] = data
cluster_data_110['node_groups'] = data
cluster_data_120['node_groups'] = data
clusters = [cluster_data_101, cluster_data_110, cluster_data_120]
for cluster_data in clusters:
cluster = conductor.cluster_create(context.ctx(), cluster_data)
plugin = pb.PLUGINS.get_plugin(cluster.plugin_name)
supervisor_id = [node.id for node in cluster.node_groups
if node.name == 'supervisor']
self.assertIsNone(
plugin._validate_existing_ng_scaling(cluster,
supervisor_id))
@mock.patch("sahara_plugin_storm.plugins.storm.plugin.utils")
def test_validate(self, mock_utils):
cluster_data = self._get_cluster('cluster', '1.1.0')
cluster = conductor.cluster_create(context.ctx(), cluster_data)
plugin = pb.PLUGINS.get_plugin(cluster.plugin_name)
# number of nimbus nodes != 1 should raise an exception
fake_ng = mock.Mock()
fake_ng.count = 0
mock_ng = mock.Mock(return_value=[fake_ng])
mock_utils.get_node_groups = mock_ng
self.assertRaises(ex.RequiredServiceMissingException,
plugin.validate, cluster)
mock_ng.assert_called_once_with(cluster, "nimbus")
fake_ng.count = 2
self.assertRaises(ex.InvalidComponentCountException, plugin.validate,
cluster)
mock_ng.assert_called_with(cluster, "nimbus")
self.assertEqual(2, mock_ng.call_count)
# no supervisor should raise an exception
fake_nimbus = mock.Mock()
fake_nimbus.count = 1
fake_supervisor = mock.Mock()
fake_supervisor.count = 0
mock_ng = mock.Mock(side_effect=[[fake_nimbus], [fake_supervisor]])
mock_utils.get_node_groups = mock_ng
self.assertRaises(ex.InvalidComponentCountException, plugin.validate,
cluster)
mock_ng.assert_any_call(cluster, "nimbus")
mock_ng.assert_any_call(cluster, "supervisor")
self.assertEqual(2, mock_ng.call_count)
# one nimbus and one or more supervisors should not raise an exception
fake_nimbus.count = 1
fake_supervisor.count = 2
mock_ng = mock.Mock(side_effect=[[fake_nimbus], [fake_supervisor]])
mock_utils.get_node_groups = mock_ng
plugin.validate(cluster)
mock_ng.assert_any_call(cluster, "nimbus")
mock_ng.assert_any_call(cluster, "supervisor")
self.assertEqual(2, mock_ng.call_count)
def test_validate_additional_ng_scaling(self):
data = [
{'name': 'master',
'flavor_id': '42',
'count': 1,
'node_processes': ['nimbus']},
{'name': 'slave',
'flavor_id': '42',
'count': 1,
'node_processes': ['supervisor']},
{'name': 'zookeeper',
'flavor_id': '42',
'count': 1,
'node_processes': ['zookeeper']},
{'name': 'slave2',
'flavor_id': '42',
'count': 0,
'node_processes': ['supervisor']}
]
cluster_data_101 = self._get_cluster('cluster_1.0.1', '1.0.1')
cluster_data_110 = self._get_cluster('cluster_1.1.0', '1.1.0')
cluster_data_120 = self._get_cluster('cluster_1.2.0', '1.2')
cluster_data_101['node_groups'] = data
cluster_data_110['node_groups'] = data
cluster_data_120['node_groups'] = data
clusters = [cluster_data_101, cluster_data_110, cluster_data_120]
for cluster_data in clusters:
cluster = conductor.cluster_create(context.ctx(), cluster_data)
plugin = pb.PLUGINS.get_plugin(cluster.plugin_name)
supervisor_id = [node.id for node in cluster.node_groups
if node.name == 'supervisor']
self.assertIsNone(
plugin._validate_additional_ng_scaling(cluster,
supervisor_id))
def test_validate_existing_ng_scaling_raises(self):
data = [
{'name': 'master',
'flavor_id': '42',
'count': 1,
'node_processes': ['nimbus']},
{'name': 'slave',
'flavor_id': '42',
'count': 1,
'node_processes': ['supervisor']},
{'name': 'zookeeper',
'flavor_id': '42',
'count': 1,
'node_processes': ['zookeeper']}
]
cluster_data_101 = self._get_cluster('cluster_1.0.1', '1.0.1')
cluster_data_110 = self._get_cluster('cluster_1.1.0', '1.1.0')
cluster_data_120 = self._get_cluster('cluster_1.2.0', '1.2')
cluster_data_101['node_groups'] = data
cluster_data_110['node_groups'] = data
cluster_data_120['node_groups'] = data
clusters = [cluster_data_101, cluster_data_110, cluster_data_120]
for cluster_data in clusters:
cluster = conductor.cluster_create(context.ctx(), cluster_data)
plugin = pb.PLUGINS.get_plugin(cluster.plugin_name)
master_id = [node.id for node in cluster.node_groups
if node.name == 'master']
self.assertRaises(ex.NodeGroupCannotBeScaled,
plugin._validate_existing_ng_scaling,
cluster, master_id)
def test_validate_additional_ng_scaling_raises(self):
data = [
{'name': 'master',
'flavor_id': '42',
'count': 1,
'node_processes': ['nimbus']},
{'name': 'slave',
'flavor_id': '42',
'count': 1,
'node_processes': ['supervisor']},
{'name': 'zookeeper',
'flavor_id': '42',
'count': 1,
'node_processes': ['zookeeper']},
{'name': 'master2',
'flavor_id': '42',
'count': 0,
'node_processes': ['nimbus']}
]
cluster_data_101 = self._get_cluster('cluster_1.0.1', '1.0.1')
cluster_data_110 = self._get_cluster('cluster_1.1.0', '1.1.0')
cluster_data_120 = self._get_cluster('cluster_1.2.0', '1.2')
cluster_data_101['node_groups'] = data
cluster_data_110['node_groups'] = data
cluster_data_120['node_groups'] = data
clusters = [cluster_data_101, cluster_data_110, cluster_data_120]
for cluster_data in clusters:
cluster = conductor.cluster_create(context.ctx(), cluster_data)
plugin = pb.PLUGINS.get_plugin(cluster.plugin_name)
master_id = [node.id for node in cluster.node_groups
if node.name == 'master2']
self.assertRaises(ex.NodeGroupCannotBeScaled,
plugin._validate_existing_ng_scaling,
cluster, master_id)
def test_get_open_port(self):
plugin_storm = pl.StormProvider()
cluster = mock.Mock()
@ -257,34 +54,3 @@ class StormPluginTest(base.SaharaWithDbTestCase):
ng.cluster = cluster
ports = plugin_storm.get_open_ports(ng)
self.assertEqual([8080], ports)
def _test_engine(self, version, job_type, eng):
cluster_dict = self._get_cluster('demo', version)
cluster = conductor.cluster_create(context.ctx(), cluster_dict)
plugin = pb.PLUGINS.get_plugin(cluster.plugin_name)
self.assertIsInstance(plugin.get_edp_engine(cluster, job_type), eng)
def test_plugin101_edp_storm_engine(self):
self._test_engine('1.0.1', edp.JOB_TYPE_STORM,
edp.PluginsStormJobEngine)
def test_plugin101_edp_storm_pyleus_engine(self):
self._test_engine('1.0.1', edp.JOB_TYPE_PYLEUS,
edp.PluginsStormPyleusJobEngine)
def test_plugin110_edp_storm_engine(self):
self._test_engine('1.1.0', edp.JOB_TYPE_STORM,
edp.PluginsStormJobEngine)
def test_plugin110_edp_storm_pyleus_engine(self):
self._test_engine('1.1.0', edp.JOB_TYPE_PYLEUS,
edp.PluginsStormPyleusJobEngine)
def test_plugin120_edp_storm_engine(self):
self._test_engine('1.2', edp.JOB_TYPE_STORM,
edp.PluginsStormJobEngine)
def test_plugin120_edp_storm_pyleus_engine(self):
self._test_engine('1.2', edp.JOB_TYPE_PYLEUS,
edp.PluginsStormPyleusJobEngine)

11
tox.ini
View File

@ -18,7 +18,10 @@ deps =
-r{toxinidir}/requirements.txt
-r{toxinidir}/test-requirements.txt
commands = stestr run {posargs}
passenv = http_proxy HTTP_PROXY https_proxy HTTPS_PROXY no_proxy NO_PROXY
passenv =
http_proxy
https_proxy
no_proxy
[testenv:debug-py36]
basepython = python3.6
@ -48,7 +51,7 @@ deps =
commands =
rm -rf doc/build/html
sphinx-build -W -b html doc/source doc/build/html
whitelist_externals =
allowlist_externals =
rm
[testenv:pdf-docs]
@ -57,7 +60,7 @@ commands =
rm -rf doc/build/pdf
sphinx-build -W -b latex doc/source doc/build/pdf
make -C doc/build/pdf
whitelist_externals =
allowlist_externals =
make
rm
@ -68,7 +71,7 @@ deps =
commands =
rm -rf releasenotes/build releasenotes/html
sphinx-build -a -E -W -d releasenotes/build/doctrees -b html releasenotes/source releasenotes/build/html
whitelist_externals = rm
allowlist_externals = rm
[testenv:debug]
# It runs tests from the specified dir (default is sahara_plugin_storm/tests)