Fixes the several problems/errors caused by tox

Change-Id: I75d312ed6f1b9a71cc7cd2817c2dc60d3baa2971
This commit is contained in:
zhouxinyong 2023-09-07 14:34:52 +08:00
parent a888d6be6b
commit c271dd8b6c
5 changed files with 9 additions and 201 deletions

View File

@ -15,4 +15,4 @@ oslo.log>=3.36.0 # Apache-2.0
oslo.serialization!=2.19.1,>=2.18.0 # Apache-2.0
oslo.utils>=3.33.0 # Apache-2.0
requests>=2.14.2 # Apache-2.0
sahara>=10.0.0.0b1
sahara>=18.0.0

View File

@ -1,65 +0,0 @@
# Copyright (c) 2015 ISPRAS
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or
# implied.
# See the License for the specific language governing permissions and
# limitations under the License.
import testtools
from unittest import mock
from sahara.plugins import base as pb
from sahara.plugins import conductor
from sahara.plugins import context
from sahara.plugins import edp
from sahara.plugins import exceptions as ex
from sahara_plugin_cdh.tests.unit import base
class SparkPluginTest(base.SaharaWithDbTestCase):
def setUp(self):
super(SparkPluginTest, self).setUp()
self.override_config("plugins", ["cdh"])
pb.setup_plugins()
def test_plugin_edp_engine_no_spark(self):
cluster_dict = {
'name': 'cluster',
'plugin_name': 'cdh',
'hadoop_version': '5.11.0',
'default_image_id': 'image'}
job = mock.Mock()
job.type = edp.JOB_TYPE_SPARK
cluster = conductor.cluster_create(context.ctx(), cluster_dict)
plugin = pb.PLUGINS.get_plugin(cluster.plugin_name)
edp_engine = plugin.get_edp_engine(cluster, edp.JOB_TYPE_SPARK)
with testtools.ExpectedException(
ex.InvalidComponentCountException,
value_re="Hadoop cluster should contain 1 "
"SPARK_YARN_HISTORY_SERVER component\(s\). Actual "
"SPARK_YARN_HISTORY_SERVER count is 0\nError ID: .*"):
edp_engine.validate_job_execution(cluster, job, mock.Mock())
def test_plugin_edp_engine(self):
cluster_dict = {
'name': 'cluster',
'plugin_name': 'cdh',
'hadoop_version': '5.11.0',
'default_image_id': 'image'}
cluster = conductor.cluster_create(context.ctx(), cluster_dict)
plugin = pb.PLUGINS.get_plugin(cluster.plugin_name)
self.assertIsInstance(
plugin.get_edp_engine(cluster, edp.JOB_TYPE_SPARK),
edp.PluginsSparkJobEngine)

View File

@ -1,65 +0,0 @@
# Copyright (c) 2015 ISPRAS
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or
# implied.
# See the License for the specific language governing permissions and
# limitations under the License.
import testtools
from unittest import mock
from sahara.plugins import base as pb
from sahara.plugins import conductor
from sahara.plugins import context
from sahara.plugins import edp
from sahara.plugins import exceptions as ex
from sahara_plugin_cdh.tests.unit import base
class SparkPluginTest(base.SaharaWithDbTestCase):
def setUp(self):
super(SparkPluginTest, self).setUp()
self.override_config("plugins", ["cdh"])
pb.setup_plugins()
def test_plugin_edp_engine_no_spark(self):
cluster_dict = {
'name': 'cluster',
'plugin_name': 'cdh',
'hadoop_version': '5.7.0',
'default_image_id': 'image'}
job = mock.Mock()
job.type = edp.JOB_TYPE_SPARK
cluster = conductor.cluster_create(context.ctx(), cluster_dict)
plugin = pb.PLUGINS.get_plugin(cluster.plugin_name)
edp_engine = plugin.get_edp_engine(cluster, edp.JOB_TYPE_SPARK)
with testtools.ExpectedException(
ex.InvalidComponentCountException,
value_re="Hadoop cluster should contain 1 "
"SPARK_YARN_HISTORY_SERVER component\(s\). Actual "
"SPARK_YARN_HISTORY_SERVER count is 0\nError ID: .*"):
edp_engine.validate_job_execution(cluster, job, mock.Mock())
def test_plugin_edp_engine(self):
cluster_dict = {
'name': 'cluster',
'plugin_name': 'cdh',
'hadoop_version': '5.7.0',
'default_image_id': 'image'}
cluster = conductor.cluster_create(context.ctx(), cluster_dict)
plugin = pb.PLUGINS.get_plugin(cluster.plugin_name)
self.assertIsInstance(
plugin.get_edp_engine(cluster, edp.JOB_TYPE_SPARK),
edp.PluginsSparkJobEngine)

View File

@ -1,65 +0,0 @@
# Copyright (c) 2015 ISPRAS
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or
# implied.
# See the License for the specific language governing permissions and
# limitations under the License.
import testtools
from unittest import mock
from sahara.plugins import base as pb
from sahara.plugins import conductor
from sahara.plugins import context
from sahara.plugins import edp
from sahara.plugins import exceptions as ex
from sahara.tests.unit import base
class SparkPluginTest(base.SaharaWithDbTestCase):
def setUp(self):
super(SparkPluginTest, self).setUp()
self.override_config("plugins", ["cdh"])
pb.setup_plugins()
def test_plugin_edp_engine_no_spark(self):
cluster_dict = {
'name': 'cluster',
'plugin_name': 'cdh',
'hadoop_version': '5.9.0',
'default_image_id': 'image'}
job = mock.Mock()
job.type = edp.JOB_TYPE_SPARK
cluster = conductor.cluster_create(context.ctx(), cluster_dict)
plugin = pb.PLUGINS.get_plugin(cluster.plugin_name)
edp_engine = plugin.get_edp_engine(cluster, edp.JOB_TYPE_SPARK)
with testtools.ExpectedException(
ex.InvalidComponentCountException,
value_re="Hadoop cluster should contain 1 "
"SPARK_YARN_HISTORY_SERVER component\(s\). Actual "
"SPARK_YARN_HISTORY_SERVER count is 0\nError ID: .*"):
edp_engine.validate_job_execution(cluster, job, mock.Mock())
def test_plugin_edp_engine(self):
cluster_dict = {
'name': 'cluster',
'plugin_name': 'cdh',
'hadoop_version': '5.9.0',
'default_image_id': 'image'}
cluster = conductor.cluster_create(context.ctx(), cluster_dict)
plugin = pb.PLUGINS.get_plugin(cluster.plugin_name)
self.assertIsInstance(
plugin.get_edp_engine(cluster, edp.JOB_TYPE_SPARK),
edp.PluginsSparkJobEngine)

13
tox.ini
View File

@ -18,7 +18,10 @@ deps =
-r{toxinidir}/requirements.txt
-r{toxinidir}/test-requirements.txt
commands = stestr run {posargs}
passenv = http_proxy HTTP_PROXY https_proxy HTTPS_PROXY no_proxy NO_PROXY
passenv =
http_proxy
https_proxy
no_proxy
[testenv:debug-py36]
basepython = python3.6
@ -39,6 +42,7 @@ commands =
doc8 doc/source
# Run bashate checks
bash -c "find sahara_plugin_cdh -iname '*.sh' -print0 | xargs -0 bashate -v"
allowlist_externals = bash
[testenv:venv]
commands = {posargs}
@ -50,8 +54,7 @@ deps =
commands =
rm -rf doc/build/html
sphinx-build -W -b html doc/source doc/build/html
whitelist_externals =
rm
allowlist_externals = rm
[testenv:pdf-docs]
deps = {[testenv:docs]deps}
@ -59,7 +62,7 @@ commands =
rm -rf doc/build/pdf
sphinx-build -W -b latex doc/source doc/build/pdf
make -C doc/build/pdf
whitelist_externals =
allowlist_externals =
make
rm
@ -70,7 +73,7 @@ deps =
commands =
rm -rf releasenotes/build releasenotes/html
sphinx-build -a -E -W -d releasenotes/build/doctrees -b html releasenotes/source releasenotes/build/html
whitelist_externals = rm
allowlist_externals = rm
[testenv:debug]
# It runs tests from the specified dir (default is sahara_plugin_cdh/tests)