Split plugins from Sahara core

Change-Id: I0a2bc8ea401603720a77a237060e4fe1c95bdbd0
This commit is contained in:
Telles Nobrega 2018-12-21 17:19:35 -03:00 committed by Telles Nobrega
parent 1a81d564d9
commit a14e3c276a
34 changed files with 284 additions and 207 deletions

1
.gitignore vendored
View File

@ -8,6 +8,7 @@
.stestr
.venv
.idea
.un~
AUTHORS
ChangeLog
build

View File

@ -1,3 +1,3 @@
[DEFAULT]
test_path=./sahara/tests/unit
test_path=./sahara_plugin_storm/tests/unit
top_dir=./

34
README.rst Normal file
View File

@ -0,0 +1,34 @@
========================
Team and repository tags
========================
.. image:: https://governance.openstack.org/tc/badges/sahara.svg
:target: https://governance.openstack.org/tc/reference/tags/index.html
.. Change things from this point on
OpenStack Data Processing ("Sahara") project
============================================
Sahara at wiki.openstack.org: https://wiki.openstack.org/wiki/Sahara
Storyboard project: https://storyboard.openstack.org/#!/project/935
Sahara docs site: https://docs.openstack.org/sahara/latest/
Roadmap: https://wiki.openstack.org/wiki/Sahara/Roadmap
Quickstart guide: https://docs.openstack.org/sahara/latest/user/quickstart.html
How to participate: https://docs.openstack.org/sahara/latest/contributor/how-to-participate.html
Source: https://git.openstack.org/cgit/openstack/sahara
Bugs and feature requests: https://storyboard.openstack.org/#!/project/935
Release notes: https://docs.openstack.org/releasenotes/sahara/
License
-------
Apache License Version 2.0 http://www.apache.org/licenses/LICENSE-2.0

9
doc/requirements.txt Normal file
View File

@ -0,0 +1,9 @@
# The order of packages is significant, because pip processes them in the order
# of appearance. Changing the order has an impact on the overall integration
# process, which may cause wedges in the gate later.
openstackdocstheme>=1.18.1 # Apache-2.0
os-api-ref>=1.4.0 # Apache-2.0
reno>=2.5.0 # Apache-2.0
sphinx!=1.6.6,!=1.6.7,>=1.6.2 # BSD
sphinxcontrib-httpdomain>=1.3.0 # BSD
whereto>=0.3.0 # Apache-2.0

Binary file not shown.

162
lower-constraints.txt Normal file
View File

@ -0,0 +1,162 @@
alabaster==0.7.10
alembic==0.8.10
amqp==2.2.2
appdirs==1.4.3
asn1crypto==0.24.0
astroid==1.3.8
Babel==2.3.4
bandit==1.1.0
bashate==0.5.1
bcrypt==3.1.4
botocore==1.5.1
cachetools==2.0.1
castellan==0.16.0
certifi==2018.1.18
cffi==1.11.5
chardet==3.0.4
click==6.7
cliff==2.11.0
cmd2==0.8.1
contextlib2==0.5.5
coverage==4.0
cryptography==2.1.4
debtcollector==1.19.0
decorator==4.2.1
deprecation==2.0
doc8==0.6.0
docutils==0.14
dogpile.cache==0.6.5
dulwich==0.19.0
enum-compat==0.0.2
eventlet==0.18.2
extras==1.0.0
fasteners==0.14.1
fixtures==3.0.0
flake8==2.6.2
Flask==1.0.2
future==0.16.0
futurist==1.6.0
gitdb2==2.0.3
GitPython==2.1.8
greenlet==0.4.13
hacking==1.1.0
idna==2.6
imagesize==1.0.0
iso8601==0.1.11
itsdangerous==0.24
Jinja2==2.10
jmespath==0.9.3
jsonpatch==1.21
jsonpointer==2.0
jsonschema==2.6.0
keystoneauth1==3.4.0
keystonemiddleware==4.17.0
kombu==4.1.0
linecache2==1.0.0
logilab-common==1.4.1
Mako==1.0.7
MarkupSafe==1.0
mccabe==0.2.1
mock==2.0.0
monotonic==1.4
mox3==0.25.0
msgpack==0.5.6
munch==2.2.0
netaddr==0.7.19
netifaces==0.10.6
openstackdocstheme==1.18.1
openstacksdk==0.12.0
os-api-ref==1.4.0
os-client-config==1.29.0
os-service-types==1.2.0
osc-lib==1.10.0
oslo.cache==1.29.0
oslo.concurrency==3.26.0
oslo.config==5.2.0
oslo.context==2.19.2
oslo.db==4.27.0
oslo.i18n==3.15.3
oslo.log==3.36.0
oslo.messaging==5.29.0
oslo.middleware==3.31.0
oslo.policy==1.30.0
oslo.rootwrap==5.8.0
oslo.serialization==2.18.0
oslo.service==1.24.0
oslo.upgradecheck==0.1.0
oslo.utils==3.33.0
oslotest==3.2.0
packaging==17.1
paramiko==2.0.0
Paste==2.0.3
PasteDeploy==1.5.2
pbr==2.0.0
pika-pool==0.1.3
pika==0.10.0
prettytable==0.7.2
psycopg2==2.6.2
pyasn1==0.4.2
pycadf==2.7.0
pycparser==2.18
pycodestyle==2.4.0
pyflakes==0.8.1
Pygments==2.2.0
pyinotify==0.9.6
pylint==1.4.5
PyMySQL==0.7.6
PyNaCl==1.2.1
pyOpenSSL==17.5.0
pyparsing==2.2.0
pyperclip==1.6.0
python-barbicanclient==4.6.0
python-cinderclient==3.3.0
python-dateutil==2.7.0
python-editor==1.0.3
python-glanceclient==2.8.0
python-heatclient==1.10.0
python-keystoneclient==3.8.0
python-manilaclient==1.16.0
python-mimeparse==1.6.0
python-neutronclient==6.7.0
python-novaclient==9.1.0
python-openstackclient==3.14.0
python-saharaclient==1.4.0
python-subunit==1.2.0
python-swiftclient==3.2.0
pytz==2018.3
PyYAML==3.12
reno==2.5.0
repoze.lru==0.7
requests==2.14.2
requestsexceptions==1.4.0
restructuredtext-lint==1.1.3
rfc3986==1.1.0
Routes==2.4.1
simplejson==3.13.2
six==1.10.0
smmap2==2.0.3
snowballstemmer==1.2.1
Sphinx==1.6.2
sphinxcontrib-httpdomain==1.3.0
sphinxcontrib-websupport==1.0.1
sqlalchemy-migrate==0.11.0
SQLAlchemy==1.0.10
sqlparse==0.2.4
statsd==3.2.2
stestr==1.0.0
stevedore==1.20.0
Tempita==0.5.2
tenacity==4.9.0
testresources==2.0.0
testscenarios==0.4
testtools==2.2.0
tooz==1.58.0
traceback2==1.4.0
unittest2==1.1.0
urllib3==1.22
vine==1.1.4
voluptuous==0.11.1
warlock==1.3.0
WebOb==1.7.1
Werkzeug==0.14.1
wrapt==1.10.11

Binary file not shown.

View File

@ -39,6 +39,7 @@ python-swiftclient>=3.2.0 # Apache-2.0
python-neutronclient>=6.7.0 # Apache-2.0
python-heatclient>=1.10.0 # Apache-2.0
python-glanceclient>=2.8.0 # Apache-2.0
sahara
six>=1.10.0 # MIT
stevedore>=1.20.0 # Apache-2.0
SQLAlchemy!=1.1.5,!=1.1.6,!=1.1.7,!=1.1.8,>=1.0.10 # MIT

View File

@ -15,12 +15,10 @@
from oslo_config import cfg
from sahara import conductor as c
from sahara import exceptions as ex
from sahara.i18n import _
from sahara.plugins import exceptions as ex
from sahara_plugin_storm.i18n import _
conductor = c.API
CONF = cfg.CONF

View File

@ -13,12 +13,12 @@
# See the License for the specific language governing permissions and
# limitations under the License.
from sahara import exceptions as ex
from sahara.i18n import _
from sahara.service.edp.storm import engine as edp_engine
from sahara.plugins import edp
from sahara.plugins import exceptions as ex
from sahara_plugin_storm.i18n import _
class EdpStormEngine(edp_engine.StormJobEngine):
class EdpStormEngine(edp.PluginsStormJobEngine):
edp_base_version = "1.0.1"
@ -28,14 +28,14 @@ class EdpStormEngine(edp_engine.StormJobEngine):
def validate_job_execution(self, cluster, job, data):
if not self.edp_supported(cluster.hadoop_version):
raise ex.InvalidDataException(
raise ex.PluginInvalidDataException(
_('Storm {base} required to run {type} jobs').format(
base=EdpStormEngine.edp_base_version, type=job.type))
super(EdpStormEngine, self).validate_job_execution(cluster, job, data)
class EdpPyleusEngine(edp_engine.StormPyleusJobEngine):
class EdpPyleusEngine(edp.PluginsStormPyleusJobEngine):
edp_base_version = "1.0.1"
@ -45,7 +45,7 @@ class EdpPyleusEngine(edp_engine.StormPyleusJobEngine):
def validate_job_execution(self, cluster, job, data):
if not self.edp_supported(cluster.hadoop_version):
raise ex.InvalidDataException(
raise ex.PluginInvalidDataException(
_('Storm {base} required to run {type} jobs').format(
base=EdpPyleusEngine.edp_base_version, type=job.type))

View File

@ -19,20 +19,16 @@ from oslo_log import log as logging
import six
import yaml
from sahara import conductor
from sahara import context
from sahara.i18n import _
from sahara.plugins import conductor
from sahara.plugins import context
from sahara.plugins import exceptions as ex
from sahara.plugins import provisioning as p
from sahara.plugins.storm import config_helper as c_helper
from sahara.plugins.storm import edp_engine
from sahara.plugins.storm import run_scripts as run
from sahara.plugins import utils
from sahara.utils import cluster_progress_ops as cpo
from sahara.utils import general as ug
from sahara.utils import remote
from sahara_plugin_storm.i18n import _
from sahara_plugin_storm.plugins.storm import config_helper as c_helper
from sahara_plugin_storm.plugins.storm import edp_engine
from sahara_plugin_storm.plugins.storm import run_scripts as run
conductor = conductor.API
LOG = logging.getLogger(__name__)
@ -188,10 +184,10 @@ class StormProvider(p.ProvisioningPluginBase):
return extra
@cpo.event_wrapper(
@utils.event_wrapper(
True, step=utils.start_process_event_message("StormMaster"))
def _start_storm_master(self, sm_instance):
with remote.get_remote(sm_instance) as r:
with utils.get_remote(sm_instance) as r:
run.start_storm_nimbus_and_ui(r)
LOG.info("Storm master at {host} has been started".format(
host=sm_instance.hostname()))
@ -199,16 +195,16 @@ class StormProvider(p.ProvisioningPluginBase):
def _start_slave_processes(self, sl_instances):
if len(sl_instances) == 0:
return
cpo.add_provisioning_step(
utils.add_provisioning_step(
sl_instances[0].cluster_id,
utils.start_process_event_message("Slave"), len(sl_instances))
with context.ThreadGroup() as tg:
with context.PluginsThreadGroup() as tg:
for i in sl_instances:
tg.spawn('storm-start-sl-%s' % i.instance_name,
self._start_slaves, i)
@cpo.event_wrapper(True)
@utils.event_wrapper(True)
def _start_slaves(self, instance):
with instance.remote() as r:
run.start_storm_supervisor(r)
@ -217,16 +213,16 @@ class StormProvider(p.ProvisioningPluginBase):
if len(zk_instances) == 0:
return
cpo.add_provisioning_step(
utils.add_provisioning_step(
zk_instances[0].cluster_id,
utils.start_process_event_message("Zookeeper"), len(zk_instances))
with context.ThreadGroup() as tg:
with context.PluginsThreadGroup() as tg:
for i in zk_instances:
tg.spawn('storm-start-zk-%s' % i.instance_name,
self._start_zookeeper, i)
@cpo.event_wrapper(True)
@utils.event_wrapper(True)
def _start_zookeeper(self, instance):
with instance.remote() as r:
run.start_zookeeper(r)
@ -241,10 +237,10 @@ class StormProvider(p.ProvisioningPluginBase):
def _push_configs_to_nodes(self, cluster, extra, new_instances):
all_instances = utils.get_instances(cluster)
cpo.add_provisioning_step(
utils.add_provisioning_step(
cluster.id, _("Push configs to nodes"), len(all_instances))
with context.ThreadGroup() as tg:
with context.PluginsThreadGroup() as tg:
for instance in all_instances:
if instance in new_instances:
tg.spawn('storm-configure-%s' % instance.instance_name,
@ -266,7 +262,7 @@ class StormProvider(p.ProvisioningPluginBase):
return stream
@cpo.event_wrapper(True)
@utils.event_wrapper(True)
def _push_configs_to_new_node(self, cluster, extra, instance):
ng_extra = extra[instance.node_group.id]
@ -286,7 +282,7 @@ class StormProvider(p.ProvisioningPluginBase):
'/home/ubuntu/.pyleus.conf': ng_extra['pyleus_conf']
}
with remote.get_remote(instance) as r:
with utils.get_remote(instance) as r:
node_processes = instance.node_group.node_processes
r.write_files_to(files_storm, run_as_root=True)
if 'zookeeper' in node_processes:
@ -297,7 +293,7 @@ class StormProvider(p.ProvisioningPluginBase):
if 'supervisor' in node_processes:
self._push_supervisor_configs(r, files_supervisor)
@cpo.event_wrapper(True)
@utils.event_wrapper(True)
def _push_configs_to_existing_node(self, cluster, extra, instance):
node_processes = instance.node_group.node_processes
need_storm_update = ('nimbus' in node_processes or
@ -305,7 +301,7 @@ class StormProvider(p.ProvisioningPluginBase):
need_zookeeper_update = 'zookeeper' in node_processes
ng_extra = extra[instance.node_group.id]
r = remote.get_remote(instance)
r = utils.get_remote(instance)
if need_storm_update:
storm_path = '/usr/local/storm/conf/storm.yaml'
@ -350,14 +346,14 @@ class StormProvider(p.ProvisioningPluginBase):
"host": master.hostname()
})
with remote.get_remote(master) as r:
with utils.get_remote(master) as r:
ret, stdout = r.execute_command(cmd)
names = stdout.split('\n')
topology_names = names[0:len(names)-1]
return topology_names
@cpo.event_wrapper(True, step=_("Rebalance Topology"),
param=('cluster', 1))
@utils.event_wrapper(True, step=_("Rebalance Topology"),
param=('cluster', 1))
def rebalance_topology(self, cluster):
topology_names = self._get_running_topologies_names(cluster)
master = utils.get_instance(cluster, "nimbus")
@ -371,7 +367,7 @@ class StormProvider(p.ProvisioningPluginBase):
"topology_name": topology_name
})
with remote.get_remote(master) as r:
with utils.get_remote(master) as r:
ret, stdout = r.execute_command(cmd)
def validate_scaling(self, cluster, existing, additional):
@ -392,7 +388,7 @@ class StormProvider(p.ProvisioningPluginBase):
scalable_processes = self._get_scalable_processes()
for ng_id in additional:
ng = ug.get_by_id(cluster.node_groups, ng_id)
ng = utils.get_by_id(cluster.node_groups, ng_id)
if not set(ng.node_processes).issubset(scalable_processes):
raise ex.NodeGroupCannotBeScaled(
ng.name, _("Storm plugin cannot scale nodegroup"

View File

@ -13,7 +13,7 @@
# See the License for the specific language governing permissions and
# limitations under the License.
from sahara.utils import patches
from sahara_plugin_storm.utils import patches
patches.patch_all()
import oslo_i18n

View File

@ -14,8 +14,8 @@
from testtools import testcase
from sahara.plugins.storm import config_helper as s_config
from sahara.plugins.storm import plugin as s_plugin
from sahara_plugin_storm.plugins.storm import config_helper as s_config
from sahara_plugin_storm.plugins.storm import plugin as s_plugin
class TestStormConfigHelper(testcase.TestCase):

View File

@ -15,17 +15,13 @@
import mock
from sahara import conductor as cond
from sahara import context
from sahara.plugins import base as pb
from sahara.plugins import conductor
from sahara.plugins import context
from sahara.plugins import edp
from sahara.plugins import exceptions as ex
from sahara.plugins.storm import plugin as pl
from sahara.service.edp.storm import engine
from sahara.tests.unit import base
from sahara.utils import edp
conductor = cond.API
from sahara_plugin_storm.plugins.storm import plugin as pl
from sahara_plugin_storm.tests.unit import base
class StormPluginTest(base.SaharaWithDbTestCase):
@ -87,7 +83,7 @@ class StormPluginTest(base.SaharaWithDbTestCase):
plugin._validate_existing_ng_scaling(cluster,
supervisor_id))
@mock.patch("sahara.plugins.storm.plugin.utils")
@mock.patch("sahara_plugin_storm.plugins.storm.plugin.utils")
def test_validate(self, mock_utils):
cluster_data = self._get_cluster('cluster', '1.1.0')
@ -271,24 +267,24 @@ class StormPluginTest(base.SaharaWithDbTestCase):
def test_plugin101_edp_storm_engine(self):
self._test_engine('1.0.1', edp.JOB_TYPE_STORM,
engine.StormJobEngine)
edp.PluginsStormJobEngine)
def test_plugin101_edp_storm_pyleus_engine(self):
self._test_engine('1.0.1', edp.JOB_TYPE_PYLEUS,
engine.StormJobEngine)
edp.PluginsStormPyleusJobEngine)
def test_plugin110_edp_storm_engine(self):
self._test_engine('1.1.0', edp.JOB_TYPE_STORM,
engine.StormJobEngine)
edp.PluginsStormJobEngine)
def test_plugin110_edp_storm_pyleus_engine(self):
self._test_engine('1.1.0', edp.JOB_TYPE_PYLEUS,
engine.StormJobEngine)
edp.PluginsStormPyleusJobEngine)
def test_plugin120_edp_storm_engine(self):
self._test_engine('1.2', edp.JOB_TYPE_STORM,
engine.StormJobEngine)
edp.PluginsStormJobEngine)
def test_plugin120_edp_storm_pyleus_engine(self):
self._test_engine('1.2', edp.JOB_TYPE_PYLEUS,
engine.StormJobEngine)
edp.PluginsStormPyleusJobEngine)

View File

@ -1,6 +1,6 @@
[metadata]
name = sahara
summary = Sahara project
name = sahara_plugin_storm
summary = Storm Plugin for Sahara Project
description-file = README.rst
license = Apache Software License
classifiers =
@ -23,80 +23,17 @@ setup-hooks = pbr.hooks.setup_hook
[files]
packages =
sahara
data_files =
etc/sahara =
etc/sahara/api-paste.ini
etc/sahara/rootwrap.conf
etc/sahara/rootwrap.d = etc/sahara/rootwrap.d/*
sahara_plugin_storm
[entry_points]
console_scripts =
sahara-all = sahara.cli.sahara_all:main
sahara-api = sahara.cli.sahara_api:main
sahara-engine = sahara.cli.sahara_engine:main
sahara-db-manage = sahara.db.migration.cli:main
sahara-rootwrap = oslo_rootwrap.cmd:main
_sahara-subprocess = sahara.cli.sahara_subprocess:main
sahara-templates = sahara.db.templates.cli:main
sahara-image-pack = sahara.cli.image_pack.cli:main
sahara-status = sahara.cli.sahara_status:main
wsgi_scripts =
sahara-wsgi-api = sahara.cli.sahara_api:setup_api
sahara.cluster.plugins =
vanilla = sahara.plugins.vanilla.plugin:VanillaProvider
ambari = sahara.plugins.ambari.plugin:AmbariPluginProvider
mapr = sahara.plugins.mapr.plugin:MapRPlugin
cdh = sahara.plugins.cdh.plugin:CDHPluginProvider
fake = sahara.plugins.fake.plugin:FakePluginProvider
spark = sahara.plugins.spark.plugin:SparkProvider
storm = sahara.plugins.storm.plugin:StormProvider
storm = sahara_plugin_storm.plugins.storm.plugin:StormProvider
sahara.data_source.types =
hdfs = sahara.service.edp.data_sources.hdfs.implementation:HDFSType
manila = sahara.service.edp.data_sources.manila.implementation:ManilaType
maprfs = sahara.service.edp.data_sources.maprfs.implementation:MapRFSType
swift = sahara.service.edp.data_sources.swift.implementation:SwiftType
s3 = sahara.service.edp.data_sources.s3.implementation:S3Type
[build_sphinx]
all_files = 1
build-dir = doc/build
source-dir = doc/source
warning-is-error = 1
sahara.job_binary.types =
internal-db = sahara.service.edp.job_binaries.internal_db.implementation:InternalDBType
manila = sahara.service.edp.job_binaries.manila.implementation:ManilaType
swift = sahara.service.edp.job_binaries.swift.implementation:SwiftType
s3 = sahara.service.edp.job_binaries.s3.implementation:S3Type
sahara.infrastructure.engine =
heat = sahara.service.heat.heat_engine:HeatEngine
sahara.remote =
ssh = sahara.utils.ssh_remote:SshRemoteDriver
sahara.run.mode =
all-in-one = sahara.service.ops:LocalOps
distributed = sahara.service.ops:RemoteOps
oslo.config.opts =
sahara.config = sahara.config:list_opts
oslo.config.opts.defaults =
sahara.config = sahara.common.config:set_cors_middleware_defaults
oslo.policy.policies =
sahara = sahara.common.policies:list_rules
[extract_messages]
keywords = _ gettext ngettext l_ lazy_gettext
mapping_file = babel.cfg
output_file = sahara/locale/sahara.pot
[compile_catalog]
directory = sahara/locale
domain = sahara
[update_catalog]
domain = sahara
output_dir = sahara/locale
input_file = sahara/locale/sahara.pot
[wheel]
universal = 1

89
tox.ini
View File

@ -1,6 +1,6 @@
[tox]
envlist = py35,py27,pep8,genpolicy
minversion = 1.6
envlist = py36,py35,py27,pep8
minversion = 2.0
skipsdist = True
[testenv]
@ -8,7 +8,7 @@ usedevelop = True
install_command = pip install {opts} {packages}
setenv =
VIRTUAL_ENV={envdir}
DISCOVER_DIRECTORY=sahara/tests/unit
DISCOVER_DIRECTORY=sahara_plugin_storm/tests/unit
deps =
-c{env:UPPER_CONSTRAINTS_FILE:https://git.openstack.org/cgit/openstack/requirements/plain/upper-constraints.txt}
-r{toxinidir}/requirements.txt
@ -16,19 +16,13 @@ deps =
commands = stestr run {posargs}
passenv = http_proxy HTTP_PROXY https_proxy HTTPS_PROXY no_proxy NO_PROXY
[testenv:cover]
basepython = python3
setenv =
PACKAGE_NAME=sahara
commands = {toxinidir}/tools/cover.sh {posargs}
[testenv:debug-py27]
basepython = python2.7
commands = oslo_debug_helper -t sahara/tests/unit {posargs}
commands = oslo_debug_helper -t sahara_plugin_storm/tests/unit {posargs}
[testenv:debug-py35]
basepython = python3.5
commands = oslo_debug_helper -t sahara/tests/unit {posargs}
commands = oslo_debug_helper -t sahara_plugin_storm/tests/unit {posargs}
[testenv:pep8]
basepython = python3
@ -40,24 +34,11 @@ deps =
commands =
flake8 {posargs}
doc8 doc/source
# Run bashate checks
bash -c "find sahara -iname '*.sh' -print0 | xargs -0 bashate -v"
bash -c "find devstack -not -name \*.template -and -not -name README.rst -and -not -name \*.json -type f -print0 | xargs -0 bashate -v"
# Run security linter
bandit -c bandit.yaml -r sahara -n5 -p sahara_default -x tests
[testenv:genpolicy]
basepython = python3
commands = oslopolicy-sample-generator --config-file tools/config/sahara-policy-generator.conf
[testenv:venv]
basepython = python3
commands = {posargs}
[testenv:images]
sitepackages = True
commands = {posargs}
[testenv:docs]
basepython = python3
deps =
@ -65,35 +46,9 @@ deps =
-r{toxinidir}/doc/requirements.txt
commands =
rm -rf doc/html doc/build
rm -rf api-ref/build api-ref/html
rm -rf doc/source/apidoc doc/source/api
sphinx-build -W -b html doc/source doc/build/html
sphinx-build -W -b html -d api-ref/build/doctrees api-ref/source api-ref/build/html
whereto doc/source/_extra/.htaccess doc/test/redirect-tests.txt
whitelist_externals = rm
[testenv:api-ref]
basepython = python3
deps =
-c{env:UPPER_CONSTRAINTS_FILE:https://git.openstack.org/cgit/openstack/requirements/plain/upper-constraints.txt}
-r{toxinidir}/doc/requirements.txt
install_command = pip install -U --force-reinstall {opts} {packages}
commands =
rm -rf api-ref/build api-ref/html
sphinx-build -W -b html -d api-ref/build/doctrees api-ref/source api-ref/build/html
whitelist_externals = rm
[testenv:pylint]
basepython = python3
setenv = VIRTUAL_ENV={envdir}
commands = bash tools/lintstack.sh
[testenv:genconfig]
basepython = python3
commands =
oslo-config-generator --config-file tools/config/config-generator.sahara.conf \
--output-file etc/sahara/sahara.conf.sample
whitelist_externals =
rm
[testenv:releasenotes]
basepython = python3
@ -107,16 +62,11 @@ whitelist_externals = rm
[testenv:debug]
basepython = python3
# It runs tests from the specified dir (default is sahara/tests)
# It runs tests from the specified dir (default is sahara_plugin_storm/tests)
# in interactive mode, so, you could use pbr for tests debug.
# Example usage: tox -e debug -- -t sahara/tests/unit some.test.path
# Example usage: tox -e debug -- -t sahara_plugin_storm/tests/unit some.test.path
# https://docs.openstack.org/oslotest/latest/features.html#debugging-with-oslo-debug-helper
commands = oslo_debug_helper -t sahara/tests/unit {posargs}
[testenv:bandit]
basepython = python3
deps = -r{toxinidir}/test-requirements-bandit.txt
commands = bandit -c bandit.yaml -r sahara -n5 -p sahara_default -x tests
commands = oslo_debug_helper -t sahara_plugin_storm/tests/unit {posargs}
[flake8]
show-source = true
@ -128,19 +78,12 @@ exclude=.venv,.git,.tox,dist,doc,*lib/python*,*egg,tools
# [H204] Use assert(Not)Equal to check for equality
# [H205] Use assert(Greater|Less)(Equal) for comparison
enable-extensions=H904,H106,H203,H204,H205
[hacking]
import_exceptions = sahara.i18n
local-check-factory = sahara.utils.hacking.checks.factory
[testenv:bindep]
basepython = python3
# Do not install any requirements. We want this to be fast and work even if
# system dependencies are missing, since it's used to tell you what system
# dependencies are missing! This also means that bindep must be installed
# separately, outside of the requirements files.
deps = bindep
commands = bindep test
# [E123] Closing bracket does not match indentation of opening bracket's line
# [E226] Missing whitespace around arithmetic operator
# [E402] Module level import not at top of file
# [E731] Do not assign a lambda expression, use a def
# [W503] Line break occurred before a binary operator
ignore=E123,E226,E402,E731,W503
[testenv:lower-constraints]
basepython = python3