Plugins splitted from sahara core

Change-Id: I43e0beec6508f93a436a150749bfa23571986b9d
This commit is contained in:
Telles Nobrega 2018-12-21 17:18:33 -03:00 committed by Telles Nobrega
parent e221c4b614
commit fbc20448ab
33 changed files with 332 additions and 303 deletions

View File

@ -1,3 +1,3 @@
[DEFAULT] [DEFAULT]
test_path=./sahara/tests/unit test_path=./sahara_plugin_spark/tests/unit
top_dir=./ top_dir=./

35
README.rst Normal file
View File

@ -0,0 +1,35 @@
========================
Team and repository tags
========================
.. image:: https://governance.openstack.org/tc/badges/sahara.svg
:target: https://governance.openstack.org/tc/reference/tags/index.html
.. Change things from this point on
OpenStack Data Processing ("Sahara") project
============================================
Sahara at wiki.openstack.org: https://wiki.openstack.org/wiki/Sahara
Storyboard project: https://storyboard.openstack.org/#!/project/935
Sahara docs site: https://docs.openstack.org/sahara/latest/
Roadmap: https://wiki.openstack.org/wiki/Sahara/Roadmap
Quickstart guide: https://docs.openstack.org/sahara/latest/user/quickstart.html
How to participate: https://docs.openstack.org/sahara/latest/contributor/how-to-participate.html
Source: https://git.openstack.org/cgit/openstack/sahara
Bugs and feature requests: https://storyboard.openstack.org/#!/project/935
Release notes: https://docs.openstack.org/releasenotes/sahara/
License
-------
Apache License Version 2.0 http://www.apache.org/licenses/LICENSE-2.0

9
doc/requirements.txt Normal file
View File

@ -0,0 +1,9 @@
# The order of packages is significant, because pip processes them in the order
# of appearance. Changing the order has an impact on the overall integration
# process, which may cause wedges in the gate later.
openstackdocstheme>=1.18.1 # Apache-2.0
os-api-ref>=1.4.0 # Apache-2.0
reno>=2.5.0 # Apache-2.0
sphinx!=1.6.6,!=1.6.7,>=1.6.2 # BSD
sphinxcontrib-httpdomain>=1.3.0 # BSD
whereto>=0.3.0 # Apache-2.0

162
lower-constraints.txt Normal file
View File

@ -0,0 +1,162 @@
alabaster==0.7.10
alembic==0.8.10
amqp==2.2.2
appdirs==1.4.3
asn1crypto==0.24.0
astroid==1.3.8
Babel==2.3.4
bandit==1.1.0
bashate==0.5.1
bcrypt==3.1.4
botocore==1.5.1
cachetools==2.0.1
castellan==0.16.0
certifi==2018.1.18
cffi==1.11.5
chardet==3.0.4
click==6.7
cliff==2.11.0
cmd2==0.8.1
contextlib2==0.5.5
coverage==4.0
cryptography==2.1.4
debtcollector==1.19.0
decorator==4.2.1
deprecation==2.0
doc8==0.6.0
docutils==0.14
dogpile.cache==0.6.5
dulwich==0.19.0
enum-compat==0.0.2
eventlet==0.18.2
extras==1.0.0
fasteners==0.14.1
fixtures==3.0.0
flake8==2.6.2
Flask==1.0.2
future==0.16.0
futurist==1.6.0
gitdb2==2.0.3
GitPython==2.1.8
greenlet==0.4.13
hacking==1.1.0
idna==2.6
imagesize==1.0.0
iso8601==0.1.11
itsdangerous==0.24
Jinja2==2.10
jmespath==0.9.3
jsonpatch==1.21
jsonpointer==2.0
jsonschema==2.6.0
keystoneauth1==3.4.0
keystonemiddleware==4.17.0
kombu==4.1.0
linecache2==1.0.0
logilab-common==1.4.1
Mako==1.0.7
MarkupSafe==1.0
mccabe==0.2.1
mock==2.0.0
monotonic==1.4
mox3==0.25.0
msgpack==0.5.6
munch==2.2.0
netaddr==0.7.19
netifaces==0.10.6
openstackdocstheme==1.18.1
openstacksdk==0.12.0
os-api-ref==1.4.0
os-client-config==1.29.0
os-service-types==1.2.0
osc-lib==1.10.0
oslo.cache==1.29.0
oslo.concurrency==3.26.0
oslo.config==5.2.0
oslo.context==2.19.2
oslo.db==4.27.0
oslo.i18n==3.15.3
oslo.log==3.36.0
oslo.messaging==5.29.0
oslo.middleware==3.31.0
oslo.policy==1.30.0
oslo.rootwrap==5.8.0
oslo.serialization==2.18.0
oslo.service==1.24.0
oslo.upgradecheck==0.1.0
oslo.utils==3.33.0
oslotest==3.2.0
packaging==17.1
paramiko==2.0.0
Paste==2.0.3
PasteDeploy==1.5.2
pbr==2.0.0
pika-pool==0.1.3
pika==0.10.0
prettytable==0.7.2
psycopg2==2.6.2
pyasn1==0.4.2
pycadf==2.7.0
pycparser==2.18
pycodestyle==2.4.0
pyflakes==0.8.1
Pygments==2.2.0
pyinotify==0.9.6
pylint==1.4.5
PyMySQL==0.7.6
PyNaCl==1.2.1
pyOpenSSL==17.5.0
pyparsing==2.2.0
pyperclip==1.6.0
python-barbicanclient==4.6.0
python-cinderclient==3.3.0
python-dateutil==2.7.0
python-editor==1.0.3
python-glanceclient==2.8.0
python-heatclient==1.10.0
python-keystoneclient==3.8.0
python-manilaclient==1.16.0
python-mimeparse==1.6.0
python-neutronclient==6.7.0
python-novaclient==9.1.0
python-openstackclient==3.14.0
python-saharaclient==1.4.0
python-subunit==1.2.0
python-swiftclient==3.2.0
pytz==2018.3
PyYAML==3.12
reno==2.5.0
repoze.lru==0.7
requests==2.14.2
requestsexceptions==1.4.0
restructuredtext-lint==1.1.3
rfc3986==1.1.0
Routes==2.4.1
simplejson==3.13.2
six==1.10.0
smmap2==2.0.3
snowballstemmer==1.2.1
Sphinx==1.6.2
sphinxcontrib-httpdomain==1.3.0
sphinxcontrib-websupport==1.0.1
sqlalchemy-migrate==0.11.0
SQLAlchemy==1.0.10
sqlparse==0.2.4
statsd==3.2.2
stestr==1.0.0
stevedore==1.20.0
Tempita==0.5.2
tenacity==4.9.0
testresources==2.0.0
testscenarios==0.4
testtools==2.2.0
tooz==1.58.0
traceback2==1.4.0
unittest2==1.1.0
urllib3==1.22
vine==1.1.4
voluptuous==0.11.1
warlock==1.3.0
WebOb==1.7.1
Werkzeug==0.14.1
wrapt==1.10.11

View File

@ -39,6 +39,7 @@ python-swiftclient>=3.2.0 # Apache-2.0
python-neutronclient>=6.7.0 # Apache-2.0 python-neutronclient>=6.7.0 # Apache-2.0
python-heatclient>=1.10.0 # Apache-2.0 python-heatclient>=1.10.0 # Apache-2.0
python-glanceclient>=2.8.0 # Apache-2.0 python-glanceclient>=2.8.0 # Apache-2.0
sahara
six>=1.10.0 # MIT six>=1.10.0 # MIT
stevedore>=1.20.0 # Apache-2.0 stevedore>=1.20.0 # Apache-2.0
SQLAlchemy!=1.1.5,!=1.1.6,!=1.1.7,!=1.1.8,>=1.0.10 # MIT SQLAlchemy!=1.1.5,!=1.1.6,!=1.1.7,!=1.1.8,>=1.0.10 # MIT

View File

@ -17,25 +17,20 @@ from oslo_config import cfg
from oslo_log import log as logging from oslo_log import log as logging
import six import six
from sahara import conductor as c
from sahara.plugins import provisioning as p from sahara.plugins import provisioning as p
from sahara.plugins import swift_helper as swift
from sahara.plugins import topology_helper as topology
from sahara.plugins import utils from sahara.plugins import utils
from sahara.swift import swift_helper as swift
from sahara.topology import topology_helper as topology
from sahara.utils import files as f
from sahara.utils import types
from sahara.utils import xmlutils as x
conductor = c.API
LOG = logging.getLogger(__name__) LOG = logging.getLogger(__name__)
CONF = cfg.CONF CONF = cfg.CONF
CORE_DEFAULT = x.load_hadoop_xml_defaults( CORE_DEFAULT = utils.load_hadoop_xml_defaults(
'plugins/spark/resources/core-default.xml') 'plugins/spark/resources/core-default.xml', 'sahara_plugin_spark')
HDFS_DEFAULT = x.load_hadoop_xml_defaults( HDFS_DEFAULT = utils.load_hadoop_xml_defaults(
'plugins/spark/resources/hdfs-default.xml') 'plugins/spark/resources/hdfs-default.xml', 'sahara_plugin_spark')
SWIFT_DEFAULTS = swift.read_default_swift_configs() SWIFT_DEFAULTS = swift.read_default_swift_configs()
@ -200,7 +195,7 @@ def _initialise_configs():
if cfg.default_value in ["true", "false"]: if cfg.default_value in ["true", "false"]:
cfg.config_type = "bool" cfg.config_type = "bool"
cfg.default_value = (cfg.default_value == 'true') cfg.default_value = (cfg.default_value == 'true')
elif types.is_int(cfg.default_value): elif utils.is_int(cfg.default_value):
cfg.config_type = "int" cfg.config_type = "int"
cfg.default_value = int(cfg.default_value) cfg.default_value = int(cfg.default_value)
if config['name'] in CLUSTER_WIDE_CONFS: if config['name'] in CLUSTER_WIDE_CONFS:
@ -296,8 +291,8 @@ def generate_xml_configs(configs, storage_path, nn_hostname, hadoop_port):
core_all += topology.vm_awareness_core_config() core_all += topology.vm_awareness_core_config()
xml_configs = { xml_configs = {
'core-site': x.create_hadoop_xml(cfg, core_all), 'core-site': utils.create_hadoop_xml(cfg, core_all),
'hdfs-site': x.create_hadoop_xml(cfg, HDFS_DEFAULT) 'hdfs-site': utils.create_hadoop_xml(cfg, HDFS_DEFAULT)
} }
return xml_configs return xml_configs
@ -458,10 +453,12 @@ def generate_job_cleanup_config(cluster):
(args['minimum_cleanup_megabytes'] > 0 (args['minimum_cleanup_megabytes'] > 0
and args['minimum_cleanup_seconds'] > 0))} and args['minimum_cleanup_seconds'] > 0))}
if job_conf['valid']: if job_conf['valid']:
job_conf['cron'] = f.get_file_text( job_conf['cron'] = utils.get_file_text(
'plugins/spark/resources/spark-cleanup.cron'), 'plugins/spark/resources/spark-cleanup.cron',
job_cleanup_script = f.get_file_text( 'sahara_plugin_spark'),
'plugins/spark/resources/tmp-cleanup.sh.template') job_cleanup_script = utils.get_file_text(
'plugins/spark/resources/tmp-cleanup.sh.template',
'sahara_plugin_spark')
job_conf['script'] = job_cleanup_script.format(**args) job_conf['script'] = job_cleanup_script.format(**args)
return job_conf return job_conf

View File

@ -18,13 +18,13 @@ import os
import six import six
from sahara import exceptions as ex from sahara.plugins import edp
from sahara.i18n import _ from sahara.plugins import exceptions as ex
from sahara.plugins import utils as plugin_utils from sahara.plugins import utils as plugin_utils
from sahara.service.edp.spark import engine as edp_engine from sahara_plugin_spark.i18n import _
class EdpEngine(edp_engine.SparkJobEngine): class EdpEngine(edp.PluginsSparkJobEngine):
edp_base_version = "1.6.0" edp_base_version = "1.6.0"
@ -51,11 +51,11 @@ class EdpEngine(edp_engine.SparkJobEngine):
@staticmethod @staticmethod
def job_type_supported(job_type): def job_type_supported(job_type):
return job_type in edp_engine.SparkJobEngine.get_supported_job_types() return job_type in edp.PluginsSparkJobEngine.get_supported_job_types()
def validate_job_execution(self, cluster, job, data): def validate_job_execution(self, cluster, job, data):
if not self.edp_supported(cluster.hadoop_version): if not self.edp_supported(cluster.hadoop_version):
raise ex.InvalidDataException( raise ex.PluginInvalidDataException(
_('Spark {base} or higher required to run {type} jobs').format( _('Spark {base} or higher required to run {type} jobs').format(
base=EdpEngine.edp_base_version, type=job.type)) base=EdpEngine.edp_base_version, type=job.type))

View File

@ -19,27 +19,22 @@ import os
from oslo_config import cfg from oslo_config import cfg
from oslo_log import log as logging from oslo_log import log as logging
from sahara import conductor from sahara.plugins import conductor
from sahara import context from sahara.plugins import context
from sahara.i18n import _
from sahara.plugins import exceptions as ex from sahara.plugins import exceptions as ex
from sahara.plugins import provisioning as p from sahara.plugins import provisioning as p
from sahara.plugins import recommendations_utils as ru from sahara.plugins import recommendations_utils as ru
from sahara.plugins.spark import config_helper as c_helper from sahara.plugins import swift_helper
from sahara.plugins.spark import edp_engine from sahara.plugins import topology_helper as th
from sahara.plugins.spark import run_scripts as run
from sahara.plugins.spark import scaling as sc
from sahara.plugins.spark import shell_engine
from sahara.plugins import utils from sahara.plugins import utils
from sahara.swift import swift_helper from sahara_plugin_spark.i18n import _
from sahara.topology import topology_helper as th from sahara_plugin_spark.plugins.spark import config_helper as c_helper
from sahara.utils import cluster_progress_ops as cpo from sahara_plugin_spark.plugins.spark import edp_engine
from sahara.utils import files as f from sahara_plugin_spark.plugins.spark import run_scripts as run
from sahara.utils import general as ug from sahara_plugin_spark.plugins.spark import scaling as sc
from sahara.utils import remote from sahara_plugin_spark.plugins.spark import shell_engine
conductor = conductor.API
LOG = logging.getLogger(__name__) LOG = logging.getLogger(__name__)
CONF = cfg.CONF CONF = cfg.CONF
@ -127,10 +122,10 @@ class SparkProvider(p.ProvisioningPluginBase):
def configure_cluster(self, cluster): def configure_cluster(self, cluster):
self._setup_instances(cluster) self._setup_instances(cluster)
@cpo.event_wrapper( @utils.event_wrapper(
True, step=utils.start_process_event_message("NameNode")) True, step=utils.start_process_event_message("NameNode"))
def _start_namenode(self, nn_instance): def _start_namenode(self, nn_instance):
with remote.get_remote(nn_instance) as r: with utils.get_remote(nn_instance) as r:
run.format_namenode(r) run.format_namenode(r)
run.start_processes(r, "namenode") run.start_processes(r, "namenode")
@ -139,10 +134,10 @@ class SparkProvider(p.ProvisioningPluginBase):
if sm_instance: if sm_instance:
self._start_spark(cluster, sm_instance) self._start_spark(cluster, sm_instance)
@cpo.event_wrapper( @utils.event_wrapper(
True, step=utils.start_process_event_message("SparkMasterNode")) True, step=utils.start_process_event_message("SparkMasterNode"))
def _start_spark(self, cluster, sm_instance): def _start_spark(self, cluster, sm_instance):
with remote.get_remote(sm_instance) as r: with utils.get_remote(sm_instance) as r:
run.start_spark_master(r, self._spark_home(cluster)) run.start_spark_master(r, self._spark_home(cluster))
LOG.info("Spark service has been started") LOG.info("Spark service has been started")
@ -159,7 +154,7 @@ class SparkProvider(p.ProvisioningPluginBase):
LOG.info("Hadoop services have been started") LOG.info("Hadoop services have been started")
with remote.get_remote(nn_instance) as r: with utils.get_remote(nn_instance) as r:
r.execute_command("sudo -u hdfs hdfs dfs -mkdir -p /user/$USER/") r.execute_command("sudo -u hdfs hdfs dfs -mkdir -p /user/$USER/")
r.execute_command("sudo -u hdfs hdfs dfs -chown $USER " r.execute_command("sudo -u hdfs hdfs dfs -chown $USER "
"/user/$USER/") "/user/$USER/")
@ -229,16 +224,16 @@ class SparkProvider(p.ProvisioningPluginBase):
if len(dn_instances) == 0: if len(dn_instances) == 0:
return return
cpo.add_provisioning_step( utils.add_provisioning_step(
dn_instances[0].cluster_id, dn_instances[0].cluster_id,
utils.start_process_event_message("DataNodes"), len(dn_instances)) utils.start_process_event_message("DataNodes"), len(dn_instances))
with context.ThreadGroup() as tg: with context.PluginsThreadGroup() as tg:
for i in dn_instances: for i in dn_instances:
tg.spawn('spark-start-dn-%s' % i.instance_name, tg.spawn('spark-start-dn-%s' % i.instance_name,
self._start_datanode, i) self._start_datanode, i)
@cpo.event_wrapper(mark_successful_on_exit=True) @utils.event_wrapper(mark_successful_on_exit=True)
def _start_datanode(self, instance): def _start_datanode(self, instance):
with instance.remote() as r: with instance.remote() as r:
run.start_processes(r, "datanode") run.start_processes(r, "datanode")
@ -253,9 +248,9 @@ class SparkProvider(p.ProvisioningPluginBase):
def _push_configs_to_nodes(self, cluster, extra, new_instances): def _push_configs_to_nodes(self, cluster, extra, new_instances):
all_instances = utils.get_instances(cluster) all_instances = utils.get_instances(cluster)
cpo.add_provisioning_step( utils.add_provisioning_step(
cluster.id, _("Push configs to nodes"), len(all_instances)) cluster.id, _("Push configs to nodes"), len(all_instances))
with context.ThreadGroup() as tg: with context.PluginsThreadGroup() as tg:
for instance in all_instances: for instance in all_instances:
extra = self._add_instance_ng_related_to_extra( extra = self._add_instance_ng_related_to_extra(
cluster, instance, extra) cluster, instance, extra)
@ -268,7 +263,7 @@ class SparkProvider(p.ProvisioningPluginBase):
self._push_configs_to_existing_node, cluster, self._push_configs_to_existing_node, cluster,
extra, instance) extra, instance)
@cpo.event_wrapper(mark_successful_on_exit=True) @utils.event_wrapper(mark_successful_on_exit=True)
def _push_configs_to_new_node(self, cluster, extra, instance): def _push_configs_to_new_node(self, cluster, extra, instance):
files_hadoop = { files_hadoop = {
os.path.join(c_helper.HADOOP_CONF_DIR, os.path.join(c_helper.HADOOP_CONF_DIR,
@ -308,7 +303,7 @@ class SparkProvider(p.ProvisioningPluginBase):
'sudo chmod 755 %(nn_path)s %(dn_path)s' % 'sudo chmod 755 %(nn_path)s %(dn_path)s' %
{"nn_path": nn_path, "dn_path": dn_path}) {"nn_path": nn_path, "dn_path": dn_path})
with remote.get_remote(instance) as r: with utils.get_remote(instance) as r:
r.execute_command( r.execute_command(
'sudo chown -R $USER:$USER /etc/hadoop' 'sudo chown -R $USER:$USER /etc/hadoop'
) )
@ -331,8 +326,9 @@ class SparkProvider(p.ProvisioningPluginBase):
if c_helper.is_data_locality_enabled(cluster): if c_helper.is_data_locality_enabled(cluster):
r.write_file_to( r.write_file_to(
'/etc/hadoop/topology.sh', '/etc/hadoop/topology.sh',
f.get_file_text( utils.get_file_text(
'plugins/spark/resources/topology.sh')) 'plugins/spark/resources/topology.sh',
'sahara_plugin_spark'))
r.execute_command( r.execute_command(
'sudo chmod +x /etc/hadoop/topology.sh' 'sudo chmod +x /etc/hadoop/topology.sh'
) )
@ -341,7 +337,7 @@ class SparkProvider(p.ProvisioningPluginBase):
self._push_master_configs(r, cluster, extra, instance) self._push_master_configs(r, cluster, extra, instance)
self._push_cleanup_job(r, cluster, extra, instance) self._push_cleanup_job(r, cluster, extra, instance)
@cpo.event_wrapper(mark_successful_on_exit=True) @utils.event_wrapper(mark_successful_on_exit=True)
def _push_configs_to_existing_node(self, cluster, extra, instance): def _push_configs_to_existing_node(self, cluster, extra, instance):
node_processes = instance.node_group.node_processes node_processes = instance.node_group.node_processes
need_update_hadoop = (c_helper.is_data_locality_enabled(cluster) or need_update_hadoop = (c_helper.is_data_locality_enabled(cluster) or
@ -359,11 +355,11 @@ class SparkProvider(p.ProvisioningPluginBase):
sp_home, sp_home,
'conf/spark-defaults.conf'): extra['sp_defaults'] 'conf/spark-defaults.conf'): extra['sp_defaults']
} }
r = remote.get_remote(instance) r = utils.get_remote(instance)
r.write_files_to(files) r.write_files_to(files)
self._push_cleanup_job(r, cluster, extra, instance) self._push_cleanup_job(r, cluster, extra, instance)
if need_update_hadoop: if need_update_hadoop:
with remote.get_remote(instance) as r: with utils.get_remote(instance) as r:
self._write_topology_data(r, cluster, extra) self._write_topology_data(r, cluster, extra)
self._push_master_configs(r, cluster, extra, instance) self._push_master_configs(r, cluster, extra, instance)
@ -451,13 +447,13 @@ class SparkProvider(p.ProvisioningPluginBase):
def scale_cluster(self, cluster, instances): def scale_cluster(self, cluster, instances):
master = utils.get_instance(cluster, "master") master = utils.get_instance(cluster, "master")
r_master = remote.get_remote(master) r_master = utils.get_remote(master)
run.stop_spark(r_master, self._spark_home(cluster)) run.stop_spark(r_master, self._spark_home(cluster))
self._setup_instances(cluster, instances) self._setup_instances(cluster, instances)
nn = utils.get_instance(cluster, "namenode") nn = utils.get_instance(cluster, "namenode")
run.refresh_nodes(remote.get_remote(nn), "dfsadmin") run.refresh_nodes(utils.get_remote(nn), "dfsadmin")
dn_instances = [instance for instance in instances if dn_instances = [instance for instance in instances if
'datanode' in instance.node_group.node_processes] 'datanode' in instance.node_group.node_processes]
self._start_datanode_processes(dn_instances) self._start_datanode_processes(dn_instances)
@ -473,7 +469,7 @@ class SparkProvider(p.ProvisioningPluginBase):
scalable_processes = self._get_scalable_processes() scalable_processes = self._get_scalable_processes()
for ng_id in additional: for ng_id in additional:
ng = ug.get_by_id(cluster.node_groups, ng_id) ng = utils.get_by_id(cluster.node_groups, ng_id)
if not set(ng.node_processes).issubset(scalable_processes): if not set(ng.node_processes).issubset(scalable_processes):
raise ex.NodeGroupCannotBeScaled( raise ex.NodeGroupCannotBeScaled(
ng.name, _("Spark plugin cannot scale nodegroup" ng.name, _("Spark plugin cannot scale nodegroup"

View File

@ -17,11 +17,9 @@ import os
from oslo_log import log as logging from oslo_log import log as logging
from sahara.i18n import _
from sahara.plugins.spark import config_helper as c_helper
from sahara.plugins import utils from sahara.plugins import utils
from sahara.utils import cluster_progress_ops from sahara_plugin_spark.i18n import _
from sahara.utils import poll_utils from sahara_plugin_spark.plugins.spark import config_helper as c_helper
LOG = logging.getLogger(__name__) LOG = logging.getLogger(__name__)
@ -63,7 +61,7 @@ def stop_spark(nn_remote, sp_home):
"sbin/stop-all.sh")) "sbin/stop-all.sh"))
@cluster_progress_ops.event_wrapper( @utils.event_wrapper(
True, step=_("Await DataNodes start up"), param=("cluster", 0)) True, step=_("Await DataNodes start up"), param=("cluster", 0))
def await_datanodes(cluster): def await_datanodes(cluster):
datanodes_count = len(utils.get_instances(cluster, "datanode")) datanodes_count = len(utils.get_instances(cluster, "datanode"))
@ -72,7 +70,7 @@ def await_datanodes(cluster):
log_msg = _("Waiting on %d DataNodes to start up") % datanodes_count log_msg = _("Waiting on %d DataNodes to start up") % datanodes_count
with utils.get_instance(cluster, "namenode").remote() as r: with utils.get_instance(cluster, "namenode").remote() as r:
poll_utils.plugin_option_poll( utils.plugin_option_poll(
cluster, _check_datanodes_count, cluster, _check_datanodes_count,
c_helper.DATANODES_STARTUP_TIMEOUT, c_helper.DATANODES_STARTUP_TIMEOUT,
log_msg, 1, {"remote": r, "count": datanodes_count}) log_msg, 1, {"remote": r, "count": datanodes_count})

View File

@ -17,17 +17,14 @@ import os
import six import six
from sahara import context from sahara.plugins import context
from sahara.i18n import _
from sahara.plugins.spark import config_helper as c_helper
from sahara.plugins.spark import run_scripts as run
from sahara.plugins import utils from sahara.plugins import utils
from sahara.utils import cluster_progress_ops as cpo from sahara_plugin_spark.i18n import _
from sahara.utils import poll_utils from sahara_plugin_spark.plugins.spark import config_helper as c_helper
from sahara.utils import remote from sahara_plugin_spark.plugins.spark import run_scripts as run
@cpo.event_wrapper(True, step=_("Decommission %s") % "Slaves") @utils.event_wrapper(True, step=_("Decommission %s") % "Slaves")
def decommission_sl(master, inst_to_be_deleted, survived_inst): def decommission_sl(master, inst_to_be_deleted, survived_inst):
if survived_inst is not None: if survived_inst is not None:
slavenames = [] slavenames = []
@ -39,7 +36,7 @@ def decommission_sl(master, inst_to_be_deleted, survived_inst):
cluster = master.cluster cluster = master.cluster
sp_home = utils.get_config_value_or_default("Spark", "Spark home", cluster) sp_home = utils.get_config_value_or_default("Spark", "Spark home", cluster)
r_master = remote.get_remote(master) r_master = utils.get_remote(master)
run.stop_spark(r_master, sp_home) run.stop_spark(r_master, sp_home)
# write new slave file to master # write new slave file to master
@ -48,7 +45,7 @@ def decommission_sl(master, inst_to_be_deleted, survived_inst):
# write new slaves file to each survived slave as well # write new slaves file to each survived slave as well
for i in survived_inst: for i in survived_inst:
with remote.get_remote(i) as r: with utils.get_remote(i) as r:
r.write_files_to(files) r.write_files_to(files)
run.start_spark_master(r_master, sp_home) run.start_spark_master(r_master, sp_home)
@ -65,16 +62,16 @@ def _is_decommissioned(r, inst_to_be_deleted):
return True return True
@cpo.event_wrapper(True, step=_("Decommission %s") % "DataNodes") @utils.event_wrapper(True, step=_("Decommission %s") % "DataNodes")
def decommission_dn(nn, inst_to_be_deleted, survived_inst): def decommission_dn(nn, inst_to_be_deleted, survived_inst):
with remote.get_remote(nn) as r: with utils.get_remote(nn) as r:
r.write_file_to('/etc/hadoop/dn.excl', r.write_file_to('/etc/hadoop/dn.excl',
utils.generate_fqdn_host_names( utils.generate_fqdn_host_names(
inst_to_be_deleted)) inst_to_be_deleted))
run.refresh_nodes(remote.get_remote(nn), "dfsadmin") run.refresh_nodes(utils.get_remote(nn), "dfsadmin")
context.sleep(3) context.sleep(3)
poll_utils.plugin_option_poll( utils.plugin_option_poll(
nn.cluster, _is_decommissioned, c_helper.DECOMMISSIONING_TIMEOUT, nn.cluster, _is_decommissioned, c_helper.DECOMMISSIONING_TIMEOUT,
_("Decommission %s") % "DataNodes", 3, { _("Decommission %s") % "DataNodes", 3, {
'r': r, 'inst_to_be_deleted': inst_to_be_deleted}) 'r': r, 'inst_to_be_deleted': inst_to_be_deleted})

View File

@ -13,16 +13,16 @@
# See the License for the specific language governing permissions and # See the License for the specific language governing permissions and
# limitations under the License. # limitations under the License.
from sahara.plugins import edp
from sahara.plugins import utils as plugin_utils from sahara.plugins import utils as plugin_utils
from sahara.service.edp.spark import engine as shell_engine
class ShellEngine(shell_engine.SparkShellJobEngine): class ShellEngine(edp.PluginsSparkShellJobEngine):
def __init__(self, cluster): def __init__(self, cluster):
super(ShellEngine, self).__init__(cluster) super(ShellEngine, self).__init__(cluster)
self.master = plugin_utils.get_instance(cluster, "master") self.master = plugin_utils.get_instance(cluster, "master")
@staticmethod @staticmethod
def job_type_supported(job_type): def job_type_supported(job_type):
return (job_type in shell_engine.SparkShellJobEngine. return (job_type in edp.PluginsSparkShellJobEngine.
get_supported_job_types()) get_supported_job_types())

View File

@ -13,7 +13,7 @@
# See the License for the specific language governing permissions and # See the License for the specific language governing permissions and
# limitations under the License. # limitations under the License.
from sahara.utils import patches from sahara_plugin_spark.utils import patches
patches.patch_all() patches.patch_all()
import oslo_i18n import oslo_i18n

View File

@ -13,13 +13,12 @@
# See the License for the specific language governing permissions and # See the License for the specific language governing permissions and
# limitations under the License. # limitations under the License.
import mock
from oslotest import base from oslotest import base
from sahara import context from sahara.plugins import context
from sahara.db import api as db_api from sahara.plugins import db as db_api
from sahara import main from sahara.plugins import main
from sahara.utils import rpc from sahara.plugins import utils
class SaharaTestCase(base.BaseTestCase): class SaharaTestCase(base.BaseTestCase):
@ -27,7 +26,7 @@ class SaharaTestCase(base.BaseTestCase):
def setUp(self): def setUp(self):
super(SaharaTestCase, self).setUp() super(SaharaTestCase, self).setUp()
self.setup_context() self.setup_context()
rpc.setup('all-in-one') utils.rpc_setup('all-in-one')
def setup_context(self, username="test_user", tenant_id="tenant_1", def setup_context(self, username="test_user", tenant_id="tenant_1",
auth_token="test_auth_token", tenant_name='test_tenant', auth_token="test_auth_token", tenant_name='test_tenant',
@ -35,14 +34,14 @@ class SaharaTestCase(base.BaseTestCase):
self.addCleanup(context.set_ctx, self.addCleanup(context.set_ctx,
context.ctx() if context.has_ctx() else None) context.ctx() if context.has_ctx() else None)
context.set_ctx(context.Context( context.set_ctx(context.PluginsContext(
username=username, tenant_id=tenant_id, username=username, tenant_id=tenant_id,
auth_token=auth_token, service_catalog=service_catalog or {}, auth_token=auth_token, service_catalog=service_catalog or {},
tenant_name=tenant_name, **kwargs)) tenant_name=tenant_name, **kwargs))
def override_config(self, name, override, group=None): def override_config(self, name, override, group=None):
main.CONF.set_override(name, override, group) main.set_override(name, override, group)
self.addCleanup(main.CONF.clear_override, name, group) self.addCleanup(main.clear_override, name, group)
class SaharaWithDbTestCase(SaharaTestCase): class SaharaWithDbTestCase(SaharaTestCase):
@ -52,22 +51,3 @@ class SaharaWithDbTestCase(SaharaTestCase):
self.override_config('connection', "sqlite://", group='database') self.override_config('connection', "sqlite://", group='database')
db_api.setup_db() db_api.setup_db()
self.addCleanup(db_api.drop_db) self.addCleanup(db_api.drop_db)
class _ConsecutiveThreadGroup(context.ThreadGroup):
def __init__(self, _thread_pool_size=1000):
pass
def spawn(self, thread_description, func, *args, **kwargs):
func(*args, **kwargs)
def __enter__(self):
return self
def __exit__(self, *ex):
pass
def mock_thread_group(func):
return mock.patch('sahara.context.ThreadGroup',
new=_ConsecutiveThreadGroup)(func)

View File

@ -18,10 +18,10 @@ import xml.dom.minidom as xml
import mock import mock
from sahara.plugins.spark import config_helper as c_helper from sahara.plugins import swift_helper as swift
from sahara.swift import swift_helper as swift from sahara.plugins import utils
from sahara.tests.unit import base as test_base from sahara_plugin_spark.plugins.spark import config_helper as c_helper
from sahara.utils import xmlutils from sahara_plugin_spark.tests.unit import base as test_base
class ConfigHelperUtilsTest(test_base.SaharaTestCase): class ConfigHelperUtilsTest(test_base.SaharaTestCase):
@ -62,7 +62,7 @@ class ConfigHelperUtilsTest(test_base.SaharaTestCase):
self.assertNotIn(configs, 'script') self.assertNotIn(configs, 'script')
self.assertNotIn(configs, 'cron') self.assertNotIn(configs, 'cron')
@mock.patch("sahara.swift.utils.retrieve_auth_url") @mock.patch("sahara.plugins.swift_utils.retrieve_auth_url")
def test_generate_xml_configs(self, auth_url): def test_generate_xml_configs(self, auth_url):
auth_url.return_value = "http://localhost:5000/v2/" auth_url.return_value = "http://localhost:5000/v2/"
@ -73,7 +73,7 @@ class ConfigHelperUtilsTest(test_base.SaharaTestCase):
c = c_helper.generate_xml_configs({}, ['/mnt/one'], 'localhost', None) c = c_helper.generate_xml_configs({}, ['/mnt/one'], 'localhost', None)
doc = xml.parseString(c['core-site']) doc = xml.parseString(c['core-site'])
configuration = doc.getElementsByTagName('configuration') configuration = doc.getElementsByTagName('configuration')
properties = xmlutils.get_property_dict(configuration[0]) properties = utils.get_property_dict(configuration[0])
self.assertDictContainsSubset(swift_vals, properties) self.assertDictContainsSubset(swift_vals, properties)
# Make sure that user values have precedence over defaults # Make sure that user values have precedence over defaults
@ -82,7 +82,7 @@ class ConfigHelperUtilsTest(test_base.SaharaTestCase):
['/mnt/one'], 'localhost', None) ['/mnt/one'], 'localhost', None)
doc = xml.parseString(c['core-site']) doc = xml.parseString(c['core-site'])
configuration = doc.getElementsByTagName('configuration') configuration = doc.getElementsByTagName('configuration')
properties = xmlutils.get_property_dict(configuration[0]) properties = utils.get_property_dict(configuration[0])
mod_swift_vals = copy.copy(swift_vals) mod_swift_vals = copy.copy(swift_vals)
mod_swift_vals['fs.swift.service.sahara.tenant'] = 'fred' mod_swift_vals['fs.swift.service.sahara.tenant'] = 'fred'
self.assertDictContainsSubset(mod_swift_vals, properties) self.assertDictContainsSubset(mod_swift_vals, properties)
@ -94,6 +94,6 @@ class ConfigHelperUtilsTest(test_base.SaharaTestCase):
['/mnt/one'], 'localhost', None) ['/mnt/one'], 'localhost', None)
doc = xml.parseString(c['core-site']) doc = xml.parseString(c['core-site'])
configuration = doc.getElementsByTagName('configuration') configuration = doc.getElementsByTagName('configuration')
properties = xmlutils.get_property_dict(configuration[0]) properties = utils.get_property_dict(configuration[0])
for key in mod_swift_vals.keys(): for key in mod_swift_vals.keys():
self.assertNotIn(key, properties) self.assertNotIn(key, properties)

View File

@ -16,18 +16,14 @@
import mock import mock
import testtools import testtools
from sahara import conductor as cond
from sahara import context
from sahara.plugins import base as pb from sahara.plugins import base as pb
from sahara.plugins import conductor
from sahara.plugins import context
from sahara.plugins import edp
from sahara.plugins import exceptions as pe from sahara.plugins import exceptions as pe
from sahara.plugins.spark import plugin as pl from sahara.plugins import testutils as tu
from sahara.service.edp.spark import engine from sahara_plugin_spark.plugins.spark import plugin as pl
from sahara.tests.unit import base from sahara_plugin_spark.tests.unit import base
from sahara.tests.unit import testutils as tu
from sahara.utils import edp
conductor = cond.API
class SparkPluginTest(base.SaharaWithDbTestCase): class SparkPluginTest(base.SaharaWithDbTestCase):
@ -46,35 +42,35 @@ class SparkPluginTest(base.SaharaWithDbTestCase):
def test_plugin11_edp_engine(self): def test_plugin11_edp_engine(self):
self._test_engine('1.6.0', edp.JOB_TYPE_SPARK, self._test_engine('1.6.0', edp.JOB_TYPE_SPARK,
engine.SparkJobEngine) edp.PluginsSparkJobEngine)
def test_plugin12_shell_engine(self): def test_plugin12_shell_engine(self):
self._test_engine('1.6.0', edp.JOB_TYPE_SHELL, self._test_engine('1.6.0', edp.JOB_TYPE_SHELL,
engine.SparkShellJobEngine) edp.PluginsSparkShellJobEngine)
def test_plugin21_edp_engine(self): def test_plugin21_edp_engine(self):
self._test_engine('2.1.0', edp.JOB_TYPE_SPARK, self._test_engine('2.1.0', edp.JOB_TYPE_SPARK,
engine.SparkJobEngine) edp.PluginsSparkJobEngine)
def test_plugin21_shell_engine(self): def test_plugin21_shell_engine(self):
self._test_engine('2.1.0', edp.JOB_TYPE_SHELL, self._test_engine('2.1.0', edp.JOB_TYPE_SHELL,
engine.SparkShellJobEngine) edp.PluginsSparkShellJobEngine)
def test_plugin22_edp_engine(self): def test_plugin22_edp_engine(self):
self._test_engine('2.2', edp.JOB_TYPE_SPARK, self._test_engine('2.2', edp.JOB_TYPE_SPARK,
engine.SparkJobEngine) edp.PluginsSparkJobEngine)
def test_plugin22_shell_engine(self): def test_plugin22_shell_engine(self):
self._test_engine('2.2', edp.JOB_TYPE_SHELL, self._test_engine('2.2', edp.JOB_TYPE_SHELL,
engine.SparkShellJobEngine) edp.PluginsSparkShellJobEngine)
def test_plugin23_edp_engine(self): def test_plugin23_edp_engine(self):
self._test_engine('2.3', edp.JOB_TYPE_SPARK, self._test_engine('2.3', edp.JOB_TYPE_SPARK,
engine.SparkJobEngine) edp.PluginsSparkJobEngine)
def test_plugin23_shell_engine(self): def test_plugin23_shell_engine(self):
self._test_engine('2.3', edp.JOB_TYPE_SHELL, self._test_engine('2.3', edp.JOB_TYPE_SHELL,
engine.SparkShellJobEngine) edp.PluginsSparkShellJobEngine)
def _test_engine(self, version, job_type, eng): def _test_engine(self, version, job_type, eng):
cluster_dict = self._init_cluster_dict(version) cluster_dict = self._init_cluster_dict(version)

View File

@ -1,6 +1,6 @@
[metadata] [metadata]
name = sahara name = sahara_plugin_spark
summary = Sahara project summary = Spark Plugin for Sahara Project
description-file = README.rst description-file = README.rst
license = Apache Software License license = Apache Software License
classifiers = classifiers =
@ -23,80 +23,17 @@ setup-hooks = pbr.hooks.setup_hook
[files] [files]
packages = packages =
sahara sahara_plugin_spark
data_files =
etc/sahara =
etc/sahara/api-paste.ini
etc/sahara/rootwrap.conf
etc/sahara/rootwrap.d = etc/sahara/rootwrap.d/*
[entry_points] [entry_points]
console_scripts =
sahara-all = sahara.cli.sahara_all:main
sahara-api = sahara.cli.sahara_api:main
sahara-engine = sahara.cli.sahara_engine:main
sahara-db-manage = sahara.db.migration.cli:main
sahara-rootwrap = oslo_rootwrap.cmd:main
_sahara-subprocess = sahara.cli.sahara_subprocess:main
sahara-templates = sahara.db.templates.cli:main
sahara-image-pack = sahara.cli.image_pack.cli:main
sahara-status = sahara.cli.sahara_status:main
wsgi_scripts =
sahara-wsgi-api = sahara.cli.sahara_api:setup_api
sahara.cluster.plugins = sahara.cluster.plugins =
vanilla = sahara.plugins.vanilla.plugin:VanillaProvider spark = sahara_plugin_spark.plugins.spark.plugin:SparkProvider
ambari = sahara.plugins.ambari.plugin:AmbariPluginProvider
mapr = sahara.plugins.mapr.plugin:MapRPlugin
cdh = sahara.plugins.cdh.plugin:CDHPluginProvider
fake = sahara.plugins.fake.plugin:FakePluginProvider
spark = sahara.plugins.spark.plugin:SparkProvider
storm = sahara.plugins.storm.plugin:StormProvider
sahara.data_source.types = [build_sphinx]
hdfs = sahara.service.edp.data_sources.hdfs.implementation:HDFSType all_files = 1
manila = sahara.service.edp.data_sources.manila.implementation:ManilaType build-dir = doc/build
maprfs = sahara.service.edp.data_sources.maprfs.implementation:MapRFSType source-dir = doc/source
swift = sahara.service.edp.data_sources.swift.implementation:SwiftType warning-is-error = 1
s3 = sahara.service.edp.data_sources.s3.implementation:S3Type
sahara.job_binary.types = [wheel]
internal-db = sahara.service.edp.job_binaries.internal_db.implementation:InternalDBType universal = 1
manila = sahara.service.edp.job_binaries.manila.implementation:ManilaType
swift = sahara.service.edp.job_binaries.swift.implementation:SwiftType
s3 = sahara.service.edp.job_binaries.s3.implementation:S3Type
sahara.infrastructure.engine =
heat = sahara.service.heat.heat_engine:HeatEngine
sahara.remote =
ssh = sahara.utils.ssh_remote:SshRemoteDriver
sahara.run.mode =
all-in-one = sahara.service.ops:LocalOps
distributed = sahara.service.ops:RemoteOps
oslo.config.opts =
sahara.config = sahara.config:list_opts
oslo.config.opts.defaults =
sahara.config = sahara.common.config:set_cors_middleware_defaults
oslo.policy.policies =
sahara = sahara.common.policies:list_rules
[extract_messages]
keywords = _ gettext ngettext l_ lazy_gettext
mapping_file = babel.cfg
output_file = sahara/locale/sahara.pot
[compile_catalog]
directory = sahara/locale
domain = sahara
[update_catalog]
domain = sahara
output_dir = sahara/locale
input_file = sahara/locale/sahara.pot

103
tox.ini
View File

@ -1,14 +1,15 @@
[tox] [tox]
envlist = py35,py27,pep8,genpolicy envlist = py35,py27,pep8
minversion = 1.6 minversion = 1.6
skipsdist = True skipsdist = True
[testenv] [testenv]
basepython = python3
usedevelop = True usedevelop = True
install_command = pip install {opts} {packages} install_command = pip install {opts} {packages}
setenv = setenv =
VIRTUAL_ENV={envdir} VIRTUAL_ENV={envdir}
DISCOVER_DIRECTORY=sahara/tests/unit DISCOVER_DIRECTORY=sahara_plugin_spark/tests/unit
deps = deps =
-c{env:UPPER_CONSTRAINTS_FILE:https://git.openstack.org/cgit/openstack/requirements/plain/upper-constraints.txt} -c{env:UPPER_CONSTRAINTS_FILE:https://git.openstack.org/cgit/openstack/requirements/plain/upper-constraints.txt}
-r{toxinidir}/requirements.txt -r{toxinidir}/requirements.txt
@ -16,107 +17,46 @@ deps =
commands = stestr run {posargs} commands = stestr run {posargs}
passenv = http_proxy HTTP_PROXY https_proxy HTTPS_PROXY no_proxy NO_PROXY passenv = http_proxy HTTP_PROXY https_proxy HTTPS_PROXY no_proxy NO_PROXY
[testenv:cover]
basepython = python3
setenv =
PACKAGE_NAME=sahara
commands = {toxinidir}/tools/cover.sh {posargs}
[testenv:debug-py27] [testenv:debug-py27]
basepython = python2.7 basepython = python2.7
commands = oslo_debug_helper -t sahara/tests/unit {posargs} commands = oslo_debug_helper -t sahara_plugin_spark/tests/unit {posargs}
[testenv:debug-py35] [testenv:debug-py35]
basepython = python3.5 basepython = python3.5
commands = oslo_debug_helper -t sahara/tests/unit {posargs} commands = oslo_debug_helper -t sahara_plugin_spark/tests/unit {posargs}
[testenv:pep8] [testenv:pep8]
basepython = python3 basepython = python3
deps =
-c{env:UPPER_CONSTRAINTS_FILE:https://git.openstack.org/cgit/openstack/requirements/plain/upper-constraints.txt}
-r{toxinidir}/requirements.txt
-r{toxinidir}/test-requirements.txt
-r{toxinidir}/doc/requirements.txt
commands = commands =
flake8 {posargs} flake8 {posargs}
doc8 doc/source doc8 doc/source
# Run bashate checks
bash -c "find sahara -iname '*.sh' -print0 | xargs -0 bashate -v"
bash -c "find devstack -not -name \*.template -and -not -name README.rst -and -not -name \*.json -type f -print0 | xargs -0 bashate -v"
# Run security linter
bandit -c bandit.yaml -r sahara -n5 -p sahara_default -x tests
[testenv:genpolicy]
basepython = python3
commands = oslopolicy-sample-generator --config-file tools/config/sahara-policy-generator.conf
[testenv:venv] [testenv:venv]
basepython = python3 basepython = python3
commands = {posargs} commands = {posargs}
[testenv:images]
sitepackages = True
commands = {posargs}
[testenv:docs] [testenv:docs]
basepython = python3 basepython = python3
deps =
-c{env:UPPER_CONSTRAINTS_FILE:https://git.openstack.org/cgit/openstack/requirements/plain/upper-constraints.txt}
-r{toxinidir}/doc/requirements.txt
commands = commands =
rm -rf doc/html doc/build rm -rf doc/html doc/build
rm -rf api-ref/build api-ref/html rm -rf api-ref/build api-ref/html
rm -rf doc/source/apidoc doc/source/api rm -rf doc/source/apidoc doc/source/api
sphinx-build -W -b html doc/source doc/build/html python setup.py build_sphinx
sphinx-build -W -b html -d api-ref/build/doctrees api-ref/source api-ref/build/html sphinx-build -W -b html -d api-ref/build/doctrees api-ref/source api-ref/build/html
whereto doc/source/_extra/.htaccess doc/test/redirect-tests.txt whitelist_externals =
whitelist_externals = rm rm
[testenv:api-ref]
basepython = python3
deps =
-c{env:UPPER_CONSTRAINTS_FILE:https://git.openstack.org/cgit/openstack/requirements/plain/upper-constraints.txt}
-r{toxinidir}/doc/requirements.txt
install_command = pip install -U --force-reinstall {opts} {packages}
commands =
rm -rf api-ref/build api-ref/html
sphinx-build -W -b html -d api-ref/build/doctrees api-ref/source api-ref/build/html
whitelist_externals = rm
[testenv:pylint]
basepython = python3
setenv = VIRTUAL_ENV={envdir}
commands = bash tools/lintstack.sh
[testenv:genconfig]
basepython = python3
commands =
oslo-config-generator --config-file tools/config/config-generator.sahara.conf \
--output-file etc/sahara/sahara.conf.sample
[testenv:releasenotes] [testenv:releasenotes]
basepython = python3 basepython = python3
deps = commands = sphinx-build -a -E -W -d releasenotes/build/doctrees -b html releasenotes/source releasenotes/build/html
-c{env:UPPER_CONSTRAINTS_FILE:https://git.openstack.org/cgit/openstack/requirements/plain/upper-constraints.txt}
-r{toxinidir}/doc/requirements.txt
commands =
rm -rf releasenotes/build releasenotes/html
sphinx-build -a -E -W -d releasenotes/build/doctrees -b html releasenotes/source releasenotes/build/html
whitelist_externals = rm
[testenv:debug] [testenv:debug]
basepython = python3 basepython = python3
# It runs tests from the specified dir (default is sahara/tests) # It runs tests from the specified dir (default is sahara_plugin_spark/tests)
# in interactive mode, so, you could use pbr for tests debug. # in interactive mode, so, you could use pbr for tests debug.
# Example usage: tox -e debug -- -t sahara/tests/unit some.test.path # Example usage: tox -e debug -- -t sahara_plugin_spark/tests/unit some.test.path
# https://docs.openstack.org/oslotest/latest/features.html#debugging-with-oslo-debug-helper # https://docs.openstack.org/oslotest/latest/features.html#debugging-with-oslo-debug-helper
commands = oslo_debug_helper -t sahara/tests/unit {posargs} commands = oslo_debug_helper -t sahara_plugin_spark/tests/unit {posargs}
[testenv:bandit]
basepython = python3
deps = -r{toxinidir}/test-requirements-bandit.txt
commands = bandit -c bandit.yaml -r sahara -n5 -p sahara_default -x tests
[flake8] [flake8]
show-source = true show-source = true
@ -129,22 +69,3 @@ exclude=.venv,.git,.tox,dist,doc,*lib/python*,*egg,tools
# [H205] Use assert(Greater|Less)(Equal) for comparison # [H205] Use assert(Greater|Less)(Equal) for comparison
enable-extensions=H904,H106,H203,H204,H205 enable-extensions=H904,H106,H203,H204,H205
[hacking]
import_exceptions = sahara.i18n
local-check-factory = sahara.utils.hacking.checks.factory
[testenv:bindep]
basepython = python3
# Do not install any requirements. We want this to be fast and work even if
# system dependencies are missing, since it's used to tell you what system
# dependencies are missing! This also means that bindep must be installed
# separately, outside of the requirements files.
deps = bindep
commands = bindep test
[testenv:lower-constraints]
basepython = python3
deps =
-c{toxinidir}/lower-constraints.txt
-r{toxinidir}/test-requirements.txt
-r{toxinidir}/requirements.txt