Allow configurable SPARK_HOME

Spark could feasibly be installed in any location so we should
allow SPARK_HOME to be specified in the conf file and that
value used in the spark-submit carried out in the transform
service invocation.

Change-Id: I4d25ccaa0e271eeb783d186666cdc8aaf131097c
This commit is contained in:
David C Kennedy 2016-06-03 16:55:05 +01:00
parent e4ade60711
commit 05c36ab8e5
5 changed files with 11 additions and 3 deletions

View File

@ -50,6 +50,9 @@ spark_jars_list = /opt/spark/current/lib/spark-streaming-kafka_2.10-1.6.1.jar,/o
# A list of where the Spark master(s) should run
spark_master_list = spark://localhost:7077
# spark_home for the environment
spark_home = /opt/spark/current
# Python files for Spark to use
spark_python_files = /opt/monasca/transform/lib/monasca-transform.zip

View File

@ -87,7 +87,7 @@ function install_java_libs {
function link_spark_streaming_lib {
pushd /opt/spark/current/lib
ln -sf spark-streaming-kafka.jar spark-streaming-kafka_2.10-1.6.0.jar
ln -sf spark-streaming-kafka.jar spark-streaming-kafka_2.10-1.6.1.jar
popd
}

View File

@ -56,6 +56,9 @@ spark_jars_list = /opt/spark/current/lib/spark-streaming-kafka.jar,/opt/spark/cu
# A list of where the Spark master(s) should run
spark_master_list = spark://192.168.10.4:7077,192.168.10.5:7077
# spark_home for the environment
spark_home = /opt/spark/current
# Python files for Spark to use
spark_python_files = /opt/stack/monasca-transform/dist/monasca_transform-0.0.1.egg

View File

@ -98,7 +98,8 @@ class ConfigInitializer(object):
cfg.StrOpt('spark_master_list'),
cfg.StrOpt('spark_python_files'),
cfg.IntOpt('stream_interval'),
cfg.StrOpt('work_dir')
cfg.StrOpt('work_dir'),
cfg.StrOpt('spark_home')
]
service_group = cfg.OptGroup(name='service', title='service')
cfg.CONF.register_group(service_group)

View File

@ -95,7 +95,8 @@ class TransformService(threading.Thread):
else:
pyfiles = ''
# Build the command to start the Spark driver
spark_cmd = ("export SPARK_HOME=/opt/spark/current && "
spark_cmd = ("export SPARK_HOME=" +
CONF.service.spark_home + " && "
"spark-submit --supervise --master " +
CONF.service.spark_master_list +
" --conf spark.eventLog.enabled=" +