Removed spark-events from config

Allow spark to configure the location of spark-events.
Add spark events log config to spark-defaults in devstack plugin.
Move spark events logging to /var/log/spark/events for devstack plugin.
Set group permissions to ensure spark events log directory is group, but not
world writable.

Change-Id: I26aef23a9a801a02a20e14899e1c89b10556e4d4
This commit is contained in:
David C Kennedy 2016-06-20 10:15:02 +01:00
parent 72f240b926
commit 02b23741a5
8 changed files with 18 additions and 14 deletions

View File

@ -38,9 +38,6 @@ spark_driver = /opt/monasca/transform/lib/driver.py
# the location for the transform-service log
service_log_path=/var/log/monasca/transform/
# The location where Spark event logs should be written
spark_event_logging_dest = /var/log/spark-events
# Whether Spark event logging should be enabled (true/false)
spark_event_logging_enabled = true

View File

@ -8,3 +8,4 @@ spark.executor.port 7115
spark.fileserver.port 7120
spark.speculation true
spark.speculation.interval 200
spark.eventLog.dir /var/log/spark/events

View File

@ -13,6 +13,6 @@ export SPARK_WORKER_DIR=/var/run/spark/work
export SPARK_WORKER_MEMORY=2g
export SPARK_WORKER_CORES=2
export SPARK_HISTORY_OPTS="$SPARK_HISTORY_OPTS -Dspark.history.fs.logDirectory=file://var/log/spark/spark-events -Dspark.history.ui.port=18082"
export SPARK_HISTORY_OPTS="$SPARK_HISTORY_OPTS -Dspark.history.fs.logDirectory=file://var/log/spark/events -Dspark.history.ui.port=18082"
export SPARK_LOG_DIR=/var/log/spark
export SPARK_DAEMON_JAVA_OPTS="$SPARK_DAEMON_JAVA_OPTS -Dspark.deploy.recoveryMode=ZOOKEEPER -Dspark.deploy.zookeeper.url=127.0.0.1:2181 -Dspark.deploy.zookeeper.dir=/var/run/spark"

View File

@ -13,6 +13,6 @@ export SPARK_WORKER_DIR=/var/run/spark/work
export SPARK_WORKER_MEMORY=2g
export SPARK_WORKER_CORES=2
export SPARK_HISTORY_OPTS="$SPARK_HISTORY_OPTS -Dspark.history.fs.logDirectory=file://var/log/spark/spark-events -Dspark.history.ui.port=18082"
export SPARK_HISTORY_OPTS="$SPARK_HISTORY_OPTS -Dspark.history.fs.logDirectory=file://var/log/spark/events -Dspark.history.ui.port=18082"
export SPARK_LOG_DIR=/var/log/spark
export SPARK_DAEMON_JAVA_OPTS="$SPARK_DAEMON_JAVA_OPTS -Dspark.deploy.recoveryMode=ZOOKEEPER -Dspark.deploy.zookeeper.url=127.0.0.1:2181 -Dspark.deploy.zookeeper.dir=/var/run/spark"

View File

@ -168,8 +168,9 @@ function create_spark_directories {
sudo chmod 755 ${SPARK_DIRECTORY}
done
sudo mkdir -p /var/log/spark-events
sudo chmod "a+rw" /var/log/spark-events
sudo mkdir -p /var/log/spark/events
sudo chown spark:spark /var/log/spark/events
sudo chmod 775 /var/log/spark/events
}
@ -180,7 +181,7 @@ function delete_spark_directories {
sudo rm -rf ${SPARK_DIRECTORY} || true
done
sudo rm -rf /var/log/spark-events || true
sudo rm -rf /var/log/spark/events || true
}
@ -264,6 +265,7 @@ function install_monasca_transform {
sudo groupadd --system monasca-transform || true
sudo useradd --system -g monasca-transform monasca-transform || true
sudo usermod -a -G spark monasca-transform
create_monasca_transform_directories
copy_monasca_transform_files

View File

@ -44,9 +44,6 @@ spark_driver = /opt/stack/monasca-transform/monasca_transform/driver/mon_metrics
# the location for the transform-service log
service_log_path=/opt/stack/monasca-transform
# The location where Spark event logs should be written
spark_event_logging_dest = /var/log/spark-events
# Whether Spark event logging should be enabled (true/false)
spark_event_logging_enabled = true

View File

@ -94,6 +94,14 @@ class TransformService(threading.Thread):
pyfiles = " --py-files %s" % CONF.service.spark_python_files
else:
pyfiles = ''
if (CONF.service.spark_event_logging_enabled and
CONF.service.spark_event_logging_dest):
event_logging_dest = (" --conf spark.eventLog.dir=file://%s" %
CONF.service.spark_event_logging_dest)
else:
event_logging_dest = ''
# Build the command to start the Spark driver
spark_cmd = ("export SPARK_HOME=" +
CONF.service.spark_home + " && "
@ -101,8 +109,7 @@ class TransformService(threading.Thread):
CONF.service.spark_master_list +
" --conf spark.eventLog.enabled=" +
CONF.service.spark_event_logging_enabled +
" --conf spark.eventLog.dir=file://" +
CONF.service.spark_event_logging_dest +
event_logging_dest +
" --jars " + CONF.service.spark_jars_list +
pyfiles +
" " + CONF.service.spark_driver)

View File

@ -12,7 +12,7 @@ export SPARK_HOME=/opt/spark/current/
# control-c is pressed.
COUNTER=0
while [ $COUNTER -lt 2 ]; do
spark-submit --supervise --master spark://192.168.10.4:7077,192.168.10.5:7077 --conf spark.eventLog.enabled=true --conf spark.eventLog.dir=file:///var/log/spark-events --jars $JARS_PATH --py-files dist/$new_filename /opt/monasca/transform/lib/driver.py || break
spark-submit --supervise --master spark://192.168.10.4:7077,192.168.10.5:7077 --conf spark.eventLog.enabled=true --jars $JARS_PATH --py-files dist/$new_filename /opt/monasca/transform/lib/driver.py || break
let COUNTER=COUNTER+1
done
popd