diff --git a/devstack/files/monasca-transform/monasca-transform.conf b/devstack/files/monasca-transform/monasca-transform.conf index be44d3e..4cfd277 100644 --- a/devstack/files/monasca-transform/monasca-transform.conf +++ b/devstack/files/monasca-transform/monasca-transform.conf @@ -38,9 +38,6 @@ spark_driver = /opt/monasca/transform/lib/driver.py # the location for the transform-service log service_log_path=/var/log/monasca/transform/ -# The location where Spark event logs should be written -spark_event_logging_dest = /var/log/spark-events - # Whether Spark event logging should be enabled (true/false) spark_event_logging_enabled = true diff --git a/devstack/files/spark/spark-defaults.conf b/devstack/files/spark/spark-defaults.conf index 3131d72..7c83c21 100644 --- a/devstack/files/spark/spark-defaults.conf +++ b/devstack/files/spark/spark-defaults.conf @@ -8,3 +8,4 @@ spark.executor.port 7115 spark.fileserver.port 7120 spark.speculation true spark.speculation.interval 200 +spark.eventLog.dir /var/log/spark/events diff --git a/devstack/files/spark/spark-env.sh b/devstack/files/spark/spark-env.sh index ca03a60..ded2d65 100644 --- a/devstack/files/spark/spark-env.sh +++ b/devstack/files/spark/spark-env.sh @@ -13,6 +13,6 @@ export SPARK_WORKER_DIR=/var/run/spark/work export SPARK_WORKER_MEMORY=2g export SPARK_WORKER_CORES=2 -export SPARK_HISTORY_OPTS="$SPARK_HISTORY_OPTS -Dspark.history.fs.logDirectory=file://var/log/spark/spark-events -Dspark.history.ui.port=18082" +export SPARK_HISTORY_OPTS="$SPARK_HISTORY_OPTS -Dspark.history.fs.logDirectory=file://var/log/spark/events -Dspark.history.ui.port=18082" export SPARK_LOG_DIR=/var/log/spark export SPARK_DAEMON_JAVA_OPTS="$SPARK_DAEMON_JAVA_OPTS -Dspark.deploy.recoveryMode=ZOOKEEPER -Dspark.deploy.zookeeper.url=127.0.0.1:2181 -Dspark.deploy.zookeeper.dir=/var/run/spark" diff --git a/devstack/files/spark/spark-worker-env.sh b/devstack/files/spark/spark-worker-env.sh index ca03a60..ded2d65 100644 --- a/devstack/files/spark/spark-worker-env.sh +++ b/devstack/files/spark/spark-worker-env.sh @@ -13,6 +13,6 @@ export SPARK_WORKER_DIR=/var/run/spark/work export SPARK_WORKER_MEMORY=2g export SPARK_WORKER_CORES=2 -export SPARK_HISTORY_OPTS="$SPARK_HISTORY_OPTS -Dspark.history.fs.logDirectory=file://var/log/spark/spark-events -Dspark.history.ui.port=18082" +export SPARK_HISTORY_OPTS="$SPARK_HISTORY_OPTS -Dspark.history.fs.logDirectory=file://var/log/spark/events -Dspark.history.ui.port=18082" export SPARK_LOG_DIR=/var/log/spark export SPARK_DAEMON_JAVA_OPTS="$SPARK_DAEMON_JAVA_OPTS -Dspark.deploy.recoveryMode=ZOOKEEPER -Dspark.deploy.zookeeper.url=127.0.0.1:2181 -Dspark.deploy.zookeeper.dir=/var/run/spark" diff --git a/devstack/plugin.sh b/devstack/plugin.sh index 6b47a56..f3ce1f6 100755 --- a/devstack/plugin.sh +++ b/devstack/plugin.sh @@ -168,8 +168,9 @@ function create_spark_directories { sudo chmod 755 ${SPARK_DIRECTORY} done - sudo mkdir -p /var/log/spark-events - sudo chmod "a+rw" /var/log/spark-events + sudo mkdir -p /var/log/spark/events + sudo chown spark:spark /var/log/spark/events + sudo chmod 775 /var/log/spark/events } @@ -180,7 +181,7 @@ function delete_spark_directories { sudo rm -rf ${SPARK_DIRECTORY} || true done - sudo rm -rf /var/log/spark-events || true + sudo rm -rf /var/log/spark/events || true } @@ -264,6 +265,7 @@ function install_monasca_transform { sudo groupadd --system monasca-transform || true sudo useradd --system -g monasca-transform monasca-transform || true + sudo usermod -a -G spark monasca-transform create_monasca_transform_directories copy_monasca_transform_files diff --git a/etc/monasca-transform.conf b/etc/monasca-transform.conf index 63ed73a..57e9692 100644 --- a/etc/monasca-transform.conf +++ b/etc/monasca-transform.conf @@ -44,9 +44,6 @@ spark_driver = /opt/stack/monasca-transform/monasca_transform/driver/mon_metrics # the location for the transform-service log service_log_path=/opt/stack/monasca-transform -# The location where Spark event logs should be written -spark_event_logging_dest = /var/log/spark-events - # Whether Spark event logging should be enabled (true/false) spark_event_logging_enabled = true diff --git a/monasca_transform/service/transform_service.py b/monasca_transform/service/transform_service.py index cd51141..974cdba 100644 --- a/monasca_transform/service/transform_service.py +++ b/monasca_transform/service/transform_service.py @@ -94,6 +94,14 @@ class TransformService(threading.Thread): pyfiles = " --py-files %s" % CONF.service.spark_python_files else: pyfiles = '' + + if (CONF.service.spark_event_logging_enabled and + CONF.service.spark_event_logging_dest): + event_logging_dest = (" --conf spark.eventLog.dir=file://%s" % + CONF.service.spark_event_logging_dest) + else: + event_logging_dest = '' + # Build the command to start the Spark driver spark_cmd = ("export SPARK_HOME=" + CONF.service.spark_home + " && " @@ -101,8 +109,7 @@ class TransformService(threading.Thread): CONF.service.spark_master_list + " --conf spark.eventLog.enabled=" + CONF.service.spark_event_logging_enabled + - " --conf spark.eventLog.dir=file://" + - CONF.service.spark_event_logging_dest + + event_logging_dest + " --jars " + CONF.service.spark_jars_list + pyfiles + " " + CONF.service.spark_driver) diff --git a/scripts/submit_mon_metrics_kafka.sh b/scripts/submit_mon_metrics_kafka.sh index 3fddc87..6e9f303 100755 --- a/scripts/submit_mon_metrics_kafka.sh +++ b/scripts/submit_mon_metrics_kafka.sh @@ -12,7 +12,7 @@ export SPARK_HOME=/opt/spark/current/ # control-c is pressed. COUNTER=0 while [ $COUNTER -lt 2 ]; do - spark-submit --supervise --master spark://192.168.10.4:7077,192.168.10.5:7077 --conf spark.eventLog.enabled=true --conf spark.eventLog.dir=file:///var/log/spark-events --jars $JARS_PATH --py-files dist/$new_filename /opt/monasca/transform/lib/driver.py || break + spark-submit --supervise --master spark://192.168.10.4:7077,192.168.10.5:7077 --conf spark.eventLog.enabled=true --jars $JARS_PATH --py-files dist/$new_filename /opt/monasca/transform/lib/driver.py || break let COUNTER=COUNTER+1 done popd