From 98f3c606462887a6c1ffaea3f968baa4ff005bc8 Mon Sep 17 00:00:00 2001 From: Daisuke Fujita Date: Fri, 9 Nov 2018 06:18:53 -0800 Subject: [PATCH] Update Spark version This patch updates Spark version from 1.6.1 to 2.1.0 for maintenance reason. Change-Id: Icfb6e678b38dcceba67b96f6bf72f48d6e18fba3 --- devstack/settings | 6 +++--- tools/test-setup.sh | 2 +- tox.ini | 4 ++-- 3 files changed, 6 insertions(+), 6 deletions(-) diff --git a/devstack/settings b/devstack/settings index 4d3034a..911c2d9 100644 --- a/devstack/settings +++ b/devstack/settings @@ -33,9 +33,9 @@ SPARK_DIRECTORIES=("/var/spark" "/var/log/spark" "/var/run/spark/work" "/etc/spa JDK_PKG="openjdk-8-jre-headless openjdk-8-jdk" -MAVEN="apache-maven-3.3.9" +MAVEN="apache-maven-3.5.3" MAVEN_TARBAL="$MAVEN-bin.tar.gz" -MAVEN_URL="ftp://mirror.reverse.net/pub/apache/maven/maven-3/3.3.9/binaries/$MAVEN_TARBAL" +MAVEN_URL="https://archive.apache.org/dist/maven/maven-3/3.5.3/binaries/$MAVEN_TARBAL" SCALA_VERSION=${SCALA_VERSION:-2.11} SCALA_MIN_VERSION=${SCALA_MIN_VERSION:-.7} @@ -45,7 +45,7 @@ KEYID=642AC823 SPARK_DIR="/opt/spark" SPARK_DOWNLOAD="$SPARK_DIR/download" -SPARK_VERSION=${SPARK_VERSION:-1.6.1} +SPARK_VERSION=${SPARK_VERSION:-2.1.0} SPARK_TARBALL_NAME="spark-${SPARK_VERSION}.tgz" SPARK_URL="http://archive.apache.org/dist/spark/spark-$SPARK_VERSION/$SPARK_TARBALL_NAME" diff --git a/tools/test-setup.sh b/tools/test-setup.sh index bf8bcac..f320923 100755 --- a/tools/test-setup.sh +++ b/tools/test-setup.sh @@ -6,7 +6,7 @@ HOME=${HOME:-/home/jenkins} SPARK_DIR=$HOME/spark -SPARK_VERSION=${SPARK_VERSION:-1.6.1} +SPARK_VERSION=${SPARK_VERSION:-2.1.0} SPARK_TARBALL_NAME=spark-$SPARK_VERSION.tgz SPARK_URL=http://archive.apache.org/dist/spark/spark-$SPARK_VERSION/$SPARK_TARBALL_NAME diff --git a/tox.ini b/tox.ini index 61999c3..6533406 100644 --- a/tox.ini +++ b/tox.ini @@ -9,7 +9,7 @@ setenv = PYTHONUNBUFFERED=1 VIRTUAL_ENV={envdir} DISCOVER_DIRECTORY=tests - PYTHONPATH={homedir}/spark/spark-1.6.1/python:{homedir}/spark/spark-1.6.1/python/lib/py4j-0.9-src.zip: + PYTHONPATH={homedir}/spark/spark-2.1.0/python:{homedir}/spark/spark-2.1.0/python/lib/py4j-0.10.4-src.zip: install_command = pip install -U {opts} {packages} deps = -r{toxinidir}/requirements.txt -r{toxinidir}/test-requirements.txt @@ -26,7 +26,7 @@ setenv = VIRTUAL_ENV={envdir} NOSE_COVER_BRANCHES=1 NOSE_COVER_HTML=1 NOSE_COVER_HTML_DIR={toxinidir}/cover - PYTHONPATH={homedir}/spark/spark-1.6.1/python:{homedir}/spark/spark-1.6.1/python/lib/py4j-0.9-src.zip: + PYTHONPATH={homedir}/spark/spark-2.1.0/python:{homedir}/spark/spark-2.1.0/python/lib/py4j-0.10.4-src.zip: [testenv:pep8] basepython = python3