summaryrefslogtreecommitdiff
diff options
context:
space:
mode:
authorJenkins <jenkins@review.openstack.org>2017-07-28 17:49:18 +0000
committerGerrit Code Review <review@openstack.org>2017-07-28 17:49:18 +0000
commitd12f6167bbb0d7e0b46821545a85bb4444de0adc (patch)
tree2005c1dffae6412f5b2b7d0e4145d5209b215c51
parent796a48490bc4db0b7a765f746ad57b7bf6e4473d (diff)
parenta77a9a978a655044a0b58a299df965c89391090d (diff)
Merge "Add S3 jar to Hadoop classpath"7.0.0.0rc17.0.0.0b3
-rwxr-xr-xdiskimage-create/diskimage-create.sh30
-rwxr-xr-xelements/hadoop/install.d/40-setup-hadoop5
-rw-r--r--elements/s3_hadoop/README.rst23
-rw-r--r--elements/s3_hadoop/element-deps1
-rw-r--r--elements/s3_hadoop/package-installs.yaml2
-rwxr-xr-xelements/s3_hadoop/post-install.d/89-add-amazon-jar36
-rw-r--r--elements/swift_hadoop/README.rst2
7 files changed, 82 insertions, 17 deletions
diff --git a/diskimage-create/diskimage-create.sh b/diskimage-create/diskimage-create.sh
index d778740..964b0c7 100755
--- a/diskimage-create/diskimage-create.sh
+++ b/diskimage-create/diskimage-create.sh
@@ -477,7 +477,7 @@ if [ -z "$PLUGIN" -o "$PLUGIN" = "vanilla" ]; then
477 export HIVE_VERSION=${HIVE_VERSION:-"0.11.0"} 477 export HIVE_VERSION=${HIVE_VERSION:-"0.11.0"}
478 export HADOOP_V2_7_1_NATIVE_LIBS_DOWNLOAD_URL=${HADOOP_V2_7_1_NATIVE_LIBS_DOWNLOAD_URL:-"https://tarballs.openstack.org/sahara/dist/common-artifacts/hadoop-native-libs-2.7.1.tar.gz"} 478 export HADOOP_V2_7_1_NATIVE_LIBS_DOWNLOAD_URL=${HADOOP_V2_7_1_NATIVE_LIBS_DOWNLOAD_URL:-"https://tarballs.openstack.org/sahara/dist/common-artifacts/hadoop-native-libs-2.7.1.tar.gz"}
479 export OOZIE_HADOOP_V2_7_1_DOWNLOAD_URL=${OOZIE_HADOOP_V2_7_1_FILE:-"http://sahara-files.mirantis.com/oozie-4.2.0-hadoop-2.7.1.tar.gz"} 479 export OOZIE_HADOOP_V2_7_1_DOWNLOAD_URL=${OOZIE_HADOOP_V2_7_1_FILE:-"http://sahara-files.mirantis.com/oozie-4.2.0-hadoop-2.7.1.tar.gz"}
480 export DIB_HDFS_LIB_DIR="/opt/hadoop/share/hadoop/tools/lib" 480 export DIB_HDFS_LIB_DIR="/opt/hadoop/share/hadoop/common/lib"
481 export plugin_type="vanilla" 481 export plugin_type="vanilla"
482 482
483 if [ "$DIB_SPARK_VERSION" = "1.3.1" ]; then 483 if [ "$DIB_SPARK_VERSION" = "1.3.1" ]; then
@@ -493,10 +493,10 @@ if [ -z "$PLUGIN" -o "$PLUGIN" = "vanilla" ]; then
493 export SPARK_HADOOP_DL=hadoop2.7 493 export SPARK_HADOOP_DL=hadoop2.7
494 fi 494 fi
495 495
496 ubuntu_elements_sequence="hadoop oozie mysql hive $JAVA_ELEMENT swift_hadoop spark" 496 ubuntu_elements_sequence="hadoop oozie mysql hive $JAVA_ELEMENT swift_hadoop spark s3_hadoop"
497 fedora_elements_sequence="hadoop oozie mysql disable-firewall hive $JAVA_ELEMENT swift_hadoop spark" 497 fedora_elements_sequence="hadoop oozie mysql disable-firewall hive $JAVA_ELEMENT swift_hadoop spark s3_hadoop"
498 centos_elements_sequence="hadoop oozie mysql disable-firewall hive $JAVA_ELEMENT swift_hadoop spark nc" 498 centos_elements_sequence="hadoop oozie mysql disable-firewall hive $JAVA_ELEMENT swift_hadoop spark nc s3_hadoop"
499 centos7_elements_sequence="hadoop oozie mysql disable-firewall hive $JAVA_ELEMENT swift_hadoop spark nc" 499 centos7_elements_sequence="hadoop oozie mysql disable-firewall hive $JAVA_ELEMENT swift_hadoop spark nc s3_hadoop"
500 500
501 # Workaround for https://bugs.launchpad.net/diskimage-builder/+bug/1204824 501 # Workaround for https://bugs.launchpad.net/diskimage-builder/+bug/1204824
502 # https://bugs.launchpad.net/sahara/+bug/1252684 502 # https://bugs.launchpad.net/sahara/+bug/1252684
@@ -571,7 +571,7 @@ if [ -z "$PLUGIN" -o "$PLUGIN" = "spark" ]; then
571 # Tell the cloudera element to install only hdfs 571 # Tell the cloudera element to install only hdfs
572 export DIB_CDH_HDFS_ONLY=1 572 export DIB_CDH_HDFS_ONLY=1
573 573
574 ubuntu_elements_sequence="$JAVA_ELEMENT swift_hadoop spark hadoop-cloudera" 574 ubuntu_elements_sequence="$JAVA_ELEMENT swift_hadoop spark hadoop-cloudera s3_hadoop"
575 export ubuntu_image_name=${ubuntu_spark_image_name:-"ubuntu_sahara_spark_latest"} 575 export ubuntu_image_name=${ubuntu_spark_image_name:-"ubuntu_sahara_spark_latest"}
576 576
577 # Creating Ubuntu cloud image 577 # Creating Ubuntu cloud image
@@ -619,19 +619,19 @@ if [ -z "$PLUGIN" -o "$PLUGIN" = "ambari" ]; then
619 619
620 if [ -z "$BASE_IMAGE_OS" -o "$BASE_IMAGE_OS" = "ubuntu" ]; then 620 if [ -z "$BASE_IMAGE_OS" -o "$BASE_IMAGE_OS" = "ubuntu" ]; then
621 ambari_ubuntu_image_name=${ambari_ubuntu_image_name:-ubuntu_sahara_ambari} 621 ambari_ubuntu_image_name=${ambari_ubuntu_image_name:-ubuntu_sahara_ambari}
622 ambari_element_sequence="ambari $JAVA_ELEMENT swift_hadoop kdc" 622 ambari_element_sequence="ambari $JAVA_ELEMENT swift_hadoop kdc s3_hadoop"
623 export DIB_RELEASE="trusty" 623 export DIB_RELEASE="trusty"
624 image_create ubuntu $ambari_ubuntu_image_name $ambari_element_sequence 624 image_create ubuntu $ambari_ubuntu_image_name $ambari_element_sequence
625 unset DIB_RELEASE 625 unset DIB_RELEASE
626 fi 626 fi
627 if [ "$BASE_IMAGE_OS" = "centos" ]; then 627 if [ "$BASE_IMAGE_OS" = "centos" ]; then
628 ambari_centos_image_name=${ambari_centos_image_name:-centos_sahara_ambari} 628 ambari_centos_image_name=${ambari_centos_image_name:-centos_sahara_ambari}
629 ambari_element_sequence="ambari $JAVA_ELEMENT disable-firewall swift_hadoop kdc nc" 629 ambari_element_sequence="ambari $JAVA_ELEMENT disable-firewall swift_hadoop kdc nc s3_hadoop"
630 image_create centos $ambari_centos_image_name $ambari_element_sequence 630 image_create centos $ambari_centos_image_name $ambari_element_sequence
631 fi 631 fi
632 if [ -z "$BASE_IMAGE_OS" -o "$BASE_IMAGE_OS" = "centos7" ]; then 632 if [ -z "$BASE_IMAGE_OS" -o "$BASE_IMAGE_OS" = "centos7" ]; then
633 ambari_centos7_image_name=${ambari_centos7_image_name:-"centos7-sahara-ambari"} 633 ambari_centos7_image_name=${ambari_centos7_image_name:-"centos7-sahara-ambari"}
634 ambari_element_sequence="disable-selinux ambari $JAVA_ELEMENT disable-firewall swift_hadoop kdc nc" 634 ambari_element_sequence="disable-selinux ambari $JAVA_ELEMENT disable-firewall swift_hadoop kdc nc s3_hadoop"
635 image_create centos7 $ambari_centos7_image_name $ambari_element_sequence 635 image_create centos7 $ambari_centos7_image_name $ambari_element_sequence
636 fi 636 fi
637 637
@@ -657,7 +657,7 @@ if [ -z "$PLUGIN" -o "$PLUGIN" = "cloudera" ]; then
657 HADOOP_VERSION=${DIB_CDH_MINOR_VERSION%.*} 657 HADOOP_VERSION=${DIB_CDH_MINOR_VERSION%.*}
658 fi 658 fi
659 659
660 cloudera_elements_sequence="hadoop-cloudera swift_hadoop kdc" 660 cloudera_elements_sequence="hadoop-cloudera swift_hadoop kdc s3_hadoop"
661 if [ -z "$BASE_IMAGE_OS" -o "$BASE_IMAGE_OS" = "ubuntu" ]; then 661 if [ -z "$BASE_IMAGE_OS" -o "$BASE_IMAGE_OS" = "ubuntu" ]; then
662 if [ -z "$HADOOP_VERSION" -o "$HADOOP_VERSION" = "5.5" ]; then 662 if [ -z "$HADOOP_VERSION" -o "$HADOOP_VERSION" = "5.5" ]; then
663 export DIB_CDH_VERSION="5.5" 663 export DIB_CDH_VERSION="5.5"
@@ -698,7 +698,7 @@ if [ -z "$PLUGIN" -o "$PLUGIN" = "cloudera" ]; then
698 fi 698 fi
699 699
700 if [ "$BASE_IMAGE_OS" = "centos" ]; then 700 if [ "$BASE_IMAGE_OS" = "centos" ]; then
701 centos_cloudera_elements_sequence="selinux-permissive disable-firewall nc" 701 centos_cloudera_elements_sequence="selinux-permissive disable-firewall nc s3_hadoop"
702 if [ -z "$HADOOP_VERSION" -o "$HADOOP_VERSION" = "5.5" ]; then 702 if [ -z "$HADOOP_VERSION" -o "$HADOOP_VERSION" = "5.5" ]; then
703 export DIB_CDH_VERSION="5.5" 703 export DIB_CDH_VERSION="5.5"
704 704
@@ -710,7 +710,7 @@ if [ -z "$PLUGIN" -o "$PLUGIN" = "cloudera" ]; then
710 fi 710 fi
711 711
712 if [ -z "$BASE_IMAGE_OS" -o "$BASE_IMAGE_OS" = "centos7" ]; then 712 if [ -z "$BASE_IMAGE_OS" -o "$BASE_IMAGE_OS" = "centos7" ]; then
713 centos7_cloudera_elements_sequence="selinux-permissive disable-firewall nc" 713 centos7_cloudera_elements_sequence="selinux-permissive disable-firewall nc s3_hadoop"
714 if [ -z "$HADOOP_VERSION" -o "$HADOOP_VERSION" = "5.5" ]; then 714 if [ -z "$HADOOP_VERSION" -o "$HADOOP_VERSION" = "5.5" ]; then
715 export DIB_CDH_VERSION="5.5" 715 export DIB_CDH_VERSION="5.5"
716 716
@@ -759,6 +759,7 @@ fi
759########################## 759##########################
760if [ -z "$PLUGIN" -o "$PLUGIN" = "mapr" ]; then 760if [ -z "$PLUGIN" -o "$PLUGIN" = "mapr" ]; then
761 export DIB_MAPR_VERSION=${DIB_MAPR_VERSION:-${DIB_DEFAULT_MAPR_VERSION}} 761 export DIB_MAPR_VERSION=${DIB_MAPR_VERSION:-${DIB_DEFAULT_MAPR_VERSION}}
762 export plugin_type="mapr"
762 763
763 export DIB_CLOUD_INIT_DATASOURCES=$CLOUD_INIT_DATASOURCES 764 export DIB_CLOUD_INIT_DATASOURCES=$CLOUD_INIT_DATASOURCES
764 765
@@ -766,8 +767,8 @@ if [ -z "$PLUGIN" -o "$PLUGIN" = "mapr" ]; then
766 #MapR repository requires additional space 767 #MapR repository requires additional space
767 export DIB_MIN_TMPFS=10 768 export DIB_MIN_TMPFS=10
768 769
769 mapr_ubuntu_elements_sequence="ssh hadoop-mapr $JAVA_ELEMENT" 770 mapr_ubuntu_elements_sequence="ssh hadoop-mapr $JAVA_ELEMENT s3_hadoop"
770 mapr_centos_elements_sequence="ssh hadoop-mapr selinux-permissive $JAVA_ELEMENT disable-firewall nc" 771 mapr_centos_elements_sequence="ssh hadoop-mapr selinux-permissive $JAVA_ELEMENT disable-firewall nc s3_hadoop"
771 772
772 if [ -z "$BASE_IMAGE_OS" -o "$BASE_IMAGE_OS" = "ubuntu" ]; then 773 if [ -z "$BASE_IMAGE_OS" -o "$BASE_IMAGE_OS" = "ubuntu" ]; then
773 export DIB_RELEASE=${DIB_RELEASE:-trusty} 774 export DIB_RELEASE=${DIB_RELEASE:-trusty}
@@ -794,6 +795,7 @@ if [ -z "$PLUGIN" -o "$PLUGIN" = "mapr" ]; then
794 795
795 unset DIB_CLOUD_INIT_DATASOURCES 796 unset DIB_CLOUD_INIT_DATASOURCES
796 fi 797 fi
798 unset plugin_type
797 799
798fi 800fi
799 801
diff --git a/elements/hadoop/install.d/40-setup-hadoop b/elements/hadoop/install.d/40-setup-hadoop
index c0d28dd..7dd995e 100755
--- a/elements/hadoop/install.d/40-setup-hadoop
+++ b/elements/hadoop/install.d/40-setup-hadoop
@@ -61,8 +61,9 @@ EOF
61 $HADOOP_HOME/etc/hadoop/yarn-env.sh 61 $HADOOP_HOME/etc/hadoop/yarn-env.sh
62 echo "source $JAVA_RC" >> $HADOOP_HOME/etc/hadoop/yarn-env.sh 62 echo "source $JAVA_RC" >> $HADOOP_HOME/etc/hadoop/yarn-env.sh
63 63
64 # enable swiftfs 64 # remove apache-built swiftfs
65 ln -s ${HADOOP_HOME}/share/hadoop/tools/lib/hadoop-openstack-${DIB_HADOOP_VERSION}.jar ${HADOOP_HOME}/share/hadoop/common/lib/ 65 rm ${HADOOP_HOME}/share/hadoop/tools/lib/hadoop-openstack-${DIB_HADOOP_VERSION}.jar
66
66} 67}
67 68
68case "$DISTRO_NAME" in 69case "$DISTRO_NAME" in
diff --git a/elements/s3_hadoop/README.rst b/elements/s3_hadoop/README.rst
new file mode 100644
index 0000000..3118dc6
--- /dev/null
+++ b/elements/s3_hadoop/README.rst
@@ -0,0 +1,23 @@
1=========
2s3_hadoop
3=========
4
5Copy the Hadoop S3 connector jar file into the Hadoop classpath.
6
7Environment Variables
8---------------------
9
10HADOOP_S3_JAR_ORIGIN
11 :Required: No
12 :Default: Depends on plugin.
13 :Description: Path to where the S3 jar is (already) located.
14
15HADOOP_S3_JAR_DOWNLOAD
16 :Required: No
17 :Default: None.
18 :Description: If set, use a download a specific S3 jar instead of one already available on the image.
19
20DIB_HDFS_LIB_DIR
21 :Required: No
22 :Default: /usr/share/hadoop/lib
23 :Description: Directory in the guest where to save the S3 jar. Shared with swift_hadoop.
diff --git a/elements/s3_hadoop/element-deps b/elements/s3_hadoop/element-deps
new file mode 100644
index 0000000..7076aba
--- /dev/null
+++ b/elements/s3_hadoop/element-deps
@@ -0,0 +1 @@
package-installs
diff --git a/elements/s3_hadoop/package-installs.yaml b/elements/s3_hadoop/package-installs.yaml
new file mode 100644
index 0000000..cc77790
--- /dev/null
+++ b/elements/s3_hadoop/package-installs.yaml
@@ -0,0 +1,2 @@
1wget:
2 phase: post-install.d
diff --git a/elements/s3_hadoop/post-install.d/89-add-amazon-jar b/elements/s3_hadoop/post-install.d/89-add-amazon-jar
new file mode 100755
index 0000000..30e2c37
--- /dev/null
+++ b/elements/s3_hadoop/post-install.d/89-add-amazon-jar
@@ -0,0 +1,36 @@
1#!/bin/bash
2if [ "${DIB_DEBUG_TRACE:-0}" -gt 0 ]; then
3 set -x
4fi
5set -eu
6set -o pipefail
7
8if [ -z "${HADOOP_S3_JAR_ORIGIN:-}" ]; then
9 # The jar is not locally available during image-gen on Ambari/MapR plugins: relevant packages are installed later.
10 # The jar is not appropriate for the Storm plugin: you cannot stream data from an object store.
11 # For plugins not found in the switch statement below, a user-specified jar can still be downloaded.
12 case "$plugin_type" in
13 "vanilla")
14 HADOOP_S3_JAR_ORIGIN="/opt/hadoop/share/hadoop/tools/lib/hadoop-aws-$DIB_HADOOP_VERSION.jar"
15 ;;
16 "cloudera" | "spark" )
17 HADOOP_S3_JAR_ORIGIN="/usr/lib/hadoop/hadoop-aws.jar"
18 ;;
19 esac
20fi
21
22HDFS_LIB_DIR=${DIB_HDFS_LIB_DIR:-"/usr/share/hadoop/lib"} # matches swift_hadoop default
23
24if [ -z "${HADOOP_S3_JAR_DOWNLOAD:-}" ]; then
25 if [ "${HADOOP_S3_JAR_ORIGIN:-}" ]; then
26 cp $HADOOP_S3_JAR_ORIGIN $HDFS_LIB_DIR/hadoop-aws.jar
27 fi
28else
29 wget -O $HDFS_LIB_DIR/hadoop-aws.jar $HADOOP_S3_JAR_DOWNLOAD
30fi
31
32path=$HDFS_LIB_DIR/hadoop-aws.jar
33
34if [ -f $path ]; then
35 chmod 0644 $path
36fi
diff --git a/elements/swift_hadoop/README.rst b/elements/swift_hadoop/README.rst
index e447414..dc8b75d 100644
--- a/elements/swift_hadoop/README.rst
+++ b/elements/swift_hadoop/README.rst
@@ -17,7 +17,7 @@ swift_url
17DIB_HDFS_LIB_DIR 17DIB_HDFS_LIB_DIR
18 :Required: No 18 :Required: No
19 :Default: /usr/share/hadoop/lib 19 :Default: /usr/share/hadoop/lib
20 :Description: Directory in the guest where to save the swift jar. 20 :Description: Directory in the guest where to save the swift jar. Shared with s3_hadoop.
21 21
22DIB_HADOOP_SWIFT_JAR_NAME 22DIB_HADOOP_SWIFT_JAR_NAME
23 :Required: No 23 :Required: No