Merge "Image generation for CDH Plugin"
This commit is contained in:
commit
7bf5ed49bb
|
@ -170,7 +170,7 @@ support. See :doc:`image-gen`.
|
|||
|
||||
*Returns*: A sequence with items of type sahara.plugins.images.ImageArgument.
|
||||
|
||||
pack_image( self, hadoop_version, remote, reconcile=True, ... ):
|
||||
pack_image( self, hadoop_version, remote, test_only=False, ... ):
|
||||
~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~
|
||||
|
||||
Optional method which packs an image for registration in Glance and use by
|
||||
|
@ -179,7 +179,7 @@ the Sahara api or engine service. See :doc:`image-gen`.
|
|||
|
||||
*Returns*: None (modifies the image pointed to by the remote in-place.)
|
||||
|
||||
validate_images( self, cluster, reconcile=True, image_arguments=None ):
|
||||
validate_images( self, cluster, test_only=False, image_arguments=None ):
|
||||
~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~
|
||||
|
||||
Validates the image to be used to create a cluster, to ensure that it meets
|
||||
|
|
|
@ -0,0 +1,4 @@
|
|||
---
|
||||
features:
|
||||
- Enables the creation and validation of CDH 5.7.0 images using the
|
||||
new image generation process where libguestfs replaces the use of DIB.
|
|
@ -101,6 +101,11 @@ class ImageRemote(remote.TerminalOnlyRemote):
|
|||
def get_os_distrib(self):
|
||||
return self.guest.inspect_get_distro(self.root_drive)
|
||||
|
||||
def write_file_to(self, path, script, run_as_root):
|
||||
LOG.info("Writing script to : {path}".format(path=path))
|
||||
stdout = self.guest.write(path, script)
|
||||
return 0, stdout
|
||||
|
||||
|
||||
def setup_plugins():
|
||||
plugins_base.setup_plugins()
|
||||
|
@ -117,7 +122,6 @@ def get_plugin_arguments(plugin_name):
|
|||
def pack_image(image_path, plugin_name, plugin_version, image_arguments,
|
||||
root_drive=None, test_only=False):
|
||||
with ImageRemote(image_path, root_drive) as image_remote:
|
||||
reconcile = not test_only
|
||||
plugin = plugins_base.PLUGINS.get_plugin(plugin_name)
|
||||
plugin.pack_image(plugin_version, image_remote, reconcile=reconcile,
|
||||
plugin.pack_image(plugin_version, image_remote, test_only=test_only,
|
||||
image_arguments=image_arguments)
|
||||
|
|
|
@ -78,6 +78,19 @@ class AbstractVersionHandler(object):
|
|||
def on_terminate_cluster(self, cluster):
|
||||
dh.delete_passwords_from_keymanager(cluster)
|
||||
|
||||
@abc.abstractmethod
|
||||
def get_image_arguments(self):
|
||||
return NotImplemented
|
||||
|
||||
@abc.abstractmethod
|
||||
def pack_image(self, hadoop_version, remote, test_only=False,
|
||||
image_arguments=None):
|
||||
pass
|
||||
|
||||
@abc.abstractmethod
|
||||
def validate_images(self, cluster, test_only=False, image_arguments=None):
|
||||
pass
|
||||
|
||||
|
||||
class BaseVersionHandler(AbstractVersionHandler):
|
||||
|
||||
|
@ -202,3 +215,20 @@ class BaseVersionHandler(AbstractVersionHandler):
|
|||
|
||||
def get_health_checks(self, cluster):
|
||||
return health.get_health_checks(cluster, self.cloudera_utils)
|
||||
|
||||
def get_image_arguments(self):
|
||||
if hasattr(self, 'images'):
|
||||
return self.images.get_image_arguments()
|
||||
else:
|
||||
return NotImplemented
|
||||
|
||||
def pack_image(self, hadoop_version, remote, test_only=False,
|
||||
image_arguments=None):
|
||||
if hasattr(self, 'images'):
|
||||
self.images.pack_image(
|
||||
remote, test_only=test_only, image_arguments=image_arguments)
|
||||
|
||||
def validate_images(self, cluster, test_only=False, image_arguments=None):
|
||||
if hasattr(self, 'images'):
|
||||
self.images.validate_images(
|
||||
cluster, test_only=test_only, image_arguments=image_arguments)
|
||||
|
|
|
@ -110,3 +110,16 @@ class CDHPluginProvider(p.ProvisioningPluginBase):
|
|||
def get_health_checks(self, cluster):
|
||||
return self._get_version_handler(
|
||||
cluster.hadoop_version).get_health_checks(cluster)
|
||||
|
||||
def get_image_arguments(self, hadoop_version):
|
||||
return self._get_version_handler(hadoop_version).get_image_arguments()
|
||||
|
||||
def pack_image(self, hadoop_version, remote,
|
||||
test_only=False, image_arguments=None):
|
||||
version = self._get_version_handler(hadoop_version)
|
||||
version.pack_image(hadoop_version, remote, test_only=test_only,
|
||||
image_arguments=image_arguments)
|
||||
|
||||
def validate_images(self, cluster, test_only=False, image_arguments=None):
|
||||
self._get_version_handler(cluster.hadoop_version).validate_images(
|
||||
cluster, test_only=test_only, image_arguments=image_arguments)
|
||||
|
|
|
@ -0,0 +1,43 @@
|
|||
# Copyright (c) 2016 Red Hat, Inc.
|
||||
#
|
||||
# Licensed under the Apache License, Version 2.0 (the "License");
|
||||
# you may not use this file except in compliance with the License.
|
||||
# You may obtain a copy of the License at
|
||||
#
|
||||
# http://www.apache.org/licenses/LICENSE-2.0
|
||||
#
|
||||
# Unless required by applicable law or agreed to in writing, software
|
||||
# distributed under the License is distributed on an "AS IS" BASIS,
|
||||
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or
|
||||
# implied.
|
||||
# See the License for the specific language governing permissions and
|
||||
# limitations under the License.
|
||||
|
||||
from sahara.plugins import images
|
||||
from sahara.plugins import utils as plugin_utils
|
||||
|
||||
|
||||
_validator = images.SaharaImageValidator.from_yaml(
|
||||
'plugins/cdh/v5_7_0/resources/images/image.yaml',
|
||||
resource_roots=['plugins/cdh/v5_7_0/resources/images'])
|
||||
|
||||
|
||||
def get_image_arguments():
|
||||
return _validator.get_argument_list()
|
||||
|
||||
|
||||
def pack_image(remote, test_only=False, image_arguments=None):
|
||||
_validator.validate(remote, test_only=test_only,
|
||||
image_arguments=image_arguments)
|
||||
|
||||
|
||||
def validate_images(cluster, test_only=False, image_arguments=None):
|
||||
image_arguments = get_image_arguments()
|
||||
if not test_only:
|
||||
instances = plugin_utils.get_instances(cluster)
|
||||
else:
|
||||
instances = plugin_utils.get_instances(cluster)[0]
|
||||
for instance in instances:
|
||||
with instance.remote() as r:
|
||||
_validator.validate(r, test_only=test_only,
|
||||
image_arguments=image_arguments)
|
|
@ -0,0 +1,20 @@
|
|||
#!/bin/bash
|
||||
|
||||
check=$(systemctl --no-pager list-unit-files iptables.service | grep 'enabled' | wc -l)
|
||||
|
||||
if [ $check -eq 1 ]; then
|
||||
if [ $test_only -eq 0 ]; then
|
||||
if type -p systemctl && [[ "$(systemctl --no-pager list-unit-files firewalld)" =~ 'enabled' ]]; then
|
||||
systemctl disable firewalld
|
||||
fi
|
||||
|
||||
if type -p service; then
|
||||
service ip6tables save
|
||||
service iptables save
|
||||
chkconfig ip6tables off
|
||||
chkconfig iptables off
|
||||
fi
|
||||
else
|
||||
exit 0
|
||||
fi
|
||||
fi
|
|
@ -0,0 +1,13 @@
|
|||
#!/bin/bash
|
||||
|
||||
check=$(cat /etc/selinux/config | grep "SELINUX=permissive" | wc -l)
|
||||
echo $check
|
||||
|
||||
if [ $check -eq 0 ]; then
|
||||
if [ $test_only -eq 0 ]; then
|
||||
echo "SELINUX=permissive" > /etc/selinux/config
|
||||
echo "SELINUXTYPE=targeted" >> /etc/selinux/config
|
||||
else
|
||||
exit 0
|
||||
fi
|
||||
fi
|
|
@ -0,0 +1,33 @@
|
|||
#!/bin/bash
|
||||
|
||||
check=$(systemctl --no-pager list-unit-files oozie.service | grep 'enabled' | wc -l)
|
||||
|
||||
if [ "$check" != "on" ]; then
|
||||
if [ $test_only -eq 0 ]; then
|
||||
for i in cloudera-scm-agent \
|
||||
cloudera-scm-server \
|
||||
cloudera-scm-server-db \
|
||||
hadoop-hdfs-datanode \
|
||||
hadoop-hdfs-namenode \
|
||||
hadoop-hdfs-secondarynamenode \
|
||||
hadoop-mapreduce-historyserver \
|
||||
hadoop-yarn-nodemanager \
|
||||
hadoop-yarn-resourcemanager \
|
||||
hive-metastore \
|
||||
hive-server2 \
|
||||
hive-webhcat-server \
|
||||
hue \
|
||||
oozie \
|
||||
postgresql \
|
||||
impala-catalog \
|
||||
impala-server \
|
||||
impala-state-store \
|
||||
solr-server \
|
||||
spark-history-server
|
||||
do
|
||||
chkconfig $i off
|
||||
done
|
||||
else
|
||||
exit 0
|
||||
fi
|
||||
fi
|
|
@ -0,0 +1,33 @@
|
|||
#!/bin/bash
|
||||
|
||||
if [ ! -f /etc/yum.repos.d/cloudera-cdh5.repo ]; then
|
||||
if [ $test_only -eq 0 ]; then
|
||||
echo '[cloudera-cdh5]' > /etc/yum.repos.d/cloudera-cdh5.repo
|
||||
echo "name=Cloudera's Distribution for Hadoop, Version 5" >> /etc/yum.repos.d/cloudera-cdh5.repo
|
||||
echo "baseurl=http://archive.cloudera.com/cdh5/redhat/7/x86_64/cdh/5.7.0/" >> /etc/yum.repos.d/cloudera-cdh5.repo
|
||||
echo "gpgkey = http://archive.cloudera.com/cdh5/redhat/7/x86_64/cdh/RPM-GPG-KEY-cloudera" >> /etc/yum.repos.d/cloudera-cdh5.repo
|
||||
echo 'gpgcheck = 1' >> /etc/yum.repos.d/cloudera-cdh5.repo
|
||||
|
||||
echo '[cloudera-manager]' > /etc/yum.repos.d/cloudera-manager.repo
|
||||
echo 'name=Cloudera Manager' >> /etc/yum.repos.d/cloudera-manager.repo
|
||||
echo "baseurl=http://archive.cloudera.com/cm5/redhat/7/x86_64/cm/5.7.0/" >> /etc/yum.repos.d/cloudera-manager.repo
|
||||
echo "gpgkey = http://archive.cloudera.com/cm5/redhat/7/x86_64/cm/RPM-GPG-KEY-cloudera" >> /etc/yum.repos.d/cloudera-manager.repo
|
||||
echo 'gpgcheck = 1' >> /etc/yum.repos.d/cloudera-manager.repo
|
||||
|
||||
echo '[navigator-keytrustee]' > /etc/yum.repos.d/kms.repo
|
||||
echo "name=Cloudera's Distribution for navigator-Keytrustee, Version 5" >> /etc/yum.repos.d/kms.repo
|
||||
echo "baseurl=http://archive.cloudera.com/navigator-keytrustee5/redhat/7/x86_64/navigator-keytrustee/5.7.0/" >> /etc/yum.repos.d/kms.repo
|
||||
echo "gpgkey = http://archive.cloudera.com/navigator-keytrustee5/redhat/7/x86_64/navigator-keytrustee/RPM-GPG-KEY-cloudera" >> /etc/yum.repos.d/kms.repo
|
||||
echo 'gpgcheck = 1' >> /etc/yum.repos.d/kms.repo
|
||||
|
||||
echo "[cloudera-kafka]" > /etc/yum.repos.d/cloudera-kafka.repo
|
||||
echo "name=Cloudera's Distribution for kafka, Version 2.0.2" >> /etc/yum.repos.d/cloudera-kafka.repo
|
||||
echo "baseurl=http://archive.cloudera.com/kafka/redhat/7/x86_64/kafka/2.0.2/" >> /etc/yum.repos.d/cloudera-kafka.repo
|
||||
echo "gpgkey = http://archive.cloudera.com/kafka/redhat/7/x86_64/kafka/RPM-GPG-KEY-cloudera" >> /etc/yum.repos.d/cloudera-kafka.repo
|
||||
echo "gpgcheck = 1" >> /etc/yum.repos.d/cloudera-kafka.repo
|
||||
|
||||
yum clean all
|
||||
else
|
||||
exit 0
|
||||
fi
|
||||
fi
|
|
@ -0,0 +1,25 @@
|
|||
#!/usr/bin/env bash
|
||||
|
||||
hadoop="2.6.0"
|
||||
|
||||
HADOOP_SWIFT_JAR_NAME="hadoop-openstack-$hadoop-cdh$plugin_version.jar"
|
||||
|
||||
HADOOP_SWIFT_BUILD_LOCATION="http://tarballs.openstack.org/sahara/dist/hadoop-openstack/master"
|
||||
SWIFT_LIB_URI="$HADOOP_SWIFT_BUILD_LOCATION/hadoop-openstack-${hadoop}.jar"
|
||||
HADOOP_SWIFT_JAR_NAME="hadoop-openstack.jar"
|
||||
|
||||
if [ ! -f $hdfs_lib_dir/$HADOOP_SWIFT_JAR_NAME ]; then
|
||||
if [ $test_only -eq 0 ]; then
|
||||
wget -O $hdfs_lib_dir/$HADOOP_SWIFT_JAR_NAME $SWIFT_LIB_URI
|
||||
|
||||
if [ $? -ne 0 ]; then
|
||||
echo -e "Could not download Swift Hadoop FS implementation.\nAborting"
|
||||
exit 1
|
||||
fi
|
||||
|
||||
chmod 0644 $hdfs_lib_dir/$HADOOP_SWIFT_JAR_NAME
|
||||
else
|
||||
exit 0
|
||||
fi
|
||||
fi
|
||||
|
|
@ -0,0 +1,17 @@
|
|||
#!/bin/bash
|
||||
|
||||
HADOOP_OPENSTACK_5_7_x_URL="https://repository.cloudera.com/artifactory/repo/org/apache/hadoop/hadoop-openstack/2.6.0-cdh$plugin_version/hadoop-openstack-2.6.0-cdh$plugin_version.jar"
|
||||
|
||||
dest=/usr/lib/hadoop-mapreduce/hadoop-openstack.jar
|
||||
|
||||
if [ ! -L "/usr/lib/oozie/oozie-sharelib-yarn.tar.gz" ]; then
|
||||
if [ $test_only -eq 0 ]; then
|
||||
wget -O $dest $HADOOP_OPENSTACK_5_7_x_URL
|
||||
|
||||
ln -s /usr/lib/oozie/oozie-sharelib-yarn /usr/lib/oozie/oozie-sharelib-yarn.tar.gz
|
||||
ln -s /usr/lib/oozie/oozie-sharelib-mr1 /usr/lib/oozie/oozie-sharelib-mr1.tar.gz
|
||||
ln -s /usr/lib/oozie/oozie-sharelib-yarn.tar.gz /usr/lib/oozie/oozie-sharelib.tar.gz
|
||||
else
|
||||
exit 0
|
||||
fi
|
||||
fi
|
|
@ -0,0 +1,22 @@
|
|||
#!/bin/bash
|
||||
|
||||
EXTJS_DESTINATION_DIR="/var/lib/oozie"
|
||||
EXTJS_DOWNLOAD_URL="http://tarballs.openstack.org/sahara/dist/common-artifacts/ext-2.2.zip"
|
||||
|
||||
extjs_archive=/tmp/$(basename $EXTJS_DOWNLOAD_URL)
|
||||
|
||||
if [ ! -n "$EXTJS_DESTINATION_DIR" ]; then
|
||||
if [ $test_only -eq 0 ]; then
|
||||
wget -O $extjs_archive $EXTJS_DOWNLOAD_URL
|
||||
|
||||
mkdir -p $EXTJS_DESTINATION_DIR
|
||||
if [ -z "${EXTJS_NO_UNPACK:-}" ]; then
|
||||
unzip -d "$EXTJS_DESTINATION_DIR" $extjs_archive
|
||||
rm -f $extjs_archive
|
||||
else
|
||||
mv $extjs_archive $EXTJS_DESTINATION_DIR
|
||||
fi
|
||||
else
|
||||
exit 0
|
||||
fi
|
||||
fi
|
|
@ -0,0 +1,11 @@
|
|||
#!/bin/bash
|
||||
|
||||
if [ ! -n /tmp/UnlimitedPolicy/ ]; then
|
||||
if [ $test_only -eq 0 ]; then
|
||||
mkdir /tmp/UnlimitedPolicy/
|
||||
wget $unlimited_security_location/local_policy.jar -O /tmp/UnlimitedPolicy/local_policy.jar
|
||||
wget $unlimited_security_location/US_export_policy.jar -O /tmp/UnlimitedPolicy/US_export_policy.jar
|
||||
else
|
||||
exit 0
|
||||
fi
|
||||
fi
|
|
@ -0,0 +1,132 @@
|
|||
arguments:
|
||||
java_distro:
|
||||
default: cloudera-jdk
|
||||
description: The distribution of Java to install. Defaults to openjdk.
|
||||
choices:
|
||||
- openjdk
|
||||
- oracle-java
|
||||
- cloudera-jdk
|
||||
plugin_version:
|
||||
default: 5.7.0
|
||||
description: The distribution of CDH to install. Defaults to 5.7.0.
|
||||
hidden: True
|
||||
required: False
|
||||
choices:
|
||||
- 5.7.0
|
||||
- 5.7.1
|
||||
- 5.7.2
|
||||
- 5.7.3
|
||||
- 5.7.4
|
||||
- 5.7.5
|
||||
hdfs_lib_dir:
|
||||
default: /usr/lib/hadoop-mapreduce
|
||||
description: The path to HDFS_LIB_DIR. Default to /usr/lib/hadoop-mapreduce
|
||||
required: False
|
||||
unlimited_security_location:
|
||||
default: http://sahara-files.mirantis.com/kerberos-artifacts/
|
||||
description: Place where UnlimitedSecurity polices are located
|
||||
required: False
|
||||
|
||||
validators:
|
||||
- script: common/install_extjs
|
||||
- os_case:
|
||||
- centos:
|
||||
- package: wget
|
||||
- script: centos/wget_repo
|
||||
- ubuntu:
|
||||
- script:
|
||||
ubuntu/wget_repo:
|
||||
env_vars: [plugin_version]
|
||||
- argument_case:
|
||||
argument_name: java_distro
|
||||
cases:
|
||||
openjdk:
|
||||
- any:
|
||||
- all:
|
||||
- package: java-1.8.0-openjdk-devel
|
||||
- all:
|
||||
- package: java-1.7.0-openjdk-devel
|
||||
cloudera-jdk:
|
||||
- all:
|
||||
- package: oracle-j2sdk1.7
|
||||
- package: ntp
|
||||
- package:
|
||||
- cloudera-manager-agent
|
||||
- cloudera-manager-daemons
|
||||
- cloudera-manager-server
|
||||
- cloudera-manager-server-db-2
|
||||
- package:
|
||||
- hadoop-hdfs-namenode
|
||||
- hadoop-hdfs-datanode
|
||||
- hadoop-hdfs-secondarynamenode
|
||||
- hadoop-mapreduce
|
||||
- hadoop-mapreduce-historyserver
|
||||
- hadoop-yarn-nodemanager
|
||||
- hadoop-yarn-resourcemanager
|
||||
- package:
|
||||
- hbase
|
||||
- hbase-solr
|
||||
- package:
|
||||
- hive-hcatalog
|
||||
- hive-metastore
|
||||
- hive-server2
|
||||
- hive-webhcat-server
|
||||
- hue
|
||||
- package:
|
||||
- oozie
|
||||
- spark-core
|
||||
- os_case:
|
||||
- centos:
|
||||
- package: spark-history-server
|
||||
- package: zookeeper
|
||||
- package: unzip
|
||||
- package: flume-ng
|
||||
- package: hadoop-kms
|
||||
- package:
|
||||
- impala
|
||||
- impala-server
|
||||
- impala-state-store
|
||||
- impala-catalog
|
||||
- impala-shell
|
||||
- package: keytrustee-keyprovider
|
||||
- package:
|
||||
- sentry
|
||||
- solr-server
|
||||
- solr-doc
|
||||
- search
|
||||
- sqoop2
|
||||
- package:
|
||||
- kafka
|
||||
- kafka-server
|
||||
- script:
|
||||
common/install_cloudera:
|
||||
env_vars: [plugin_version]
|
||||
- os_case:
|
||||
- centos:
|
||||
- script: centos/turn_off_services
|
||||
- ubuntu:
|
||||
- script: ubuntu/turn_off_services
|
||||
- script:
|
||||
common/add_jar:
|
||||
env_vars: [plugin_version, hdfs_lib_dir]
|
||||
- script:
|
||||
common/unlimited_security_artifacts:
|
||||
env_vars: [unlimited_security_location]
|
||||
- os_case:
|
||||
- centos:
|
||||
- package:
|
||||
- krb5-server
|
||||
- krb5-libs
|
||||
- krb5-workstation
|
||||
- rng-tools
|
||||
- package: iptables-services
|
||||
- script: centos/selinux_permissive
|
||||
- script: centos/disable_firewall
|
||||
- package: nmap-ncat
|
||||
- ubuntu:
|
||||
- package:
|
||||
- krb5-admin-server
|
||||
- libpam-krb5
|
||||
- ldap-utils
|
||||
- krb5-user
|
||||
- rng-tools
|
|
@ -0,0 +1,33 @@
|
|||
#!/bin/bash
|
||||
|
||||
check=$(systemctl --no-pager list-unit-files oozie.service | grep 'enabled' | wc -l)
|
||||
|
||||
if [ "$check" != "on" ]; then
|
||||
if [ $test_only -eq 0 ]; then
|
||||
for i in cloudera-scm-agent \
|
||||
cloudera-scm-server \
|
||||
cloudera-scm-server-db \
|
||||
hadoop-hdfs-datanode \
|
||||
hadoop-hdfs-namenode \
|
||||
hadoop-hdfs-secondarynamenode \
|
||||
hadoop-mapreduce-historyserver \
|
||||
hadoop-yarn-nodemanager \
|
||||
hadoop-yarn-resourcemanager \
|
||||
hive-metastore \
|
||||
hive-server2 \
|
||||
hive-webhcat-server \
|
||||
hue \
|
||||
oozie \
|
||||
postgresql \
|
||||
impala-catalog \
|
||||
impala-server \
|
||||
impala-state-store \
|
||||
solr-server \
|
||||
spark-history-server
|
||||
do
|
||||
update-rc.d -f $i remove
|
||||
done
|
||||
else
|
||||
exit 0
|
||||
fi
|
||||
fi
|
|
@ -0,0 +1,36 @@
|
|||
#!/bin/bash
|
||||
|
||||
if [ ! -f /etc/apt/sources.list.d/cdh5.list ]; then
|
||||
if [ $test_only -eq 0 ]; then
|
||||
# Add repository with postgresql package (it's dependency of cloudera packages)
|
||||
# Base image doesn't contain this repo
|
||||
echo "deb http://nova.clouds.archive.ubuntu.com/ubuntu/ trusty universe multiverse main" >> /etc/apt/sources.list
|
||||
|
||||
# Cloudera repositories
|
||||
echo "deb [arch=amd64] http://archive.cloudera.com/cdh5/ubuntu/trusty/amd64/cdh trusty-cdh$plugin_version contrib" > /etc/apt/sources.list.d/cdh5.list
|
||||
echo "deb-src http://archive.cloudera.com/cdh5/ubuntu/trusty/amd64/cdh trusty-cdh$plugin_version contrib" >> /etc/apt/sources.list.d/cdh5.list
|
||||
|
||||
wget -qO - http://archive-primary.cloudera.com/cdh5/ubuntu/trusty/amd64/cdh/archive.key | apt-key add -
|
||||
|
||||
echo "deb [arch=amd64] http://archive.cloudera.com/cm5/ubuntu/trusty/amd64/cm trusty-cm$plugin_version contrib" > /etc/apt/sources.list.d/cm5.list
|
||||
echo "deb-src http://archive.cloudera.com/cm5/ubuntu/trusty/amd64/cm trusty-cm$plugin_version contrib" >> /etc/apt/sources.list.d/cm5.list
|
||||
|
||||
wget -qO - http://archive-primary.cloudera.com/cm5/ubuntu/trusty/amd64/cm/archive.key | apt-key add -
|
||||
|
||||
wget -O /etc/apt/sources.list.d/kms.list http://archive.cloudera.com/navigator-keytrustee5/ubuntu/trusty/amd64/navigator-keytrustee/cloudera.list
|
||||
wget -qO - http://archive.cloudera.com/navigator-keytrustee5/ubuntu/trusty/amd64/navigator-keytrustee/archive.key | apt-key add -
|
||||
|
||||
# add Kafka repository
|
||||
echo 'deb http://archive.cloudera.com/kafka/ubuntu/trusty/amd64/kafka/ trusty-kafka2.0.2 contrib' >> /etc/apt/sources.list
|
||||
wget -qO - https://archive.cloudera.com/kafka/ubuntu/trusty/amd64/kafka/archive.key | apt-key add -
|
||||
|
||||
#change repository priority
|
||||
echo 'Package: zookeeper' >> /etc/apt/preferences.d/cloudera-pin
|
||||
echo 'Pin: origin "archive.cloudera.com"' >> /etc/apt/preferences.d/cloudera-pin
|
||||
echo 'Pin-Priority: 1001' >> /etc/apt/preferences.d/cloudera-pin
|
||||
|
||||
apt-get update
|
||||
else
|
||||
exit 0
|
||||
fi
|
||||
fi
|
|
@ -19,6 +19,7 @@ from sahara.plugins.cdh.v5_7_0 import cloudera_utils
|
|||
from sahara.plugins.cdh.v5_7_0 import config_helper
|
||||
from sahara.plugins.cdh.v5_7_0 import deploy
|
||||
from sahara.plugins.cdh.v5_7_0 import edp_engine
|
||||
from sahara.plugins.cdh.v5_7_0 import images
|
||||
from sahara.plugins.cdh.v5_7_0 import plugin_utils
|
||||
from sahara.plugins.cdh.v5_7_0 import validation
|
||||
|
||||
|
@ -32,4 +33,5 @@ class VersionHandler(avm.BaseVersionHandler):
|
|||
self.plugin_utils = plugin_utils.PluginUtilsV570()
|
||||
self.deploy = deploy
|
||||
self.edp_engine = edp_engine
|
||||
self.images = images
|
||||
self.validation = validation.ValidatorV570()
|
||||
|
|
|
@ -13,6 +13,8 @@
|
|||
# See the License for the specific language governing permissions and
|
||||
# limitations under the License.
|
||||
|
||||
from oslo_utils import uuidutils
|
||||
|
||||
import abc
|
||||
import collections
|
||||
import copy
|
||||
|
@ -669,16 +671,20 @@ class SaharaScriptValidator(SaharaImageValidatorBase):
|
|||
distro representation, per `lsb_release -is`.
|
||||
:raises ImageValidationError: If validation fails.
|
||||
"""
|
||||
|
||||
arguments = copy.deepcopy(image_arguments)
|
||||
arguments[self.TEST_ONLY_KEY] = 1 if test_only else 0
|
||||
script = "\n".join(["%(env_vars)s",
|
||||
"%(script)s"])
|
||||
env_vars = "\n".join("export %s=%s" % (key, value) for (key, value)
|
||||
in six.iteritems(image_arguments)
|
||||
in six.iteritems(arguments)
|
||||
if key in self.env_vars)
|
||||
script = script % {"env_vars": env_vars,
|
||||
"script": self.script_contents}
|
||||
code, stdout = _sudo(remote, script)
|
||||
path = '/tmp/%s.sh' % uuidutils.generate_uuid()
|
||||
remote.write_file_to(path, script, run_as_root=True)
|
||||
_sudo(remote, 'chmod +x %s' % path)
|
||||
code, stdout = _sudo(remote, '%s' % path)
|
||||
if self.output_var:
|
||||
image_arguments[self.output_var] = stdout
|
||||
|
||||
|
|
|
@ -14,6 +14,7 @@
|
|||
# limitations under the License.
|
||||
|
||||
import mock
|
||||
from oslo_utils import uuidutils
|
||||
import yaml
|
||||
|
||||
from sahara import exceptions as ex
|
||||
|
@ -267,10 +268,12 @@ class TestImages(b.SaharaTestCase):
|
|||
run_as_root=True)]
|
||||
remote.execute_command.assert_has_calls(calls)
|
||||
|
||||
def test_script_validator(self):
|
||||
@mock.patch('oslo_utils.uuidutils.generate_uuid')
|
||||
def test_script_validator(self, uuid):
|
||||
hash_value = '00000000-0000-0000-0000-000000000000'
|
||||
uuidutils.generate_uuid.return_value = hash_value
|
||||
cls = images.SaharaScriptValidator
|
||||
image_arguments = {"distro": 'centos'}
|
||||
map_rep = "export distro=centos\n"
|
||||
cmd = "It's dangerous to go alone. Run this."
|
||||
validator = cls(cmd, env_vars=image_arguments.keys(),
|
||||
output_var="distro")
|
||||
|
@ -281,7 +284,10 @@ class TestImages(b.SaharaTestCase):
|
|||
|
||||
validator.validate(remote, test_only=False,
|
||||
image_arguments=image_arguments)
|
||||
call = [mock.call(map_rep + cmd, run_as_root=True)]
|
||||
call = [mock.call('chmod +x /tmp/%(hash_value)s.sh' %
|
||||
{'hash_value': hash_value}, run_as_root=True),
|
||||
mock.call('/tmp/%(hash_value)s.sh' %
|
||||
{'hash_value': hash_value}, run_as_root=True)]
|
||||
remote.execute_command.assert_has_calls(call)
|
||||
self.assertEqual(image_arguments['distro'], 'fedora')
|
||||
|
||||
|
|
|
@ -64,6 +64,9 @@ setup_build_env
|
|||
# build_images "<plugin_name>" "<plugin_version>" "<distribution> <distribution>"
|
||||
|
||||
case "$PLUGIN" in
|
||||
"cloudera")
|
||||
build_images "cdh" "5.7.0" "centos7"
|
||||
;;
|
||||
*)
|
||||
echo "Invalid version"
|
||||
;;
|
||||
|
|
Loading…
Reference in New Issue