Fixing Create hbase common lib shows warnings

Change-Id: Ia6ffeedbbf038d04e19015856fc8f5e2290c4981
Closes-Bug: 1497947
This commit is contained in:
Marianne Linhares Monteiro 2017-02-17 09:47:31 -03:00
parent af5979e70b
commit b844c426fd
2 changed files with 13 additions and 13 deletions

View File

@ -31,7 +31,7 @@ HBASE_COMMON_LIB_PATH = "/user/sahara-hbase-lib"
def create_hbase_common_lib(r):
r.execute_command(
'sudo su - -c "hadoop dfs -mkdir -p %s" hdfs' % (
'sudo su - -c "hdfs dfs -mkdir -p %s" hdfs' % (
HBASE_COMMON_LIB_PATH))
ret_code, stdout = r.execute_command(
'hbase classpath')
@ -39,7 +39,7 @@ def create_hbase_common_lib(r):
paths = stdout.split(':')
for p in paths:
if p.endswith(".jar"):
r.execute_command('sudo su - -c "hadoop fs -put -p %s %s" hdfs'
r.execute_command('sudo su - -c "hdfs fs -put -p %s %s" hdfs'
% (p, HBASE_COMMON_LIB_PATH))
else:
raise ex.RequiredServiceMissingException('hbase')
@ -53,26 +53,26 @@ def put_file_to_hdfs(r, file, file_name, path, hdfs_user):
def copy_from_local(r, source, target, hdfs_user):
r.execute_command('sudo su - -c "hadoop dfs -copyFromLocal '
r.execute_command('sudo su - -c "hdfs dfs -copyFromLocal '
'%s %s" %s' % (source, target, hdfs_user))
def move_from_local(r, source, target, hdfs_user):
# using copyFromLocal followed by rm to address permission issues that
# arise when image user is not the same as hdfs user (permissions-wise).
r.execute_command('sudo su - -c "hadoop dfs -copyFromLocal %(source)s '
r.execute_command('sudo su - -c "hdfs dfs -copyFromLocal %(source)s '
'%(target)s" %(user)s && sudo rm -f %(source)s' %
{"source": source, "target": target, "user": hdfs_user})
def create_dir_hadoop1(r, dir_name, hdfs_user):
r.execute_command(
'sudo su - -c "hadoop dfs -mkdir %s" %s' % (dir_name, hdfs_user))
'sudo su - -c "hdfs dfs -mkdir %s" %s' % (dir_name, hdfs_user))
def create_dir_hadoop2(r, dir_name, hdfs_user):
r.execute_command(
'sudo su - -c "hadoop dfs -mkdir -p %s" %s' % (dir_name, hdfs_user))
'sudo su - -c "hdfs dfs -mkdir -p %s" %s' % (dir_name, hdfs_user))
def _get_cluster_hosts_information(host, cluster):

View File

@ -37,10 +37,10 @@ class HDFSHelperTestCase(base.SaharaTestCase):
helper.create_hbase_common_lib(self.cluster)
calls = [
mock.call(('sudo su - -c "hadoop dfs -mkdir -p '
mock.call(('sudo su - -c "hdfs dfs -mkdir -p '
'/user/sahara-hbase-lib" hdfs')),
mock.call('hbase classpath'),
mock.call(('sudo su - -c "hadoop fs -put -p may.jar '
mock.call(('sudo su - -c "hdfs fs -put -p may.jar '
'/user/sahara-hbase-lib" hdfs'))]
self.cluster.execute_command.assert_has_calls(calls)
@ -59,23 +59,23 @@ class HDFSHelperTestCase(base.SaharaTestCase):
def test_copy_from_local(self):
helper.copy_from_local(self.cluster, 'Galaxy', 'Earth', 'BigBang')
self.cluster.execute_command.assert_called_once_with(
'sudo su - -c "hadoop dfs -copyFromLocal Galaxy Earth" BigBang')
'sudo su - -c "hdfs dfs -copyFromLocal Galaxy Earth" BigBang')
def test_move_from_local(self):
helper.move_from_local(self.cluster, 'Galaxy', 'Earth', 'BigBang')
self.cluster.execute_command.assert_called_once_with(
'sudo su - -c "hadoop dfs -copyFromLocal Galaxy Earth" BigBang '
'sudo su - -c "hdfs dfs -copyFromLocal Galaxy Earth" BigBang '
'&& sudo rm -f Galaxy')
def test_create_dir_hadoop1(self):
helper.create_dir_hadoop1(self.cluster, 'Earth', 'BigBang')
self.cluster.execute_command.assert_called_once_with(
'sudo su - -c "hadoop dfs -mkdir Earth" BigBang')
'sudo su - -c "hdfs dfs -mkdir Earth" BigBang')
def test_create_dir_hadoop2(self):
helper.create_dir_hadoop2(self.cluster, 'Earth', 'BigBang')
self.cluster.execute_command.assert_called_once_with(
'sudo su - -c "hadoop dfs -mkdir -p Earth" BigBang')
'sudo su - -c "hdfs dfs -mkdir -p Earth" BigBang')
@mock.patch('sahara.utils.cluster.generate_etc_hosts')
@mock.patch('sahara.plugins.utils.get_instances')
@ -147,5 +147,5 @@ class HDFSHelperTestCase(base.SaharaTestCase):
helper.put_file_to_hdfs(self.cluster, open_get, 'workflow',
'/tmp', 'hdfs')
self.cluster.execute_command.assert_called_once_with(
'sudo su - -c "hadoop dfs -copyFromLocal /tmp/workflow.111'
'sudo su - -c "hdfs dfs -copyFromLocal /tmp/workflow.111'
' /tmp/workflow" hdfs && sudo rm -f /tmp/workflow.111')