Fix working Spark with cinder volumes
Change-Id: I4eb0d00766066f0d387e632b0217e11cfed552c5 Closes-bug: #1376790
This commit is contained in:
parent
ec880ae194
commit
5aca5fe75a
|
@ -379,9 +379,13 @@ def extract_name_values(configs):
|
|||
return dict((cfg['name'], cfg['value']) for cfg in configs)
|
||||
|
||||
|
||||
def make_hadoop_path(base_dirs, suffix):
|
||||
return [base_dir + suffix for base_dir in base_dirs]
|
||||
|
||||
|
||||
def extract_hadoop_path(lst, hadoop_dir):
|
||||
if lst:
|
||||
return ",".join([p + hadoop_dir for p in lst])
|
||||
return ",".join(make_hadoop_path(lst, hadoop_dir))
|
||||
|
||||
|
||||
def _set_config(cfg, gen_cfg, name=None):
|
||||
|
|
|
@ -232,8 +232,10 @@ class SparkProvider(p.ProvisioningPluginBase):
|
|||
'sudo chmod 600 $HOME/.ssh/id_rsa')
|
||||
|
||||
storage_paths = instance.node_group.storage_paths()
|
||||
dn_path = c_helper.extract_hadoop_path(storage_paths, '/dfs/dn')
|
||||
nn_path = c_helper.extract_hadoop_path(storage_paths, '/dfs/nn')
|
||||
dn_path = ' '.join(c_helper.make_hadoop_path(storage_paths,
|
||||
'/dfs/dn'))
|
||||
nn_path = ' '.join(c_helper.make_hadoop_path(storage_paths,
|
||||
'/dfs/nn'))
|
||||
|
||||
hdfs_dir_cmd = ('sudo mkdir -p %(nn_path)s %(dn_path)s &&'
|
||||
'sudo chown -R hdfs:hadoop %(nn_path)s %(dn_path)s &&'
|
||||
|
|
|
@ -0,0 +1,25 @@
|
|||
# Copyright (c) 2014 Mirantis Inc.
|
||||
#
|
||||
# Licensed under the Apache License, Version 2.0 (the "License");
|
||||
# you may not use this file except in compliance with the License.
|
||||
# You may obtain a copy of the License at
|
||||
#
|
||||
# http://www.apache.org/licenses/LICENSE-2.0
|
||||
#
|
||||
# Unless required by applicable law or agreed to in writing, software
|
||||
# distributed under the License is distributed on an "AS IS" BASIS,
|
||||
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or
|
||||
# implied.
|
||||
# See the License for the specific language governing permissions and
|
||||
# limitations under the License.
|
||||
|
||||
from sahara.plugins.spark import config_helper as c_helper
|
||||
from sahara.tests.unit import base as test_base
|
||||
|
||||
|
||||
class ConfigHelperUtilsTest(test_base.SaharaTestCase):
|
||||
def test_make_hadoop_path(self):
|
||||
storage_paths = ['/mnt/one', '/mnt/two']
|
||||
paths = c_helper.make_hadoop_path(storage_paths, '/spam')
|
||||
expected = ['/mnt/one/spam', '/mnt/two/spam']
|
||||
self.assertEqual(expected, paths)
|
Loading…
Reference in New Issue