Merge "Fix submitting hive job"

This commit is contained in:
Jenkins 2013-09-30 08:23:25 +00:00 committed by Gerrit Code Review
commit bce2507a82
7 changed files with 30 additions and 19 deletions

View File

@ -140,7 +140,7 @@ def get_plugin_configs():
return PLUGIN_CONFIGS
def set_general_configs(hive_hostname):
def set_general_configs(hive_hostname, passwd_mysql):
GENERAL_CONFS.update({
ENABLE_SWIFT.name: {
'default_value': ENABLE_SWIFT.default_value,
@ -148,7 +148,7 @@ def set_general_configs(hive_hostname):
},
ENABLE_MYSQL.name: {
'default_value': ENABLE_MYSQL.default_value,
'conf': m_h.get_required_mysql_configs(hive_hostname)
'conf': m_h.get_required_mysql_configs(hive_hostname, passwd_mysql)
}
})
@ -168,9 +168,9 @@ def generate_cfg_from_general(cfg, configs, general_config,
return cfg
def generate_xml_configs(configs, storage_path, nn_hostname,
jt_hostname, oozie_hostname, hive_hostname):
set_general_configs(hive_hostname)
def generate_xml_configs(configs, storage_path, nn_hostname, jt_hostname,
oozie_hostname, hive_hostname, passwd_hive_mysql):
set_general_configs(hive_hostname, passwd_hive_mysql)
# inserting common configs depends on provisioned VMs and HDFS placement
# TODO(aignatov): should be moved to cluster context
cfg = {

View File

@ -14,12 +14,13 @@
# limitations under the License.
def get_hive_mysql_configs(metastore_host):
def get_hive_mysql_configs(metastore_host, passwd):
return {
'javax.jdo.option.ConnectionURL': 'jdbc:mysql://localhost/metastore',
'javax.jdo.option.ConnectionURL': 'jdbc:mysql://%s/metastore' %
metastore_host,
'javax.jdo.option.ConnectionDriverName': 'com.mysql.jdbc.Driver',
'javax.jdo.option.ConnectionUserName': 'hive',
'javax.jdo.option.ConnectionPassword': 'hive',
'javax.jdo.option.ConnectionPassword': passwd,
'datanucleus.autoCreateSchema': 'false',
'datanucleus.fixedDatastore': 'true',
'hive.metastore.uris': 'thrift://%s:9083' % metastore_host,
@ -37,8 +38,8 @@ def get_oozie_mysql_configs():
}
def get_required_mysql_configs(hive_hostname):
def get_required_mysql_configs(hive_hostname, passwd_mysql):
configs = get_oozie_mysql_configs()
if hive_hostname:
configs.update(get_hive_mysql_configs(hive_hostname))
configs.update(get_hive_mysql_configs(hive_hostname, passwd_mysql))
return configs

View File

@ -16,6 +16,7 @@
from savanna import conductor
from savanna import context
from savanna.openstack.common import log as logging
from savanna.openstack.common import uuidutils
from savanna.plugins.general import exceptions as ex
from savanna.plugins.general import utils
from savanna.plugins import provisioning as p
@ -167,7 +168,7 @@ class VanillaProvider(p.ProvisioningPluginBase):
LOG.info('Cluster %s has been started successfully' % cluster.name)
self._set_cluster_info(cluster)
def _extract_configs_to_extra(self, cluster):
def _extract_configs_to_extra(self, cluster, passwd_hive_mysql):
nn = utils.get_namenode(cluster)
jt = utils.get_jobtracker(cluster)
oozie = utils.get_oozie(cluster)
@ -184,7 +185,8 @@ class VanillaProvider(p.ProvisioningPluginBase):
oozie.hostname
if oozie else None,
hive.hostname
if hive else None),
if hive else None,
passwd_hive_mysql),
'setup_script': c_helper.generate_setup_script(
ng.storage_paths,
c_helper.extract_environment_confs(ng.configuration),
@ -240,7 +242,9 @@ class VanillaProvider(p.ProvisioningPluginBase):
run.start_process(r, "tasktracker")
def _push_configs_to_nodes(self, cluster, instances=None):
extra = self._extract_configs_to_extra(cluster)
passwd_mysql = uuidutils.generate_uuid() \
if utils.get_hiveserver(cluster) else None
extra = self._extract_configs_to_extra(cluster, passwd_mysql)
if instances is None:
instances = utils.get_instances(cluster)
@ -307,6 +311,8 @@ class VanillaProvider(p.ProvisioningPluginBase):
sql_script = f.get_file_text(
'plugins/vanilla/resources/create_hive_db.sql'
)
sql_script = sql_script.replace('pass',
passwd_mysql)
files.update({'/tmp/create_hive_db.sql': sql_script})
remote.get_remote(hive_server).write_files_to(files)

View File

@ -1,8 +1,9 @@
CREATE DATABASE metastore;
USE metastore;
SOURCE /opt/hive/scripts/metastore/upgrade/mysql/hive-schema-0.10.0.mysql.sql;
CREATE USER 'hive'@'localhost' IDENTIFIED BY 'hive';
CREATE USER 'hive'@'localhost' IDENTIFIED BY 'pass';
REVOKE ALL PRIVILEGES, GRANT OPTION FROM 'hive'@'localhost';
GRANT SELECT,INSERT,UPDATE,DELETE,LOCK TABLES,EXECUTE ON metastore.* TO 'hive'@'localhost';
GRANT ALL PRIVILEGES ON metastore.* TO 'hive'@'localhost' IDENTIFIED BY 'pass';
GRANT ALL PRIVILEGES ON metastore.* TO 'hive'@'%' IDENTIFIED BY 'pass';
FLUSH PRIVILEGES;
exit

View File

@ -21,6 +21,8 @@ class HiveWorkflowCreator(base_workflow.OozieWorkflowCreator):
def __init__(self):
super(HiveWorkflowCreator, self).__init__('hive')
hive_elem = self.doc.getElementsByTagName('hive')[0]
hive_elem.setAttribute('xmlns', 'uri:oozie:hive-action:0.2')
def build_workflow_xml(self, script, job_xml, prepare={},
configuration=None, params={},

View File

@ -156,9 +156,10 @@ class VanillaPluginTest(unittest2.TestCase):
self.assertDictEqual(cfg, all_configured)
def test_get_mysql_configs(self):
cfg = m_h.get_required_mysql_configs(None)
cfg = m_h.get_required_mysql_configs(None, None)
self.assertDictEqual(cfg, m_h.get_oozie_mysql_configs())
cfg = m_h.get_required_mysql_configs("metastore_host")
cfg = m_h.get_required_mysql_configs("metastore_host", "passwd")
cfg_to_compare = m_h.get_oozie_mysql_configs()
cfg_to_compare.update(m_h.get_hive_mysql_configs("metastore_host"))
cfg_to_compare.update(m_h.get_hive_mysql_configs(
"metastore_host", "passwd"))
self.assertDictEqual(cfg, cfg_to_compare)

View File

@ -115,7 +115,7 @@ class TestPigWorkflowCreator(unittest2.TestCase):
self.prepare, self.configuration,
params, self.files, self.archives)
res = hive_workflow.get_built_workflow_xml()
hive_action = """ <hive>
hive_action = """ <hive xmlns="uri:oozie:hive-action:0.2">
<job-tracker>${jobTracker}</job-tracker>
<name-node>${nameNode}</name-node>
<prepare>