Merge "Adding Storm 1.2.0 and 1.2.1"

This commit is contained in:
Zuul 2018-07-13 14:39:27 +00:00 committed by Gerrit Code Review
commit 3d0c891f90
6 changed files with 40 additions and 11 deletions

View File

@ -0,0 +1,4 @@
---
upgrade:
- Adding new versions of Storm, 1.2.0 and 1.2.1. Both will exist under the
same tag 1.2.

View File

@ -56,12 +56,11 @@ def generate_storm_config(master_hostname, zk_hostnames, version):
"storm.local.dir": "/app/storm"
}
# Since pyleus is built using previous versions os Storm we need this
# Since pyleus is built using previous versions of Storm we need this
# option to allow the cluster to be compatible with pyleus topologies as
# well as with topologies built using older versions of Storm
if version in ['1.0.1', '1.1.0']:
cfg['client.jartransformer.class'] = (
"org.apache.storm.hack.StormShadeTransformer")
cfg['client.jartransformer.class'] = (
"org.apache.storm.hack.StormShadeTransformer")
return cfg

View File

@ -20,7 +20,7 @@ from sahara.service.edp.storm import engine as edp_engine
class EdpStormEngine(edp_engine.StormJobEngine):
edp_base_version = "0.9.2"
edp_base_version = "1.0.1"
@staticmethod
def edp_supported(version):
@ -37,7 +37,7 @@ class EdpStormEngine(edp_engine.StormJobEngine):
class EdpPyleusEngine(edp_engine.StormPyleusJobEngine):
edp_base_version = "0.9.2"
edp_base_version = "1.0.1"
@staticmethod
def edp_supported(version):

View File

@ -57,13 +57,14 @@ class StormProvider(p.ProvisioningPluginBase):
'deprecated': {'status': True}}
result = {'plugin_labels': copy.deepcopy(default)}
result['version_labels'] = {
'1.2': copy.deepcopy(default),
'1.1.0': copy.deepcopy(default),
'1.0.1': copy.deepcopy(deprecated),
}
return result
def get_versions(self):
return ['1.0.1', '1.1.0']
return ['1.0.1', '1.1.0', '1.2']
def get_configs(self, storm_version):
return c_helper.get_plugin_configs()

View File

@ -23,6 +23,7 @@ class TestStormConfigHelper(testcase.TestCase):
def test_generate_storm_config(self):
STORM_101 = '1.0.1'
STORM_110 = '1.1.0'
STORM_120 = '1.2'
tested_versions = []
master_hostname = "s-master"
zk_hostnames = ["s-zoo"]
@ -42,5 +43,13 @@ class TestStormConfigHelper(testcase.TestCase):
self.assertEqual(configs_110['client.jartransformer.class'],
'org.apache.storm.hack.StormShadeTransformer')
tested_versions.append(STORM_110)
configs_120 = s_config.generate_storm_config(
master_hostname, zk_hostnames, STORM_120)
self.assertNotIn('nimbus.host', configs_120.keys())
self.assertIn('nimbus.seeds', configs_120.keys())
self.assertIn('client.jartransformer.class', configs_120.keys())
self.assertEqual(configs_120['client.jartransformer.class'],
'org.apache.storm.hack.StormShadeTransformer')
tested_versions.append(STORM_120)
storm = s_plugin.StormProvider()
self.assertEqual(storm.get_versions(), tested_versions)

View File

@ -71,10 +71,12 @@ class StormPluginTest(base.SaharaWithDbTestCase):
cluster_data_101 = self._get_cluster('cluster_1.0.1', '1.0.1')
cluster_data_110 = self._get_cluster('cluster_1.1.0', '1.1.0')
cluster_data_120 = self._get_cluster('cluster_1.2.0', '1.2')
cluster_data_101['node_groups'] = data
cluster_data_110['node_groups'] = data
cluster_data_120['node_groups'] = data
clusters = [cluster_data_101, cluster_data_110]
clusters = [cluster_data_101, cluster_data_110, cluster_data_120]
for cluster_data in clusters:
cluster = conductor.cluster_create(context.ctx(), cluster_data)
@ -162,10 +164,12 @@ class StormPluginTest(base.SaharaWithDbTestCase):
cluster_data_101 = self._get_cluster('cluster_1.0.1', '1.0.1')
cluster_data_110 = self._get_cluster('cluster_1.1.0', '1.1.0')
cluster_data_120 = self._get_cluster('cluster_1.2.0', '1.2')
cluster_data_101['node_groups'] = data
cluster_data_110['node_groups'] = data
cluster_data_120['node_groups'] = data
clusters = [cluster_data_101, cluster_data_110]
clusters = [cluster_data_101, cluster_data_110, cluster_data_120]
for cluster_data in clusters:
cluster = conductor.cluster_create(context.ctx(), cluster_data)
@ -194,10 +198,12 @@ class StormPluginTest(base.SaharaWithDbTestCase):
cluster_data_101 = self._get_cluster('cluster_1.0.1', '1.0.1')
cluster_data_110 = self._get_cluster('cluster_1.1.0', '1.1.0')
cluster_data_120 = self._get_cluster('cluster_1.2.0', '1.2')
cluster_data_101['node_groups'] = data
cluster_data_110['node_groups'] = data
cluster_data_120['node_groups'] = data
clusters = [cluster_data_101, cluster_data_110]
clusters = [cluster_data_101, cluster_data_110, cluster_data_120]
for cluster_data in clusters:
cluster = conductor.cluster_create(context.ctx(), cluster_data)
@ -230,10 +236,12 @@ class StormPluginTest(base.SaharaWithDbTestCase):
cluster_data_101 = self._get_cluster('cluster_1.0.1', '1.0.1')
cluster_data_110 = self._get_cluster('cluster_1.1.0', '1.1.0')
cluster_data_120 = self._get_cluster('cluster_1.2.0', '1.2')
cluster_data_101['node_groups'] = data
cluster_data_110['node_groups'] = data
cluster_data_120['node_groups'] = data
clusters = [cluster_data_101, cluster_data_110]
clusters = [cluster_data_101, cluster_data_110, cluster_data_120]
for cluster_data in clusters:
cluster = conductor.cluster_create(context.ctx(), cluster_data)
@ -276,3 +284,11 @@ class StormPluginTest(base.SaharaWithDbTestCase):
def test_plugin110_edp_storm_pyleus_engine(self):
self._test_engine('1.1.0', edp.JOB_TYPE_PYLEUS,
engine.StormJobEngine)
def test_plugin120_edp_storm_engine(self):
self._test_engine('1.2', edp.JOB_TYPE_STORM,
engine.StormJobEngine)
def test_plugin120_edp_storm_pyleus_engine(self):
self._test_engine('1.2', edp.JOB_TYPE_PYLEUS,
engine.StormJobEngine)