Test templates: add the ocata/ dir, remove liberty/ etc

The current set matches the job tested on the Sahara CI on the
current master, which is going to become Ocata.
Remove the directory with Liberty templates (Liberty has been EOL
for a while).
Also remove mapr-5.0.0.mrv2.yaml.mako in the main test directory
(it is going to be removed completely in newer versions).
Finally, fix the unit tests which depended on the test templates
for Liberty (use Newton for now).

Change-Id: I61cae34c7ce3b7de22502f4ebb66464df9164647
This commit is contained in:
Luigi Toscano 2017-01-31 16:52:15 +01:00
parent dabfeb562f
commit 52ea7a6e03
13 changed files with 169 additions and 206 deletions

View File

@ -0,0 +1,22 @@
---
prelude: >
Ocata test templates are now available, while Liberty
test templates have been removed.
features:
- A folder with test templates with Ocata has been created
and initialized starting from the templates in the main
directory, following the status of the jobs tested on
the Sahara CI.
deprecations:
- The Liberty-specific job templates have been removed.
This means that starting from this release Liberty is
not supported (it has been EOL for a while).
- The MapR 5.0.0 test template have been removed from
the master branch as well.
other:
- Add other notes here, or remove this section.
All of the list items in this section are combined
when the release notes are rendered, so the text
needs to be worded so that it does not depend on any
information only available in another section, such
as the prelude. This may mean repeating some details.

View File

@ -1,54 +0,0 @@
<%page args="is_proxy_gateway='true', use_auto_security_group='true', ci_flavor_id='m1.small'"/>
clusters:
- plugin_name: hdp
plugin_version: 2.0.6
image: ${hdp_206_image}
node_group_templates:
- name: master
flavor: ${ci_flavor_id}
node_processes:
- AMBARI_SERVER
- GANGLIA_SERVER
- HISTORYSERVER
- NAGIOS_SERVER
- NAMENODE
- OOZIE_SERVER
- RESOURCEMANAGER
- SECONDARY_NAMENODE
- ZOOKEEPER_SERVER
auto_security_group: ${use_auto_security_group}
is_proxy_gateway: ${is_proxy_gateway}
- name: worker
flavor: ${ci_flavor_id}
node_processes:
- DATANODE
- HDFS_CLIENT
- MAPREDUCE2_CLIENT
- NODEMANAGER
- OOZIE_CLIENT
- PIG
- YARN_CLIENT
- ZOOKEEPER_CLIENT
volumes_per_node: 2
volumes_size: 2
auto_security_group: ${use_auto_security_group}
cluster_template:
name: hdp206
node_group_templates:
master: 1
worker: 3
cluster_configs:
YARN:
yarn.log-aggregation-enable: false
cluster:
name: ${cluster_name}
scaling:
- operation: add
node_group: worker
size: 1
edp_jobs_flow:
- pig_job
- mapreduce_job
- mapreduce_streaming_job
- java_job

View File

@ -1,54 +0,0 @@
<%page args="is_proxy_gateway='true', use_auto_security_group='true', ci_flavor_id='m1.small'"/>
clusters:
- plugin_name: vanilla
plugin_version: 2.7.1
image: ${vanilla_271_image}
node_group_templates:
- name: worker
flavor: ${ci_flavor_id}
node_processes:
- datanode
- nodemanager
volumes_per_node: 2
volumes_size: 2
auto_security_group: ${use_auto_security_group}
node_configs:
&ng_configs
MapReduce:
yarn.app.mapreduce.am.resource.mb: 256
yarn.app.mapreduce.am.command-opts: -Xmx256m
YARN:
yarn.scheduler.minimum-allocation-mb: 256
yarn.scheduler.maximum-allocation-mb: 1024
yarn.nodemanager.vmem-check-enabled: false
- name: master
flavor: ${ci_flavor_id}
node_processes:
- oozie
- historyserver
- resourcemanager
- namenode
auto_security_group: ${use_auto_security_group}
is_proxy_gateway: ${is_proxy_gateway}
cluster_template:
name: transient
node_group_templates:
master: 1
worker: 3
cluster_configs:
HDFS:
dfs.replication: 1
MapReduce:
mapreduce.tasktracker.map.tasks.maximum: 16
mapreduce.tasktracker.reduce.tasks.maximum: 16
YARN:
yarn.resourcemanager.scheduler.class: org.apache.hadoop.yarn.server.resourcemanager.scheduler.fair.FairScheduler
cluster:
name: ${cluster_name}
is_transient: true
scenario:
- run_jobs
- transient
edp_jobs_flow: pig_job

View File

@ -1,56 +0,0 @@
<%page args="use_auto_security_group='true', mapr_master_flavor_id='mapr.master', mapr_worker_flavor_id='mapr.worker'"/>
clusters:
- plugin_name: mapr
plugin_version: 5.0.0.mrv2
image: ${mapr_500mrv2_image}
node_group_templates:
- name: master
flavor:
name: ${mapr_master_flavor_id}
vcpus: 4
ram: 8192
root_disk: 80
ephemeral_disk: 40
node_processes:
- Metrics
- Webserver
- ZooKeeper
- HTTPFS
- Oozie
- FileServer
- CLDB
- Flume
- Hue
- NodeManager
- HistoryServer
- ResourceManager
- HiveServer2
- HiveMetastore
- Sqoop2-Client
- Sqoop2-Server
auto_security_group: ${use_auto_security_group}
- name: worker
flavor:
name: ${mapr_worker_flavor_id}
vcpus: 2
ram: 4096
root_disk: 40
ephemeral_disk: 40
node_processes:
- NodeManager
- FileServer
auto_security_group: ${use_auto_security_group}
cluster_template:
name: mapr500mrv2
node_group_templates:
master: 1
worker: 1
cluster:
name: ${cluster_name}
scaling:
- operation: add
node_group: worker
size: 1
edp_jobs_flow:
- mapr

View File

@ -2,7 +2,7 @@
clusters: clusters:
- plugin_name: ambari - plugin_name: ambari
plugin_version: '2.3' plugin_version: '2.4'
image: ${ambari_22_image} image: ${ambari_22_image}
node_group_templates: node_group_templates:
- name: master - name: master
@ -58,8 +58,10 @@ clusters:
timeout: 30 timeout: 30
cluster: cluster:
name: ${cluster_name} name: ${cluster_name}
scenario: scaling:
- run_jobs - operation: add
node_group: worker
size: 1
edp_jobs_flow: edp_jobs_flow:
- java_job - java_job
- spark_pi - spark_pi

View File

@ -1,9 +1,9 @@
<%page args="is_proxy_gateway='true', use_auto_security_group='true', ci_flavor_id='m1.small', medium_flavor_id='m1.medium', large_flavor_id='m1.large'"/> <%page args="is_proxy_gateway='true', use_auto_security_group='true', ci_flavor_id='m1.small', large_flavor_id='m1.large'"/>
clusters: clusters:
- plugin_name: cdh - plugin_name: cdh
plugin_version: 5.4.0 plugin_version: 5.7.0
image: ${cdh_540_image} image: ${cdh_570_image}
node_group_templates: node_group_templates:
- name: worker-dn - name: worker-dn
flavor: ${ci_flavor_id} flavor: ${ci_flavor_id}
@ -39,7 +39,7 @@ clusters:
is_proxy_gateway: ${is_proxy_gateway} is_proxy_gateway: ${is_proxy_gateway}
auto_security_group: ${use_auto_security_group} auto_security_group: ${use_auto_security_group}
- name: master-core - name: master-core
flavor: ${medium_flavor_id} flavor: ${large_flavor_id}
node_processes: node_processes:
- HDFS_NAMENODE - HDFS_NAMENODE
- YARN_RESOURCEMANAGER - YARN_RESOURCEMANAGER
@ -48,7 +48,7 @@ clusters:
- ZOOKEEPER_SERVER - ZOOKEEPER_SERVER
auto_security_group: ${use_auto_security_group} auto_security_group: ${use_auto_security_group}
- name: master-additional - name: master-additional
flavor: ${medium_flavor_id} flavor: ${large_flavor_id}
node_processes: node_processes:
- OOZIE_SERVER - OOZIE_SERVER
- YARN_JOBHISTORY - YARN_JOBHISTORY
@ -58,8 +58,15 @@ clusters:
- HIVE_SERVER2 - HIVE_SERVER2
- SPARK_YARN_HISTORY_SERVER - SPARK_YARN_HISTORY_SERVER
auto_security_group: ${use_auto_security_group} auto_security_group: ${use_auto_security_group}
# In 5.7 the defaults of following configs are too large,
# restrict them to save memory for scenario testing.
node_configs:
HIVEMETASTORE:
hive_metastore_java_heapsize: 2147483648
HIVESERVER:
hiveserver2_java_heapsize: 2147483648
cluster_template: cluster_template:
name: cdh540 name: cdh570
node_group_templates: node_group_templates:
manager: 1 manager: 1
master-core: 1 master-core: 1

View File

@ -0,0 +1,90 @@
<%page args="is_proxy_gateway='true', use_auto_security_group='true', ci_flavor_id='m1.small', large_flavor_id='m1.large'"/>
clusters:
- plugin_name: cdh
plugin_version: 5.9.0
image: ${cdh_590_image}
node_group_templates:
- name: worker-dn
flavor: ${ci_flavor_id}
node_processes:
- HDFS_DATANODE
volumes_per_node: 2
volumes_size: 2
auto_security_group: ${use_auto_security_group}
node_configs:
&ng_configs
DATANODE:
dfs_datanode_du_reserved: 0
- name: worker-nm
flavor: ${ci_flavor_id}
node_processes:
- YARN_NODEMANAGER
auto_security_group: ${use_auto_security_group}
- name: worker-nm-dn
flavor: ${ci_flavor_id}
node_processes:
- YARN_NODEMANAGER
- HDFS_DATANODE
volumes_per_node: 2
volumes_size: 2
auto_security_group: ${use_auto_security_group}
node_configs:
*ng_configs
- name: manager
flavor: ${large_flavor_id}
node_processes:
- CLOUDERA_MANAGER
- KMS
is_proxy_gateway: ${is_proxy_gateway}
auto_security_group: ${use_auto_security_group}
- name: master-core
flavor: ${large_flavor_id}
node_processes:
- HDFS_NAMENODE
- YARN_RESOURCEMANAGER
- SENTRY_SERVER
- YARN_NODEMANAGER
- ZOOKEEPER_SERVER
auto_security_group: ${use_auto_security_group}
- name: master-additional
flavor: ${large_flavor_id}
node_processes:
- OOZIE_SERVER
- YARN_JOBHISTORY
- YARN_NODEMANAGER
- HDFS_SECONDARYNAMENODE
- HIVE_METASTORE
- HIVE_SERVER2
- SPARK_YARN_HISTORY_SERVER
auto_security_group: ${use_auto_security_group}
# In 5.9 the defaults of following configs are too large,
# restrict them to save memory for scenario testing.
node_configs:
HIVEMETASTORE:
hive_metastore_java_heapsize: 2147483648
HIVESERVER:
hiveserver2_java_heapsize: 2147483648
cluster_template:
name: cdh590
node_group_templates:
manager: 1
master-core: 1
master-additional: 1
worker-nm-dn: 1
worker-nm: 1
worker-dn: 1
cluster_configs:
HDFS:
dfs_replication: 1
cluster:
name: ${cluster_name}
scenario:
- run_jobs
- sentry
edp_jobs_flow:
- pig_job
- mapreduce_job
- mapreduce_streaming_job
- java_job
- spark_wordcount

View File

@ -2,8 +2,8 @@
clusters: clusters:
- plugin_name: mapr - plugin_name: mapr
plugin_version: 5.0.0.mrv2 plugin_version: 5.2.0.mrv2
image: ${mapr_500mrv2_image} image: ${mapr_520mrv2_image}
node_group_templates: node_group_templates:
- name: master - name: master
flavor: flavor:
@ -42,7 +42,7 @@ clusters:
- FileServer - FileServer
auto_security_group: ${use_auto_security_group} auto_security_group: ${use_auto_security_group}
cluster_template: cluster_template:
name: mapr500mrv2 name: mapr520mrv2
node_group_templates: node_group_templates:
master: 1 master: 1
worker: 1 worker: 1

View File

@ -2,8 +2,8 @@
clusters: clusters:
- plugin_name: spark - plugin_name: spark
plugin_version: 1.3.1 plugin_version: 1.6.0
image: ${spark_131_image} image: ${spark_160_image}
node_group_templates: node_group_templates:
- name: master - name: master
flavor: ${ci_flavor_id} flavor: ${ci_flavor_id}
@ -19,7 +19,7 @@ clusters:
- slave - slave
auto_security_group: ${use_auto_security_group} auto_security_group: ${use_auto_security_group}
cluster_template: cluster_template:
name: spark131 name: spark160
node_group_templates: node_group_templates:
master: 1 master: 1
worker: 1 worker: 1

View File

@ -1,34 +1,37 @@
<%page args="is_proxy_gateway='true', use_auto_security_group='true', ci_flavor_id='m1.small'"/> <%page args="is_proxy_gateway='true', use_auto_security_group='true', ci_flavor_id='m1.small', medium_flavor_id='m1.medium'"/>
clusters: clusters:
- plugin_name: fake - plugin_name: storm
plugin_version: "0.1" plugin_version: 1.0.1
image: ${fake_plugin_image} image: ${storm_101_image}
node_group_templates: node_group_templates:
- name: worker
flavor: ${ci_flavor_id}
node_processes:
- datanode
- tasktracker
volumes_per_node: 2
volumes_size: 2
auto_security_group: ${use_auto_security_group}
- name: master - name: master
flavor: ${ci_flavor_id} flavor: ${ci_flavor_id}
node_processes: node_processes:
- jobtracker - nimbus
- namenode
auto_security_group: ${use_auto_security_group} auto_security_group: ${use_auto_security_group}
is_proxy_gateway: ${is_proxy_gateway} is_proxy_gateway: ${is_proxy_gateway}
- name: worker
flavor: ${ci_flavor_id}
node_processes:
- supervisor
auto_security_group: ${use_auto_security_group}
- name: zookeeper
flavor: ${medium_flavor_id}
node_processes:
- zookeeper
auto_security_group: ${use_auto_security_group}
cluster_template: cluster_template:
name: fake01 name: storm101
node_group_templates: node_group_templates:
master: 1 master: 1
worker: 1 worker: 1
zookeeper: 1
cluster: cluster:
name: ${cluster_name} name: ${cluster_name}
scaling: scaling:
- operation: add - operation: add
node_group: worker node_group: worker
size: 1 size: 1
edp_jobs_flow: pig_job scenario:
- scale

View File

@ -25,13 +25,14 @@ clusters:
volumes_per_node: 2 volumes_per_node: 2
volumes_size: 2 volumes_size: 2
auto_security_group: ${use_auto_security_group} auto_security_group: ${use_auto_security_group}
- name: master-rm-nn-hvs - name: master-rm-nn-hvs-sp
flavor: ${ci_flavor_id} flavor: ${ci_flavor_id}
node_processes: node_processes:
- namenode - namenode
- resourcemanager - resourcemanager
- hiveserver - hiveserver
- nodemanager - nodemanager
- spark history server
auto_security_group: ${use_auto_security_group} auto_security_group: ${use_auto_security_group}
- name: master-oo-hs-sn - name: master-oo-hs-sn
flavor: ${ci_flavor_id} flavor: ${ci_flavor_id}
@ -45,7 +46,7 @@ clusters:
cluster_template: cluster_template:
name: vanilla271 name: vanilla271
node_group_templates: node_group_templates:
master-rm-nn-hvs: 1 master-rm-nn-hvs-sp: 1
master-oo-hs-sn: 1 master-oo-hs-sn: 1
worker-dn-nm: 2 worker-dn-nm: 2
worker-dn: 1 worker-dn: 1
@ -77,3 +78,4 @@ clusters:
- mapreduce_streaming_job - mapreduce_streaming_job
- java_job - java_job
- hive_job - hive_job
- spark_wordcount

View File

@ -4,8 +4,8 @@ network_private_name: private
network_public_name: public network_public_name: public
vanilla_26_image: centos_sahara_vanilla_hadoop_2_6_latest vanilla_26_image: centos_sahara_vanilla_hadoop_2_6_latest
vanilla_271_image: vanilla271 vanilla_271_image: vanilla271
spark_131_image: spark spark_160_image: spark
cdh_540_image: cdh540 cdh_550_image: cdh550
cluster_name: cluster cluster_name: cluster
ci_flavor_id: '2' ci_flavor_id: '2'
medium_flavor_id: '3' medium_flavor_id: '3'

View File

@ -375,11 +375,11 @@ class RunnerUnitTest(testtools.TestCase):
@mock.patch('subprocess.Popen', @mock.patch('subprocess.Popen',
return_value=_create_subprocess_communicate_mock()) return_value=_create_subprocess_communicate_mock())
@mock.patch('sys.exit', return_value=None) @mock.patch('sys.exit', return_value=None)
def test_default_templates_kilo(self, mock_sys, mock_sub, mock_validate): def test_default_templates(self, mock_sys, mock_sub, mock_validate):
sys.argv = ['sahara_tests/scenario/runner.py', sys.argv = ['sahara_tests/scenario/runner.py',
'-V', '-V',
'sahara_tests/unit/scenario/templatevars_complete.ini', 'sahara_tests/unit/scenario/templatevars_complete.ini',
'-p', 'spark', '-v', '1.3.1', '-r', 'liberty', '-p', 'spark', '-v', '1.6.0', '-r', 'newton',
'--os-username', 'demo', '--os-password', 'demopwd', '--os-username', 'demo', '--os-password', 'demopwd',
'--os-project-name', 'demo', '--os-project-name', 'demo',
'--os-auth-url', 'http://127.0.0.1:5000/v2'] '--os-auth-url', 'http://127.0.0.1:5000/v2']
@ -387,7 +387,7 @@ class RunnerUnitTest(testtools.TestCase):
self.assertEqual('spark', self.assertEqual('spark',
mock_validate.call_args[0][0]['clusters'][0][ mock_validate.call_args[0][0]['clusters'][0][
'plugin_name']) 'plugin_name'])
self.assertEqual('1.3.1', self.assertEqual('1.6.0',
mock_validate.call_args[0][0]['clusters'][0][ mock_validate.call_args[0][0]['clusters'][0][
'plugin_version']) 'plugin_version'])
@ -398,7 +398,7 @@ class RunnerUnitTest(testtools.TestCase):
sys.argv = ['sahara_tests/scenario/runner.py', sys.argv = ['sahara_tests/scenario/runner.py',
'-V', '-V',
'sahara_tests/unit/scenario/templatevars_complete.ini', 'sahara_tests/unit/scenario/templatevars_complete.ini',
'-p', 'spark', '-v', '1.3.1', '--release', 'liberty', '-p', 'spark', '-v', '1.6.0', '--release', 'newton',
'--count', '4', '--count', '4',
'--os-username', 'demo', '--os-password', 'demopwd', '--os-username', 'demo', '--os-password', 'demopwd',
'--os-project-name', 'demo', '--os-project-name', 'demo',
@ -412,14 +412,15 @@ class RunnerUnitTest(testtools.TestCase):
sys.argv = ['sahara_tests/scenario/runner.py', sys.argv = ['sahara_tests/scenario/runner.py',
'-V', '-V',
'sahara_tests/unit/scenario/templatevars_complete.ini', 'sahara_tests/unit/scenario/templatevars_complete.ini',
'sahara_tests/scenario/defaults/liberty', 'sahara_tests/scenario/defaults/newton',
'sahara_tests/scenario/defaults/edp.yaml.mako', 'sahara_tests/scenario/defaults/edp.yaml.mako',
'--os-username', 'demo', '--os-password', 'demopwd', '--os-username', 'demo', '--os-password', 'demopwd',
'--os-project-name', 'demo', '--os-project-name', 'demo',
'--os-auth-url', 'http://127.0.0.1:5000/v2', '--args', '--os-auth-url', 'http://127.0.0.1:5000/v2', '--args',
'ambari_22_image:ambari', 'fake_plugin_image:fake', 'ambari_22_image:ambari', 'fake_plugin_image:fake',
'mapr_500mrv2_image:mapr', 'spark_131_image:spark', 'mapr_510mrv2_image:mapr', 'mapr_520mrv2_image:mapr',
'hdp_206_image:hdp'] 'cdh_570_image:cdh', 'spark_160_image:spark',
'storm_101_image:storm']
runner.main() runner.main()
@mock.patch('sahara_tests.scenario.validation.validate') @mock.patch('sahara_tests.scenario.validation.validate')