summaryrefslogtreecommitdiff
diff options
context:
space:
mode:
authorJenkins <jenkins@review.openstack.org>2016-09-10 13:21:29 +0000
committerGerrit Code Review <review@openstack.org>2016-09-10 13:21:29 +0000
commit5eeeab432b95f59b2a1c2db85f1f1a297bd22668 (patch)
tree72b2ff44631c903b7e540aac1bca878da40d0c61
parenta2a07ae79267eabb3a26814078e0401ee75d1910 (diff)
parentb3447fbf9034546983ddead59a0fdc4f5eca4c89 (diff)
Merge "Remove Tempest-like tests for clients (see sahara-tests)"
-rw-r--r--sahara/tests/tempest/__init__.py0
-rw-r--r--sahara/tests/tempest/scenario/__init__.py0
-rw-r--r--sahara/tests/tempest/scenario/data_processing/README.rst116
-rw-r--r--sahara/tests/tempest/scenario/data_processing/__init__.py0
-rw-r--r--sahara/tests/tempest/scenario/data_processing/client_tests/__init__.py0
-rw-r--r--sahara/tests/tempest/scenario/data_processing/client_tests/base.py281
-rw-r--r--sahara/tests/tempest/scenario/data_processing/client_tests/test_cluster_templates.py97
-rw-r--r--sahara/tests/tempest/scenario/data_processing/client_tests/test_data_sources.py92
-rw-r--r--sahara/tests/tempest/scenario/data_processing/client_tests/test_job_binaries.py132
-rw-r--r--sahara/tests/tempest/scenario/data_processing/client_tests/test_job_binary_internals.py61
-rw-r--r--sahara/tests/tempest/scenario/data_processing/client_tests/test_job_executions.py317
-rw-r--r--sahara/tests/tempest/scenario/data_processing/client_tests/test_jobs.py79
-rw-r--r--sahara/tests/tempest/scenario/data_processing/client_tests/test_node_group_templates.py76
-rw-r--r--sahara/tests/tempest/scenario/data_processing/client_tests/test_plugins.py45
-rw-r--r--sahara/tests/tempest/scenario/data_processing/config.py34
-rw-r--r--sahara/tests/tempest/scenario/data_processing/etc/sahara_tests.conf7
-rw-r--r--sahara/tests/tempest/scenario/data_processing/etc/sahara_tests.conf.sample28
-rw-r--r--sahara/tests/tempest/scenario/data_processing/plugin.py37
-rw-r--r--test-requirements.txt5
19 files changed, 0 insertions, 1407 deletions
diff --git a/sahara/tests/tempest/__init__.py b/sahara/tests/tempest/__init__.py
deleted file mode 100644
index e69de29..0000000
--- a/sahara/tests/tempest/__init__.py
+++ /dev/null
diff --git a/sahara/tests/tempest/scenario/__init__.py b/sahara/tests/tempest/scenario/__init__.py
deleted file mode 100644
index e69de29..0000000
--- a/sahara/tests/tempest/scenario/__init__.py
+++ /dev/null
diff --git a/sahara/tests/tempest/scenario/data_processing/README.rst b/sahara/tests/tempest/scenario/data_processing/README.rst
deleted file mode 100644
index 1d9f3f9..0000000
--- a/sahara/tests/tempest/scenario/data_processing/README.rst
+++ /dev/null
@@ -1,116 +0,0 @@
1Tests for Sahara Client in Tempest
2====================================
3
4How to run
5----------
6
7Get the latest sahara resources from the appropriate mirror:
8
9.. sourcecode:: console
10
11 $ git clone https://github.com/openstack/sahara.git
12..
13
14Install sahara, in order to register the tempest plugin interface:
15
16.. sourcecode:: console
17
18 $ pip install $SAHARA_ROOT_DIR
19..
20
21Get the latest python-saharaclient resources from the appropriate mirror:
22
23.. sourcecode:: console
24
25 $ git clone https://github.com/openstack/python-saharaclient.git
26..
27
28Install python-saharaclient:
29
30.. sourcecode:: console
31
32 $ pip install $SAHARACLIENT_ROOT_DIR
33..
34
35Get the latest tempest resources from the appropriate mirror:
36
37.. sourcecode:: console
38
39 $ git clone https://github.com/openstack/tempest.git
40..
41
42Create a configuration file ``tempest/etc/tempest.conf`` for tempest.
43The sample file can be generated and used for this purpose:
44
45.. sourcecode:: console
46
47 $ cd $TEMPEST_ROOT_DIR
48 $ tox -e genconfig
49 $ cp etc/tempest.conf.sample etc/tempest.conf
50..
51
52Some configuration options are required for running tests. Here is the list:
53
54.. sourcecode:: ini
55
56 [auth]
57 admin_username=
58 admin_project_name=
59 admin_password=
60
61 [identity]
62 uri=
63 uri_v3=
64
65 [compute]
66 fixed_network_name=
67 flavor_ref=
68
69 [network]
70 floating_network_name=
71 public_network_id=
72
73 [data-processing]
74 fake_image_id=
75
76 [validation]
77 image_ssh_user=
78
79 [service_available]
80 sahara=true
81 neutron=true
82
83..
84
85All the parameters above are defined by tempest, with the exception of
86data_processing.fake_image_id, which is defined by the scenario python
87client tests here.
88
89Other relevant parameters (all defined by scenario python client tests):
90
91.. sourcecode:: ini
92
93 [data-processing]
94 ...
95 endpoint_type=
96 catalog_type=
97 saharaclient_version=1.1
98 sahara_url=
99 cluster_timeout=1800
100 request_timeout=10
101
102..
103
104When configuration is finished, you can launch the tests from tempest with:
105
106.. sourcecode:: console
107
108 $ tox -e all-plugin -- tempest.scenario.data_processing.client_tests
109..
110
111If you want to launch all Sahara tests in Tempest, you can do this with ``data_processing`` tag:
112
113.. sourcecode:: console
114
115 $ tox -e all-plugin -- data_processing
116..
diff --git a/sahara/tests/tempest/scenario/data_processing/__init__.py b/sahara/tests/tempest/scenario/data_processing/__init__.py
deleted file mode 100644
index e69de29..0000000
--- a/sahara/tests/tempest/scenario/data_processing/__init__.py
+++ /dev/null
diff --git a/sahara/tests/tempest/scenario/data_processing/client_tests/__init__.py b/sahara/tests/tempest/scenario/data_processing/client_tests/__init__.py
deleted file mode 100644
index e69de29..0000000
--- a/sahara/tests/tempest/scenario/data_processing/client_tests/__init__.py
+++ /dev/null
diff --git a/sahara/tests/tempest/scenario/data_processing/client_tests/base.py b/sahara/tests/tempest/scenario/data_processing/client_tests/base.py
deleted file mode 100644
index 325bfbb..0000000
--- a/sahara/tests/tempest/scenario/data_processing/client_tests/base.py
+++ /dev/null
@@ -1,281 +0,0 @@
1# Copyright (c) 2014 Mirantis Inc.
2#
3# Licensed under the Apache License, Version 2.0 (the "License"); you may
4# not use this file except in compliance with the License. You may obtain
5# a copy of the License at
6#
7# http://www.apache.org/licenses/LICENSE-2.0
8#
9# Unless required by applicable law or agreed to in writing, software
10# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
11# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
12# License for the specific language governing permissions and limitations
13# under the License.
14
15import time
16
17from oslo_utils import timeutils
18from saharaclient.api import base as sab
19from saharaclient import client as sahara_client
20from tempest import config
21from tempest import exceptions
22from tempest.scenario import manager
23
24
25TEMPEST_CONF = config.CONF
26
27# cluster status
28CLUSTER_STATUS_ACTIVE = "Active"
29CLUSTER_STATUS_ERROR = "Error"
30
31
32class BaseDataProcessingTest(manager.ScenarioTest):
33 @classmethod
34 def resource_setup(cls):
35 cls.set_network_resources()
36 super(BaseDataProcessingTest, cls).resource_setup()
37
38 endpoint_type = TEMPEST_CONF.data_processing.endpoint_type
39 catalog_type = TEMPEST_CONF.data_processing.catalog_type
40 auth_url = TEMPEST_CONF.identity.uri
41
42 credentials = cls.os_primary.credentials
43
44 cls.client = sahara_client.Client(
45 TEMPEST_CONF.data_processing.saharaclient_version,
46 credentials.username,
47 credentials.password,
48 project_name=credentials.tenant_name,
49 endpoint_type=endpoint_type,
50 service_type=catalog_type,
51 auth_url=auth_url,
52 sahara_url=TEMPEST_CONF.data_processing.sahara_url)
53
54 cls.object_client = cls.os_primary.object_client
55 cls.container_client = cls.os_primary.container_client
56 cls.networks_client = cls.os_primary.compute_networks_client
57
58 cls.floating_ip_pool = TEMPEST_CONF.network.floating_network_name
59 if TEMPEST_CONF.service_available.neutron:
60 cls.floating_ip_pool = cls.get_floating_ip_pool_id_for_neutron()
61
62 cls.worker_template = {
63 'description': 'Test node group template',
64 'plugin_name': 'fake',
65 'hadoop_version': '0.1',
66 'node_processes': [
67 'datanode',
68 'tasktracker'
69 ],
70 'flavor_id': TEMPEST_CONF.compute.flavor_ref,
71 'floating_ip_pool': cls.floating_ip_pool
72 }
73
74 cls.master_template = {
75 'description': 'Test node group template',
76 'plugin_name': 'fake',
77 'hadoop_version': '0.1',
78 'node_processes': [
79 'namenode',
80 'jobtracker'
81 ],
82 'flavor_id': TEMPEST_CONF.compute.flavor_ref,
83 'floating_ip_pool': cls.floating_ip_pool,
84 'auto_security_group': True
85 }
86
87 cls.cluster_template = {
88 'description': 'Test cluster template',
89 'plugin_name': 'fake',
90 'hadoop_version': '0.1'
91 }
92
93 cls.swift_data_source_with_creds = {
94 'url': 'swift://sahara-container/input-source',
95 'description': 'Test data source',
96 'type': 'swift',
97 'credentials': {
98 'user': 'test',
99 'password': '123'
100 }
101 }
102
103 cls.local_hdfs_data_source = {
104 'url': 'input-source',
105 'description': 'Test data source',
106 'type': 'hdfs',
107 }
108
109 cls.external_hdfs_data_source = {
110 'url': 'hdfs://test-master-node/usr/hadoop/input-source',
111 'description': 'Test data source',
112 'type': 'hdfs'
113 }
114
115 @classmethod
116 def get_floating_ip_pool_id_for_neutron(cls):
117 net_id = cls._find_network_by_name(
118 TEMPEST_CONF.network.floating_network_name)
119 if not net_id:
120 raise exceptions.NotFound(
121 'Floating IP pool \'%s\' not found in pool list.'
122 % TEMPEST_CONF.network.floating_network_name)
123 return net_id
124
125 @classmethod
126 def get_private_network_id(cls):
127 net_id = cls._find_network_by_name(
128 TEMPEST_CONF.compute.fixed_network_name)
129 if not net_id:
130 raise exceptions.NotFound(
131 'Private network \'%s\' not found in network list.'
132 % TEMPEST_CONF.compute.fixed_network_name)
133 return net_id
134
135 @classmethod
136 def _find_network_by_name(cls, network_name):
137 for network in cls.networks_client.list_networks()['networks']:
138 if network['label'] == network_name:
139 return network['id']
140 return None
141
142 def create_node_group_template(self, name, **kwargs):
143
144 resp_body = self.client.node_group_templates.create(
145 name, **kwargs)
146
147 self.addCleanup(self.delete_resource,
148 self.client.node_group_templates, resp_body.id)
149
150 return resp_body
151
152 def create_cluster_template(self, name, **kwargs):
153
154 resp_body = self.client.cluster_templates.create(
155 name, **kwargs)
156
157 self.addCleanup(self.delete_resource,
158 self.client.cluster_templates, resp_body.id)
159
160 return resp_body
161
162 def create_data_source(self, name, url, description, type,
163 credentials=None):
164
165 user = credentials['user'] if credentials else None
166 pas = credentials['password'] if credentials else None
167
168 resp_body = self.client.data_sources.create(
169 name, description, type, url, credential_user=user,
170 credential_pass=pas)
171
172 self.addCleanup(self.delete_resource,
173 self.client.data_sources, resp_body.id)
174
175 return resp_body
176
177 def create_job_binary(self, name, url, description, extra=None):
178
179 resp_body = self.client.job_binaries.create(
180 name, url, description, extra)
181
182 self.addCleanup(self.delete_resource,
183 self.client.job_binaries, resp_body.id)
184
185 return resp_body
186
187 def create_job_binary_internal(self, name, data):
188
189 resp_body = self.client.job_binary_internals.create(name, data)
190
191 self.addCleanup(self.delete_resource,
192 self.client.job_binary_internals, resp_body.id)
193
194 return resp_body
195
196 def create_job(self, name, job_type, mains, libs=None, description=None):
197
198 libs = libs or ()
199 description = description or ''
200
201 resp_body = self.client.jobs.create(
202 name, job_type, mains, libs, description)
203
204 self.addCleanup(self.delete_resource, self.client.jobs, resp_body.id)
205
206 return resp_body
207
208 def create_cluster(self, name, **kwargs):
209
210 resp_body = self.client.clusters.create(name, **kwargs)
211
212 self.addCleanup(self.delete_resource, self.client.clusters,
213 resp_body.id)
214
215 return resp_body
216
217 def check_cluster_active(self, cluster_id):
218 timeout = TEMPEST_CONF.data_processing.cluster_timeout
219 s_time = timeutils.utcnow()
220 while timeutils.delta_seconds(s_time, timeutils.utcnow()) < timeout:
221 cluster = self.client.clusters.get(cluster_id)
222 if cluster.status == CLUSTER_STATUS_ACTIVE:
223 return
224 if cluster.status == CLUSTER_STATUS_ERROR:
225 raise exceptions.BuildErrorException(
226 'Cluster failed to build and is in %s status.' %
227 CLUSTER_STATUS_ERROR)
228 time.sleep(TEMPEST_CONF.data_processing.request_timeout)
229 raise exceptions.TimeoutException(
230 'Cluster failed to get to %s status within %d seconds.'
231 % (CLUSTER_STATUS_ACTIVE, timeout))
232
233 def create_job_execution(self, **kwargs):
234
235 resp_body = self.client.job_executions.create(**kwargs)
236
237 self.addCleanup(self.delete_resource, self.client.job_executions,
238 resp_body.id)
239
240 return resp_body
241
242 def create_container(self, name):
243
244 self.container_client.create_container(name)
245
246 self.addCleanup(self.delete_swift_container, name)
247
248 def delete_resource(self, resource_client, resource_id):
249 try:
250 resource_client.delete(resource_id)
251 except sab.APIException:
252 pass
253 else:
254 self.delete_timeout(resource_client, resource_id)
255
256 def delete_timeout(
257 self, resource_client, resource_id,
258 timeout=TEMPEST_CONF.data_processing.cluster_timeout):
259
260 start = timeutils.utcnow()
261 while timeutils.delta_seconds(start, timeutils.utcnow()) < timeout:
262 try:
263 resource_client.get(resource_id)
264 except sab.APIException as sahara_api_exception:
265 if 'not found' in sahara_api_exception.message:
266 return
267 raise sahara_api_exception
268
269 time.sleep(TEMPEST_CONF.data_processing.request_timeout)
270
271 raise exceptions.TimeoutException(
272 'Failed to delete resource "%s" in %d seconds.'
273 % (resource_id, timeout))
274
275 def delete_swift_container(self, container):
276 objects = ([obj['name'] for obj in
277 self.container_client.list_all_container_objects(
278 container)])
279 for obj in objects:
280 self.object_client.delete_object(container, obj)
281 self.container_client.delete_container(container)
diff --git a/sahara/tests/tempest/scenario/data_processing/client_tests/test_cluster_templates.py b/sahara/tests/tempest/scenario/data_processing/client_tests/test_cluster_templates.py
deleted file mode 100644
index 2435dad..0000000
--- a/sahara/tests/tempest/scenario/data_processing/client_tests/test_cluster_templates.py
+++ /dev/null
@@ -1,97 +0,0 @@
1# Copyright (c) 2014 Mirantis Inc.
2#
3# Licensed under the Apache License, Version 2.0 (the "License"); you may
4# not use this file except in compliance with the License. You may obtain
5# a copy of the License at
6#
7# http://www.apache.org/licenses/LICENSE-2.0
8#
9# Unless required by applicable law or agreed to in writing, software
10# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
11# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
12# License for the specific language governing permissions and limitations
13# under the License.
14
15from tempest import config
16from tempest.lib.common.utils import data_utils
17
18from sahara.tests.tempest.scenario.data_processing.client_tests import base
19
20
21TEMPEST_CONF = config.CONF
22
23
24class ClusterTemplateTest(base.BaseDataProcessingTest):
25 def _check_create_cluster_template(self):
26 ng_template_name = data_utils.rand_name('sahara-ng-template')
27 ng_template = self.create_node_group_template(ng_template_name,
28 **self.worker_template)
29
30 full_cluster_template = self.cluster_template.copy()
31 full_cluster_template['node_groups'] = [
32 {
33 'name': 'master-node',
34 'flavor_id': TEMPEST_CONF.compute.flavor_ref,
35 'node_processes': ['namenode'],
36 'count': 1
37 },
38 {
39 'name': 'worker-node',
40 'node_group_template_id': ng_template.id,
41 'count': 3
42 }
43 ]
44
45 template_name = data_utils.rand_name('sahara-cluster-template')
46
47 # create cluster template
48 resp_body = self.create_cluster_template(template_name,
49 **full_cluster_template)
50
51 # check that template created successfully
52 self.assertEqual(template_name, resp_body.name)
53 self.assertDictContainsSubset(self.cluster_template,
54 resp_body.__dict__)
55
56 return resp_body.id, template_name
57
58 def _check_cluster_template_list(self, template_id, template_name):
59 # check for cluster template in list
60 template_list = self.client.cluster_templates.list()
61 templates_info = [(template.id, template.name)
62 for template in template_list]
63 self.assertIn((template_id, template_name), templates_info)
64
65 def _check_cluster_template_get(self, template_id, template_name):
66 # check cluster template fetch by id
67 template = self.client.cluster_templates.get(
68 template_id)
69 self.assertEqual(template_name, template.name)
70 self.assertDictContainsSubset(self.cluster_template, template.__dict__)
71
72 def _check_cluster_template_update(self, template_id):
73 values = {
74 'name': data_utils.rand_name('updated-sahara-ct'),
75 'description': 'description',
76 }
77
78 # check updating of cluster template
79 template = self.client.cluster_templates.update(
80 template_id, **values)
81 self.assertDictContainsSubset(values, template.__dict__)
82
83 def _check_cluster_template_delete(self, template_id):
84 # delete cluster template by id
85 self.client.cluster_templates.delete(
86 template_id)
87
88 # check that cluster template really deleted
89 templates = self.client.cluster_templates.list()
90 self.assertNotIn(template_id, [template.id for template in templates])
91
92 def test_cluster_templates(self):
93 template_id, template_name = self._check_create_cluster_template()
94 self._check_cluster_template_list(template_id, template_name)
95 self._check_cluster_template_get(template_id, template_name)
96 self._check_cluster_template_update(template_id)
97 self._check_cluster_template_delete(template_id)
diff --git a/sahara/tests/tempest/scenario/data_processing/client_tests/test_data_sources.py b/sahara/tests/tempest/scenario/data_processing/client_tests/test_data_sources.py
deleted file mode 100644
index 71b6867..0000000
--- a/sahara/tests/tempest/scenario/data_processing/client_tests/test_data_sources.py
+++ /dev/null
@@ -1,92 +0,0 @@
1# Copyright (c) 2014 Mirantis Inc.
2#
3# Licensed under the Apache License, Version 2.0 (the "License"); you may
4# not use this file except in compliance with the License. You may obtain
5# a copy of the License at
6#
7# http://www.apache.org/licenses/LICENSE-2.0
8#
9# Unless required by applicable law or agreed to in writing, software
10# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
11# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
12# License for the specific language governing permissions and limitations
13# under the License.
14
15from tempest.lib.common.utils import data_utils
16
17from sahara.tests.tempest.scenario.data_processing.client_tests import base
18
19
20class DataSourceTest(base.BaseDataProcessingTest):
21 def _check_data_source_create(self, source_body):
22 source_name = data_utils.rand_name('sahara-data-source')
23 # create data source
24 resp_body = self.create_data_source(source_name, **source_body)
25 # check that source created successfully
26 self.assertEqual(source_name, resp_body.name)
27 if source_body['type'] == 'swift':
28 source_body = self.swift_data_source
29 self.assertDictContainsSubset(source_body, resp_body.__dict__)
30
31 return resp_body.id, source_name
32
33 def _check_data_source_list(self, source_id, source_name):
34 # check for data source in list
35 source_list = self.client.data_sources.list()
36 sources_info = [(source.id, source.name) for source in source_list]
37 self.assertIn((source_id, source_name), sources_info)
38
39 def _check_data_source_get(self, source_id, source_name, source_body):
40 # check data source fetch by id
41 source = self.client.data_sources.get(source_id)
42 self.assertEqual(source_name, source.name)
43 self.assertDictContainsSubset(source_body, source.__dict__)
44
45 def _check_data_source_update(self, source_id):
46 values = {
47 'name': data_utils.rand_name('updated-sahara-data-source'),
48 'description': 'description',
49 'type': 'hdfs',
50 'url': 'hdfs://user/foo'
51 }
52
53 source = self.client.data_sources.update(source_id, values)
54
55 self.assertDictContainsSubset(values, source.data_source)
56
57 def _check_data_source_delete(self, source_id):
58 # delete data source
59 self.client.data_sources.delete(source_id)
60 # check that data source really deleted
61 source_list = self.client.data_sources.list()
62 self.assertNotIn(source_id, [source.id for source in source_list])
63
64 def test_swift_data_source(self):
65 # Create extra self.swift_data_source variable to use for comparison to
66 # data source response body because response body has no 'credentials'
67 # field.
68 self.swift_data_source = self.swift_data_source_with_creds.copy()
69 del self.swift_data_source['credentials']
70 source_id, source_name = self._check_data_source_create(
71 self.swift_data_source_with_creds)
72 self._check_data_source_list(source_id, source_name)
73 self._check_data_source_get(source_id, source_name,
74 self.swift_data_source)
75 self._check_data_source_delete(source_id)
76
77 def test_local_hdfs_data_source(self):
78 source_id, source_name = self._check_data_source_create(
79 self.local_hdfs_data_source)
80 self._check_data_source_list(source_id, source_name)
81 self._check_data_source_get(source_id, source_name,
82 self.local_hdfs_data_source)
83 self._check_data_source_delete(source_id)
84
85 def test_external_hdfs_data_source(self):
86 source_id, source_name = self._check_data_source_create(
87 self.external_hdfs_data_source)
88 self._check_data_source_list(source_id, source_name)
89 self._check_data_source_get(source_id, source_name,
90 self.external_hdfs_data_source)
91 self._check_data_source_update(source_id)
92 self._check_data_source_delete(source_id)
diff --git a/sahara/tests/tempest/scenario/data_processing/client_tests/test_job_binaries.py b/sahara/tests/tempest/scenario/data_processing/client_tests/test_job_binaries.py
deleted file mode 100644
index d078076..0000000
--- a/sahara/tests/tempest/scenario/data_processing/client_tests/test_job_binaries.py
+++ /dev/null
@@ -1,132 +0,0 @@
1# Copyright (c) 2014 Mirantis Inc.
2#
3# Licensed under the Apache License, Version 2.0 (the "License"); you may
4# not use this file except in compliance with the License. You may obtain
5# a copy of the License at
6#
7# http://www.apache.org/licenses/LICENSE-2.0
8#
9# Unless required by applicable law or agreed to in writing, software
10# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
11# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
12# License for the specific language governing permissions and limitations
13# under the License.
14
15from tempest.lib.common.utils import data_utils
16
17from sahara.tests.tempest.scenario.data_processing.client_tests import base
18
19
20class JobBinariesTest(base.BaseDataProcessingTest):
21 def _check_job_binary_create(self, binary_body):
22 binary_name = data_utils.rand_name('sahara-job-binary')
23
24 # create job binary
25 resp_body = self.create_job_binary(binary_name, **binary_body)
26
27 # ensure that binary created successfully
28 self.assertEqual(binary_name, resp_body.name)
29 if 'swift' in binary_body['url']:
30 binary_body = self.swift_job_binary
31 else:
32 binary_body = self.internal_db_binary
33 self.assertDictContainsSubset(binary_body, resp_body.__dict__)
34
35 return resp_body.id, binary_name
36
37 def _check_job_binary_list(self, binary_id, binary_name):
38 # check for job binary in list
39 binary_list = self.client.job_binaries.list()
40 binaries_info = [(binary.id, binary.name) for binary in binary_list]
41 self.assertIn((binary_id, binary_name), binaries_info)
42
43 def _check_job_binary_delete(self, binary_id):
44 # delete job binary by id
45 self.client.job_binaries.delete(binary_id)
46 # check that job binary really deleted
47 binary_list = self.client.job_binaries.list()
48 self.assertNotIn(binary_id, [binary.id for binary in binary_list])
49
50 def _check_swift_job_binary_create(self):
51 self.swift_job_binary_with_extra = {
52 'url': 'swift://sahara-container/example.jar',
53 'description': 'Test job binary',
54 'extra': {
55 'user': 'test',
56 'password': '123'
57 }
58 }
59 # Create extra self.swift_job_binary variable to use for comparison to
60 # job binary response body because response body has no 'extra' field.
61 self.swift_job_binary = self.swift_job_binary_with_extra.copy()
62 del self.swift_job_binary['extra']
63 return self._check_job_binary_create(self.swift_job_binary_with_extra)
64
65 def _check_swift_job_binary_get(self, binary_id, binary_name):
66 # check job binary fetch by id
67 binary = self.client.job_binaries.get(binary_id)
68 self.assertEqual(binary_name, binary.name)
69 self.assertDictContainsSubset(self.swift_job_binary, binary.__dict__)
70
71 def _check_swift_job_binary_update(self, binary_id):
72 values = {
73 'url': 'swift://user/foo',
74 'description': 'description'
75 }
76 # check updating of job binary in swift
77 binary = self.client.job_binaries.update(binary_id, values)
78 self.assertDictContainsSubset(values, binary.__dict__)
79
80 def _check_internal_db_job_binary_create(self):
81 name = data_utils.rand_name('sahara-internal-job-binary')
82 self.job_binary_data = 'Some data'
83 job_binary_internal = (
84 self.create_job_binary_internal(name, self.job_binary_data))
85 self.internal_db_binary_with_extra = {
86 'url': 'internal-db://%s' % job_binary_internal.id,
87 'description': 'Test job binary',
88 'extra': {
89 'user': 'test',
90 'password': '123'
91 }
92 }
93 # Create extra self.internal_db_binary variable to use for comparison
94 # to job binary response body because response body has no 'extra'
95 # field.
96 self.internal_db_binary = self.internal_db_binary_with_extra.copy()
97 del self.internal_db_binary['extra']
98 return self._check_job_binary_create(
99 self.internal_db_binary_with_extra)
100
101 def _check_internal_db_job_binary_get(self, binary_id, binary_name):
102 # check job binary fetch by id
103 binary = self.client.job_binaries.get(binary_id)
104 self.assertEqual(binary_name, binary.name)
105 self.assertDictContainsSubset(self.internal_db_binary, binary.__dict__)
106
107 def _check_internal_db_job_binary_update(self, binary_id):
108 values = {
109 'description': 'description'
110 }
111 # check updating of job binary in internal db
112 binary = self.client.job_binaries.update(binary_id, values)
113 self.assertDictContainsSubset(values, binary.__dict__)
114
115 def _check_job_binary_get_file(self, binary_id):
116 data = self.client.job_binaries.get_file(binary_id)
117 self.assertEqual(self.job_binary_data, data)
118
119 def test_swift_job_binaries(self):
120 binary_id, binary_name = self._check_swift_job_binary_create()
121 self._check_job_binary_list(binary_id, binary_name)
122 self._check_swift_job_binary_get(binary_id, binary_name)
123 self._check_swift_job_binary_update(binary_id)
124 self._check_job_binary_delete(binary_id)
125
126 def test_internal_job_binaries(self):
127 binary_id, binary_name = self._check_internal_db_job_binary_create()
128 self._check_job_binary_list(binary_id, binary_name)
129 self._check_internal_db_job_binary_get(binary_id, binary_name)
130 self._check_job_binary_get_file(binary_id)
131 self._check_internal_db_job_binary_update(binary_id)
132 self._check_job_binary_delete(binary_id)
diff --git a/sahara/tests/tempest/scenario/data_processing/client_tests/test_job_binary_internals.py b/sahara/tests/tempest/scenario/data_processing/client_tests/test_job_binary_internals.py
deleted file mode 100644
index 736b0a4..0000000
--- a/sahara/tests/tempest/scenario/data_processing/client_tests/test_job_binary_internals.py
+++ /dev/null
@@ -1,61 +0,0 @@
1# Copyright (c) 2014 Mirantis Inc.
2#
3# Licensed under the Apache License, Version 2.0 (the "License"); you may
4# not use this file except in compliance with the License. You may obtain
5# a copy of the License at
6#
7# http://www.apache.org/licenses/LICENSE-2.0
8#
9# Unless required by applicable law or agreed to in writing, software
10# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
11# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
12# License for the specific language governing permissions and limitations
13# under the License.
14
15from tempest.lib.common.utils import data_utils
16
17from sahara.tests.tempest.scenario.data_processing.client_tests import base
18
19
20class JobBinaryInternalsTest(base.BaseDataProcessingTest):
21 def _check_job_binary_internal_create(self):
22 name = data_utils.rand_name('sahara-internal-job-binary')
23 self.job_binary_data = 'Some data'
24 # create job binary internal
25 resp_body = self.create_job_binary_internal(name, self.job_binary_data)
26 # check that job_binary_internal created successfully
27 self.assertEqual(name, resp_body.name)
28 return resp_body.id, resp_body.name
29
30 def _check_job_binary_internal_list(self, binary_id, binary_name):
31 # check for job binary internal in list
32 binary_list = self.client.job_binary_internals.list()
33 binaries_info = [(binary.id, binary.name) for binary in binary_list]
34 self.assertIn((binary_id, binary_name), binaries_info)
35
36 def _check_job_binary_internal_get(self, binary_id, binary_name):
37 # check job binary internal fetch by id
38 binary = self.client.job_binary_internals.get(binary_id)
39 self.assertEqual(binary_name, binary.name)
40
41 def _check_job_binary_internal_update(self, binary_id):
42 values = {
43 'name': data_utils.rand_name('sahara-internal-job-binary'),
44 'is_public': True
45 }
46 binary = self.client.job_binary_internals.update(binary_id, **values)
47 self.assertDictContainsSubset(values, binary.job_binary_internal)
48
49 def _check_job_binary_internal_delete(self, binary_id):
50 # delete job binary internal by id
51 self.client.job_binary_internals.delete(binary_id)
52 # check that job binary internal really deleted
53 binary_list = self.client.job_binary_internals.list()
54 self.assertNotIn(binary_id, [binary.id for binary in binary_list])
55
56 def test_job_binary_internal(self):
57 binary_id, binary_name = self._check_job_binary_internal_create()
58 self._check_job_binary_internal_list(binary_id, binary_name)
59 self._check_job_binary_internal_get(binary_id, binary_name)
60 self._check_job_binary_internal_update(binary_id)
61 self._check_job_binary_internal_delete(binary_id)
diff --git a/sahara/tests/tempest/scenario/data_processing/client_tests/test_job_executions.py b/sahara/tests/tempest/scenario/data_processing/client_tests/test_job_executions.py
deleted file mode 100644
index e382872..0000000
--- a/sahara/tests/tempest/scenario/data_processing/client_tests/test_job_executions.py
+++ /dev/null
@@ -1,317 +0,0 @@
1# Copyright (c) 2014 Mirantis Inc.
2#
3# Licensed under the Apache License, Version 2.0 (the "License"); you may
4# not use this file except in compliance with the License. You may obtain
5# a copy of the License at
6#
7# http://www.apache.org/licenses/LICENSE-2.0
8#
9# Unless required by applicable law or agreed to in writing, software
10# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
11# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
12# License for the specific language governing permissions and limitations
13# under the License.
14
15import time
16
17from oslo_utils import timeutils
18from saharaclient.api import base as sab
19from tempest import config
20from tempest import exceptions
21from tempest.lib.common.utils import data_utils
22from tempest.lib import decorators
23from tempest import test
24
25from sahara.tests.tempest.scenario.data_processing.client_tests import base
26
27
28TEMPEST_CONF = config.CONF
29
30
31class JobExecutionTest(base.BaseDataProcessingTest):
32 def _check_register_image(self, image_id):
33 self.client.images.update_image(
34 image_id, TEMPEST_CONF.scenario.ssh_user, '')
35 reg_image = self.client.images.get(image_id)
36
37 self.assertDictContainsSubset(
38 {'_sahara_username': TEMPEST_CONF.scenario.ssh_user},
39 reg_image.metadata)
40
41 def _check_image_get(self, image_id):
42 image = self.client.images.get(image_id)
43
44 self.assertEqual(image_id, image.id)
45
46 def _check_image_list(self, image_id):
47 # check for image in list
48 image_list = self.client.images.list()
49 images_info = [image.id for image in image_list]
50
51 self.assertIn(image_id, images_info)
52
53 def _check_adding_tags(self, image_id):
54 # adding new tags
55 self.client.images.update_tags(image_id, ['fake', '0.1'])
56 image = self.client.images.get(image_id)
57
58 self.assertDictContainsSubset({'_sahara_tag_fake': 'True',
59 '_sahara_tag_0.1': 'True'},
60 image.metadata)
61
62 def _check_deleting_tags(self, image_id):
63 # deleting tags
64 self.client.images.update_tags(image_id, [])
65 image = self.client.images.get(image_id)
66
67 self.assertNotIn('_sahara_tag_fake', image.metadata)
68 self.assertNotIn('_sahara_tag_0.1', image.metadata)
69
70 def _check_unregister_image(self, image_id):
71 # unregister image
72 self.client.images.unregister_image(image_id)
73
74 # check that image really unregistered
75 image_list = self.client.images.list()
76 self.assertNotIn(image_id, [image.id for image in image_list])
77
78 def _check_cluster_create(self):
79 worker = self.create_node_group_template(
80 data_utils.rand_name('sahara-ng-template'), **self.worker_template)
81
82 master = self.create_node_group_template(
83 data_utils.rand_name('sahara-ng-template'), **self.master_template)
84
85 cluster_templ = self.cluster_template.copy()
86 cluster_templ['node_groups'] = [
87 {
88 'name': 'master',
89 'node_group_template_id': master.id,
90 'count': 1
91 },
92 {
93 'name': 'worker',
94 'node_group_template_id': worker.id,
95 'count': 3
96 }
97 ]
98 if TEMPEST_CONF.service_available.neutron:
99 cluster_templ['net_id'] = self.get_private_network_id()
100
101 cluster_template = self.create_cluster_template(
102 data_utils.rand_name('sahara-cluster-template'), **cluster_templ)
103 cluster_name = data_utils.rand_name('sahara-cluster')
104 self.cluster_info = {
105 'name': cluster_name,
106 'plugin_name': 'fake',
107 'hadoop_version': '0.1',
108 'cluster_template_id': cluster_template.id,
109 'default_image_id': TEMPEST_CONF.data_processing.fake_image_id
110 }
111
112 # create cluster
113 cluster = self.create_cluster(**self.cluster_info)
114
115 # wait until cluster moves to active state
116 self.check_cluster_active(cluster.id)
117
118 # check that cluster created successfully
119 self.assertEqual(cluster_name, cluster.name)
120 self.assertDictContainsSubset(self.cluster_info, cluster.__dict__)
121
122 return cluster.id, cluster.name
123
124 def _check_cluster_list(self, cluster_id, cluster_name):
125 # check for cluster in list
126 cluster_list = self.client.clusters.list()
127 clusters_info = [(clust.id, clust.name) for clust in cluster_list]
128 self.assertIn((cluster_id, cluster_name), clusters_info)
129
130 def _check_cluster_get(self, cluster_id, cluster_name):
131 # check cluster fetch by id
132 cluster = self.client.clusters.get(cluster_id)
133 self.assertEqual(cluster_name, cluster.name)
134 self.assertDictContainsSubset(self.cluster_info, cluster.__dict__)
135
136 def _check_cluster_update(self, cluster_id):
137 values = {
138 'name': data_utils.rand_name('updated-sahara-cluster'),
139 'description': 'description'
140 }
141 # check updating of cluster
142 cluster = self.client.clusters.update(cluster_id)
143 self.assertDictContainsSubset(values, cluster.__dict__)
144
145 def _check_cluster_scale(self, cluster_id):
146 big_worker = self.create_node_group_template(
147 data_utils.rand_name('sahara-ng-template'), **self.worker_template)
148
149 scale_body = {
150 'resize_node_groups': [
151 {
152 'count': 2,
153 'name': 'worker'
154 },
155 {
156 "count": 2,
157 "name": 'master'
158 }
159 ],
160 'add_node_groups': [
161 {
162 'count': 1,
163 'name': 'big-worker',
164 'node_group_template_id': big_worker.id
165
166 }
167 ]
168 }
169
170 self.client.clusters.scale(cluster_id, scale_body)
171 self.check_cluster_active(cluster_id)
172
173 cluster = self.client.clusters.get(cluster_id)
174 for ng in cluster.node_groups:
175 if ng['name'] == scale_body['resize_node_groups'][0]['name']:
176 self.assertDictContainsSubset(
177 scale_body['resize_node_groups'][0], ng)
178 elif ng['name'] == scale_body['resize_node_groups'][1]['name']:
179 self.assertDictContainsSubset(
180 scale_body['resize_node_groups'][1], ng)
181 elif ng['name'] == scale_body['add_node_groups'][0]['name']:
182 self.assertDictContainsSubset(
183 scale_body['add_node_groups'][0], ng)
184
185 def _check_cluster_delete(self, cluster_id):
186 self.client.clusters.delete(cluster_id)
187
188 # check that cluster moved to deleting state
189 cluster = self.client.clusters.get(cluster_id)
190 self.assertEqual('Deleting', cluster.status)
191
192 timeout = TEMPEST_CONF.data_processing.cluster_timeout
193 s_time = timeutils.utcnow()
194 while timeutils.delta_seconds(s_time, timeutils.utcnow()) < timeout:
195 try:
196 self.client.clusters.get(cluster_id)
197 except sab.APIException:
198 # cluster is deleted
199 return
200 time.sleep(TEMPEST_CONF.data_processing.request_timeout)
201
202 raise exceptions.TimeoutException('Cluster failed to terminate'
203 'in %d seconds.' % timeout)
204
205 def _check_job_execution_create(self, cluster_id):
206 # create swift container
207 container_name = data_utils.rand_name('test-container')
208 self.create_container(container_name)
209
210 # create input data source
211 input_file_name = data_utils.rand_name('input')
212 self.object_client.create_object(container_name, input_file_name,
213 'some-data')
214
215 input_file_url = 'swift://%s/%s' % (container_name, input_file_name)
216 input_source_name = data_utils.rand_name('input-data-source')
217 input_source = self.create_data_source(
218 input_source_name, input_file_url, '', 'swift',
219 {'user': 'test', 'password': '123'})
220
221 # create output data source
222 output_dir_name = data_utils.rand_name('output')
223 output_dir_url = 'swift://%s/%s' % (container_name, output_dir_name)
224 output_source_name = data_utils.rand_name('output-data-source')
225 output_source = self.create_data_source(
226 output_source_name, output_dir_url, '', 'swift',
227 {'user': 'test', 'password': '123'})
228
229 job_binary = {
230 'name': data_utils.rand_name('sahara-job-binary'),
231 'url': input_file_url,
232 'description': 'Test job binary',
233 'extra': {
234 'user': 'test',
235 'password': '123'
236 }
237 }
238 # create job_binary
239 job_binary = self.create_job_binary(**job_binary)
240
241 # create job
242 job_name = data_utils.rand_name('test-job')
243 job = self.create_job(job_name, 'Pig', [job_binary.id])
244
245 self.job_exec_info = {
246 'job_id': job.id,
247 'cluster_id': cluster_id,
248 'input_id': input_source.id,
249 'output_id': output_source.id,
250 'configs': {}
251 }
252 # create job execution
253 job_execution = self.create_job_execution(**self.job_exec_info)
254
255 return job_execution.id
256
257 def _check_job_execution_list(self, job_exec_id):
258 # check for job_execution in list
259 job_exec_list = self.client.job_executions.list()
260 self.assertIn(job_exec_id, [job_exec.id for job_exec in job_exec_list])
261
262 def _check_job_execution_get(self, job_exec_id):
263 # check job_execution fetch by id
264 job_exec = self.client.job_executions.get(job_exec_id)
265 # Create extra cls.swift_job_binary variable to use for comparison to
266 # job binary response body because response body has no 'extra' field.
267 job_exec_info = self.job_exec_info.copy()
268 del job_exec_info['configs']
269 self.assertDictContainsSubset(job_exec_info, job_exec.__dict__)
270
271 def _check_job_execution_update(self, job_exec_id):
272 values = {
273 'is_public': True
274 }
275 job_exec = self.client.job_executions.update(job_exec_id, **values)
276 self.assertDictContainsSubset(values, job_exec.__dict__)
277
278 def _check_job_execution_delete(self, job_exec_id):
279 # delete job_execution by id
280 self.client.job_executions.delete(job_exec_id)
281 # check that job_execution really deleted
282 job_exec_list = self.client.jobs.list()
283 self.assertNotIn(job_exec_id, [job_exec.id for
284 job_exec in job_exec_list])
285
286 @decorators.skip_because(bug="1430252")
287 @test.attr(type='slow')
288 def test_job_executions(self):
289 image_id = TEMPEST_CONF.data_processing.fake_image_id
290 self._check_register_image(image_id)
291 self._check_image_get(image_id)
292 self._check_image_list(image_id)
293 self._check_adding_tags(image_id)
294
295 cluster_id, cluster_name = self._check_cluster_create()
296 self._check_cluster_list(cluster_id, cluster_name)
297 self._check_cluster_get(cluster_id, cluster_name)
298 self._check_cluster_update(cluster_id)
299 self._check_cluster_scale(cluster_id)
300
301 job_exec_id = self._check_job_execution_create(cluster_id)
302 self._check_job_execution_list(job_exec_id)
303 self._check_job_execution_get(job_exec_id)
304 self._check_job_execution_update(job_exec_id)
305
306 self._check_job_execution_delete(job_exec_id)
307 self._check_cluster_delete(cluster_id)
308 self._check_deleting_tags(image_id)
309 self._check_unregister_image(image_id)
310
311 @classmethod
312 def tearDownClass(cls):
313 image_list = cls.client.images.list()
314 image_id = TEMPEST_CONF.data_processing.fake_image_id
315 if image_id in [image.id for image in image_list]:
316 cls.client.images.unregister_image(image_id)
317 super(JobExecutionTest, cls).tearDownClass()
diff --git a/sahara/tests/tempest/scenario/data_processing/client_tests/test_jobs.py b/sahara/tests/tempest/scenario/data_processing/client_tests/test_jobs.py
deleted file mode 100644
index 5b87abf..0000000
--- a/sahara/tests/tempest/scenario/data_processing/client_tests/test_jobs.py
+++ /dev/null
@@ -1,79 +0,0 @@
1# Copyright (c) 2014 Mirantis Inc.
2#
3# Licensed under the Apache License, Version 2.0 (the "License"); you may
4# not use this file except in compliance with the License. You may obtain
5# a copy of the License at
6#
7# http://www.apache.org/licenses/LICENSE-2.0
8#
9# Unless required by applicable law or agreed to in writing, software
10# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
11# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
12# License for the specific language governing permissions and limitations
13# under the License.
14
15from tempest.lib.common.utils import data_utils
16
17from sahara.tests.tempest.scenario.data_processing.client_tests import base
18
19
20class JobTest(base.BaseDataProcessingTest):
21 def _check_create_job(self):
22 job_binary = {
23 'name': data_utils.rand_name('sahara-job-binary'),
24 'url': 'swift://sahara-container.sahara/example.jar',
25 'description': 'Test job binary',
26 'extra': {
27 'user': 'test',
28 'password': '123'
29 }
30 }
31 # create job_binary
32 job_binary = self.create_job_binary(**job_binary)
33
34 self.job = {
35 'job_type': 'Pig',
36 'mains': [job_binary.id]
37 }
38 job_name = data_utils.rand_name('sahara-job')
39 # create job
40 job = self.create_job(job_name, **self.job)
41 # check that job created successfully
42 self.assertEqual(job_name, job.name)
43
44 return job.id, job.name
45
46 def _check_job_list(self, job_id, job_name):
47 # check for job in list
48 job_list = self.client.jobs.list()
49 jobs_info = [(job.id, job.name) for job in job_list]
50 self.assertIn((job_id, job_name), jobs_info)
51
52 def _check_get_job(self, job_id, job_name):
53 # check job fetch by id
54 job = self.client.jobs.get(job_id)
55 self.assertEqual(job_name, job.name)
56
57 def _check_job_update(self, job_id):
58 # check updating of job
59 values = {
60 'name': data_utils.rand_name('updated-sahara-job'),
61 'description': 'description'
62
63 }
64 job = self.client.jobs.update(job_id, **values)
65 self.assertDictContainsSubset(values, job.job)
66
67 def _check_delete_job(self, job_id):
68 # delete job by id
69 self.client.jobs.delete(job_id)
70 # check that job really deleted
71 job_list = self.client.jobs.list()
72 self.assertNotIn(job_id, [job.id for job in job_list])
73
74 def test_job(self):
75 job_id, job_name = self._check_create_job()
76 self._check_job_list(job_id, job_name)
77 self._check_get_job(job_id, job_name)
78 self._check_job_update(job_id)
79 self._check_delete_job(job_id)
diff --git a/sahara/tests/tempest/scenario/data_processing/client_tests/test_node_group_templates.py b/sahara/tests/tempest/scenario/data_processing/client_tests/test_node_group_templates.py
deleted file mode 100644
index 443920b..0000000
--- a/sahara/tests/tempest/scenario/data_processing/client_tests/test_node_group_templates.py
+++ /dev/null
@@ -1,76 +0,0 @@
1# Copyright (c) 2014 Mirantis Inc.
2#
3# Licensed under the Apache License, Version 2.0 (the "License"); you may
4# not use this file except in compliance with the License. You may obtain
5# a copy of the License at
6#
7# http://www.apache.org/licenses/LICENSE-2.0
8#
9# Unless required by applicable law or agreed to in writing, software
10# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
11# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
12# License for the specific language governing permissions and limitations
13# under the License.
14
15from tempest.lib.common.utils import data_utils
16
17from sahara.tests.tempest.scenario.data_processing.client_tests import base
18
19
20class NodeGroupTemplateTest(base.BaseDataProcessingTest):
21 def _check_create_node_group_template(self):
22 template_name = data_utils.rand_name('sahara-ng-template')
23
24 # create node group template
25 resp_body = self.create_node_group_template(template_name,
26 **self.worker_template)
27 # check that template created successfully
28 self.assertEqual(template_name, resp_body.name)
29 self.assertDictContainsSubset(self.worker_template,
30 resp_body.__dict__)
31
32 return resp_body.id, template_name
33
34 def _check_node_group_template_list(self, template_id, template_name):
35 # check for node group template in list
36 template_list = self.client.node_group_templates.list()
37 templates_info = [(template.id, template.name)
38 for template in template_list]
39 self.assertIn((template_id, template_name), templates_info)
40
41 def _check_node_group_template_get(self, template_id, template_name):
42 # check node group template fetch by id
43 template = self.client.node_group_templates.get(
44 template_id)
45 self.assertEqual(template_name, template.name)
46 self.assertDictContainsSubset(self.worker_template,
47 template.__dict__)
48
49 def _check_node_group_template_update(self, template_id):
50 values = {
51 'name': data_utils.rand_name('updated-sahara-ng-template'),
52 'description': 'description',
53 'volumes_per_node': 2,
54 'volumes_size': 2,
55 }
56
57 resp_body = self.client.node_group_templates.update(template_id,
58 **values)
59 # check that template updated successfully
60 self.assertDictContainsSubset(values,
61 resp_body.__dict__)
62
63 def _check_node_group_template_delete(self, template_id):
64 # delete node group template by id
65 self.client.node_group_templates.delete(template_id)
66
67 # check that node group really deleted
68 templates = self.client.node_group_templates.list()
69 self.assertNotIn(template_id, [template.id for template in templates])
70
71 def test_node_group_templates(self):
72 template_id, template_name = self._check_create_node_group_template()
73 self._check_node_group_template_list(template_id, template_name)
74 self._check_node_group_template_get(template_id, template_name)
75 self._check_node_group_template_update(template_id)
76 self._check_node_group_template_delete(template_id)
diff --git a/sahara/tests/tempest/scenario/data_processing/client_tests/test_plugins.py b/sahara/tests/tempest/scenario/data_processing/client_tests/test_plugins.py
deleted file mode 100644
index e8469b5..0000000
--- a/sahara/tests/tempest/scenario/data_processing/client_tests/test_plugins.py
+++ /dev/null
@@ -1,45 +0,0 @@
1# Copyright (c) 2014 Mirantis Inc.
2#
3# Licensed under the Apache License, Version 2.0 (the "License"); you may
4# not use this file except in compliance with the License. You may obtain
5# a copy of the License at
6#
7# http://www.apache.org/licenses/LICENSE-2.0
8#
9# Unless required by applicable law or agreed to in writing, software
10# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
11# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
12# License for the specific language governing permissions and limitations
13# under the License.
14
15from sahara.tests.tempest.scenario.data_processing.client_tests import base
16
17
18class PluginsTest(base.BaseDataProcessingTest):
19
20 def _check_plugins_list(self):
21 plugins = self.client.plugins.list()
22 plugins_names = [plugin.name for plugin in plugins]
23 self.assertIn('fake', plugins_names)
24
25 return plugins_names
26
27 def _check_plugins_get(self, plugins_names):
28 for plugin_name in plugins_names:
29 plugin = self.client.plugins.get(plugin_name)
30 self.assertEqual(plugin_name, plugin.name)
31
32 # check get_version_details
33 for plugin_version in plugin.versions:
34 detailed_plugin = self.client.plugins.get_version_details(
35 plugin_name, plugin_version)
36 self.assertEqual(plugin_name, detailed_plugin.name)
37
38 # check that required image tags contains name and version
39 image_tags = detailed_plugin.required_image_tags
40 self.assertIn(plugin_name, image_tags)
41 self.assertIn(plugin_version, image_tags)
42
43 def test_plugins(self):
44 plugins_names = self._check_plugins_list()
45 self._check_plugins_get(plugins_names)
diff --git a/sahara/tests/tempest/scenario/data_processing/config.py b/sahara/tests/tempest/scenario/data_processing/config.py
deleted file mode 100644
index 0aef4ad..0000000
--- a/sahara/tests/tempest/scenario/data_processing/config.py
+++ /dev/null
@@ -1,34 +0,0 @@
1# Copyright (c) 2014 Mirantis Inc.
2#
3# Licensed under the Apache License, Version 2.0 (the "License"); you may
4# not use this file except in compliance with the License. You may obtain
5# a copy of the License at
6#
7# http://www.apache.org/licenses/LICENSE-2.0
8#
9# Unless required by applicable law or agreed to in writing, software
10# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
11# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
12# License for the specific language governing permissions and limitations
13# under the License.
14
15from __future__ import print_function
16
17from oslo_config import cfg
18
19
20DataProcessingGroup = [
21 cfg.IntOpt('cluster_timeout',
22 default=3600,
23 help='Timeout (in seconds) to wait for cluster deployment.'),
24 cfg.IntOpt('request_timeout',
25 default=10,
26 help='Timeout (in seconds) between status checks.'),
27 cfg.StrOpt('fake_image_id',
28 help='ID of an image which is used for cluster creation.'),
29 cfg.StrOpt('saharaclient_version',
30 default='1.1',
31 help='Version of python-saharaclient'),
32 cfg.StrOpt('sahara_url',
33 help='Sahara url as http://ip:port/api_version/tenant_id'),
34]
diff --git a/sahara/tests/tempest/scenario/data_processing/etc/sahara_tests.conf b/sahara/tests/tempest/scenario/data_processing/etc/sahara_tests.conf
deleted file mode 100644
index 6fbf3ba..0000000
--- a/sahara/tests/tempest/scenario/data_processing/etc/sahara_tests.conf
+++ /dev/null
@@ -1,7 +0,0 @@
1[data_processing]
2
3floating_ip_pool='a454832b-5101-421a-a225-5445a98667d4'
4private_network_id='cdbfcaa0-d17c-4ec4-9271-eb12c975d825'
5fake_image_id='16a534fc-9e1e-43d6-b577-9f805212dc0b'
6flavor_id=2
7ssh_username='ubuntu' \ No newline at end of file
diff --git a/sahara/tests/tempest/scenario/data_processing/etc/sahara_tests.conf.sample b/sahara/tests/tempest/scenario/data_processing/etc/sahara_tests.conf.sample
deleted file mode 100644
index 410a882..0000000
--- a/sahara/tests/tempest/scenario/data_processing/etc/sahara_tests.conf.sample
+++ /dev/null
@@ -1,28 +0,0 @@
1[data_processing]
2
3# Timeout (in seconds) to wait for cluster deployment.
4#cluster_timeout=3600
5
6# Timeout (in seconds) between status checks.
7#request_timeout=10
8
9# Name of IP pool.
10#floating_ip_pool=
11
12# Name of the private network that provides internal connectivity.
13#private_network=
14
15# ID of an image which is used for cluster creation.
16#fake_image_id=
17
18# ID of a flavor.
19#flavor_id=
20
21# Version of python-saharaclient
22#saharaclient_version=1.1
23
24# Sahara url as http://ip:port/api_version/tenant_id
25#sahara_url=
26
27# Username which is used to log into remote nodes via SSH.
28#ssh_username=
diff --git a/sahara/tests/tempest/scenario/data_processing/plugin.py b/sahara/tests/tempest/scenario/data_processing/plugin.py
deleted file mode 100644
index bdc54eb..0000000
--- a/sahara/tests/tempest/scenario/data_processing/plugin.py
+++ /dev/null
@@ -1,37 +0,0 @@
1# Copyright (c) 2015 Red Hat, Inc.
2#
3# Licensed under the Apache License, Version 2.0 (the "License");
4# you may not use this file except in compliance with the License.
5# You may obtain a copy of the License at
6#
7# http://www.apache.org/licenses/LICENSE-2.0
8#
9# Unless required by applicable law or agreed to in writing, software
10# distributed under the License is distributed on an "AS IS" BASIS,
11# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or
12# implied.
13# See the License for the specific language governing permissions and
14# limitations under the License.
15
16
17import os
18
19from tempest.test_discover import plugins
20
21import sahara.tests.tempest.scenario.data_processing.config as sahara_config
22
23
24class SaharaClientsScenarioPlugin(plugins.TempestPlugin):
25 def load_tests(self):
26 relative_test_dir = 'sahara/tests/tempest/scenario/data_processing'
27 test_dir = os.path.dirname(os.path.abspath(__file__))
28 top_level_dir = test_dir[:test_dir.find(relative_test_dir)-1]
29 return test_dir, top_level_dir
30
31 def register_opts(self, conf):
32 # additional options in the data_processing section
33 conf.register_opts(sahara_config.DataProcessingGroup,
34 'data-processing')
35
36 def get_opt_lists(self):
37 return [('data-processing', sahara_config.DataProcessingGroup)]
diff --git a/test-requirements.txt b/test-requirements.txt
index efdb57c..a456df3 100644
--- a/test-requirements.txt
+++ b/test-requirements.txt
@@ -4,7 +4,6 @@
4 4
5hacking<0.11,>=0.10.0 5hacking<0.11,>=0.10.0
6 6
7Mako>=0.4.0 # MIT
8PyMySQL!=0.7.7,>=0.6.2 # MIT License 7PyMySQL!=0.7.7,>=0.6.2 # MIT License
9bandit>=1.1.0 # Apache-2.0 8bandit>=1.1.0 # Apache-2.0
10bashate>=0.2 # Apache-2.0 9bashate>=0.2 # Apache-2.0
@@ -21,10 +20,6 @@ pylint==1.4.5 # GPLv2
21reno>=1.8.0 # Apache2 20reno>=1.8.0 # Apache2
22sphinx!=1.3b1,<1.3,>=1.2.1 # BSD 21sphinx!=1.3b1,<1.3,>=1.2.1 # BSD
23sphinxcontrib-httpdomain # BSD 22sphinxcontrib-httpdomain # BSD
24tempest-lib>=0.14.0 # Apache-2.0
25testrepository>=0.0.18 # Apache-2.0/BSD
26testresources>=0.2.4 # Apache-2.0/BSD 23testresources>=0.2.4 # Apache-2.0/BSD
27testscenarios>=0.4 # Apache-2.0/BSD 24testscenarios>=0.4 # Apache-2.0/BSD
28testtools>=1.4.0 # MIT 25testtools>=1.4.0 # MIT
29
30python-saharaclient>=0.18.0 # Apache-2.0