summaryrefslogtreecommitdiff
diff options
context:
space:
mode:
authorVladimir Kuklin <vkuklin@mirantis.com>2016-09-08 13:17:07 +0300
committerVladimir Kuklin <vkuklin@mirantis.com>2016-09-08 14:18:35 +0300
commitcaebcc64b736b0bddb9ee4b3569c8a2054c05d27 (patch)
treeb64e918c8a25d2660a9e0a05eb3d5ae3cecf256e
parentab8ea030924871a3c4fc782e736547829480600c (diff)
Add Nailgun Converted serializers base code
This commit introduces Nailgun converted serializers extension which allows for usage of Fuel Mitaka LCM features in pre-Mitaka releases It essentially runs old serializers and patches the result properly, so that LCM deployment engine can work with this serialized data
-rw-r--r--.coveragerc6
-rw-r--r--.gitignore105
-rw-r--r--MANIFEST.in6
-rw-r--r--README.rst10
-rw-r--r--bindep.txt6
-rw-r--r--conftest.py24
-rw-r--r--converted_serializers/__init__.py0
-rw-r--r--converted_serializers/extension.py72
-rw-r--r--converted_serializers/tests/__init__.py0
-rw-r--r--converted_serializers/tests/test_pipelines.py647
-rw-r--r--nailgun-test-settings.yaml15
-rw-r--r--requirements.txt5
-rw-r--r--setup.cfg28
-rw-r--r--setup.py29
-rw-r--r--specs/fuel-nailgun-extension-converted-serializers.spec38
-rw-r--r--test-requirements.txt6
-rw-r--r--tox.ini38
17 files changed, 967 insertions, 68 deletions
diff --git a/.coveragerc b/.coveragerc
new file mode 100644
index 0000000..0519c90
--- /dev/null
+++ b/.coveragerc
@@ -0,0 +1,6 @@
1[run]
2branch = True
3source = converted_serializers
4
5[report]
6ignore_errors = True
diff --git a/.gitignore b/.gitignore
index 72364f9..2562132 100644
--- a/.gitignore
+++ b/.gitignore
@@ -1,89 +1,58 @@
1# Byte-compiled / optimized / DLL files
2__pycache__/
3*.py[cod] 1*.py[cod]
4*$py.class
5 2
6# C extensions 3# C extensions
7*.so 4*.so
8 5
9# Distribution / packaging 6# Packages
10.Python 7*.egg*
11env/ 8*.egg-info
12build/ 9dist
13develop-eggs/ 10build
14dist/ 11eggs
15downloads/ 12parts
16eggs/ 13bin
17.eggs/ 14var
18lib/ 15sdist
19lib64/ 16develop-eggs
20parts/
21sdist/
22var/
23*.egg-info/
24.installed.cfg 17.installed.cfg
25*.egg 18lib
26 19lib64
27# PyInstaller
28# Usually these files are written by a python script from a template
29# before PyInstaller builds the exe, so as to inject date/other infos into it.
30*.manifest
31*.spec
32 20
33# Installer logs 21# Installer logs
34pip-log.txt 22pip-log.txt
35pip-delete-this-directory.txt
36 23
37# Unit test / coverage reports 24# Unit test / coverage reports
38htmlcov/ 25cover/
39.tox/ 26.coverage*
40.coverage 27!.coveragerc
41.coverage.* 28.tox
42.cache
43nosetests.xml 29nosetests.xml
44coverage.xml 30.testrepository
45*,cover 31.venv
46.hypothesis/
47 32
48# Translations 33# Translations
49*.mo 34*.mo
50*.pot
51
52# Django stuff:
53*.log
54local_settings.py
55
56# Flask stuff:
57instance/
58.webassets-cache
59
60# Scrapy stuff:
61.scrapy
62
63# Sphinx documentation
64docs/_build/
65
66# PyBuilder
67target/
68
69# IPython Notebook
70.ipynb_checkpoints
71 35
72# pyenv 36# Mr Developer
73.python-version 37.mr.developer.cfg
38.project
39.pydevproject
74 40
75# celery beat schedule file 41# Complexity
76celerybeat-schedule 42output/*.html
43output/*/index.html
77 44
78# dotenv 45# Sphinx
79.env 46doc/build
80 47
81# virtualenv 48# pbr generates these
82venv/ 49AUTHORS
83ENV/ 50ChangeLog
84 51
85# Spyder project settings 52# Editors
86.spyderproject 53*~
54.*.swp
55.*sw?
87 56
88# Rope project settings 57# Files created by releasenotes build
89.ropeproject 58extension.xml
diff --git a/MANIFEST.in b/MANIFEST.in
new file mode 100644
index 0000000..c978a52
--- /dev/null
+++ b/MANIFEST.in
@@ -0,0 +1,6 @@
1include AUTHORS
2include ChangeLog
3exclude .gitignore
4exclude .gitreview
5
6global-exclude *.pyc
diff --git a/README.rst b/README.rst
new file mode 100644
index 0000000..2ca0eea
--- /dev/null
+++ b/README.rst
@@ -0,0 +1,10 @@
1Fuel nailgun extenstion for converted serializers
2=================================================
3
4This extension for Nailgun provides conversion layer which triggers pre-Mitaka
5serializers to generate deployment data, so that pre-9.x clusters can leverage
6Fuel Mitaka LCM features
7
8Installation
9-----------
10Just install the package `fuel-nailgun-extension-converted-serializers`
diff --git a/bindep.txt b/bindep.txt
new file mode 100644
index 0000000..568f269
--- /dev/null
+++ b/bindep.txt
@@ -0,0 +1,6 @@
1libpq-dev
2postgresql
3postgresql-client
4# We don't use these, but mysql-prep step is in template job
5mysql-client
6mysql-server
diff --git a/conftest.py b/conftest.py
new file mode 100644
index 0000000..a345985
--- /dev/null
+++ b/conftest.py
@@ -0,0 +1,24 @@
1# coding: utf-8
2
3# Licensed under the Apache License, Version 2.0 (the "License"); you may
4# not use this file except in compliance with the License. You may obtain
5# a copy of the License at
6#
7# http://www.apache.org/licenses/LICENSE-2.0
8#
9# Unless required by applicable law or agreed to in writing, software
10# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
11# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
12# License for the specific language governing permissions and limitations
13# under the License.
14
15
16def pytest_configure(config):
17 from nailgun import db
18 db.dropdb()
19 db.syncdb()
20
21
22def pytest_unconfigure(config):
23 from nailgun import db
24 db.dropdb()
diff --git a/converted_serializers/__init__.py b/converted_serializers/__init__.py
new file mode 100644
index 0000000..e69de29
--- /dev/null
+++ b/converted_serializers/__init__.py
diff --git a/converted_serializers/extension.py b/converted_serializers/extension.py
new file mode 100644
index 0000000..810fdd0
--- /dev/null
+++ b/converted_serializers/extension.py
@@ -0,0 +1,72 @@
1# -*- coding: utf-8 -*-
2
3# Copyright 2015 Mirantis, Inc.
4#
5# Licensed under the Apache License, Version 2.0 (the "License"); you may
6# not use this file except in compliance with the License. You may obtain
7# a copy of the License at
8#
9# http://www.apache.org/licenses/LICENSE-2.0
10#
11# Unless required by applicable law or agreed to in writing, software
12# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
13# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
14# License for the specific language governing permissions and limitations
15# under the License.
16import logging
17
18from nailgun import extensions
19from nailgun import objects
20from nailgun.orchestrator.deployment_serializers import \
21 get_serializer_for_cluster
22
23
24logger = logging.getLogger(__name__)
25
26
27class ConvertPreLCMtoLCM(extensions.BasePipeline):
28
29 @classmethod
30 def pre_process_data(cls, data, cluster, nodes, **kwargs):
31 return data
32
33 @classmethod
34 def post_process_data(cls, data, cluster, nodes, **kwargs):
35 return data
36
37 @classmethod
38 def serialize(cls, data, cluster, nodes, **kwargs):
39 if objects.Release.is_lcm_supported(cluster.release):
40 return data
41 serializer = get_serializer_for_cluster(cluster)()
42 real_data = serializer.serialize(cluster, nodes, **kwargs)
43 return real_data
44
45 @classmethod
46 def process_deployment(cls, data, cluster, nodes, **kwargs):
47 pre_processed_data = cls.pre_process_data(data,
48 cluster, nodes, **kwargs)
49 real_data = cls.serialize(pre_processed_data, cluster, nodes, **kwargs)
50 post_processed_data = cls.post_process_data(real_data,
51 cluster, nodes, **kwargs)
52 # copypaste cluster specific values from LCM serializer.
53 # This is needed for tasks paramters interpolation like CLUSTER_ID
54 cluster_data = data[0]['cluster']
55 for node_data in post_processed_data:
56 node_data['cluster'] = cluster_data
57 return post_processed_data
58
59 @classmethod
60 def process_provisioning(cls, data, cluster, nodes, **kwargs):
61 return data
62
63
64class ConvertedSerializersExtension(extensions.BaseExtension):
65 name = 'converted_serializers'
66 version = '0.0.1'
67 description = "Serializers Conversion extension"
68 weight = 100
69
70 data_pipelines = [
71 ConvertPreLCMtoLCM,
72 ]
diff --git a/converted_serializers/tests/__init__.py b/converted_serializers/tests/__init__.py
new file mode 100644
index 0000000..e69de29
--- /dev/null
+++ b/converted_serializers/tests/__init__.py
diff --git a/converted_serializers/tests/test_pipelines.py b/converted_serializers/tests/test_pipelines.py
new file mode 100644
index 0000000..c63f466
--- /dev/null
+++ b/converted_serializers/tests/test_pipelines.py
@@ -0,0 +1,647 @@
1# -*- coding: utf-8 -*-
2
3# Copyright 2015 Mirantis, Inc.
4#
5# Licensed under the Apache License, Version 2.0 (the "License"); you may
6# not use this file except in compliance with the License. You may obtain
7# a copy of the License at
8#
9# http://www.apache.org/licenses/LICENSE-2.0
10#
11# Unless required by applicable law or agreed to in writing, software
12# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
13# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
14# License for the specific language governing permissions and limitations
15# under the License.
16
17
18from copy import deepcopy
19import mock
20import six
21
22import nailgun
23
24from nailgun import consts
25from nailgun.db.sqlalchemy import models
26from nailgun import objects
27from nailgun import rpc
28
29from nailgun.orchestrator import deployment_serializers
30from nailgun.orchestrator.deployment_serializers import \
31 get_serializer_for_cluster
32from nailgun.orchestrator.neutron_serializers import \
33 NeutronNetworkDeploymentSerializer80
34from nailgun.orchestrator.neutron_serializers import \
35 NeutronNetworkTemplateSerializer80
36from nailgun.test.integration.test_orchestrator_serializer import \
37 BaseDeploymentSerializer
38from nailgun.test.integration.test_orchestrator_serializer import \
39 TestSerializeInterfaceDriversData
40from nailgun.test.integration.test_orchestrator_serializer_70 import \
41 TestDeploymentHASerializer70
42from nailgun.test.integration.test_orchestrator_serializer_80 import \
43 TestSerializer80Mixin
44
45
46class TestSerializerWrapper(deployment_serializers.DeploymentLCMSerializer):
47
48 def serialize(self, cluster, nodes, ignore_customized=False):
49 return deployment_serializers.serialize_for_lcm(
50 cluster, nodes, ignore_customized=ignore_customized)
51
52 def get_net_provider_serializer(cls, cluster):
53 return deployment_serializers\
54 .DeploymentHASerializer80.get_net_provider_serializer(cluster)
55
56
57class TestSerializerConverter80To90MixIn(TestSerializer80Mixin):
58 env_version = "liberty-8.0"
59 task_deploy = True
60 is_propagate_task_deploy = True
61 enforce_lcm = True
62
63 @classmethod
64 def create_serializer(cls, cluster):
65 serializer_type = TestSerializerWrapper
66 return serializer_type(None)
67
68
69class TestNetworkTemplateSerializer80MixIn(
70 TestSerializerConverter80To90MixIn,
71 BaseDeploymentSerializer
72):
73 legacy_serializer = NeutronNetworkDeploymentSerializer80
74 template_serializer = NeutronNetworkTemplateSerializer80
75
76 def setUp(self, *args):
77 super(TestNetworkTemplateSerializer80MixIn, self).setUp()
78 self.env.create(
79 release_kwargs={'version': self.env_version},
80 cluster_kwargs={
81 'mode': consts.CLUSTER_MODES.ha_compact,
82 'net_provider': consts.CLUSTER_NET_PROVIDERS.neutron,
83 'net_segment_type': consts.NEUTRON_SEGMENT_TYPES.vlan})
84 self.net_template = self.env.read_fixtures(['network_template_80'])[0]
85 self.cluster = self.env.clusters[-1]
86 self.cluster.extensions = ['volume_manager', 'converted_serializers']
87 self.serializer = self.create_serializer(self.cluster)
88
89 def test_get_net_provider_serializer(self):
90 self.cluster.network_config.configuration_template = None
91
92 net_serializer = self.serializer.\
93 get_net_provider_serializer(self.cluster)
94 self.assertIs(net_serializer, self.legacy_serializer)
95
96 self.cluster.network_config.configuration_template = \
97 self.net_template
98 net_serializer = self.serializer.\
99 get_net_provider_serializer(self.cluster)
100 self.assertIs(net_serializer, self.template_serializer)
101
102 def test_baremetal_neutron_attrs(self):
103 brmtl_template = deepcopy(
104 self.net_template['adv_net_template']['default'])
105 brmtl_template['network_assignments']['baremetal'] = {
106 'ep': 'br-baremetal'}
107 brmtl_template['templates_for_node_role']['controller'].append(
108 'baremetal')
109 brmtl_template['nic_mapping']['default']['if8'] = 'eth7'
110 brmtl_template['network_scheme']['baremetal'] = {
111 'endpoints': ['br-baremetal'],
112 'transformations': [],
113 'roles': {'baremetal': 'br-baremetal'}}
114 self.cluster.network_config.configuration_template = {
115 'adv_net_template': {'default': brmtl_template}, 'pk': 1}
116 self._check_baremetal_neutron_attrs(self.cluster)
117
118 def test_network_schemes_priorities(self):
119 expected = [
120 {
121 "action": "add-br",
122 "name": "br-prv",
123 "provider": "ovs"
124 },
125 {
126 "action": "add-br",
127 "name": "br-aux"
128 },
129 {
130 "action": "add-patch",
131 "bridges": [
132 "br-prv",
133 "br-aux"
134 ],
135 "provider": "ovs",
136 "mtu": 65000
137 },
138 {
139 "action": "add-port",
140 "bridge": "br-aux",
141 "name": "eth3.101"
142 },
143 {
144 "action": "add-br",
145 "name": "br-fw-admin"
146 },
147 {
148 "action": "add-port",
149 "bridge": "br-fw-admin",
150 "name": "eth0"
151 },
152 {
153 "action": "add-br",
154 "name": "br-mgmt"
155 },
156 {
157 "action": "add-port",
158 "bridge": "br-mgmt",
159 "name": "eth1.104"
160 },
161 {
162 "action": "add-br",
163 "name": "br-storage"
164 },
165 {
166 "action": "add-port",
167 "bridge": "br-storage",
168 "name": "eth2"
169 }
170 ]
171
172 objects.Cluster.set_network_template(
173 self.cluster,
174 self.net_template
175 )
176
177 node = self.env.create_nodes_w_interfaces_count(
178 1, 8, roles=['compute', 'cinder'],
179 cluster_id=self.cluster.id
180 )[0]
181
182 self.serializer = get_serializer_for_cluster(self.cluster)
183 net_serializer = self.serializer.get_net_provider_serializer(
184 self.cluster)
185
186 nm = objects.Cluster.get_network_manager(self.cluster)
187 network_scheme = net_serializer.generate_network_scheme(
188 node, nm.get_node_networks(node))
189 self.assertEqual(expected, network_scheme['transformations'])
190
191
192class TestDeploymentTasksSerialization80MixIn(
193 TestSerializerConverter80To90MixIn,
194 BaseDeploymentSerializer
195):
196 tasks_for_rerun = {"globals", "netconfig"}
197
198 def setUp(self):
199 super(TestDeploymentTasksSerialization80MixIn, self).setUp()
200 self.env.create(
201 release_kwargs={'version': self.env_version},
202 cluster_kwargs={
203 'mode': consts.CLUSTER_MODES.ha_compact,
204 'net_provider': consts.CLUSTER_NET_PROVIDERS.neutron,
205 'net_segment_type': consts.NEUTRON_SEGMENT_TYPES.vlan,
206 'status': consts.CLUSTER_STATUSES.operational},
207 nodes_kwargs=[
208 {'roles': ['controller'],
209 'status': consts.NODE_STATUSES.ready}]
210 )
211
212 self.cluster = self.env.clusters[-1]
213 self.cluster.extensions = ['volume_manager', 'converted_serializers']
214 if not self.task_deploy:
215 self.env.disable_task_deploy(self.cluster)
216
217 def add_node(self, role):
218 return self.env.create_node(
219 cluster_id=self.cluster.id,
220 pending_roles=[role],
221 pending_addition=True
222 )
223
224 def get_rpc_args(self):
225 self.env.launch_deployment()
226 args, kwargs = nailgun.task.manager.rpc.cast.call_args
227 return args[1][1]['args']
228
229 def check_add_node_for_task_deploy(self, rpc_message):
230 tasks_graph = rpc_message['tasks_graph']
231 for node_id, tasks in six.iteritems(tasks_graph):
232 if node_id is None or node_id == consts.MASTER_NODE_UID:
233 # skip virtual node
234 continue
235
236 task_ids = {
237 t['id'] for t in tasks
238 if t['type'] != consts.ORCHESTRATOR_TASK_TYPES.skipped
239 }
240 # all tasks are run on all nodes
241 self.assertTrue(self.tasks_for_rerun.issubset(task_ids))
242
243 def check_add_compute_for_granular_deploy(self, new_node_uid, rpc_message):
244 for node in rpc_message['deployment_info']:
245 task_ids = {t['id'] for t in node['tasks']}
246 if node['tasks'][0]['uids'] == [new_node_uid]:
247 # all tasks are run on a new node
248 self.assertTrue(
249 self.tasks_for_rerun.issubset(task_ids))
250 else:
251 # only selected tasks are run on a deployed node
252 self.assertItemsEqual(self.tasks_for_rerun, task_ids)
253
254 def check_add_controller_for_granular_deploy(self, rpc_message):
255 for node in rpc_message['deployment_info']:
256 task_ids = {t['id'] for t in node['tasks']}
257 # controller is redeployed when other one is added
258 # so all tasks are run on all nodes
259 self.assertTrue(
260 self.tasks_for_rerun.issubset(task_ids))
261
262 @mock.patch('nailgun.rpc.cast')
263 def test_add_compute(self, _):
264 new_node = self.add_node('compute')
265 rpc_deploy_message = self.get_rpc_args()
266 if self.task_deploy:
267 self.check_add_node_for_task_deploy(rpc_deploy_message)
268 else:
269 self.check_add_compute_for_granular_deploy(
270 new_node.uid, rpc_deploy_message
271 )
272
273 @mock.patch('nailgun.rpc.cast')
274 def test_add_controller(self, _):
275 self.add_node('controller')
276 rpc_deploy_message = self.get_rpc_args()
277
278 if self.task_deploy:
279 self.check_add_node_for_task_deploy(rpc_deploy_message)
280 else:
281 self.check_add_controller_for_granular_deploy(rpc_deploy_message)
282
283
284class TestDeploymentAttributesSerialization80MixIn(
285 TestSerializerConverter80To90MixIn,
286 BaseDeploymentSerializer
287):
288 def setUp(self):
289 super(TestDeploymentAttributesSerialization80MixIn, self).setUp()
290 self.cluster = self.env.create(
291 release_kwargs={
292 'version': self.env_version,
293 'operating_system': consts.RELEASE_OS.ubuntu},
294 cluster_kwargs={
295 'mode': consts.CLUSTER_MODES.ha_compact,
296 'net_provider': consts.CLUSTER_NET_PROVIDERS.neutron,
297 'net_segment_type': consts.NEUTRON_SEGMENT_TYPES.vlan})
298 self.cluster_db = self.db.query(models.Cluster).get(self.cluster['id'])
299 self.cluster.extensions = ['volume_manager', 'converted_serializers']
300 self.serializer = self.create_serializer(self.cluster_db)
301
302 def test_neutron_attrs(self):
303 self.env.create_node(
304 cluster_id=self.cluster_db.id,
305 roles=['controller'], primary_roles=['controller']
306 )
307 objects.Cluster.prepare_for_deployment(self.cluster_db)
308 serialized_for_astute = self.serializer.serialize(
309 self.cluster_db, self.cluster_db.nodes)
310 for node in serialized_for_astute:
311 self.assertEqual(
312 {
313 "bridge": consts.DEFAULT_BRIDGES_NAMES.br_floating,
314 "vlan_range": None
315 },
316 node['quantum_settings']['L2']['phys_nets']['physnet1']
317 )
318 l2 = (node["quantum_settings"]["predefined_networks"]
319 [self.cluster_db.network_config.floating_name]["L2"])
320
321 self.assertEqual("physnet1", l2["physnet"])
322 self.assertEqual("flat", l2["network_type"])
323
324 def test_baremetal_transformations(self):
325 self.env._set_additional_component(self.cluster_db, 'ironic', True)
326 self.env.create_node(cluster_id=self.cluster_db.id,
327 roles=['primary-controller'])
328 objects.Cluster.prepare_for_deployment(self.cluster_db)
329 serialized_for_astute = self.serializer.serialize(
330 self.cluster_db, self.cluster_db.nodes)
331 for node in serialized_for_astute:
332 if node['uid'] == 'master':
333 continue
334 transformations = node['network_scheme']['transformations']
335 baremetal_brs = filter(lambda t: t.get('name') ==
336 consts.DEFAULT_BRIDGES_NAMES.br_baremetal,
337 transformations)
338 baremetal_ports = filter(lambda t: t.get('name') == "eth0.104",
339 transformations)
340 expected_patch = {
341 'action': 'add-patch',
342 'bridges': [consts.DEFAULT_BRIDGES_NAMES.br_ironic,
343 consts.DEFAULT_BRIDGES_NAMES.br_baremetal],
344 'provider': 'ovs'}
345 self.assertEqual(len(baremetal_brs), 1)
346 self.assertEqual(len(baremetal_ports), 1)
347 self.assertEqual(baremetal_ports[0]['bridge'],
348 consts.DEFAULT_BRIDGES_NAMES.br_baremetal)
349 self.assertIn(expected_patch, transformations)
350
351 def test_disks_attrs(self):
352 disks = [
353 {
354 "model": "TOSHIBA MK1002TS",
355 "name": "sda",
356 "disk": "sda",
357 "size": 1004886016
358 },
359 ]
360 expected_node_volumes_hash = [
361 {
362 u'name': u'sda',
363 u'bootable': True,
364 u'extra': [],
365 u'free_space': 330,
366 u'volumes': [
367 {
368 u'type': u'boot',
369 u'size': 300
370 },
371 {
372 u'mount': u'/boot',
373 u'type': u'partition',
374 u'file_system': u'ext2',
375 u'name': u'Boot',
376 u'size': 200
377 },
378 {
379 u'type': u'lvm_meta_pool',
380 u'size': 64
381 },
382 {
383 u'vg': u'os',
384 u'type': u'pv',
385 u'lvm_meta_size': 64,
386 u'size': 394
387 },
388 {
389 u'vg': u'vm',
390 u'type': u'pv',
391 u'lvm_meta_size': 0,
392 u'size': 0
393 }
394 ],
395 u'type': u'disk',
396 u'id': u'sda',
397 u'size': 958
398 },
399 {
400 u'_allocate_size': u'min',
401 u'label': u'Base System',
402 u'min_size': 19456,
403 u'volumes': [
404 {
405 u'mount': u'/',
406 u'size': -3766,
407 u'type': u'lv',
408 u'name': u'root',
409 u'file_system': u'ext4'
410 },
411 {
412 u'mount': u'swap',
413 u'size': 4096,
414 u'type': u'lv',
415 u'name': u'swap',
416 u'file_system': u'swap'
417 }
418 ],
419 u'type': u'vg',
420 u'id': u'os'
421 },
422 {
423 u'_allocate_size': u'all',
424 u'label': u'Virtual Storage',
425 u'min_size': 5120,
426 u'volumes': [
427 {
428 u'mount': u'/var/lib/nova',
429 u'size': 0,
430 u'type': u'lv',
431 u'name': u'nova',
432 u'file_system': u'xfs'
433 }
434 ],
435 u'type': u'vg',
436 u'id': u'vm'
437 }
438 ]
439 self.env.create_node(
440 cluster_id=self.cluster_db.id,
441 roles=['compute'],
442 meta={"disks": disks},
443 )
444 objects.Cluster.prepare_for_deployment(self.cluster_db)
445 serialized_for_astute = self.serializer.serialize(
446 self.cluster_db, self.cluster_db.nodes)
447 for node in serialized_for_astute:
448 if node['uid'] == 'master':
449 continue
450 self.assertIn("node_volumes", node)
451 self.assertItemsEqual(
452 expected_node_volumes_hash, node["node_volumes"])
453
454 def test_attributes_contains_plugins(self):
455 self.env.create_plugin(
456 cluster=self.cluster_db,
457 name='plugin_1',
458 attributes_metadata={'attributes': {'name': 'plugin_1'}},
459 package_version='4.0.0',
460 fuel_version=['8.0'])
461 self.env.create_plugin(
462 cluster=self.cluster_db,
463 name='plugin_2',
464 attributes_metadata={'attributes': {'name': 'plugin_2'}},
465 package_version='4.0.0',
466 fuel_version=['8.0'])
467 self.env.create_plugin(
468 cluster=self.cluster_db,
469 enabled=False,
470 name='plugin_3',
471 attributes_metadata={'attributes': {'name': 'plugin_3'}},
472 package_version='4.0.0',
473 fuel_version=['8.0'])
474
475 expected_plugins_list = ['plugin_1', 'plugin_2']
476 self.env.create_node(
477 cluster_id=self.cluster_db.id,
478 roles=['compute']
479 )
480 objects.Cluster.prepare_for_deployment(self.cluster_db)
481 serialized_for_astute = self.serializer.serialize(
482 self.cluster_db, self.cluster_db.nodes)
483 for node in serialized_for_astute:
484 if node['uid'] == 'master':
485 continue
486 self.assertIn('plugins', node)
487 self.assertItemsEqual(
488 expected_plugins_list, node['plugins'])
489 self.assertTrue(all(name in node for name
490 in expected_plugins_list))
491
492 def test_common_attributes_contains_plugin_metadata(self):
493 expected_value = 'check_value'
494 plugin = self.env.create_plugin(
495 cluster=self.cluster_db,
496 name='test_plugin',
497 package_version='4.0.0',
498 fuel_version=['8.0'],
499 attributes_metadata={
500 'attributes': {
501 'config': {
502 'description': "Description",
503 'weight': 52,
504 'value': expected_value
505 }
506 }
507 }
508 )
509 attrs = self.serializer.get_common_attrs(self.cluster_db)
510 self.assertIn('test_plugin', attrs)
511 self.assertIn('metadata', attrs['test_plugin'])
512 self.assertEqual(
513 plugin.id, attrs['test_plugin']['metadata']['plugin_id']
514 )
515 self.assertEqual(expected_value, attrs['test_plugin']['config'])
516
517
518class TestMultiNodeGroupsSerialization80MixIn(
519 TestSerializerConverter80To90MixIn,
520 BaseDeploymentSerializer
521):
522 def setUp(self):
523 super(TestMultiNodeGroupsSerialization80MixIn, self).setUp()
524 cluster = self.env.create(
525 release_kwargs={'version': self.env_version},
526 cluster_kwargs={
527 'net_provider': consts.CLUSTER_NET_PROVIDERS.neutron,
528 'net_segment_type': consts.NEUTRON_SEGMENT_TYPES.vlan}
529 )
530 self.env.create_nodes_w_interfaces_count(
531 nodes_count=3,
532 if_count=2,
533 roles=['controller', 'cinder'],
534 pending_addition=True,
535 cluster_id=cluster['id'])
536 self.cluster_db = self.db.query(models.Cluster).get(cluster['id'])
537 cluster.extensions = ['volume_manager', 'converted_serializers']
538 self.serializer = self.create_serializer(cluster)
539
540 def _add_node_group_with_node(self, cidr_start, node_address):
541 node_group = self.env.create_node_group(
542 api=False, cluster_id=self.cluster_db.id,
543 name='ng_' + cidr_start + '_' + str(node_address))
544
545 with mock.patch.object(rpc, 'cast'):
546 resp = self.env.setup_networks_for_nodegroup(
547 cluster_id=self.cluster_db.id, node_group=node_group,
548 cidr_start=cidr_start)
549 self.assertEqual(resp.status_code, 200)
550
551 self.db.query(models.Task).filter_by(
552 name=consts.TASK_NAMES.update_dnsmasq
553 ).delete(synchronize_session=False)
554
555 self.env.create_nodes_w_interfaces_count(
556 nodes_count=1,
557 if_count=2,
558 roles=['compute'],
559 pending_addition=True,
560 cluster_id=self.cluster_db.id,
561 group_id=node_group.id,
562 ip='{0}.9.{1}'.format(cidr_start, node_address))
563
564 def _check_routes_count(self, count):
565 objects.Cluster.prepare_for_deployment(self.cluster_db)
566 facts = self.serializer.serialize(
567 self.cluster_db, self.cluster_db.nodes)
568
569 for node in facts:
570 if node['uid'] == 'master':
571 continue
572 endpoints = node['network_scheme']['endpoints']
573 for name, descr in six.iteritems(endpoints):
574 if descr['IP'] == 'none':
575 self.assertNotIn('routes', descr)
576 else:
577 self.assertEqual(len(descr['routes']), count)
578
579 def test_routes_with_no_shared_networks_2_nodegroups(self):
580 self._add_node_group_with_node('199.99', 3)
581 # all networks have different CIDRs
582 self._check_routes_count(1)
583
584 def test_routes_with_no_shared_networks_3_nodegroups(self):
585 self._add_node_group_with_node('199.99', 3)
586 self._add_node_group_with_node('199.77', 3)
587 # all networks have different CIDRs
588 self._check_routes_count(2)
589
590 def test_routes_with_shared_networks_3_nodegroups(self):
591 self._add_node_group_with_node('199.99', 3)
592 self._add_node_group_with_node('199.99', 4)
593 # networks in two racks have equal CIDRs
594 self._check_routes_count(1)
595
596
597class TestBlockDeviceDevicesSerialization80MixIn(
598 TestSerializerConverter80To90MixIn,
599 BaseDeploymentSerializer
600):
601 def setUp(self):
602 super(TestBlockDeviceDevicesSerialization80MixIn, self).setUp()
603 self.cluster = self.env.create(
604 release_kwargs={'version': self.env_version},
605 cluster_kwargs={
606 'mode': consts.CLUSTER_MODES.ha_compact,
607 'net_provider': consts.CLUSTER_NET_PROVIDERS.neutron,
608 'net_segment_type': consts.NEUTRON_SEGMENT_TYPES.vlan})
609 self.cluster_db = self.db.query(models.Cluster).get(self.cluster['id'])
610 self.cluster.extensions = ['volume_manager', 'converted_serializers']
611 self.serializer = self.create_serializer(self.cluster_db)
612
613 def test_block_device_disks(self):
614 self.env.create_node(
615 cluster_id=self.cluster_db.id,
616 roles=['cinder-block-device']
617 )
618 self.env.create_node(
619 cluster_id=self.cluster_db.id,
620 roles=['controller']
621 )
622 objects.Cluster.prepare_for_deployment(self.cluster_db)
623 serialized_for_astute = self.serializer.serialize(
624 self.cluster_db, self.cluster_db.nodes)
625 for node in serialized_for_astute:
626 if node['uid'] == 'master':
627 continue
628 self.assertIn("node_volumes", node)
629 for node_volume in node["node_volumes"]:
630 if node_volume["id"] == "cinder-block-device":
631 self.assertEqual(node_volume["volumes"], [])
632 else:
633 self.assertNotEqual(node_volume["volumes"], [])
634
635
636class TestSerializeInterfaceDriversData80MixIn(
637 TestSerializerConverter80To90MixIn,
638 TestSerializeInterfaceDriversData
639):
640 pass
641
642
643class TestDeploymentHASerializer80MixIn(
644 TestSerializerConverter80To90MixIn,
645 TestDeploymentHASerializer70
646):
647 pass
diff --git a/nailgun-test-settings.yaml b/nailgun-test-settings.yaml
new file mode 100644
index 0000000..d0be7e3
--- /dev/null
+++ b/nailgun-test-settings.yaml
@@ -0,0 +1,15 @@
1DEVELOPMENT: 1
2DATABASE:
3 name: "openstack_citest"
4 engine: "postgresql"
5 host: "localhost"
6 port: "5432"
7 user: "openstack_citest"
8 passwd: "openstack_citest"
9API_LOG: "logs/api.log"
10APP_LOG: "logs/app.log"
11APP_LOGLEVEL: "ERROR"
12RPC_CONSUMER_LOG_PATH: "logs/receiverd.log"
13ASSASSIN_LOG_PATH: "logs/assassind.log"
14STATS_LOGS_PATH: "logs/"
15LCM_SERIALIZERS_CONCURRENCY_FACTOR: 1
diff --git a/requirements.txt b/requirements.txt
new file mode 100644
index 0000000..30806d5
--- /dev/null
+++ b/requirements.txt
@@ -0,0 +1,5 @@
1# The order of packages is significant, because pip processes them in the order
2# of appearance. Changing the order has an impact on the overall integration
3# process, which may cause wedges in the gate later.
4
5pbr>=1.6
diff --git a/setup.cfg b/setup.cfg
new file mode 100644
index 0000000..e29fa63
--- /dev/null
+++ b/setup.cfg
@@ -0,0 +1,28 @@
1[metadata]
2name = fuel-nailgun-extension-converted-serializers
3summary = Converted serializers extension for Fuel
4description-file = README.rst
5author = Mirantis Inc.
6author-email = product@mirantis.com
7home-page = http://mirantis.com
8classifier =
9 Environment :: OpenStack
10 Intended Audience :: Information Technology
11 Intended Audience :: System Administrators
12 License :: OSI Approved :: Apache Software License
13 Operating System :: POSIX :: Linux
14 Programming Language :: Python
15 Programming Language :: Python :: 2
16 Programming Language :: Python :: 2.7
17 Programming Language :: Python :: 3
18 Programming Language :: Python :: 3.3
19 Programming Language :: Python :: 3.4
20
21[files]
22packages =
23 converted_serializers
24
25[entry_points]
26nailgun.extensions =
27 converted_serializers = converted_serializers.extension:ConvertedSerializersExtension
28
diff --git a/setup.py b/setup.py
new file mode 100644
index 0000000..056c16c
--- /dev/null
+++ b/setup.py
@@ -0,0 +1,29 @@
1# Copyright (c) 2013 Hewlett-Packard Development Company, L.P.
2#
3# Licensed under the Apache License, Version 2.0 (the "License");
4# you may not use this file except in compliance with the License.
5# You may obtain a copy of the License at
6#
7# http://www.apache.org/licenses/LICENSE-2.0
8#
9# Unless required by applicable law or agreed to in writing, software
10# distributed under the License is distributed on an "AS IS" BASIS,
11# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or
12# implied.
13# See the License for the specific language governing permissions and
14# limitations under the License.
15
16# THIS FILE IS MANAGED BY THE GLOBAL REQUIREMENTS REPO - DO NOT EDIT
17import setuptools
18
19# In python < 2.7.4, a lazy loading of package `pbr` will break
20# setuptools if some other modules registered functions in `atexit`.
21# solution from: http://bugs.python.org/issue15881#msg170215
22try:
23 import multiprocessing # noqa
24except ImportError:
25 pass
26
27setuptools.setup(
28 setup_requires=['pbr'],
29 pbr=True)
diff --git a/specs/fuel-nailgun-extension-converted-serializers.spec b/specs/fuel-nailgun-extension-converted-serializers.spec
new file mode 100644
index 0000000..5a62927
--- /dev/null
+++ b/specs/fuel-nailgun-extension-converted-serializers.spec
@@ -0,0 +1,38 @@
1Name: fuel-nailgun-extension-converted-serializers
2Version: 10.0~b1
3Release: 1%{?dist}
4Summary: Converted serializers extension for Fuel
5License: Apache-2.0
6Url: https://git.openstack.org/cgit/openstack/fuel-nailgun-extension-converted-serializers/
7Source0: %{name}-%{version}.tar.gz
8BuildArch: noarch
9
10BuildRequires: python-devel
11BuildRequires: python-pbr
12BuildRequires: python-setuptools
13
14Requires: fuel-nailgun
15Requires: python-pbr
16
17%description
18Converted serializers extension for Fuel
19
20%prep
21%setup -q -c -n %{name}-%{version}
22
23%build
24export OSLO_PACKAGE_VERSION=%{version}
25%py2_build
26
27%install
28export OSLO_PACKAGE_VERSION=%{version}
29%py2_install
30
31%files
32%license LICENSE
33%{python2_sitelib}/converted_serializers
34%{python2_sitelib}/*.egg-info
35
36%changelog
37* Thu Sep 8 2016 Vladimir Kuklin <vkuklin@mirantis.com> - 10.0~b1-1
38- Initial package.
diff --git a/test-requirements.txt b/test-requirements.txt
new file mode 100644
index 0000000..b2427f4
--- /dev/null
+++ b/test-requirements.txt
@@ -0,0 +1,6 @@
1# The order of packages is significant, because pip processes them in the order
2# of appearance. Changing the order has an impact on the overall integration
3# process, which may cause wedges in the gate later.
4
5hacking
6pytest
diff --git a/tox.ini b/tox.ini
new file mode 100644
index 0000000..631a27b
--- /dev/null
+++ b/tox.ini
@@ -0,0 +1,38 @@
1[tox]
2minversion = 2.0
3envlist = pep8,py27
4skipsdist = True
5
6[base]
7NAILGUN_REPO = git+https://github.com/openstack/fuel-web.git
8NAILGUN_CONFIG = {toxinidir}/nailgun-test-settings.yaml
9NAILGUN_BRANCH={env:ZUUL_BRANCH:master}
10
11[testenv]
12deps = -r{toxinidir}/test-requirements.txt
13setenv = VIRTUAL_ENV={envdir}
14
15[testenv:py27]
16usedevelop = True
17deps = {[testenv]deps}
18 -r{toxinidir}/requirements.txt
19 -e{[base]NAILGUN_REPO}@{[base]NAILGUN_BRANCH}#egg=nailgun[test]&subdirectory=nailgun
20setenv = {[testenv]setenv}
21 NAILGUN_CONFIG={[base]NAILGUN_CONFIG}
22
23commands = py.test -v --junit-xml {toxinidir}/extension.xml {posargs}
24
25[testenv:pep8]
26commands = flake8 {posargs}
27
28[testenv:venv]
29commands = {posargs}
30
31[flake8]
32# E123, E125 skipped as they are invalid PEP-8.
33# H101 - Don't force author's name on TODOs
34# H304 is "No relative imports" error, required for extensions
35show-source = True
36ignore = E123,E125,H101,H304
37builtins = _
38exclude = .venv,.git,.tox,dist,doc,*lib/python*,*egg,build