Add Nailgun Converted serializers base code
This commit introduces Nailgun converted serializers extension which allows for usage of Fuel Mitaka LCM features in pre-Mitaka releases It essentially runs old serializers and patches the result properly, so that LCM deployment engine can work with this serialized data
This commit is contained in:
parent
ab8ea03092
commit
caebcc64b7
|
@ -0,0 +1,6 @@
|
|||
[run]
|
||||
branch = True
|
||||
source = converted_serializers
|
||||
|
||||
[report]
|
||||
ignore_errors = True
|
|
@ -1,89 +1,58 @@
|
|||
# Byte-compiled / optimized / DLL files
|
||||
__pycache__/
|
||||
*.py[cod]
|
||||
*$py.class
|
||||
|
||||
# C extensions
|
||||
*.so
|
||||
|
||||
# Distribution / packaging
|
||||
.Python
|
||||
env/
|
||||
build/
|
||||
develop-eggs/
|
||||
dist/
|
||||
downloads/
|
||||
eggs/
|
||||
.eggs/
|
||||
lib/
|
||||
lib64/
|
||||
parts/
|
||||
sdist/
|
||||
var/
|
||||
*.egg-info/
|
||||
# Packages
|
||||
*.egg*
|
||||
*.egg-info
|
||||
dist
|
||||
build
|
||||
eggs
|
||||
parts
|
||||
bin
|
||||
var
|
||||
sdist
|
||||
develop-eggs
|
||||
.installed.cfg
|
||||
*.egg
|
||||
|
||||
# PyInstaller
|
||||
# Usually these files are written by a python script from a template
|
||||
# before PyInstaller builds the exe, so as to inject date/other infos into it.
|
||||
*.manifest
|
||||
*.spec
|
||||
lib
|
||||
lib64
|
||||
|
||||
# Installer logs
|
||||
pip-log.txt
|
||||
pip-delete-this-directory.txt
|
||||
|
||||
# Unit test / coverage reports
|
||||
htmlcov/
|
||||
.tox/
|
||||
.coverage
|
||||
.coverage.*
|
||||
.cache
|
||||
cover/
|
||||
.coverage*
|
||||
!.coveragerc
|
||||
.tox
|
||||
nosetests.xml
|
||||
coverage.xml
|
||||
*,cover
|
||||
.hypothesis/
|
||||
.testrepository
|
||||
.venv
|
||||
|
||||
# Translations
|
||||
*.mo
|
||||
*.pot
|
||||
|
||||
# Django stuff:
|
||||
*.log
|
||||
local_settings.py
|
||||
# Mr Developer
|
||||
.mr.developer.cfg
|
||||
.project
|
||||
.pydevproject
|
||||
|
||||
# Flask stuff:
|
||||
instance/
|
||||
.webassets-cache
|
||||
# Complexity
|
||||
output/*.html
|
||||
output/*/index.html
|
||||
|
||||
# Scrapy stuff:
|
||||
.scrapy
|
||||
# Sphinx
|
||||
doc/build
|
||||
|
||||
# Sphinx documentation
|
||||
docs/_build/
|
||||
# pbr generates these
|
||||
AUTHORS
|
||||
ChangeLog
|
||||
|
||||
# PyBuilder
|
||||
target/
|
||||
# Editors
|
||||
*~
|
||||
.*.swp
|
||||
.*sw?
|
||||
|
||||
# IPython Notebook
|
||||
.ipynb_checkpoints
|
||||
|
||||
# pyenv
|
||||
.python-version
|
||||
|
||||
# celery beat schedule file
|
||||
celerybeat-schedule
|
||||
|
||||
# dotenv
|
||||
.env
|
||||
|
||||
# virtualenv
|
||||
venv/
|
||||
ENV/
|
||||
|
||||
# Spyder project settings
|
||||
.spyderproject
|
||||
|
||||
# Rope project settings
|
||||
.ropeproject
|
||||
# Files created by releasenotes build
|
||||
extension.xml
|
||||
|
|
|
@ -0,0 +1,6 @@
|
|||
include AUTHORS
|
||||
include ChangeLog
|
||||
exclude .gitignore
|
||||
exclude .gitreview
|
||||
|
||||
global-exclude *.pyc
|
|
@ -0,0 +1,10 @@
|
|||
Fuel nailgun extenstion for converted serializers
|
||||
=================================================
|
||||
|
||||
This extension for Nailgun provides conversion layer which triggers pre-Mitaka
|
||||
serializers to generate deployment data, so that pre-9.x clusters can leverage
|
||||
Fuel Mitaka LCM features
|
||||
|
||||
Installation
|
||||
-----------
|
||||
Just install the package `fuel-nailgun-extension-converted-serializers`
|
|
@ -0,0 +1,6 @@
|
|||
libpq-dev
|
||||
postgresql
|
||||
postgresql-client
|
||||
# We don't use these, but mysql-prep step is in template job
|
||||
mysql-client
|
||||
mysql-server
|
|
@ -0,0 +1,24 @@
|
|||
# coding: utf-8
|
||||
|
||||
# Licensed under the Apache License, Version 2.0 (the "License"); you may
|
||||
# not use this file except in compliance with the License. You may obtain
|
||||
# a copy of the License at
|
||||
#
|
||||
# http://www.apache.org/licenses/LICENSE-2.0
|
||||
#
|
||||
# Unless required by applicable law or agreed to in writing, software
|
||||
# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
|
||||
# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
|
||||
# License for the specific language governing permissions and limitations
|
||||
# under the License.
|
||||
|
||||
|
||||
def pytest_configure(config):
|
||||
from nailgun import db
|
||||
db.dropdb()
|
||||
db.syncdb()
|
||||
|
||||
|
||||
def pytest_unconfigure(config):
|
||||
from nailgun import db
|
||||
db.dropdb()
|
|
@ -0,0 +1,72 @@
|
|||
# -*- coding: utf-8 -*-
|
||||
|
||||
# Copyright 2015 Mirantis, Inc.
|
||||
#
|
||||
# Licensed under the Apache License, Version 2.0 (the "License"); you may
|
||||
# not use this file except in compliance with the License. You may obtain
|
||||
# a copy of the License at
|
||||
#
|
||||
# http://www.apache.org/licenses/LICENSE-2.0
|
||||
#
|
||||
# Unless required by applicable law or agreed to in writing, software
|
||||
# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
|
||||
# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
|
||||
# License for the specific language governing permissions and limitations
|
||||
# under the License.
|
||||
import logging
|
||||
|
||||
from nailgun import extensions
|
||||
from nailgun import objects
|
||||
from nailgun.orchestrator.deployment_serializers import \
|
||||
get_serializer_for_cluster
|
||||
|
||||
|
||||
logger = logging.getLogger(__name__)
|
||||
|
||||
|
||||
class ConvertPreLCMtoLCM(extensions.BasePipeline):
|
||||
|
||||
@classmethod
|
||||
def pre_process_data(cls, data, cluster, nodes, **kwargs):
|
||||
return data
|
||||
|
||||
@classmethod
|
||||
def post_process_data(cls, data, cluster, nodes, **kwargs):
|
||||
return data
|
||||
|
||||
@classmethod
|
||||
def serialize(cls, data, cluster, nodes, **kwargs):
|
||||
if objects.Release.is_lcm_supported(cluster.release):
|
||||
return data
|
||||
serializer = get_serializer_for_cluster(cluster)()
|
||||
real_data = serializer.serialize(cluster, nodes, **kwargs)
|
||||
return real_data
|
||||
|
||||
@classmethod
|
||||
def process_deployment(cls, data, cluster, nodes, **kwargs):
|
||||
pre_processed_data = cls.pre_process_data(data,
|
||||
cluster, nodes, **kwargs)
|
||||
real_data = cls.serialize(pre_processed_data, cluster, nodes, **kwargs)
|
||||
post_processed_data = cls.post_process_data(real_data,
|
||||
cluster, nodes, **kwargs)
|
||||
# copypaste cluster specific values from LCM serializer.
|
||||
# This is needed for tasks paramters interpolation like CLUSTER_ID
|
||||
cluster_data = data[0]['cluster']
|
||||
for node_data in post_processed_data:
|
||||
node_data['cluster'] = cluster_data
|
||||
return post_processed_data
|
||||
|
||||
@classmethod
|
||||
def process_provisioning(cls, data, cluster, nodes, **kwargs):
|
||||
return data
|
||||
|
||||
|
||||
class ConvertedSerializersExtension(extensions.BaseExtension):
|
||||
name = 'converted_serializers'
|
||||
version = '0.0.1'
|
||||
description = "Serializers Conversion extension"
|
||||
weight = 100
|
||||
|
||||
data_pipelines = [
|
||||
ConvertPreLCMtoLCM,
|
||||
]
|
|
@ -0,0 +1,647 @@
|
|||
# -*- coding: utf-8 -*-
|
||||
|
||||
# Copyright 2015 Mirantis, Inc.
|
||||
#
|
||||
# Licensed under the Apache License, Version 2.0 (the "License"); you may
|
||||
# not use this file except in compliance with the License. You may obtain
|
||||
# a copy of the License at
|
||||
#
|
||||
# http://www.apache.org/licenses/LICENSE-2.0
|
||||
#
|
||||
# Unless required by applicable law or agreed to in writing, software
|
||||
# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
|
||||
# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
|
||||
# License for the specific language governing permissions and limitations
|
||||
# under the License.
|
||||
|
||||
|
||||
from copy import deepcopy
|
||||
import mock
|
||||
import six
|
||||
|
||||
import nailgun
|
||||
|
||||
from nailgun import consts
|
||||
from nailgun.db.sqlalchemy import models
|
||||
from nailgun import objects
|
||||
from nailgun import rpc
|
||||
|
||||
from nailgun.orchestrator import deployment_serializers
|
||||
from nailgun.orchestrator.deployment_serializers import \
|
||||
get_serializer_for_cluster
|
||||
from nailgun.orchestrator.neutron_serializers import \
|
||||
NeutronNetworkDeploymentSerializer80
|
||||
from nailgun.orchestrator.neutron_serializers import \
|
||||
NeutronNetworkTemplateSerializer80
|
||||
from nailgun.test.integration.test_orchestrator_serializer import \
|
||||
BaseDeploymentSerializer
|
||||
from nailgun.test.integration.test_orchestrator_serializer import \
|
||||
TestSerializeInterfaceDriversData
|
||||
from nailgun.test.integration.test_orchestrator_serializer_70 import \
|
||||
TestDeploymentHASerializer70
|
||||
from nailgun.test.integration.test_orchestrator_serializer_80 import \
|
||||
TestSerializer80Mixin
|
||||
|
||||
|
||||
class TestSerializerWrapper(deployment_serializers.DeploymentLCMSerializer):
|
||||
|
||||
def serialize(self, cluster, nodes, ignore_customized=False):
|
||||
return deployment_serializers.serialize_for_lcm(
|
||||
cluster, nodes, ignore_customized=ignore_customized)
|
||||
|
||||
def get_net_provider_serializer(cls, cluster):
|
||||
return deployment_serializers\
|
||||
.DeploymentHASerializer80.get_net_provider_serializer(cluster)
|
||||
|
||||
|
||||
class TestSerializerConverter80To90MixIn(TestSerializer80Mixin):
|
||||
env_version = "liberty-8.0"
|
||||
task_deploy = True
|
||||
is_propagate_task_deploy = True
|
||||
enforce_lcm = True
|
||||
|
||||
@classmethod
|
||||
def create_serializer(cls, cluster):
|
||||
serializer_type = TestSerializerWrapper
|
||||
return serializer_type(None)
|
||||
|
||||
|
||||
class TestNetworkTemplateSerializer80MixIn(
|
||||
TestSerializerConverter80To90MixIn,
|
||||
BaseDeploymentSerializer
|
||||
):
|
||||
legacy_serializer = NeutronNetworkDeploymentSerializer80
|
||||
template_serializer = NeutronNetworkTemplateSerializer80
|
||||
|
||||
def setUp(self, *args):
|
||||
super(TestNetworkTemplateSerializer80MixIn, self).setUp()
|
||||
self.env.create(
|
||||
release_kwargs={'version': self.env_version},
|
||||
cluster_kwargs={
|
||||
'mode': consts.CLUSTER_MODES.ha_compact,
|
||||
'net_provider': consts.CLUSTER_NET_PROVIDERS.neutron,
|
||||
'net_segment_type': consts.NEUTRON_SEGMENT_TYPES.vlan})
|
||||
self.net_template = self.env.read_fixtures(['network_template_80'])[0]
|
||||
self.cluster = self.env.clusters[-1]
|
||||
self.cluster.extensions = ['volume_manager', 'converted_serializers']
|
||||
self.serializer = self.create_serializer(self.cluster)
|
||||
|
||||
def test_get_net_provider_serializer(self):
|
||||
self.cluster.network_config.configuration_template = None
|
||||
|
||||
net_serializer = self.serializer.\
|
||||
get_net_provider_serializer(self.cluster)
|
||||
self.assertIs(net_serializer, self.legacy_serializer)
|
||||
|
||||
self.cluster.network_config.configuration_template = \
|
||||
self.net_template
|
||||
net_serializer = self.serializer.\
|
||||
get_net_provider_serializer(self.cluster)
|
||||
self.assertIs(net_serializer, self.template_serializer)
|
||||
|
||||
def test_baremetal_neutron_attrs(self):
|
||||
brmtl_template = deepcopy(
|
||||
self.net_template['adv_net_template']['default'])
|
||||
brmtl_template['network_assignments']['baremetal'] = {
|
||||
'ep': 'br-baremetal'}
|
||||
brmtl_template['templates_for_node_role']['controller'].append(
|
||||
'baremetal')
|
||||
brmtl_template['nic_mapping']['default']['if8'] = 'eth7'
|
||||
brmtl_template['network_scheme']['baremetal'] = {
|
||||
'endpoints': ['br-baremetal'],
|
||||
'transformations': [],
|
||||
'roles': {'baremetal': 'br-baremetal'}}
|
||||
self.cluster.network_config.configuration_template = {
|
||||
'adv_net_template': {'default': brmtl_template}, 'pk': 1}
|
||||
self._check_baremetal_neutron_attrs(self.cluster)
|
||||
|
||||
def test_network_schemes_priorities(self):
|
||||
expected = [
|
||||
{
|
||||
"action": "add-br",
|
||||
"name": "br-prv",
|
||||
"provider": "ovs"
|
||||
},
|
||||
{
|
||||
"action": "add-br",
|
||||
"name": "br-aux"
|
||||
},
|
||||
{
|
||||
"action": "add-patch",
|
||||
"bridges": [
|
||||
"br-prv",
|
||||
"br-aux"
|
||||
],
|
||||
"provider": "ovs",
|
||||
"mtu": 65000
|
||||
},
|
||||
{
|
||||
"action": "add-port",
|
||||
"bridge": "br-aux",
|
||||
"name": "eth3.101"
|
||||
},
|
||||
{
|
||||
"action": "add-br",
|
||||
"name": "br-fw-admin"
|
||||
},
|
||||
{
|
||||
"action": "add-port",
|
||||
"bridge": "br-fw-admin",
|
||||
"name": "eth0"
|
||||
},
|
||||
{
|
||||
"action": "add-br",
|
||||
"name": "br-mgmt"
|
||||
},
|
||||
{
|
||||
"action": "add-port",
|
||||
"bridge": "br-mgmt",
|
||||
"name": "eth1.104"
|
||||
},
|
||||
{
|
||||
"action": "add-br",
|
||||
"name": "br-storage"
|
||||
},
|
||||
{
|
||||
"action": "add-port",
|
||||
"bridge": "br-storage",
|
||||
"name": "eth2"
|
||||
}
|
||||
]
|
||||
|
||||
objects.Cluster.set_network_template(
|
||||
self.cluster,
|
||||
self.net_template
|
||||
)
|
||||
|
||||
node = self.env.create_nodes_w_interfaces_count(
|
||||
1, 8, roles=['compute', 'cinder'],
|
||||
cluster_id=self.cluster.id
|
||||
)[0]
|
||||
|
||||
self.serializer = get_serializer_for_cluster(self.cluster)
|
||||
net_serializer = self.serializer.get_net_provider_serializer(
|
||||
self.cluster)
|
||||
|
||||
nm = objects.Cluster.get_network_manager(self.cluster)
|
||||
network_scheme = net_serializer.generate_network_scheme(
|
||||
node, nm.get_node_networks(node))
|
||||
self.assertEqual(expected, network_scheme['transformations'])
|
||||
|
||||
|
||||
class TestDeploymentTasksSerialization80MixIn(
|
||||
TestSerializerConverter80To90MixIn,
|
||||
BaseDeploymentSerializer
|
||||
):
|
||||
tasks_for_rerun = {"globals", "netconfig"}
|
||||
|
||||
def setUp(self):
|
||||
super(TestDeploymentTasksSerialization80MixIn, self).setUp()
|
||||
self.env.create(
|
||||
release_kwargs={'version': self.env_version},
|
||||
cluster_kwargs={
|
||||
'mode': consts.CLUSTER_MODES.ha_compact,
|
||||
'net_provider': consts.CLUSTER_NET_PROVIDERS.neutron,
|
||||
'net_segment_type': consts.NEUTRON_SEGMENT_TYPES.vlan,
|
||||
'status': consts.CLUSTER_STATUSES.operational},
|
||||
nodes_kwargs=[
|
||||
{'roles': ['controller'],
|
||||
'status': consts.NODE_STATUSES.ready}]
|
||||
)
|
||||
|
||||
self.cluster = self.env.clusters[-1]
|
||||
self.cluster.extensions = ['volume_manager', 'converted_serializers']
|
||||
if not self.task_deploy:
|
||||
self.env.disable_task_deploy(self.cluster)
|
||||
|
||||
def add_node(self, role):
|
||||
return self.env.create_node(
|
||||
cluster_id=self.cluster.id,
|
||||
pending_roles=[role],
|
||||
pending_addition=True
|
||||
)
|
||||
|
||||
def get_rpc_args(self):
|
||||
self.env.launch_deployment()
|
||||
args, kwargs = nailgun.task.manager.rpc.cast.call_args
|
||||
return args[1][1]['args']
|
||||
|
||||
def check_add_node_for_task_deploy(self, rpc_message):
|
||||
tasks_graph = rpc_message['tasks_graph']
|
||||
for node_id, tasks in six.iteritems(tasks_graph):
|
||||
if node_id is None or node_id == consts.MASTER_NODE_UID:
|
||||
# skip virtual node
|
||||
continue
|
||||
|
||||
task_ids = {
|
||||
t['id'] for t in tasks
|
||||
if t['type'] != consts.ORCHESTRATOR_TASK_TYPES.skipped
|
||||
}
|
||||
# all tasks are run on all nodes
|
||||
self.assertTrue(self.tasks_for_rerun.issubset(task_ids))
|
||||
|
||||
def check_add_compute_for_granular_deploy(self, new_node_uid, rpc_message):
|
||||
for node in rpc_message['deployment_info']:
|
||||
task_ids = {t['id'] for t in node['tasks']}
|
||||
if node['tasks'][0]['uids'] == [new_node_uid]:
|
||||
# all tasks are run on a new node
|
||||
self.assertTrue(
|
||||
self.tasks_for_rerun.issubset(task_ids))
|
||||
else:
|
||||
# only selected tasks are run on a deployed node
|
||||
self.assertItemsEqual(self.tasks_for_rerun, task_ids)
|
||||
|
||||
def check_add_controller_for_granular_deploy(self, rpc_message):
|
||||
for node in rpc_message['deployment_info']:
|
||||
task_ids = {t['id'] for t in node['tasks']}
|
||||
# controller is redeployed when other one is added
|
||||
# so all tasks are run on all nodes
|
||||
self.assertTrue(
|
||||
self.tasks_for_rerun.issubset(task_ids))
|
||||
|
||||
@mock.patch('nailgun.rpc.cast')
|
||||
def test_add_compute(self, _):
|
||||
new_node = self.add_node('compute')
|
||||
rpc_deploy_message = self.get_rpc_args()
|
||||
if self.task_deploy:
|
||||
self.check_add_node_for_task_deploy(rpc_deploy_message)
|
||||
else:
|
||||
self.check_add_compute_for_granular_deploy(
|
||||
new_node.uid, rpc_deploy_message
|
||||
)
|
||||
|
||||
@mock.patch('nailgun.rpc.cast')
|
||||
def test_add_controller(self, _):
|
||||
self.add_node('controller')
|
||||
rpc_deploy_message = self.get_rpc_args()
|
||||
|
||||
if self.task_deploy:
|
||||
self.check_add_node_for_task_deploy(rpc_deploy_message)
|
||||
else:
|
||||
self.check_add_controller_for_granular_deploy(rpc_deploy_message)
|
||||
|
||||
|
||||
class TestDeploymentAttributesSerialization80MixIn(
|
||||
TestSerializerConverter80To90MixIn,
|
||||
BaseDeploymentSerializer
|
||||
):
|
||||
def setUp(self):
|
||||
super(TestDeploymentAttributesSerialization80MixIn, self).setUp()
|
||||
self.cluster = self.env.create(
|
||||
release_kwargs={
|
||||
'version': self.env_version,
|
||||
'operating_system': consts.RELEASE_OS.ubuntu},
|
||||
cluster_kwargs={
|
||||
'mode': consts.CLUSTER_MODES.ha_compact,
|
||||
'net_provider': consts.CLUSTER_NET_PROVIDERS.neutron,
|
||||
'net_segment_type': consts.NEUTRON_SEGMENT_TYPES.vlan})
|
||||
self.cluster_db = self.db.query(models.Cluster).get(self.cluster['id'])
|
||||
self.cluster.extensions = ['volume_manager', 'converted_serializers']
|
||||
self.serializer = self.create_serializer(self.cluster_db)
|
||||
|
||||
def test_neutron_attrs(self):
|
||||
self.env.create_node(
|
||||
cluster_id=self.cluster_db.id,
|
||||
roles=['controller'], primary_roles=['controller']
|
||||
)
|
||||
objects.Cluster.prepare_for_deployment(self.cluster_db)
|
||||
serialized_for_astute = self.serializer.serialize(
|
||||
self.cluster_db, self.cluster_db.nodes)
|
||||
for node in serialized_for_astute:
|
||||
self.assertEqual(
|
||||
{
|
||||
"bridge": consts.DEFAULT_BRIDGES_NAMES.br_floating,
|
||||
"vlan_range": None
|
||||
},
|
||||
node['quantum_settings']['L2']['phys_nets']['physnet1']
|
||||
)
|
||||
l2 = (node["quantum_settings"]["predefined_networks"]
|
||||
[self.cluster_db.network_config.floating_name]["L2"])
|
||||
|
||||
self.assertEqual("physnet1", l2["physnet"])
|
||||
self.assertEqual("flat", l2["network_type"])
|
||||
|
||||
def test_baremetal_transformations(self):
|
||||
self.env._set_additional_component(self.cluster_db, 'ironic', True)
|
||||
self.env.create_node(cluster_id=self.cluster_db.id,
|
||||
roles=['primary-controller'])
|
||||
objects.Cluster.prepare_for_deployment(self.cluster_db)
|
||||
serialized_for_astute = self.serializer.serialize(
|
||||
self.cluster_db, self.cluster_db.nodes)
|
||||
for node in serialized_for_astute:
|
||||
if node['uid'] == 'master':
|
||||
continue
|
||||
transformations = node['network_scheme']['transformations']
|
||||
baremetal_brs = filter(lambda t: t.get('name') ==
|
||||
consts.DEFAULT_BRIDGES_NAMES.br_baremetal,
|
||||
transformations)
|
||||
baremetal_ports = filter(lambda t: t.get('name') == "eth0.104",
|
||||
transformations)
|
||||
expected_patch = {
|
||||
'action': 'add-patch',
|
||||
'bridges': [consts.DEFAULT_BRIDGES_NAMES.br_ironic,
|
||||
consts.DEFAULT_BRIDGES_NAMES.br_baremetal],
|
||||
'provider': 'ovs'}
|
||||
self.assertEqual(len(baremetal_brs), 1)
|
||||
self.assertEqual(len(baremetal_ports), 1)
|
||||
self.assertEqual(baremetal_ports[0]['bridge'],
|
||||
consts.DEFAULT_BRIDGES_NAMES.br_baremetal)
|
||||
self.assertIn(expected_patch, transformations)
|
||||
|
||||
def test_disks_attrs(self):
|
||||
disks = [
|
||||
{
|
||||
"model": "TOSHIBA MK1002TS",
|
||||
"name": "sda",
|
||||
"disk": "sda",
|
||||
"size": 1004886016
|
||||
},
|
||||
]
|
||||
expected_node_volumes_hash = [
|
||||
{
|
||||
u'name': u'sda',
|
||||
u'bootable': True,
|
||||
u'extra': [],
|
||||
u'free_space': 330,
|
||||
u'volumes': [
|
||||
{
|
||||
u'type': u'boot',
|
||||
u'size': 300
|
||||
},
|
||||
{
|
||||
u'mount': u'/boot',
|
||||
u'type': u'partition',
|
||||
u'file_system': u'ext2',
|
||||
u'name': u'Boot',
|
||||
u'size': 200
|
||||
},
|
||||
{
|
||||
u'type': u'lvm_meta_pool',
|
||||
u'size': 64
|
||||
},
|
||||
{
|
||||
u'vg': u'os',
|
||||
u'type': u'pv',
|
||||
u'lvm_meta_size': 64,
|
||||
u'size': 394
|
||||
},
|
||||
{
|
||||
u'vg': u'vm',
|
||||
u'type': u'pv',
|
||||
u'lvm_meta_size': 0,
|
||||
u'size': 0
|
||||
}
|
||||
],
|
||||
u'type': u'disk',
|
||||
u'id': u'sda',
|
||||
u'size': 958
|
||||
},
|
||||
{
|
||||
u'_allocate_size': u'min',
|
||||
u'label': u'Base System',
|
||||
u'min_size': 19456,
|
||||
u'volumes': [
|
||||
{
|
||||
u'mount': u'/',
|
||||
u'size': -3766,
|
||||
u'type': u'lv',
|
||||
u'name': u'root',
|
||||
u'file_system': u'ext4'
|
||||
},
|
||||
{
|
||||
u'mount': u'swap',
|
||||
u'size': 4096,
|
||||
u'type': u'lv',
|
||||
u'name': u'swap',
|
||||
u'file_system': u'swap'
|
||||
}
|
||||
],
|
||||
u'type': u'vg',
|
||||
u'id': u'os'
|
||||
},
|
||||
{
|
||||
u'_allocate_size': u'all',
|
||||
u'label': u'Virtual Storage',
|
||||
u'min_size': 5120,
|
||||
u'volumes': [
|
||||
{
|
||||
u'mount': u'/var/lib/nova',
|
||||
u'size': 0,
|
||||
u'type': u'lv',
|
||||
u'name': u'nova',
|
||||
u'file_system': u'xfs'
|
||||
}
|
||||
],
|
||||
u'type': u'vg',
|
||||
u'id': u'vm'
|
||||
}
|
||||
]
|
||||
self.env.create_node(
|
||||
cluster_id=self.cluster_db.id,
|
||||
roles=['compute'],
|
||||
meta={"disks": disks},
|
||||
)
|
||||
objects.Cluster.prepare_for_deployment(self.cluster_db)
|
||||
serialized_for_astute = self.serializer.serialize(
|
||||
self.cluster_db, self.cluster_db.nodes)
|
||||
for node in serialized_for_astute:
|
||||
if node['uid'] == 'master':
|
||||
continue
|
||||
self.assertIn("node_volumes", node)
|
||||
self.assertItemsEqual(
|
||||
expected_node_volumes_hash, node["node_volumes"])
|
||||
|
||||
def test_attributes_contains_plugins(self):
|
||||
self.env.create_plugin(
|
||||
cluster=self.cluster_db,
|
||||
name='plugin_1',
|
||||
attributes_metadata={'attributes': {'name': 'plugin_1'}},
|
||||
package_version='4.0.0',
|
||||
fuel_version=['8.0'])
|
||||
self.env.create_plugin(
|
||||
cluster=self.cluster_db,
|
||||
name='plugin_2',
|
||||
attributes_metadata={'attributes': {'name': 'plugin_2'}},
|
||||
package_version='4.0.0',
|
||||
fuel_version=['8.0'])
|
||||
self.env.create_plugin(
|
||||
cluster=self.cluster_db,
|
||||
enabled=False,
|
||||
name='plugin_3',
|
||||
attributes_metadata={'attributes': {'name': 'plugin_3'}},
|
||||
package_version='4.0.0',
|
||||
fuel_version=['8.0'])
|
||||
|
||||
expected_plugins_list = ['plugin_1', 'plugin_2']
|
||||
self.env.create_node(
|
||||
cluster_id=self.cluster_db.id,
|
||||
roles=['compute']
|
||||
)
|
||||
objects.Cluster.prepare_for_deployment(self.cluster_db)
|
||||
serialized_for_astute = self.serializer.serialize(
|
||||
self.cluster_db, self.cluster_db.nodes)
|
||||
for node in serialized_for_astute:
|
||||
if node['uid'] == 'master':
|
||||
continue
|
||||
self.assertIn('plugins', node)
|
||||
self.assertItemsEqual(
|
||||
expected_plugins_list, node['plugins'])
|
||||
self.assertTrue(all(name in node for name
|
||||
in expected_plugins_list))
|
||||
|
||||
def test_common_attributes_contains_plugin_metadata(self):
|
||||
expected_value = 'check_value'
|
||||
plugin = self.env.create_plugin(
|
||||
cluster=self.cluster_db,
|
||||
name='test_plugin',
|
||||
package_version='4.0.0',
|
||||
fuel_version=['8.0'],
|
||||
attributes_metadata={
|
||||
'attributes': {
|
||||
'config': {
|
||||
'description': "Description",
|
||||
'weight': 52,
|
||||
'value': expected_value
|
||||
}
|
||||
}
|
||||
}
|
||||
)
|
||||
attrs = self.serializer.get_common_attrs(self.cluster_db)
|
||||
self.assertIn('test_plugin', attrs)
|
||||
self.assertIn('metadata', attrs['test_plugin'])
|
||||
self.assertEqual(
|
||||
plugin.id, attrs['test_plugin']['metadata']['plugin_id']
|
||||
)
|
||||
self.assertEqual(expected_value, attrs['test_plugin']['config'])
|
||||
|
||||
|
||||
class TestMultiNodeGroupsSerialization80MixIn(
|
||||
TestSerializerConverter80To90MixIn,
|
||||
BaseDeploymentSerializer
|
||||
):
|
||||
def setUp(self):
|
||||
super(TestMultiNodeGroupsSerialization80MixIn, self).setUp()
|
||||
cluster = self.env.create(
|
||||
release_kwargs={'version': self.env_version},
|
||||
cluster_kwargs={
|
||||
'net_provider': consts.CLUSTER_NET_PROVIDERS.neutron,
|
||||
'net_segment_type': consts.NEUTRON_SEGMENT_TYPES.vlan}
|
||||
)
|
||||
self.env.create_nodes_w_interfaces_count(
|
||||
nodes_count=3,
|
||||
if_count=2,
|
||||
roles=['controller', 'cinder'],
|
||||
pending_addition=True,
|
||||
cluster_id=cluster['id'])
|
||||
self.cluster_db = self.db.query(models.Cluster).get(cluster['id'])
|
||||
cluster.extensions = ['volume_manager', 'converted_serializers']
|
||||
self.serializer = self.create_serializer(cluster)
|
||||
|
||||
def _add_node_group_with_node(self, cidr_start, node_address):
|
||||
node_group = self.env.create_node_group(
|
||||
api=False, cluster_id=self.cluster_db.id,
|
||||
name='ng_' + cidr_start + '_' + str(node_address))
|
||||
|
||||
with mock.patch.object(rpc, 'cast'):
|
||||
resp = self.env.setup_networks_for_nodegroup(
|
||||
cluster_id=self.cluster_db.id, node_group=node_group,
|
||||
cidr_start=cidr_start)
|
||||
self.assertEqual(resp.status_code, 200)
|
||||
|
||||
self.db.query(models.Task).filter_by(
|
||||
name=consts.TASK_NAMES.update_dnsmasq
|
||||
).delete(synchronize_session=False)
|
||||
|
||||
self.env.create_nodes_w_interfaces_count(
|
||||
nodes_count=1,
|
||||
if_count=2,
|
||||
roles=['compute'],
|
||||
pending_addition=True,
|
||||
cluster_id=self.cluster_db.id,
|
||||
group_id=node_group.id,
|
||||
ip='{0}.9.{1}'.format(cidr_start, node_address))
|
||||
|
||||
def _check_routes_count(self, count):
|
||||
objects.Cluster.prepare_for_deployment(self.cluster_db)
|
||||
facts = self.serializer.serialize(
|
||||
self.cluster_db, self.cluster_db.nodes)
|
||||
|
||||
for node in facts:
|
||||
if node['uid'] == 'master':
|
||||
continue
|
||||
endpoints = node['network_scheme']['endpoints']
|
||||
for name, descr in six.iteritems(endpoints):
|
||||
if descr['IP'] == 'none':
|
||||
self.assertNotIn('routes', descr)
|
||||
else:
|
||||
self.assertEqual(len(descr['routes']), count)
|
||||
|
||||
def test_routes_with_no_shared_networks_2_nodegroups(self):
|
||||
self._add_node_group_with_node('199.99', 3)
|
||||
# all networks have different CIDRs
|
||||
self._check_routes_count(1)
|
||||
|
||||
def test_routes_with_no_shared_networks_3_nodegroups(self):
|
||||
self._add_node_group_with_node('199.99', 3)
|
||||
self._add_node_group_with_node('199.77', 3)
|
||||
# all networks have different CIDRs
|
||||
self._check_routes_count(2)
|
||||
|
||||
def test_routes_with_shared_networks_3_nodegroups(self):
|
||||
self._add_node_group_with_node('199.99', 3)
|
||||
self._add_node_group_with_node('199.99', 4)
|
||||
# networks in two racks have equal CIDRs
|
||||
self._check_routes_count(1)
|
||||
|
||||
|
||||
class TestBlockDeviceDevicesSerialization80MixIn(
|
||||
TestSerializerConverter80To90MixIn,
|
||||
BaseDeploymentSerializer
|
||||
):
|
||||
def setUp(self):
|
||||
super(TestBlockDeviceDevicesSerialization80MixIn, self).setUp()
|
||||
self.cluster = self.env.create(
|
||||
release_kwargs={'version': self.env_version},
|
||||
cluster_kwargs={
|
||||
'mode': consts.CLUSTER_MODES.ha_compact,
|
||||
'net_provider': consts.CLUSTER_NET_PROVIDERS.neutron,
|
||||
'net_segment_type': consts.NEUTRON_SEGMENT_TYPES.vlan})
|
||||
self.cluster_db = self.db.query(models.Cluster).get(self.cluster['id'])
|
||||
self.cluster.extensions = ['volume_manager', 'converted_serializers']
|
||||
self.serializer = self.create_serializer(self.cluster_db)
|
||||
|
||||
def test_block_device_disks(self):
|
||||
self.env.create_node(
|
||||
cluster_id=self.cluster_db.id,
|
||||
roles=['cinder-block-device']
|
||||
)
|
||||
self.env.create_node(
|
||||
cluster_id=self.cluster_db.id,
|
||||
roles=['controller']
|
||||
)
|
||||
objects.Cluster.prepare_for_deployment(self.cluster_db)
|
||||
serialized_for_astute = self.serializer.serialize(
|
||||
self.cluster_db, self.cluster_db.nodes)
|
||||
for node in serialized_for_astute:
|
||||
if node['uid'] == 'master':
|
||||
continue
|
||||
self.assertIn("node_volumes", node)
|
||||
for node_volume in node["node_volumes"]:
|
||||
if node_volume["id"] == "cinder-block-device":
|
||||
self.assertEqual(node_volume["volumes"], [])
|
||||
else:
|
||||
self.assertNotEqual(node_volume["volumes"], [])
|
||||
|
||||
|
||||
class TestSerializeInterfaceDriversData80MixIn(
|
||||
TestSerializerConverter80To90MixIn,
|
||||
TestSerializeInterfaceDriversData
|
||||
):
|
||||
pass
|
||||
|
||||
|
||||
class TestDeploymentHASerializer80MixIn(
|
||||
TestSerializerConverter80To90MixIn,
|
||||
TestDeploymentHASerializer70
|
||||
):
|
||||
pass
|
|
@ -0,0 +1,15 @@
|
|||
DEVELOPMENT: 1
|
||||
DATABASE:
|
||||
name: "openstack_citest"
|
||||
engine: "postgresql"
|
||||
host: "localhost"
|
||||
port: "5432"
|
||||
user: "openstack_citest"
|
||||
passwd: "openstack_citest"
|
||||
API_LOG: "logs/api.log"
|
||||
APP_LOG: "logs/app.log"
|
||||
APP_LOGLEVEL: "ERROR"
|
||||
RPC_CONSUMER_LOG_PATH: "logs/receiverd.log"
|
||||
ASSASSIN_LOG_PATH: "logs/assassind.log"
|
||||
STATS_LOGS_PATH: "logs/"
|
||||
LCM_SERIALIZERS_CONCURRENCY_FACTOR: 1
|
|
@ -0,0 +1,5 @@
|
|||
# The order of packages is significant, because pip processes them in the order
|
||||
# of appearance. Changing the order has an impact on the overall integration
|
||||
# process, which may cause wedges in the gate later.
|
||||
|
||||
pbr>=1.6
|
|
@ -0,0 +1,28 @@
|
|||
[metadata]
|
||||
name = fuel-nailgun-extension-converted-serializers
|
||||
summary = Converted serializers extension for Fuel
|
||||
description-file = README.rst
|
||||
author = Mirantis Inc.
|
||||
author-email = product@mirantis.com
|
||||
home-page = http://mirantis.com
|
||||
classifier =
|
||||
Environment :: OpenStack
|
||||
Intended Audience :: Information Technology
|
||||
Intended Audience :: System Administrators
|
||||
License :: OSI Approved :: Apache Software License
|
||||
Operating System :: POSIX :: Linux
|
||||
Programming Language :: Python
|
||||
Programming Language :: Python :: 2
|
||||
Programming Language :: Python :: 2.7
|
||||
Programming Language :: Python :: 3
|
||||
Programming Language :: Python :: 3.3
|
||||
Programming Language :: Python :: 3.4
|
||||
|
||||
[files]
|
||||
packages =
|
||||
converted_serializers
|
||||
|
||||
[entry_points]
|
||||
nailgun.extensions =
|
||||
converted_serializers = converted_serializers.extension:ConvertedSerializersExtension
|
||||
|
|
@ -0,0 +1,29 @@
|
|||
# Copyright (c) 2013 Hewlett-Packard Development Company, L.P.
|
||||
#
|
||||
# Licensed under the Apache License, Version 2.0 (the "License");
|
||||
# you may not use this file except in compliance with the License.
|
||||
# You may obtain a copy of the License at
|
||||
#
|
||||
# http://www.apache.org/licenses/LICENSE-2.0
|
||||
#
|
||||
# Unless required by applicable law or agreed to in writing, software
|
||||
# distributed under the License is distributed on an "AS IS" BASIS,
|
||||
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or
|
||||
# implied.
|
||||
# See the License for the specific language governing permissions and
|
||||
# limitations under the License.
|
||||
|
||||
# THIS FILE IS MANAGED BY THE GLOBAL REQUIREMENTS REPO - DO NOT EDIT
|
||||
import setuptools
|
||||
|
||||
# In python < 2.7.4, a lazy loading of package `pbr` will break
|
||||
# setuptools if some other modules registered functions in `atexit`.
|
||||
# solution from: http://bugs.python.org/issue15881#msg170215
|
||||
try:
|
||||
import multiprocessing # noqa
|
||||
except ImportError:
|
||||
pass
|
||||
|
||||
setuptools.setup(
|
||||
setup_requires=['pbr'],
|
||||
pbr=True)
|
|
@ -0,0 +1,38 @@
|
|||
Name: fuel-nailgun-extension-converted-serializers
|
||||
Version: 10.0~b1
|
||||
Release: 1%{?dist}
|
||||
Summary: Converted serializers extension for Fuel
|
||||
License: Apache-2.0
|
||||
Url: https://git.openstack.org/cgit/openstack/fuel-nailgun-extension-converted-serializers/
|
||||
Source0: %{name}-%{version}.tar.gz
|
||||
BuildArch: noarch
|
||||
|
||||
BuildRequires: python-devel
|
||||
BuildRequires: python-pbr
|
||||
BuildRequires: python-setuptools
|
||||
|
||||
Requires: fuel-nailgun
|
||||
Requires: python-pbr
|
||||
|
||||
%description
|
||||
Converted serializers extension for Fuel
|
||||
|
||||
%prep
|
||||
%setup -q -c -n %{name}-%{version}
|
||||
|
||||
%build
|
||||
export OSLO_PACKAGE_VERSION=%{version}
|
||||
%py2_build
|
||||
|
||||
%install
|
||||
export OSLO_PACKAGE_VERSION=%{version}
|
||||
%py2_install
|
||||
|
||||
%files
|
||||
%license LICENSE
|
||||
%{python2_sitelib}/converted_serializers
|
||||
%{python2_sitelib}/*.egg-info
|
||||
|
||||
%changelog
|
||||
* Thu Sep 8 2016 Vladimir Kuklin <vkuklin@mirantis.com> - 10.0~b1-1
|
||||
- Initial package.
|
|
@ -0,0 +1,6 @@
|
|||
# The order of packages is significant, because pip processes them in the order
|
||||
# of appearance. Changing the order has an impact on the overall integration
|
||||
# process, which may cause wedges in the gate later.
|
||||
|
||||
hacking
|
||||
pytest
|
|
@ -0,0 +1,38 @@
|
|||
[tox]
|
||||
minversion = 2.0
|
||||
envlist = pep8,py27
|
||||
skipsdist = True
|
||||
|
||||
[base]
|
||||
NAILGUN_REPO = git+https://github.com/openstack/fuel-web.git
|
||||
NAILGUN_CONFIG = {toxinidir}/nailgun-test-settings.yaml
|
||||
NAILGUN_BRANCH={env:ZUUL_BRANCH:master}
|
||||
|
||||
[testenv]
|
||||
deps = -r{toxinidir}/test-requirements.txt
|
||||
setenv = VIRTUAL_ENV={envdir}
|
||||
|
||||
[testenv:py27]
|
||||
usedevelop = True
|
||||
deps = {[testenv]deps}
|
||||
-r{toxinidir}/requirements.txt
|
||||
-e{[base]NAILGUN_REPO}@{[base]NAILGUN_BRANCH}#egg=nailgun[test]&subdirectory=nailgun
|
||||
setenv = {[testenv]setenv}
|
||||
NAILGUN_CONFIG={[base]NAILGUN_CONFIG}
|
||||
|
||||
commands = py.test -v --junit-xml {toxinidir}/extension.xml {posargs}
|
||||
|
||||
[testenv:pep8]
|
||||
commands = flake8 {posargs}
|
||||
|
||||
[testenv:venv]
|
||||
commands = {posargs}
|
||||
|
||||
[flake8]
|
||||
# E123, E125 skipped as they are invalid PEP-8.
|
||||
# H101 - Don't force author's name on TODOs
|
||||
# H304 is "No relative imports" error, required for extensions
|
||||
show-source = True
|
||||
ignore = E123,E125,H101,H304
|
||||
builtins = _
|
||||
exclude = .venv,.git,.tox,dist,doc,*lib/python*,*egg,build
|
Loading…
Reference in New Issue