Plugins v 5.0.0 support in plugins adapters

Now it is possible:
* To bring release as plugin
* To define multiple graphs types for the plugin

Change-Id: Idcac14e6f4055ca0e488efc2cc23945036a493d9
Implements-Blueprint: release-as-a-plugin
This commit is contained in:
Ilya Kutukov 2016-08-23 22:41:58 +03:00
parent b8a2ee2c07
commit 84c438604d
26 changed files with 1395 additions and 287 deletions

View File

@ -582,12 +582,10 @@ class DeferredTaskHandler(BaseHandler):
)
logger.info(self.log_message.format(env_id=cluster_id))
try:
options = self.get_options()
except ValueError as e:
raise self.http(400, six.text_type(e))
try:
self.validator.validate(cluster)
task_manager = self.task_manager(cluster_id=cluster.id)
@ -614,7 +612,6 @@ class DeferredTaskHandler(BaseHandler):
)
# let it be 500
raise
self.raise_task(task)
@ -673,12 +670,10 @@ class OrchestratorDeploymentTasksHandler(SingleHandler):
"""
obj = self.get_object_or_404(self.single, obj_id)
graph_type = web.input(graph_type=None).graph_type or None
data = self.checked_data(
self.validator.validate_update,
instance=obj
)
deployment_graph = objects.DeploymentGraph.get_for_model(
obj, graph_type=graph_type)
if deployment_graph:

View File

@ -12,19 +12,22 @@
# License for the specific language governing permissions and limitations
# under the License.
# todo(ikutukov): align this schema with plugin builder
PLUGIN_RELEASE_SCHEMA = {
'type': 'object',
'additionalProperties': True,
'properties': {
'name': {'type': 'string'},
'repository_path': {'type': 'string'},
'version': {'type': 'string'},
'operating_system': {'type': 'string'},
'os': {'type': 'string'},
'deployment_scripts_path': {'type': 'string'},
'mode': {
'type': 'array',
'items': {'type': 'string'}}
},
'required': ['version', 'os', 'mode']
'required': ['version']
}

View File

@ -23,3 +23,7 @@ class PluginException(NailgunException):
class PackageVersionIsNotCompatible(PluginException):
message = "Package version is not compatible"
class NoPluginFileFound(PluginException):
message = "Plugin file not found"

View File

@ -155,9 +155,6 @@ class DeploymentGraph(NailgunObject):
@classmethod
def get_tasks(cls, deployment_graph_instance):
if not isinstance(deployment_graph_instance, models.DeploymentGraph):
raise Exception('This method is allowed only for '
'the deployment graph instance.')
return DeploymentGraphTaskCollection.get_by_deployment_graph_uid(
deployment_graph_instance.id
)

View File

@ -21,13 +21,14 @@ import operator
import six
from nailgun import consts
from nailgun.db import db
from nailgun.db.sqlalchemy import models
from nailgun.objects import DeploymentGraph
from nailgun.objects import NailgunCollection
from nailgun.objects import NailgunObject
from nailgun.objects.serializers.plugin import PluginSerializer
from nailgun.plugins.adapters import wrap_plugin
from nailgun import plugins
class Plugin(NailgunObject):
@ -37,20 +38,58 @@ class Plugin(NailgunObject):
@classmethod
def create(cls, data):
# accidental because i've seen this way of tasks creation only in tests
deployment_tasks = data.pop('deployment_tasks', [])
new_plugin = super(Plugin, cls).create(data)
graphs = data.pop("graphs", {})
deployment_tasks = data.pop("deployment_tasks", [])
# create default graph in any case
DeploymentGraph.create_for_model(
{'tasks': deployment_tasks}, new_plugin)
if not graphs.get(consts.DEFAULT_DEPLOYMENT_GRAPH_TYPE):
graphs[consts.DEFAULT_DEPLOYMENT_GRAPH_TYPE] = \
{'tasks': deployment_tasks}
plugin_obj = super(Plugin, cls).create(data)
plugin_adapter = wrap_plugin(new_plugin)
cls.update(new_plugin, plugin_adapter.get_metadata())
for graph_type, graph_data in six.iteritems(graphs):
DeploymentGraph.create_for_model(
graph_data, plugin_obj, graph_type)
ClusterPlugin.add_compatible_clusters(new_plugin)
plugin_adapter = plugins.wrap_plugin(plugin_obj)
return new_plugin
# todo(ikutukov): this update is a smell from the current plugins
# todo: installation schema. Remove it.
cls.update(plugin_obj, plugin_adapter.get_metadata())
ClusterPlugin.add_compatible_clusters(plugin_obj)
return plugin_obj
# todo(ikutukov): currently plugins update is vague operation so this
# graphs attachment on update is commented.
# @classmethod
# def update(cls, instance, data):
# """Update existing plugin instance with specified parameters.
#
# :param instance: object (model) instance
# :param data: dictionary of key-value pairs as object fields
# :returns: instance of an object (model)
# """
#
# graphs = data.pop("graphs", {})
# deployment_tasks = data.pop("deployment_tasks", [])
#
# if not graphs.get(consts.DEFAULT_DEPLOYMENT_GRAPH_TYPE):
# graphs[consts.DEFAULT_DEPLOYMENT_GRAPH_TYPE] = \
# {'tasks': deployment_tasks}
#
# super(Plugin, cls).update(instance, data)
#
# for graph_type, graph_data in six.iteritems(graphs):
# g = DeploymentGraph.get_for_model(instance, graph_type)
# if g:
# DeploymentGraph.update(g, graph_data)
# else:
# DeploymentGraph.create_for_model(
# graph_data, instance, graph_type)
#
# return instance
@classmethod
def get_by_name_version(cls, name, version):
@ -94,8 +133,11 @@ class PluginCollection(NailgunCollection):
get_name = operator.attrgetter('name')
grouped_by_name = groupby(sorted(cls.all(), key=get_name), get_name)
for name, plugins in grouped_by_name:
newest_plugin = max(plugins, key=lambda p: LooseVersion(p.version))
for name, plugins_group in grouped_by_name:
newest_plugin = max(
plugins_group,
key=lambda p: LooseVersion(p.version)
)
newest_plugins.append(newest_plugin)
@ -147,7 +189,7 @@ class ClusterPlugin(NailgunObject):
:return: True if compatible, False if not
:rtype: bool
"""
plugin_adapter = wrap_plugin(plugin)
plugin_adapter = plugins.wrap_plugin(plugin)
return plugin_adapter.validate_compatibility(cluster)

View File

@ -21,6 +21,8 @@ Release object and collection
import copy
from distutils.version import StrictVersion
import itertools
import six
import yaml
from nailgun import consts
@ -54,11 +56,18 @@ class Release(NailgunObject):
# roles array. since fuel 7.0 we don't use it anymore, and
# we don't require it even for old releases.
data.pop("roles", None)
graphs = data.pop("graphs", {})
deployment_tasks = data.pop("deployment_tasks", [])
if not graphs.get(consts.DEFAULT_DEPLOYMENT_GRAPH_TYPE):
graphs[consts.DEFAULT_DEPLOYMENT_GRAPH_TYPE] = \
{'tasks': deployment_tasks}
release_obj = super(Release, cls).create(data)
DeploymentGraph.create_for_model(
{'tasks': deployment_tasks}, release_obj)
for graph_type, graph_data in six.iteritems(graphs):
DeploymentGraph.create_for_model(
graph_data, release_obj, graph_type)
return release_obj
@classmethod
@ -73,12 +82,27 @@ class Release(NailgunObject):
# roles array. since fuel 7.0 we don't use it anymore, and
# we don't require it even for old releases.
data.pop("roles", None)
deployment_tasks = data.pop("deployment_tasks", None)
graphs = data.pop("graphs", {})
deployment_tasks = data.pop("deployment_tasks", [])
existing_default_graph = DeploymentGraph.get_for_model(
instance, consts.DEFAULT_DEPLOYMENT_GRAPH_TYPE)
if (existing_default_graph and len(deployment_tasks)) \
or not existing_default_graph:
graphs[consts.DEFAULT_DEPLOYMENT_GRAPH_TYPE] = \
{'tasks': deployment_tasks}
release_obj = super(Release, cls).update(instance, data)
if deployment_tasks:
deployment_graph_instance = DeploymentGraph.get_for_model(instance)
DeploymentGraph.update(deployment_graph_instance,
{'tasks': deployment_tasks})
for graph_type, graph_data in six.iteritems(graphs):
g = DeploymentGraph.get_for_model(instance, graph_type)
if g:
DeploymentGraph.update(g, graph_data)
else:
DeploymentGraph.create_for_model(
graph_data, instance, graph_type)
return release_obj
@classmethod

View File

@ -25,7 +25,7 @@ from nailgun import consts
from nailgun import extensions
from nailgun.logger import logger
from nailgun import objects
from nailgun.plugins import adapters
from nailgun import plugins
from nailgun.settings import settings
from nailgun import utils
from nailgun.utils.role_resolver import NameMatchingPolicy
@ -734,7 +734,7 @@ class DeploymentLCMSerializer(DeploymentHASerializer90):
@classmethod
def serialize_plugin(cls, cluster, plugin):
os_name = cluster.release.operating_system
adapter = adapters.wrap_plugin(plugin)
adapter = plugins.wrap_plugin(plugin)
result = {
'name': plugin['name'],
'scripts': [

View File

@ -11,3 +11,6 @@
# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
# License for the specific language governing permissions and limitations
# under the License.
from adapters import wrap_plugin

View File

@ -14,19 +14,18 @@
import abc
import copy
from distutils.version import StrictVersion
import glob
import os
from distutils.version import StrictVersion
from urlparse import urljoin
import six
import yaml
import loaders
import nailgun
from nailgun import consts
from nailgun import errors
from nailgun.logger import logger
from nailgun.objects.deployment_graph import DeploymentGraph
from nailgun.settings import settings
@ -38,57 +37,45 @@ class PluginAdapterBase(object):
2. Uploading tasks and deployment tasks
3. Providing repositories/deployment scripts related info to clients
"""
config_metadata = 'metadata.yaml'
config_tasks = 'tasks.yaml'
loader_class = loaders.PluginLoaderBase
def __init__(self, plugin):
self.plugin = plugin
self._attributes_metadata = None
self._tasks = None
self.plugin_path = os.path.join(settings.PLUGINS_PATH, self.path_name)
self.db_cfg_mapping = {
'attributes_metadata': 'environment_config.yaml'
self.loader = self.loader_class(self.plugin_path)
@property
def attributes_processors(self):
return {
'attributes_metadata':
lambda data: (data or {}).get('attributes', {}),
'tasks':
lambda data: data or []
}
@abc.abstractmethod
def path_name(self):
"""A name which is used to create path to plugin scripts and repos"""
"""A name which is used to create path to plugin scripts and repo"""
def get_metadata(self):
"""Get parsed plugin metadata from config yaml files.
"""Get plugin data tree.
:return: All plugin metadata
:rtype: dict
"""
metadata = self._load_config(self.config_metadata) or {}
metadata['tasks'] = self._load_tasks()
data_tree, report = self.loader.load()
if report.is_failed():
logger.error(report.render())
logger.error('Problem with loading plugin {0}'.format(
self.plugin_path))
return data_tree
for field in data_tree:
if field in self.attributes_processors:
data_tree[field] = \
self.attributes_processors[field](data_tree.get(field))
for attribute, config in six.iteritems(self.db_cfg_mapping):
attribute_data = self._load_config(config)
# Plugin columns have constraints for nullable data,
# so we need to check it
if attribute_data is not None:
if attribute == 'attributes_metadata':
attribute_data = attribute_data['attributes']
metadata[attribute] = attribute_data
return metadata
def _load_config(self, file_name):
config = os.path.join(self.plugin_path, file_name)
if os.access(config, os.R_OK):
with open(config, "r") as conf:
try:
return yaml.safe_load(conf.read())
except yaml.YAMLError as exc:
logger.warning(exc)
raise errors.ParseError(
'Problem with loading YAML file {0}'.format(config))
else:
logger.warning("Config {0} is not readable.".format(config))
def _load_tasks(self):
return self._load_config(self.config_tasks) or []
data_tree = {k: v for k, v in six.iteritems(data_tree) if v}
return data_tree
@property
def plugin_release_versions(self):
@ -110,43 +97,53 @@ class PluginAdapterBase(object):
plugin_name=self.path_name)
def get_attributes_metadata(self):
if self._attributes_metadata is None:
if self.plugin.attributes_metadata:
self._attributes_metadata = self.plugin.attributes_metadata
else:
self._attributes_metadata = self._load_config(
'environment_config.yaml') or {}
return self._attributes_metadata
return self.plugin.attributes_metadata
@property
def attributes_metadata(self):
return self.get_attributes_metadata()
def _add_defaults_to_task(self, task, roles_metadata):
"""Add required fault tolerance and cwd params to tasks.
:param task: task
:type task: dict
:param roles_metadata: node roles metadata
:type roles_metadata: dict
:return: task
:rtype: dict
"""
if task.get('parameters'):
task['parameters'].setdefault(
'cwd', self.slaves_scripts_path)
if task.get('type') == consts.ORCHESTRATOR_TASK_TYPES.group:
try:
task.setdefault(
'fault_tolerance',
roles_metadata[task['id']]['fault_tolerance']
)
except KeyError:
pass
return task
def get_deployment_graph(self, graph_type=None):
if graph_type is None:
graph_type = consts.DEFAULT_DEPLOYMENT_GRAPH_TYPE
deployment_tasks = []
graph_metadata = {}
graph_instance = DeploymentGraph.get_for_model(self.plugin, graph_type)
graph_instance = nailgun.objects.DeploymentGraph.get_for_model(
self.plugin, graph_type)
roles_metadata = self.plugin.roles_metadata
if graph_instance:
graph_metadata = DeploymentGraph.get_metadata(graph_instance)
for task in DeploymentGraph.get_tasks(graph_instance):
if task.get('parameters'):
task['parameters'].setdefault(
'cwd', self.slaves_scripts_path)
if task.get('type') == consts.ORCHESTRATOR_TASK_TYPES.group:
try:
task.setdefault(
'fault_tolerance',
roles_metadata[task['id']]['fault_tolerance']
)
except KeyError:
pass
deployment_tasks.append(task)
graph_metadata = nailgun.objects.DeploymentGraph.get_metadata(
graph_instance)
for task in nailgun.objects.DeploymentGraph.get_tasks(
graph_instance):
deployment_tasks.append(
self._add_defaults_to_task(task, roles_metadata)
)
graph_metadata['tasks'] = deployment_tasks
return graph_metadata
@ -154,20 +151,16 @@ class PluginAdapterBase(object):
return self.get_deployment_graph(graph_type)['tasks']
def get_tasks(self):
if self._tasks is None:
if self.plugin.tasks:
self._tasks = self.plugin.tasks
else:
self._tasks = self._load_tasks()
tasks = self.plugin.tasks
slave_path = self.slaves_scripts_path
for task in tasks:
task['roles'] = task.get('role')
slave_path = self.slaves_scripts_path
for task in self._tasks:
task['roles'] = task['role']
parameters = task.get('parameters')
if parameters is not None:
parameters.setdefault('cwd', slave_path)
parameters = task.get('parameters')
if parameters is not None:
parameters.setdefault('cwd', slave_path)
return self._tasks
return tasks
@property
def tasks(self):
@ -245,19 +238,23 @@ class PluginAdapterBase(object):
# plugin writer should be able to specify ha in release['mode']
# and know nothing about ha_compact
if not any(
cluster.mode.startswith(mode) for mode in release['mode']
cluster.mode.startswith(mode) for mode in release['mode']
):
continue
if not self._is_release_version_compatible(
cluster.release.version, release['version']
cluster.release.version, release['version']
):
continue
return True
return False
def get_release_info(self, release):
"""Get plugin release information which corresponds to given release"""
"""Get plugin release information which corresponds to given release.
:returns: release info
:rtype: dict
"""
rel_os = release.operating_system.lower()
version = release.version
@ -284,7 +281,10 @@ class PluginAdapterBase(object):
master_ip=settings.MASTER_IP,
plugin_name=self.path_name)
return urljoin(repo_base, release_info['repository_path'])
return urljoin(
repo_base,
release_info['repository_path']
)
def master_scripts_path(self, cluster):
release_info = self.get_release_info(cluster.release)
@ -301,6 +301,42 @@ class PluginAdapterBase(object):
class PluginAdapterV1(PluginAdapterBase):
"""Plugins attributes class for package version 1.0.0"""
loader_class = loaders.PluginLoaderV1
@property
def attributes_processors(self):
ap = super(PluginAdapterV1, self).attributes_processors
ap.update({
'tasks': self._process_legacy_tasks
})
return ap
@staticmethod
def _process_legacy_tasks(tasks):
if tasks:
for task in tasks:
role = task['role']
if isinstance(role, list) and 'controller' in role:
role.append('primary-controller')
return tasks
def get_tasks(self):
tasks = self.plugin.tasks
slave_path = self.slaves_scripts_path
for task in tasks:
task['roles'] = task.get('role')
role = task['role']
if isinstance(role, list) \
and ('controller' in role) \
and ('primary-controller' not in role):
role.append('primary-controller')
parameters = task.get('parameters')
if parameters is not None:
parameters.setdefault('cwd', slave_path)
return tasks
@property
def path_name(self):
"""Returns a name and full version
@ -310,22 +346,12 @@ class PluginAdapterV1(PluginAdapterBase):
"""
return self.full_name
def _load_tasks(self):
data = super(PluginAdapterV1, self)._load_tasks()
for item in data:
# backward compatibility for plugins added in version 6.0,
# and it is expected that task with role: [controller]
# will be executed on all controllers
role = item['role']
if (isinstance(role, list) and 'controller' in role):
role.append('primary-controller')
return data
class PluginAdapterV2(PluginAdapterBase):
"""Plugins attributes class for package version 2.0.0"""
loader_class = loaders.PluginLoaderV1
@property
def path_name(self):
"""Returns a name and major version of the plugin
@ -357,54 +383,124 @@ class PluginAdapterV2(PluginAdapterBase):
class PluginAdapterV3(PluginAdapterV2):
"""Plugin wrapper class for package version 3.0.0"""
def __init__(self, plugin):
super(PluginAdapterV3, self).__init__(plugin)
self.db_cfg_mapping['network_roles_metadata'] = 'network_roles.yaml'
self.db_cfg_mapping['roles_metadata'] = 'node_roles.yaml'
self.db_cfg_mapping['volumes_metadata'] = 'volumes.yaml'
loader_class = loaders.PluginLoaderV3
def get_metadata(self, graph_type=None):
dg = DeploymentGraph.get_for_model(self.plugin, graph_type)
def _process_deployment_tasks(self, deployment_tasks):
dg = nailgun.objects.DeploymentGraph.get_for_model(
self.plugin, graph_type=consts.DEFAULT_DEPLOYMENT_GRAPH_TYPE)
if dg:
DeploymentGraph.update(
dg,
{'tasks': self._load_config('deployment_tasks.yaml')})
nailgun.objects.DeploymentGraph.update(
dg, {'tasks': deployment_tasks})
else:
DeploymentGraph.create_for_model(
{'tasks': self._load_config('deployment_tasks.yaml')},
self.plugin,
graph_type)
nailgun.objects.DeploymentGraph.create_for_model(
{'tasks': deployment_tasks}, self.plugin)
return deployment_tasks
return super(PluginAdapterV3, self).get_metadata()
@property
def attributes_processors(self):
ap = super(PluginAdapterV3, self).attributes_processors
ap.update({
'deployment_tasks': self._process_deployment_tasks
})
return ap
class PluginAdapterV4(PluginAdapterV3):
"""Plugin wrapper class for package version 4.0.0"""
def __init__(self, plugin):
super(PluginAdapterV4, self).__init__(plugin)
self.db_cfg_mapping['components_metadata'] = 'components.yaml'
loader_class = loaders.PluginLoaderV4
class PluginAdapterV5(PluginAdapterV4):
"""Plugin wrapper class for package version 5.0.0"""
def __init__(self, plugin):
super(PluginAdapterV5, self).__init__(plugin)
self.db_cfg_mapping['nic_attributes_metadata'] = 'nic_config.yaml'
self.db_cfg_mapping['bond_attributes_metadata'] = 'bond_config.yaml'
self.db_cfg_mapping['node_attributes_metadata'] = 'node_config.yaml'
loader_class = loaders.PluginLoaderV5
@property
def attributes_processors(self):
ap = super(PluginAdapterV5, self).attributes_processors
ap.update({
'releases': self._process_releases,
'graphs': self._make_graphs_dict_by_type
})
return ap
def _make_graphs_dict_by_type(self, graphs_list):
graphs_to_create = {}
for graph in graphs_list:
self.graphs_to_create[graph.pop('type')] = graph
return graphs_to_create
def _create_release_from_configuration(self, configuration):
"""Create templated release and graphs for given configuration.
:param configuration:
:return:
"""
# deployment tasks not supposed for the release description
# but we fix this developer mistake automatically
# apply base template
base_release = configuration.pop('base_release', None)
if base_release:
base_release.update(configuration)
configuration = base_release
# process graphs
graphs_by_type = {}
graphs_list = configuration.pop('graphs', None)
for graph in graphs_list:
graphs_by_type[graph['type']] = graph['graph']
configuration['graphs'] = graphs_by_type
nailgun.objects.Release.create(configuration)
def _process_releases(self, releases_records):
"""Split new release records from old-style release-deps records.
:param releases_records: list of plugins and releases data
:type releases_records: list
:return: configurations that are extending existing
:rtype: list
"""
extend_releases = []
for release in releases_records:
is_basic_release = release.get('is_release', False)
if is_basic_release:
self._create_release_from_configuration(release)
else:
extend_releases.append(release)
return extend_releases
__version_mapping = {
__plugins_mapping = {
'1.0.': PluginAdapterV1,
'2.0.': PluginAdapterV2,
'3.0.': PluginAdapterV3,
'4.0.': PluginAdapterV4,
'5.0.': PluginAdapterV5,
'5.0.': PluginAdapterV5
}
def get_supported_versions():
return list(__plugins_mapping)
def get_adapter_for_package_version(plugin_version):
"""Get plugin adapter class for plugin version.
:param plugin_version: plugin version string
:type plugin_version: basestring|str
:return: plugin loader class
:rtype: loaders.PluginLoader|None
"""
for plugin_version_head in __plugins_mapping:
if plugin_version.startswith(plugin_version_head):
return __plugins_mapping[plugin_version_head]
def wrap_plugin(plugin):
"""Creates plugin object with specific class version
@ -413,16 +509,10 @@ def wrap_plugin(plugin):
"""
package_version = plugin.package_version
attr_class = None
# Filter by major version
for version, klass in six.iteritems(__version_mapping):
if package_version.startswith(version):
attr_class = klass
break
attr_class = get_adapter_for_package_version(package_version)
if not attr_class:
supported_versions = ', '.join(__version_mapping.keys())
supported_versions = ', '.join(get_supported_versions())
raise errors.PackageVersionIsNotCompatible(
'Plugin id={0} package_version={1} '

View File

@ -0,0 +1,75 @@
# Copyright 2016 Mirantis, Inc.
#
# Licensed under the Apache License, Version 2.0 (the "License"); you may
# not use this file except in compliance with the License. You may obtain
# a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
# License for the specific language governing permissions and limitations
# under the License.
import os
import yaml
import mapping
from nailgun import errors
from nailgun.logger import logger
PLUGIN_ROOT_FILE = 'metadata.yaml'
PLUGIN_PACKAGE_VERSION_FIELD = 'package_version'
# self.plugin_path = os.path.join(settings.PLUGINS_PATH, self.path_name)
def _get_package_version_from_path(plugin_path):
config = os.path.join(plugin_path, PLUGIN_ROOT_FILE)
if os.access(config, os.R_OK):
with open(config, "r") as conf:
try:
return yaml.safe_load(conf.read()).get(
PLUGIN_PACKAGE_VERSION_FIELD)
except yaml.YAMLError as exc:
logger.warning(exc)
raise errors.ParseError(
'Problem with loading YAML file {0}'.format(config))
else:
raise Exception("Config {0} is not readable.".format(config))
def sync(plugin_path):
"""Sync plugin data from given path.
:param plugin_path: plugin folder path
:type plugin_path: str|basestring
:return:
:rtype:
"""
plugin_package_version = _get_package_version_from_path(plugin_path)
loader_class = mapping.get_loader_for_package_version(
plugin_package_version)
adapter_class = mapping.get_adapter_for_package_version(
plugin_package_version)
if not loader_class or adapter_class:
raise Exception('No such plugin package version: {}'.format(
plugin_package_version))
loader = loader_class(plugin_path)
data, report = loader.load()
if report.is_failed():
raise Exception(report.render())
else:
pass
# adapter = adapter_class()
plugin_object = None
return plugin_object

View File

@ -0,0 +1,5 @@
from loader_base import PluginLoaderBase
from loader_v1 import PluginLoaderV1
from loader_v3 import PluginLoaderV3
from loader_v4 import PluginLoaderV4
from loader_v5 import PluginLoaderV5

View File

@ -0,0 +1,249 @@
# -*- coding: utf-8 -*-
#
# Copyright 2016 Mirantis, Inc.
#
# Licensed under the Apache License, Version 2.0 (the "License"); you may
# not use this file except in compliance with the License. You may obtain
# a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
# License for the specific language governing permissions and limitations
# under the License.
import glob
import json
import os
import yaml
from nailgun import errors
def deserializer_json(raw_data, *args, **kwargs):
"""Load JSON data from file object.
:param raw_data: raw data
:type raw_data: basestring
:return: data
:rtype: list|dict
"""
return json.load(raw_data, *args, **kwargs)
def deserializer_yaml(raw_data, loader=yaml.Loader, *args, **kwargs):
"""Load YAML data from file object.
:param raw_data: raw data
:type raw_data: basestring
:param loader: YAML-specific loader
:type loader: yaml.Loader
:return: data
:rtype: list|dict
"""
result = yaml.load(raw_data, Loader=loader)
return result
def deserializer_plaintext(raw_data, *args, **kwargs):
"""Load plaintext data from file object.
Not doing anything except passing data throug.
:param raw_data: text
:type raw_data: basestring
:return: data
:rtype: list|dict
"""
return raw_data
def serializer_json(data, *args, **kwargs):
"""Load JSON data from file object.
:param data: data
:type data: dict|list
:return: raw data
:rtype: basestring
"""
return json.dumps(data, *args, **kwargs)
def serializer_yaml(data, dumper=yaml.SafeDumper, *args, **kwargs):
"""Load YAML data from file object.
:param data: data
:type data: dict|list
:param dumper: YAML-specific dumper
:type dumper: yaml.Dumper
:return: data
:rtype: basestring
"""
return yaml.dump(data, Dumper=dumper, **kwargs)
def serializer_plaintext(data, *args, **kwargs):
"""Serialize plaintext to string.
Not doing anything except passing data throug.
:param data: data
:type data: basestring
:return: data
:rtype: basestring
"""
return data
DESERIALIZERS = {
"json": deserializer_json,
"yaml": deserializer_yaml,
"txt": deserializer_plaintext
}
SERIALIZERS = {
"json": serializer_json,
"yaml": serializer_yaml,
"txt": serializer_plaintext
}
class FilesManager(object):
"""Files Manager is responsive for data serialization and files operations.
All files loading and saving operations are recommended to be
performed via FilesManager class.
Also, it's recommended to work with FM using absolute paths to avoid
relative paths mess.
"""
def _get_normalized_extension(self, path):
"""Get normalized file extension.
:param path: path
:type path: str|basestring
:return: lowercased extension without dot
:rtype: str|basestring
"""
extension = os.path.splitext(path)[1].lower()
if extension:
if extension[0] == '.':
extension = extension[1:]
return extension
def _get_files_by_mask(self, path_mask, allowed_formats=None):
"""Find all files of allowed format in path.
:param path_mask: path mask like ./my-file.*
:type path_mask: str|basestring
:param allowed_formats: available file formats
allow all if not defined
:type allowed_formats: iterable|None
:return: list of sorted files paths
:rtype: list
"""
paths = []
for path in glob.glob(path_mask):
extension = self._get_normalized_extension(path)
if not allowed_formats or extension in allowed_formats:
paths.append(path)
if paths:
return sorted(paths)
raise errors.NoPluginFileFound(
u"Can't find file. "
u"Ensure that file is on its place and have one of the following "
u"data files formats: {}.".format(
u", ".join(list(allowed_formats))
)
)
def _merge_data_records(self, data_records):
"""Merge data records.
Accepting lists and dict structures respecting order of records.
:param data_records: list of data records
:type data_records: list[list|dict]
:return: resulting data
:rtype: list|dict
"""
dicts_to_merge = []
merged_list = []
for data_record in data_records:
if isinstance(data_record, dict):
dicts_to_merge.append(data_record)
elif isinstance(data_record, list):
merged_list.extend(data_record)
if len(merged_list): # we have list as root structure
merged_list.extend(dicts_to_merge)
return merged_list
elif len(dicts_to_merge):
merged_dict = {}
for dict_to_merge in dicts_to_merge:
merged_dict.update(dict_to_merge)
return merged_dict
def load(self, path_mask, skip_unknown_files=False, *args, **kwargs):
"""Load file from path mask or direct path.
:param path_mask: path
:type path_mask: str
:param skip_unknown_files: not stop on deserialization errors
:type skip_unknown_files: bool
:return: data
:rtype: list|dict
"""
paths = self._get_files_by_mask(path_mask, list(DESERIALIZERS))
data_records = []
for path in paths:
extension = self._get_normalized_extension(path)
deserializer = DESERIALIZERS.get(extension)
if deserializer is not None:
with open(path, 'r') as content_file:
raw_content = content_file.read()
data_records.append(
deserializer(raw_content, *args, **kwargs)
)
elif not skip_unknown_files:
raise IOError(
path,
list(DESERIALIZERS)
)
return self._merge_data_records(data_records)
def save(self, path, *args, **kwargs):
"""Save data to path applying serializer.
:param path: full path with extension that will define serialization
format.
:type path: str
:return: data
:rtype: list|dict
"""
extension = self._get_normalized_extension(path)
serializer = SERIALIZERS.get(extension)
if serializer is not None:
return serializer(path, *args, **kwargs)
else:
raise errors.InvalidFileFormat(path, list(SERIALIZERS))

View File

@ -0,0 +1,177 @@
# -*- coding: utf-8 -*-
#
# Copyright 2016 Mirantis, Inc.
#
# Licensed under the Apache License, Version 2.0 (the "License"); you may
# not use this file except in compliance with the License. You may obtain
# a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
# License for the specific language governing permissions and limitations
# under the License.
import os
import six
from files_manager import FilesManager
from nailgun import errors
from nailgun import utils
class PluginLoaderBase(object):
"""Plugin loader.
Loader deals with the file structure providing ability to load, combine
and form the data tree from the plugin directory.
If loader fails it raising exception with the report attached.
"""
_metadata_path = "metadata.yaml"
_path_suffix = "_path"
_dont_resolve_path_keys = {'repository_path', 'deployment_scripts_path'}
def __init__(self, plugin_path=None):
self.files_manager = FilesManager()
self.plugin_path = plugin_path
paths_to_fields = {}
def _get_absolute_path(self, path):
"""Get absolute path from the relative to the plugins folder.
:param path: relative path
:type path: str
:return: path string
:rtype: str
"""
return os.path.join(self.plugin_path, path)
@property
def _root_metadata_path(self):
"""Where is the root plugin data file located."""
return self._get_absolute_path(self._metadata_path)
def _recursive_process_paths(self, data, report):
"""Recursively processed nested list/dict.
:param data: data
:type data: iterable
:param report: report node
:type report: utils.ReportNode
:returns: data
:rtype: list|dict
"""
if isinstance(data, dict):
new_data = {}
for key in tuple(data):
value = data[key]
# if we have key with path we could do 3 things:
#
# * if it is pointing to directory, check dir existence and
# leave path intact
#
# * if it is a `glob` compatible mask, iterate over files
# that are matched this mask and compatible with
# FileManager then merge this files data if they have
# list or dict as common data root.
# Then remove _path suffix from key.
#
# * if it is file compatible with FileManager, read this
# file and remove _path suffix from key.
if key.endswith(self._path_suffix) \
and isinstance(value, six.string_types):
if os.path.isdir(self._get_absolute_path(value)):
report.info(u"{} is valid directory".format(
value))
# leave directories as is
new_data[key] = value
elif key in self._dont_resolve_path_keys:
report.info(u"{}:{} was not checked".format(
key, value))
new_data[key] = value
else:
cleaned_key = key[:- len(self._path_suffix)]
try:
loaded_data = self.files_manager.load(
self._get_absolute_path(value)
)
new_data[cleaned_key] = loaded_data
except Exception as exc:
path_node = utils.ReportNode(data[key])
report.add_nodes(path_node.error(exc))
# keep path as is
new_data[key] = value
else:
new_data[key] = self._recursive_process_paths(
data[key], report)
elif isinstance(data, list):
new_data = [
self._recursive_process_paths(record, report)
for record in data
]
else:
new_data = data
return new_data
def _load_root_metadata_file(self):
"""Get plugin root data (usually, it's metadata.yaml).
:return: data
:rtype: list|dict
"""
report = utils.ReportNode(u"Loading root metadata file:{}".format(
self._root_metadata_path
))
# todo(ikutukov): current loading schema and testing relies on case
# when no metadata.yaml file is possible. So we are skipping all
# exeptions.
try:
data = self.files_manager.load(self._root_metadata_path)
except Exception as exc:
report.warning(exc)
return {}, report
data = self._recursive_process_paths(data, report)
return data, report
def load(self, plugin_path=None):
"""Loads data from the given plugin path and producing data tree.
:param plugin_path: plugin root path
:param plugin_path: str|basestring|None
:return: data tree starting from the data in root metadata file
:rtype: tuple(dict, utils.ReportNode)
"""
plugin_path = plugin_path or self.plugin_path
report = utils.ReportNode(
u"File structure validation: {}".format(plugin_path))
data, root_report = self._load_root_metadata_file()
report.add_nodes(root_report)
# load files with fixed location
for key, file_path in six.iteritems(self.paths_to_fields):
file_report = utils.ReportNode(file_path)
try:
data[key] = self.files_manager.load(
self._get_absolute_path(file_path)
)
except errors.NoPluginFileFound as exc:
file_report.warning(exc)
except Exception as exc:
file_report.error(exc)
finally:
report.add_nodes(file_report)
if report.is_failed():
raise errors.ParseError(report.render())
return data, report

View File

@ -0,0 +1,24 @@
# -*- coding: utf-8 -*-
#
# Copyright 2016 Mirantis, Inc.
#
# Licensed under the Apache License, Version 2.0 (the "License"); you may
# not use this file except in compliance with the License. You may obtain
# a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
# License for the specific language governing permissions and limitations
# under the License.
from loader_base import PluginLoaderBase
class PluginLoaderV1(PluginLoaderBase):
paths_to_fields = {
'attributes_metadata': 'environment_config.yaml',
'tasks': 'tasks.yaml',
}

View File

@ -0,0 +1,31 @@
# -*- coding: utf-8 -*-
#
# Copyright 2016 Mirantis, Inc.
#
# Licensed under the Apache License, Version 2.0 (the "License"); you may
# not use this file except in compliance with the License. You may obtain
# a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
# License for the specific language governing permissions and limitations
# under the License.
from loader_base import PluginLoaderBase
class PluginLoaderV3(PluginLoaderBase):
paths_to_fields = {
'attributes_metadata': 'environment_config.yaml',
'tasks': 'tasks.yaml',
'deployment_tasks': 'deployment_tasks.yaml',
'network_roles_metadata': 'network_roles.yaml',
'roles_metadata': 'node_roles.yaml',
'volumes_metadata': 'volumes.yaml',
}

View File

@ -0,0 +1,33 @@
# -*- coding: utf-8 -*-
#
# Copyright 2016 Mirantis, Inc.
#
# Licensed under the Apache License, Version 2.0 (the "License"); you may
# not use this file except in compliance with the License. You may obtain
# a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
# License for the specific language governing permissions and limitations
# under the License.
from loader_base import PluginLoaderBase
class PluginLoaderV4(PluginLoaderBase):
paths_to_fields = {
'attributes_metadata': 'environment_config.yaml',
'tasks': 'tasks.yaml',
'deployment_tasks': 'deployment_tasks.yaml',
'network_roles_metadata': 'network_roles.yaml',
'roles_metadata': 'node_roles.yaml',
'volumes_metadata': 'volumes.yaml',
'components_metadata': 'components.yaml'
}

View File

@ -0,0 +1,41 @@
# -*- coding: utf-8 -*-
#
# Copyright 2016 Mirantis, Inc.
#
# Licensed under the Apache License, Version 2.0 (the "License"); you may
# not use this file except in compliance with the License. You may obtain
# a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
# License for the specific language governing permissions and limitations
# under the License.
from loader_base import PluginLoaderBase
class PluginLoaderV5(PluginLoaderBase):
paths_to_fields = {
'attributes_metadata': 'environment_config.yaml',
'tasks': 'tasks.yaml',
'deployment_tasks': 'deployment_tasks.yaml',
'network_roles_metadata': 'network_roles.yaml',
'roles_metadata': 'node_roles.yaml',
'volumes_metadata': 'volumes.yaml',
'components_metadata': 'components.yaml',
'nic_attributes_metadata': 'nic_config.yaml',
'bond_attributes_metadata': 'bond_config.yaml',
'node_attributes_metadata': 'node_config.yaml'
}
@property
def _root_metadata_path(self):
"""Where is the root plugin data file located."""
return self._get_absolute_path('metadata.*')

View File

@ -14,22 +14,22 @@
import copy
from distutils.version import StrictVersion
import six
from six.moves import map
from adapters import wrap_plugin
from nailgun import consts
from nailgun import errors
from nailgun.logger import logger
from nailgun.objects.plugin import ClusterPlugin
from nailgun.objects.plugin import Plugin
from nailgun.objects.plugin import PluginCollection
from nailgun.plugins.adapters import wrap_plugin
from nailgun.utils import dict_update
from nailgun.utils import get_in
class PluginManager(object):
@classmethod
def contains_legacy_tasks(cls, plugin):
if plugin.tasks:
@ -71,14 +71,14 @@ class PluginManager(object):
logger.warning(
'Plugin with id "%s" is not found, skip it', plugin_id)
continue
enabled = container['enabled']\
enabled = container['enabled'] \
and plugin_id == container['chosen_id']
legacy_tasks_are_ignored = not get_in(
attributes, 'common', 'propagate_task_deploy', 'value')
if (enabled and
legacy_tasks_are_ignored and
cls.contains_legacy_tasks(
wrap_plugin(Plugin.get_by_uid(plugin.id)))):
legacy_tasks_are_ignored and
cls.contains_legacy_tasks(
wrap_plugin(Plugin.get_by_uid(plugin.id)))):
raise errors.InvalidData(
'Cannot enable plugin with legacy tasks unless '
'propagate_task_deploy attribute is set')
@ -263,10 +263,11 @@ class PluginManager(object):
raise errors.AlreadyExists(
'Plugin {0} is overlapping with plugin {1} '
'by introducing the same deployment task with '
'id {2}'
.format(plugin_adapter.full_name,
processed_tasks[t_id],
t_id)
'id {2}'.format(
plugin_adapter.full_name,
processed_tasks[t_id],
t_id
)
)
processed_tasks[t_id] = plugin_adapter.full_name
@ -333,15 +334,19 @@ class PluginManager(object):
if volume_id in release_volumes_ids:
raise errors.AlreadyExists(
'Plugin {0} is overlapping with release '
'by introducing the same volume with id "{1}"'
.format(plugin_adapter.full_name, volume_id))
'by introducing the same volume with '
'id "{1}"'.format(plugin_adapter.full_name, volume_id)
)
elif volume_id in processed_volumes:
raise errors.AlreadyExists(
'Plugin {0} is overlapping with plugin {1} '
'by introducing the same volume with id "{2}"'
.format(plugin_adapter.full_name,
processed_volumes[volume_id],
volume_id))
'by introducing the same volume with '
'id "{2}"'.format(
plugin_adapter.full_name,
processed_volumes[volume_id],
volume_id
)
)
processed_volumes[volume_id] = plugin_adapter.full_name
@ -374,10 +379,12 @@ class PluginManager(object):
if seen_components.get(name, plugin_name) != plugin_name:
raise errors.AlreadyExists(
'Plugin {0} is overlapping with {1} by introducing '
'the same component with name "{2}"'
.format(plugin_adapter.name,
seen_components[name],
name))
'the same component with name "{2}"'.format(
plugin_adapter.name,
seen_components[name],
name
)
)
if name not in seen_components:
seen_components[name] = plugin_adapter.name
@ -385,6 +392,7 @@ class PluginManager(object):
return components
# ENTRY POINT
@classmethod
def sync_plugins_metadata(cls, plugin_ids=None):
"""Sync metadata for plugins by given IDs.
@ -398,7 +406,6 @@ class PluginManager(object):
plugins = PluginCollection.get_by_uids(plugin_ids)
else:
plugins = PluginCollection.all()
for plugin in plugins:
plugin_adapter = wrap_plugin(plugin)
metadata = plugin_adapter.get_metadata()

View File

@ -527,9 +527,11 @@ class EnvironmentManager(object):
resp = self.neutron_networks_put(cluster_id, netconfig)
return resp
@mock.patch('nailgun.plugins.adapters.PluginAdapterBase._load_config')
def create_plugin(self, m_load_conf, sample=None, api=False, cluster=None,
enabled=True, expect_errors=False, **kwargs):
@mock.patch('nailgun.plugins.loaders.files_manager.FilesManager.load')
@mock.patch('nailgun.plugins.loaders.loader_base.os.path.isdir')
def create_plugin(self, is_dir_m, files_manager_m, sample=None, api=False,
cluster=None, enabled=True, expect_errors=False,
directories=None, **kwargs):
if sample:
plugin_data = sample
plugin_data.update(**kwargs)
@ -547,6 +549,7 @@ class EnvironmentManager(object):
node_config = plugin_data.pop('node_config', None)
mocked_metadata = {
'metadata.*': plugin_data,
'metadata.yaml': plugin_data,
'environment_config.yaml': env_config,
'node_roles.yaml': node_roles,
@ -559,9 +562,22 @@ class EnvironmentManager(object):
'bond_config.yaml': bond_config,
'node_config.yaml': node_config
}
# good only when everything is located in root dir
files_manager_m.side_effect = lambda key: copy.deepcopy(
mocked_metadata.get(os.path.basename(key))
)
m_load_conf.side_effect = lambda key: copy.deepcopy(
mocked_metadata[key])
# mock is_dir
directories = (set(directories) if directories else set()).union({
'deployment_scripts/',
'repositories/ubuntu',
'repositories/centos'
})
def define_dir(path):
return any(path.endswith(d) for d in directories)
is_dir_m.side_effect = define_dir
if api:
return self.app.post(

View File

@ -17,7 +17,7 @@
import yaml
from nailgun import objects
from nailgun.plugins import adapters
from nailgun import plugins
from nailgun.test import base
@ -168,7 +168,7 @@ class TestClusterRolesHandler(base.BaseTestCase):
plugin.id,
enabled=True)
self.db.flush()
plugin_adapter = adapters.wrap_plugin(plugin)
plugin_adapter = plugins.wrap_plugin(plugin)
role = self.app.get(
url=base.reverse(

View File

@ -23,7 +23,7 @@ import unittest2
from nailgun import consts
from nailgun import objects
from nailgun.orchestrator import deployment_serializers
from nailgun.plugins import adapters
from nailgun import plugins
from nailgun.utils import reverse
from nailgun.extensions.network_manager.serializers.neutron_serializers \
@ -543,7 +543,7 @@ class TestDeploymentLCMSerialization90(
)
@mock.patch.object(
adapters.PluginAdapterBase, 'repo_files',
plugins.adapters.PluginAdapterBase, 'repo_files',
mock.MagicMock(return_value=True)
)
def test_plugins_in_serialized(self):
@ -596,7 +596,7 @@ class TestDeploymentLCMSerialization90(
'priority': 1100
}]
}
for p in six.moves.map(adapters.wrap_plugin, [plugin1, plugin2])
for p in six.moves.map(plugins.wrap_plugin, [plugin1, plugin2])
]
objects.Cluster.prepare_for_deployment(self.cluster_db)
serialized = self.serializer.serialize(

View File

@ -21,12 +21,11 @@ import yaml
from nailgun import consts
from nailgun.db.sqlalchemy.models import DeploymentGraph
from nailgun import objects
from nailgun.plugins import adapters
from nailgun import plugins
from nailgun.test import base
class BasePluginTest(base.BaseIntegrationTest):
TASKS_CONFIG = [
{'priority': 10,
'role': ['controller'],
@ -133,7 +132,6 @@ class TestPluginsApi(BasePluginTest):
self.assertEqual(resp.status_code, 201)
metadata = resp.json
del metadata['id']
self.assertEqual(metadata, self.sample_plugin)
def test_env_create_and_load_env_config(self):
@ -205,6 +203,102 @@ class TestPluginsApi(BasePluginTest):
self.assertEqual(plugin_id, updated_plugin_id)
self.assertEqual(updated_data, data)
def test_release_as_plugin(self):
resp = self.env.create_plugin(
api=True,
directories={'repositories/ubuntu', 'deployment_scripts/'},
package_version='5.0.0',
deployment_tasks=[
{
'id': 'embedded-task',
'type': 'puppet'
}
],
releases=[
{
"is_release": True,
"name": "ExampleRelease",
"description": "Example Release Description",
"operating_system": "ubuntu",
"version": "0.0.1",
"deployment_scripts_path": "deployment_scripts/",
"repository_path": "repositories/ubuntu",
"graphs": [
{
"type": "custom-graph-embedded",
"graph": {
"name": "deployment-graph-name",
"tasks": [
{
"id": "task",
"type": "shell"
}
]
}
},
{
"type": "custom-graph-ref",
"graph": {
"name": "deployment-graph-name",
"tasks_path": "deployment_tasks.yaml"
}
}
]
}
]
)
self.assertEqual(resp.status_code, 201)
release_obj = objects.ReleaseCollection.filter_by(
None, name="ExampleRelease").first()
graph_obj = objects.DeploymentGraph.get_for_model(
release_obj, graph_type="custom-graph-embedded")
self.assertEqual(
{
'tasks': [
{
'id': 'task',
'task_name': 'task',
'version': '1.0.0',
'type': 'shell'
}
],
'id': graph_obj.id,
'relations': [
{
'model_id': release_obj.id,
'model': 'release',
'type': 'custom-graph-embedded'
}
],
'name': 'deployment-graph-name'
},
objects.DeploymentGraph.to_dict(graph_obj)
)
graph_obj = objects.DeploymentGraph.get_for_model(
release_obj, graph_type="custom-graph-ref")
self.assertEqual(
{
'tasks': [
{
'id': 'embedded-task',
'task_name': 'embedded-task',
'type': 'puppet',
'version': '1.0.0'
}
],
'id': graph_obj.id,
'relations': [
{
'model_id': release_obj.id,
'model': 'release',
'type': 'custom-graph-ref'
}
],
'name': 'deployment-graph-name'
},
objects.DeploymentGraph.to_dict(graph_obj)
)
def test_default_attributes_after_plugin_is_created(self):
self.env.create_plugin(api=True)
cluster = self.create_cluster()
@ -270,16 +364,18 @@ class TestPluginsApi(BasePluginTest):
self.disable_plugin(cluster, 'multiversion_plugin')
self.assertEqual(get_num_enabled(cluster.id), 0)
def test_sync_all_plugins(self):
@mock.patch('nailgun.plugins.manager.wrap_plugin')
def test_sync_all_plugins(self, wrap_m):
self._create_new_and_old_version_plugins_for_sync()
wrap_m.get_metadata.return_value = {}
resp = self.sync_plugins()
self.assertEqual(resp.status_code, 200)
def test_sync_specific_plugins(self):
@mock.patch('nailgun.plugins.manager.wrap_plugin')
def test_sync_specific_plugins(self, wrap_m):
plugin_ids = self._create_new_and_old_version_plugins_for_sync()
ids = plugin_ids[:1]
wrap_m.get_metadata.return_value = {}
resp = self.sync_plugins(params={'ids': ids})
self.assertEqual(resp.status_code, 200)
@ -302,19 +398,21 @@ class TestPluginsApi(BasePluginTest):
'Cannot enable plugin with legacy tasks unless '
'propagate_task_deploy attribute is set')
@mock.patch('nailgun.plugins.adapters.open', create=True)
@mock.patch('nailgun.plugins.adapters.os.access')
def test_sync_with_invalid_yaml_files(self, maccess, mopen):
@mock.patch('nailgun.plugins.loaders.files_manager.open', create=True)
@mock.patch('nailgun.plugins.loaders.files_manager.os.access')
@mock.patch('nailgun.plugins.loaders.files_manager.FilesManager.'
'_get_files_by_mask')
def test_sync_with_invalid_yaml_files(self, files_list_m, maccess, mopen):
maccess.return_value = True
files_list_m.return_value = ['metadata.yaml']
self._create_new_and_old_version_plugins_for_sync()
with mock.patch.object(yaml, 'safe_load') as yaml_safe_load:
yaml_safe_load.side_effect = yaml.YAMLError()
with mock.patch.object(yaml, 'load') as yaml_load:
yaml_load.side_effect = yaml.YAMLError()
resp = self.sync_plugins(expect_errors=True)
self.assertEqual(resp.status_code, 400)
self.assertRegexpMatches(
resp.json_body["message"],
'Problem with loading YAML file')
'YAMLError')
def _create_new_and_old_version_plugins_for_sync(self):
plugin_ids = []
@ -371,7 +469,6 @@ class TestPluginsApi(BasePluginTest):
class TestPrePostHooks(BasePluginTest):
def setUp(self):
super(TestPrePostHooks, self).setUp()
@ -381,7 +478,7 @@ class TestPrePostHooks(BasePluginTest):
self._requests_mock.start()
resp = self.env.create_plugin(api=True, tasks=self.TASKS_CONFIG)
self.plugin = adapters.wrap_plugin(
self.plugin = plugins.wrap_plugin(
objects.Plugin.get_by_uid(resp.json['id']))
self.cluster = self.create_cluster([
{'roles': ['controller'], 'pending_addition': True},
@ -447,7 +544,6 @@ class TestPrePostHooks(BasePluginTest):
class TestPluginValidation(BasePluginTest):
def test_valid(self):
sample = {
'name': 'test_name',
@ -526,7 +622,6 @@ class TestPluginValidation(BasePluginTest):
class TestPluginSyncValidation(BasePluginTest):
def test_valid(self):
resp = self.sync_plugins()
self.assertEqual(resp.status_code, 200)

View File

@ -1230,7 +1230,7 @@ class TestClusterObject(BaseTestCase):
default_tasks_count = len(objects.Release.get_deployment_tasks(
cluster.release))
plugin_tasks_count = len(plugins.adapters.wrap_plugin(
plugin_tasks_count = len(plugins.wrap_plugin(
cluster.plugins[0]).get_deployment_tasks())
self.assertEqual(

View File

@ -26,9 +26,10 @@ from nailgun.expression import Expression
from nailgun.objects import ClusterPlugin
from nailgun.objects import DeploymentGraph
from nailgun.objects import Plugin
from nailgun.plugins import adapters
from nailgun import plugins
from nailgun.settings import settings
from nailgun.test import base
from nailgun.utils import ReportNode
@six.add_metaclass(abc.ABCMeta)
@ -37,7 +38,7 @@ class TestPluginBase(base.BaseTestCase):
# Prevent running tests in base class
__test__ = False
# Should be overridden in child
package_version = None
package_version = '1.0.0'
def setUp(self):
super(TestPluginBase, self).setUp()
@ -72,7 +73,7 @@ class TestPluginBase(base.BaseTestCase):
'operating_system': 'Ubuntu',
'modes': [consts.CLUSTER_MODES.multinode,
consts.CLUSTER_MODES.ha_compact]})
self.plugin_adapter = adapters.wrap_plugin(self.plugin)
self.plugin_adapter = plugins.wrap_plugin(self.plugin)
self.env_config = self.env.get_default_plugin_env_config()
self.get_config = lambda *args: mock.mock_open(
read_data=yaml.dump(self.env_config))()
@ -146,18 +147,14 @@ class TestPluginBase(base.BaseTestCase):
def test_get_metadata(self):
plugin_metadata = self.env.get_default_plugin_metadata()
attributes_metadata = self.env.get_default_plugin_env_config()
tasks = self.env.get_default_plugin_tasks()
plugin_metadata['environment_config'] = \
self.env.get_default_plugin_env_config()
mocked_metadata = {
self._find_path('metadata'): plugin_metadata,
self._find_path('environment_config'): attributes_metadata,
self._find_path('tasks'): tasks
}
plugin_metadata['tasks'] = self.env.get_default_plugin_tasks()
with mock.patch.object(
self.plugin_adapter, '_load_config') as load_conf:
load_conf.side_effect = lambda key: mocked_metadata[key]
self.plugin_adapter, 'loader') as loader:
loader.load.return_value = plugin_metadata, ReportNode()
Plugin.update(self.plugin, self.plugin_adapter.get_metadata())
for key, val in six.iteritems(plugin_metadata):
@ -177,7 +174,7 @@ class TestPluginBase(base.BaseTestCase):
self.assertEqual(depl_task['parameters'].get('cwd'),
self.plugin_adapter.slaves_scripts_path)
@mock.patch('nailgun.plugins.adapters.DeploymentGraph')
@mock.patch('nailgun.plugins.adapters.nailgun.objects.DeploymentGraph')
def test_fault_tolerance_set_for_task_groups(self, deployment_graph_mock):
deployment_graph_mock.get_for_model.return_value = True
deployment_graph_mock.get_metadata.return_value = {}
@ -253,13 +250,10 @@ class TestPluginV1(TestPluginBase):
package_version = '1.0.0'
def test_primary_added_for_version(self):
with mock.patch.object(
self.plugin_adapter, '_load_config') as load_conf:
load_conf.return_value = [{'role': ['controller']}]
tasks = self.plugin_adapter._load_tasks()
self.assertItemsEqual(
tasks[0]['role'], ['primary-controller', 'controller'])
self.plugin.tasks = [{'role': ['controller']}]
tasks = self.plugin_adapter.get_tasks()
self.assertItemsEqual(
tasks[0]['role'], ['primary-controller', 'controller'])
def test_path_name(self):
self.assertEqual(
@ -273,13 +267,10 @@ class TestPluginV2(TestPluginBase):
package_version = '2.0.0'
def test_role_not_changed_for_version(self):
with mock.patch.object(
self.plugin_adapter, '_load_config') as load_conf:
load_conf.return_value = [{'role': ['controller']}]
tasks = self.plugin_adapter._load_tasks()
self.assertItemsEqual(
tasks[0]['role'], ['controller'])
self.plugin.tasks = [{'role': ['controller']}]
tasks = self.plugin_adapter.get_tasks()
self.assertItemsEqual(
tasks[0]['role'], ['controller'])
def test_path_name(self):
self.assertEqual(
@ -302,19 +293,18 @@ class TestPluginV3(TestPluginBase):
deployment_tasks = self.env.get_default_plugin_deployment_tasks()
tasks = self.env.get_default_plugin_tasks()
mocked_metadata = {
self._find_path('metadata'): plugin_metadata,
self._find_path('environment_config'): attributes_metadata,
self._find_path('node_roles'): roles_metadata,
self._find_path('volumes'): volumes_metadata,
self._find_path('network_roles'): network_roles_metadata,
self._find_path('deployment_tasks'): deployment_tasks,
self._find_path('tasks'): tasks,
}
plugin_metadata.update({
'attributes_metadata': attributes_metadata,
'roles_metadata': roles_metadata,
'volumes_metadata': volumes_metadata,
'network_roles_metadata': network_roles_metadata,
'deployment_tasks': deployment_tasks,
'tasks': tasks,
})
with mock.patch.object(
self.plugin_adapter, '_load_config') as load_conf:
load_conf.side_effect = lambda key: mocked_metadata[key]
self.plugin_adapter, 'loader') as loader:
loader.load.return_value = (plugin_metadata, ReportNode())
Plugin.update(self.plugin, self.plugin_adapter.get_metadata())
for key, val in six.iteritems(plugin_metadata):
@ -358,20 +348,19 @@ class TestPluginV4(TestPluginBase):
tasks = self.env.get_default_plugin_tasks()
components_metadata = self.env.get_default_components()
mocked_metadata = {
self._find_path('metadata'): plugin_metadata,
self._find_path('environment_config'): attributes_metadata,
self._find_path('node_roles'): roles_metadata,
self._find_path('volumes'): volumes_metadata,
self._find_path('network_roles'): network_roles_metadata,
self._find_path('deployment_tasks'): deployment_tasks,
self._find_path('tasks'): tasks,
self._find_path('components'): components_metadata
}
plugin_metadata.update({
'attributes_metadata': attributes_metadata,
'roles_metadata': roles_metadata,
'volumes_metadata': volumes_metadata,
'network_roles_metadata': network_roles_metadata,
'deployment_tasks': deployment_tasks,
'tasks': tasks,
'components_metadata': components_metadata
})
with mock.patch.object(
self.plugin_adapter, '_load_config') as load_conf:
load_conf.side_effect = lambda key: mocked_metadata[key]
self.plugin_adapter, 'loader') as loader:
loader.load.return_value = (plugin_metadata, ReportNode())
Plugin.update(self.plugin, self.plugin_adapter.get_metadata())
for key, val in six.iteritems(plugin_metadata):
@ -403,15 +392,6 @@ class TestPluginV4(TestPluginBase):
self.plugin_adapter.get_deployment_tasks()[0][k],
v)
def test_empty_task_file_not_failing(self):
with mock.patch.object(
self.plugin_adapter, '_load_config') as load_conf:
with mock.patch('nailgun.plugins.adapters.os') as os:
os.path.exists.return_value = True
load_conf.return_value = None
self.assertNotRaises(
ValueError, self.plugin_adapter._load_tasks)
class TestPluginV5(TestPluginBase):
@ -421,9 +401,6 @@ class TestPluginV5(TestPluginBase):
def test_get_metadata(self):
plugin_metadata = self.env.get_default_plugin_metadata()
attributes_metadata = self.env.get_default_plugin_env_config()
nic_attributes_metadata = self.env.get_default_plugin_nic_config()
bond_attributes_metadata = self.env.get_default_plugin_bond_config()
node_attributes_metadata = self.env.get_default_plugin_node_config()
roles_metadata = self.env.get_default_plugin_node_roles_config()
volumes_metadata = self.env.get_default_plugin_volumes_config()
network_roles_metadata = self.env.get_default_network_roles_config()
@ -431,23 +408,26 @@ class TestPluginV5(TestPluginBase):
tasks = self.env.get_default_plugin_tasks()
components_metadata = self.env.get_default_components()
mocked_metadata = {
self._find_path('metadata'): plugin_metadata,
self._find_path('environment_config'): attributes_metadata,
self._find_path('node_roles'): roles_metadata,
self._find_path('volumes'): volumes_metadata,
self._find_path('network_roles'): network_roles_metadata,
self._find_path('deployment_tasks'): deployment_tasks,
self._find_path('tasks'): tasks,
self._find_path('components'): components_metadata,
self._find_path('nic_config'): nic_attributes_metadata,
self._find_path('bond_config'): bond_attributes_metadata,
self._find_path('node_config'): node_attributes_metadata
}
nic_attributes_metadata = self.env.get_default_plugin_nic_config()
bond_attributes_metadata = self.env.get_default_plugin_bond_config()
node_attributes_metadata = self.env.get_default_plugin_node_config()
plugin_metadata.update({
'attributes_metadata': attributes_metadata,
'roles_metadata': roles_metadata,
'volumes_metadata': volumes_metadata,
'network_roles_metadata': network_roles_metadata,
'deployment_tasks': deployment_tasks,
'tasks': tasks,
'components_metadata': components_metadata,
'nic_attributes_metadata': nic_attributes_metadata,
'bond_attributes_metadata': bond_attributes_metadata,
'node_attributes_metadata': node_attributes_metadata
})
with mock.patch.object(
self.plugin_adapter, '_load_config') as load_conf:
load_conf.side_effect = lambda key: mocked_metadata[key]
self.plugin_adapter, 'loader') as loader:
loader.load.return_value = (plugin_metadata, ReportNode())
Plugin.update(self.plugin, self.plugin_adapter.get_metadata())
for key, val in six.iteritems(plugin_metadata):
@ -475,6 +455,8 @@ class TestPluginV5(TestPluginBase):
self.plugin.node_attributes_metadata,
bond_attributes_metadata)
# deployment tasks returning all non-defined fields, so check
# should differ from JSON-stored fields
plugin_tasks = self.env.get_default_plugin_deployment_tasks()
self.assertGreater(len(plugin_tasks), 0)
for k, v in six.iteritems(plugin_tasks[0]):
@ -497,7 +479,7 @@ class TestClusterCompatibilityValidation(base.BaseTestCase):
'version': '2014.2-6.0',
'os': 'ubuntu',
'mode': ['ha']}]))
self.plugin_adapter = adapters.PluginAdapterV1(self.plugin)
self.plugin_adapter = plugins.adapters.PluginAdapterV1(self.plugin)
def cluster_mock(self, os, mode, version):
release = mock.Mock(operating_system=os, version=version)

View File

@ -36,6 +36,7 @@ from uuid import uuid4
from nailgun.logger import logger
from nailgun.settings import settings
from reports import ReportNode
def get_in(dictionary, *args):
"""This convenience function improves readability of the code like this:

View File

@ -0,0 +1,214 @@
# -*- coding: utf-8 -*-
# Copyright 2016 Mirantis, Inc.
#
# Licensed under the Apache License, Version 2.0 (the "License"); you may
# not use this file except in compliance with the License. You may obtain
# a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
# License for the specific language governing permissions and limitations
# under the License.
import traceback
import six
# Size of the new level text indent when rendering report
REPORT_INDENT_SIZE = 4
# Symbol to mark error nodes when rendering report
REPORT_FAILURE_POINTER = '> '
class ReportNode(object):
"""Basic unit of Reports tree.
Any ReportNode could be rendered as report with all children tree.
"""
text = None
children = None
_failed = False
def __init__(self, text=None, children=None, failed=False):
"""Basic unit of report tree.
:param text: node text
:type text: basestring
:param children: list of child ReportNodes
:type children: list[ReportNode]
:param failed: failure flag that affects rendering
:type failed: boolean
"""
self.text = text
self.children = children if children is not None else []
self._failed = failed
def _render(self, level=0):
"""Render report tree to the validation result and messages list.
:param level: indent level
:type level: int
:return: failed flag and list of message lines
:rtype: (list[str], bool)
"""
indent_size = REPORT_INDENT_SIZE * level
error_indent_size = \
max(indent_size - len(REPORT_FAILURE_POINTER), 0)
indent = indent_size * ' '
error_indent = error_indent_size * ' '
lines = []
failed = self._failed
# no indent is required if we have no output on this level
next_level = level + (1 if self.text else 0)
for child in self.children:
child_strings, child_failed = child._render(next_level)
failed = child_failed or failed
lines.extend(child_strings)
if self.text:
output = ''.join([
error_indent if failed else indent,
REPORT_FAILURE_POINTER if failed else '',
u"{}".format(self.text)
])
lines.insert(0, output)
return lines, failed
def add_nodes(self, *nodes):
"""Add single node or several nodes.
:param nodes: one or several report nodes
:type nodes: list[ReportNode]
:raises: InspectionConfigurationError
"""
for node in nodes:
if not isinstance(node, ReportNode):
raise TypeError(
u"This value is not ReportNode {0}".format(node))
self.children.append(node)
return self
def _process_message_or_exception(
self, prefix, msg_or_exc, *args, **kwargs):
if isinstance(msg_or_exc, six.string_types):
self.add_nodes(
ReportNode(prefix + u'{}'.format(msg_or_exc))
)
elif isinstance(msg_or_exc, Exception):
self.add_nodes(
ReportNode(prefix + (msg_or_exc.message or repr(msg_or_exc)))
)
tb = traceback.print_exc(msg_or_exc)
if tb:
self.error(tb)
self.add_nodes(
*(
ReportNode(prefix + u'{}'.format(arg))
for arg in args
)
)
self.add_nodes(
*(
ReportNode(prefix + u'{}: {}'.format(key, kwargs[key]))
for key in kwargs
)
)
return
def error(self, msg_or_exc, *args, **kwargs):
"""Add child ReportNode with error message.
:param msg_or_exc: message or exception
:type msg_or_exc: str|basestring|Exception
:return: self
:rtype: ReportNode
"""
self._failed = True
self._process_message_or_exception(
u"ERROR: ", msg_or_exc, *args, **kwargs)
return self
def warning(self, msg_or_exc, *args, **kwargs):
"""Add child ReportNode with warning message.
:param msg_or_exc: message or exception
:type msg_or_exc: str|basestring|Exception
:return: self
:rtype: ReportNode
"""
self._process_message_or_exception(
u"WARNING: ", msg_or_exc, *args, **kwargs)
return self
def info(self, msg_or_exc, *args, **kwargs):
"""Add child ReportNode with info message.
:param msg: text
:type msg: str|basestring
:return: self
:rtype: ReportNode
"""
self._process_message_or_exception(
u"INFO: ", msg_or_exc, *args, **kwargs)
return self
def render(self, add_summary=True):
"""Render report tree to the text.
:param add_summary: include statistics and result
:type add_summary: bool
:return: report strings
:rtype: str|basestring
"""
strings, _ = self._render()
if add_summary:
strings.append('')
fail_count = self.count_failures()
if fail_count:
strings += [
u'Failure!',
u'Please fix {} errors listed above.'.format(fail_count)
]
else:
strings += [
u'Success!'
]
return "\n".join(strings)
def count_failures(self, start_from=0):
"""Count failure messages inside report.
:param start_from: start count from
:type start_from: int
:return: errors count
:rtype: int
"""
count = start_from
if self._failed:
count += 1
for child in self.children:
count = child.count_failures(count)
return count
def is_failed(self):
"""Is this report about failure.
:return: is failed
:rtype: boolean
"""
return bool(self.count_failures())