Proton server to support multiple API models
This commit adds the capability for the proton-server to support multiple API models. Adding this capability required a large amount of restructuring and refactoring of code. The following is a summary of the changes: - Created loadable modules for managers using Stevedore - Moved all generic manager functionality to a base mangager class - Reworked generated API structure to start at /proton instead of /v1 - The proton_controller is now the root controller for API generation - Removed objects/base.py and moved functionality to ApiBaseObject - Database tables names are prefixed with API name for uniqueness - Added --api argument to cli to specify API to use - Many other improvements to code (pep8, better error checking, etc) Change-Id: I1a9aee74d2970d77a5c4eff2c2ce139dfd1355b0 Implements: blueprint multiple-api-support
This commit is contained in:
parent
5e1f8b3ede
commit
dcf1f75cb3
|
@ -1,49 +0,0 @@
|
|||
# All Rights Reserved.
|
||||
#
|
||||
# Licensed under the Apache License, Version 2.0 (the "License"); you may
|
||||
# not use this file except in compliance with the License. You may obtain
|
||||
# a copy of the License at
|
||||
#
|
||||
# http://www.apache.org/licenses/LICENSE-2.0
|
||||
#
|
||||
# Unless required by applicable law or agreed to in writing, software
|
||||
# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
|
||||
# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
|
||||
# License for the specific language governing permissions and limitations
|
||||
# under the License.
|
||||
|
||||
import datetime
|
||||
|
||||
import wsme
|
||||
from wsme import types as wtypes
|
||||
|
||||
|
||||
class APIBase(wtypes.Base):
|
||||
|
||||
# TBD
|
||||
created_at = wsme.wsattr(datetime.datetime, readonly=True)
|
||||
"""The time in UTC at which the object is created"""
|
||||
|
||||
# #TBD
|
||||
updated_at = wsme.wsattr(datetime.datetime, readonly=True)
|
||||
"""The time in UTC at which the object is updated"""
|
||||
|
||||
def as_dict(self):
|
||||
"""Render this object as a dict of its fields."""
|
||||
return dict((k, getattr(self, k))
|
||||
for k in self.fields
|
||||
if hasattr(self, k) and
|
||||
getattr(self, k) != wsme.Unset)
|
||||
|
||||
def unset_fields_except(self, except_list=None):
|
||||
"""Unset fields so they don't appear in the message body.
|
||||
|
||||
:param except_list: A list of fields that won't be touched.
|
||||
|
||||
"""
|
||||
if except_list is None:
|
||||
except_list = []
|
||||
|
||||
for k in self.as_dict():
|
||||
if k not in except_list:
|
||||
setattr(self, k, wsme.Unset)
|
|
@ -14,15 +14,13 @@
|
|||
|
||||
import datetime
|
||||
|
||||
from pecan import expose
|
||||
from pecan import rest
|
||||
import wsme
|
||||
from wsme import types as wtypes
|
||||
import wsmeext.pecan as wsme_pecan
|
||||
|
||||
from gluon.api import types
|
||||
from gluon.core.manager import get_api_manager
|
||||
from oslo_utils.uuidutils import generate_uuid
|
||||
from gluon.db import api as dbapi
|
||||
from gluon.managers.manager_base import get_api_manager
|
||||
|
||||
|
||||
class APIBase(wtypes.Base):
|
||||
|
@ -35,10 +33,14 @@ class APIBase(wtypes.Base):
|
|||
updated_at = wsme.wsattr(datetime.datetime, readonly=True)
|
||||
"""The time in UTC at which the object is updated"""
|
||||
|
||||
def get_fields(self):
|
||||
return [attr.key for attr in self._wsme_attributes]
|
||||
|
||||
def as_dict(self):
|
||||
"""Render this object as a dict of its fields."""
|
||||
fields = self.get_fields()
|
||||
return dict((k, getattr(self, k))
|
||||
for k in self.fields
|
||||
for k in fields
|
||||
if hasattr(self, k) and
|
||||
getattr(self, k) != wsme.Unset)
|
||||
|
||||
|
@ -50,7 +52,6 @@ class APIBase(wtypes.Base):
|
|||
"""
|
||||
if except_list is None:
|
||||
except_list = []
|
||||
|
||||
for k in self.as_dict():
|
||||
if k not in except_list:
|
||||
setattr(self, k, wsme.Unset)
|
||||
|
@ -58,191 +59,166 @@ class APIBase(wtypes.Base):
|
|||
|
||||
class APIBaseObject(APIBase):
|
||||
|
||||
_object_class = None
|
||||
|
||||
@classmethod
|
||||
def class_builder(base_cls, name, object_class, attributes):
|
||||
def class_builder(base_cls, name, _db_model, attributes):
|
||||
new_cls = type(name, (base_cls,), attributes)
|
||||
new_cls._object_class = object_class
|
||||
new_cls.db_model = _db_model
|
||||
new_cls.db = dbapi.get_instance()
|
||||
return new_cls
|
||||
|
||||
@classmethod
|
||||
def get_object_class(cls):
|
||||
return cls._object_class
|
||||
|
||||
@classmethod
|
||||
def build(cls, db_obj):
|
||||
obj = cls()
|
||||
fields = obj.get_fields()
|
||||
db_obj_dict = db_obj.as_dict()
|
||||
for field in cls._object_class.fields:
|
||||
for field in fields:
|
||||
# Skip fields we do not expose.
|
||||
if not hasattr(obj, field):
|
||||
continue
|
||||
setattr(obj, field, db_obj_dict.get(field, wtypes.Unset))
|
||||
return obj
|
||||
|
||||
def to_db_object(self):
|
||||
new_DB_obj = self._object_class()
|
||||
for field in self._object_class.fields:
|
||||
if not hasattr(self, field):
|
||||
continue
|
||||
attr = getattr(self, field)
|
||||
if type(attr) is wsme.types.UnsetType:
|
||||
continue
|
||||
setattr(new_DB_obj, field, attr)
|
||||
return new_DB_obj
|
||||
@classmethod
|
||||
def get_from_db(cls, key):
|
||||
db_obj = cls.db.get_by_primary_key(cls.db_model, key)
|
||||
return cls.build(db_obj)
|
||||
|
||||
@classmethod
|
||||
def create_in_db(cls, new_values):
|
||||
new_values['created_at'] = datetime.datetime.now()
|
||||
new_values['updated_at'] = new_values['created_at']
|
||||
db_object = cls.db.create(cls.db_model, new_values)
|
||||
return cls.build(db_object)
|
||||
|
||||
@classmethod
|
||||
def update_in_db(cls, key, new_values):
|
||||
new_values['updated_at'] = datetime.datetime.now()
|
||||
db_object = cls.db.get_by_primary_key(cls.db_model, key)
|
||||
db_object.update(new_values)
|
||||
db_object.save()
|
||||
return cls.build(db_object)
|
||||
|
||||
@classmethod
|
||||
def delete_from_db(cls, key):
|
||||
"""Delete a Object in the DB."""
|
||||
db_object = cls.db.get_by_primary_key(cls.db_model, key)
|
||||
db_object.delete()
|
||||
|
||||
|
||||
class APIBaseList(APIBase):
|
||||
|
||||
@classmethod
|
||||
def get_object_class(cls):
|
||||
return cls._API_object_class.get_object_class()
|
||||
|
||||
@classmethod
|
||||
def class_builder(base_cls, name, list_name, API_object_class):
|
||||
new_cls = type(name, (base_cls,), {list_name: [API_object_class]})
|
||||
new_cls._list_name = list_name
|
||||
new_cls._API_object_class = API_object_class
|
||||
def class_builder(base_cls, name, list_name, api_object_class):
|
||||
new_cls = type(name, (base_cls,), {list_name: [api_object_class]})
|
||||
new_cls.list_name = list_name
|
||||
new_cls.api_object_class = api_object_class
|
||||
return new_cls
|
||||
|
||||
@classmethod
|
||||
def build(cls, db_obj_list):
|
||||
def build(cls):
|
||||
db = dbapi.get_instance()
|
||||
db_obj_list = db.get_list(cls.api_object_class.db_model)
|
||||
obj = cls()
|
||||
setattr(obj, cls._list_name,
|
||||
[cls._API_object_class.build(db_obj)
|
||||
setattr(obj, cls.list_name,
|
||||
[cls.api_object_class.build(db_obj)
|
||||
for db_obj in db_obj_list])
|
||||
return obj
|
||||
|
||||
|
||||
class RootObjectController(rest.RestController):
|
||||
"""Controller for objects of the API which do not have a parent"""
|
||||
"""Root Objects are Objects of the API which do not have a parent"""
|
||||
|
||||
@classmethod
|
||||
def class_builder(base_cls, name, API_object_class,
|
||||
primary_key_type):
|
||||
def class_builder(base_cls, name, api_obj_class, primary_key_type,
|
||||
api_name):
|
||||
new_cls = type(name, (base_cls,), {})
|
||||
new_cls._list_object_class = APIBaseList.class_builder(
|
||||
name + 'List', name, API_object_class)
|
||||
new_cls._API_object_class = API_object_class
|
||||
new_cls._primary_key_type = primary_key_type
|
||||
new_cls.list_object_class = APIBaseList.class_builder(name + 'List',
|
||||
name,
|
||||
api_obj_class)
|
||||
new_cls.api_object_class = api_obj_class
|
||||
new_cls.primary_key_type = primary_key_type
|
||||
new_cls.api_mgr = get_api_manager(api_name)
|
||||
|
||||
@expose('json')
|
||||
@wsme_pecan.wsexpose(new_cls.list_object_class, template='json')
|
||||
def get_all(self):
|
||||
return self.call_api_manager(
|
||||
self._list_object_class, 'get_all')
|
||||
return self.list_object_class.build()
|
||||
new_cls.get_all = classmethod(get_all)
|
||||
|
||||
@expose('json')
|
||||
@wsme_pecan.wsexpose(new_cls.api_object_class,
|
||||
new_cls.primary_key_type,
|
||||
template='json')
|
||||
def get_one(self, key):
|
||||
return self.call_api_manager(
|
||||
self._API_object_class, 'get_one', key)
|
||||
return self.api_object_class.get_from_db(key)
|
||||
new_cls.get_one = classmethod(get_one)
|
||||
|
||||
@wsme_pecan.wsexpose(new_cls._API_object_class,
|
||||
body=new_cls._API_object_class,
|
||||
template='json',
|
||||
@wsme_pecan.wsexpose(new_cls.api_object_class,
|
||||
body=new_cls.api_object_class, template='json',
|
||||
status_code=201)
|
||||
def post(self, body):
|
||||
return self.call_api_manager_create(
|
||||
self._API_object_class, body.to_db_object())
|
||||
return self.api_mgr.handle_create(self, body.as_dict())
|
||||
new_cls.post = classmethod(post)
|
||||
|
||||
@wsme_pecan.wsexpose(new_cls._API_object_class,
|
||||
new_cls._primary_key_type,
|
||||
unicode,
|
||||
body=unicode,
|
||||
template='json')
|
||||
def put(self, key, operation, body):
|
||||
return self.call_api_manager(
|
||||
self._API_object_class, operation, key, body)
|
||||
@wsme_pecan.wsexpose(new_cls.api_object_class,
|
||||
new_cls.primary_key_type,
|
||||
body=new_cls.api_object_class, template='json')
|
||||
def put(self, key, body):
|
||||
return self.api_mgr.handle_update(self, key, body.as_dict())
|
||||
new_cls.put = classmethod(put)
|
||||
|
||||
@wsme_pecan.wsexpose(None, new_cls._primary_key_type,
|
||||
template='json')
|
||||
@wsme_pecan.wsexpose(None, new_cls.primary_key_type, template='json')
|
||||
def delete(self, key):
|
||||
return self.call_api_manager(
|
||||
new_cls._API_object_class, 'delete', key)
|
||||
return self.api_mgr.handle_delete(self, key)
|
||||
new_cls.delete = classmethod(delete)
|
||||
|
||||
return new_cls
|
||||
|
||||
@classmethod
|
||||
def call_api_manager_create(cls, api_class, db_object):
|
||||
objClass = cls._API_object_class.get_object_class()
|
||||
call_func = getattr(get_api_manager(),
|
||||
'create_%s' % cls.__name__, None)
|
||||
if not call_func:
|
||||
raise Exception('%s_%s is not implemented' %
|
||||
(call_func, cls.__name__))
|
||||
# If the primary key is a UUID and it is not set,
|
||||
# we generate one and set it here.
|
||||
if isinstance(cls._primary_key_type, types.UuidType):
|
||||
gen_uuid = False
|
||||
if db_object.db_model._primary_key in db_object.as_dict():
|
||||
if db_object.as_dict()[db_object.db_model._primary_key] \
|
||||
== "Unset":
|
||||
gen_uuid = True
|
||||
else:
|
||||
gen_uuid = True
|
||||
if gen_uuid:
|
||||
db_object.__setitem__(
|
||||
db_object.db_model._primary_key, generate_uuid())
|
||||
return call_func(api_class, db_object)
|
||||
|
||||
@classmethod
|
||||
def call_api_manager(cls, api_class, func, *args):
|
||||
objClass = cls._API_object_class.get_object_class()
|
||||
call_func = getattr(
|
||||
get_api_manager(), '%s_%s' % (func, cls.__name__), None)
|
||||
if not call_func:
|
||||
raise Exception('%s_%s is not implemented' % (func, cls.__name__))
|
||||
return call_func(api_class, objClass, *args)
|
||||
|
||||
|
||||
class SubObjectController(RootObjectController):
|
||||
|
||||
@classmethod
|
||||
def class_builder(base_cls, name, object_class, primary_key_type,
|
||||
parent_identifier_type,
|
||||
parent_attribute_name):
|
||||
new_cls = super(SubObjectController, base_cls).class_builder(
|
||||
name, object_class, primary_key_type)
|
||||
new_cls._parent_identifier_type = parent_identifier_type
|
||||
new_cls._parent_attribute_name = parent_attribute_name
|
||||
|
||||
@wsme_pecan.wsexpose(new_cls._list_object_class,
|
||||
new_cls._parent_identifier_type,
|
||||
template='json')
|
||||
def get_all(self, _parent_identifier):
|
||||
filters = {self._parent_attribute_name: _parent_identifier}
|
||||
return self._list_object_class.build(
|
||||
self._list_object_class.get_object_class().list(
|
||||
filters=filters))
|
||||
new_cls.get_all = classmethod(get_all)
|
||||
|
||||
@wsme_pecan.wsexpose(new_cls._API_object_class,
|
||||
new_cls._parent_identifier_type,
|
||||
new_cls._primary_key_type,
|
||||
template='json')
|
||||
def get_one(self, _parent_identifier, key):
|
||||
filters = {self._parent_attribute_name: _parent_identifier}
|
||||
return self._API_object_class.build(
|
||||
self._API_object_class.get_object_class(
|
||||
).get_by_primary_key(key, filters))
|
||||
new_cls.get_one = classmethod(get_one)
|
||||
|
||||
@wsme_pecan.wsexpose(new_cls._API_object_class,
|
||||
new_cls._parent_identifier_type,
|
||||
body=new_cls._API_object_class, template='json',
|
||||
status_code=201)
|
||||
def post(self, parent_identifier, body):
|
||||
call_func = getattr(get_api_manager(), 'create_%s' % self.__name__,
|
||||
None)
|
||||
if not call_func:
|
||||
raise Exception('create_%s is not implemented' % self.__name__)
|
||||
return self._API_object_class.build(call_func(parent_identifier,
|
||||
body.to_db_object()))
|
||||
new_cls.post = classmethod(post)
|
||||
|
||||
return new_cls
|
||||
# TODO(hambtw) Needs to be reworked
|
||||
# class SubObjectController(RootObjectController):
|
||||
#
|
||||
# @classmethod
|
||||
# def class_builder(base_cls, name, object_class, primary_key_type,
|
||||
# parent_identifier_type,
|
||||
# parent_attribute_name):
|
||||
# new_cls = super(SubObjectController, base_cls).class_builder(
|
||||
# name, object_class, primary_key_type)
|
||||
# new_cls._parent_id_type = parent_identifier_type
|
||||
# new_cls._parent_attribute_name = parent_attribute_name
|
||||
#
|
||||
# @wsme_pecan.wsexpose(new_cls._list_object_class,
|
||||
# new_cls.parent_id_type,
|
||||
# template='json')
|
||||
# def get_all(self, _parent_identifier):
|
||||
# filters = {self._parent_attribute_name: _parent_identifier}
|
||||
# return self._list_object_class.build(
|
||||
# self._list_object_class.get_object_class().list(
|
||||
# filters=filters))
|
||||
# new_cls.get_all = classmethod(get_all)
|
||||
#
|
||||
# @wsme_pecan.wsexpose(new_cls.api_object_class,
|
||||
# new_cls._parent_identifier_type,
|
||||
# new_cls.primary_key_type,
|
||||
# template='json')
|
||||
# def get_one(self, parent_identifier, key):
|
||||
# filters = {self._parent_attribute_name: parent_identifier}
|
||||
# return self.api_object_class.build(
|
||||
# self.api_object_class.get_object_class(
|
||||
# ).get_by_primary_key(key, filters))
|
||||
# new_cls.get_one = classmethod(get_one)
|
||||
#
|
||||
# @wsme_pecan.wsexpose(new_cls.api_object_class,
|
||||
# new_cls._parent_id_type,
|
||||
# body=new_cls.api_object_class, template='json',
|
||||
# status_code=201)
|
||||
# def post(self, parent_identifier, body):
|
||||
# call_func = getattr(get_api_manager(),
|
||||
# 'create_%s' % self.__name__,
|
||||
# None)
|
||||
# if not call_func:
|
||||
# raise Exception('create_%s is not implemented' %
|
||||
# self.__name__)
|
||||
# return self.api_object_class.build(call_func(
|
||||
# parent_identifier,
|
||||
# body.to_db_object()))
|
||||
# new_cls.post = classmethod(post)
|
||||
#
|
||||
# return new_cls
|
||||
|
|
|
@ -19,10 +19,15 @@ import wsmeext.pecan as wsme_pecan
|
|||
|
||||
from gluon.api.baseObject import APIBase
|
||||
from gluon.api import link
|
||||
from gluon.common.particleGenerator import generator as particle_generator
|
||||
from gluon.particleGenerator import generator as particle_generator
|
||||
from oslo_config import cfg
|
||||
from oslo_log import log as logging
|
||||
|
||||
|
||||
class V1(APIBase):
|
||||
LOG = logging.getLogger(__name__)
|
||||
|
||||
|
||||
class ProtonRoot(APIBase):
|
||||
"""The representation of the version 1 of the API."""
|
||||
|
||||
id = wtypes.text
|
||||
|
@ -32,26 +37,26 @@ class V1(APIBase):
|
|||
|
||||
@staticmethod
|
||||
def convert():
|
||||
v1 = V1()
|
||||
v1.id = "v1"
|
||||
v1.links = [link.Link.make_link('self', pecan.request.host_url,
|
||||
'v1', '', bookmark=True),
|
||||
]
|
||||
return v1
|
||||
root = ProtonRoot()
|
||||
root.id = "proton"
|
||||
root.links = [link.Link.make_link('self', pecan.request.host_url,
|
||||
'proton', '', bookmark=True), ]
|
||||
return root
|
||||
|
||||
|
||||
class API(rest.RestController):
|
||||
class ProtonController(rest.RestController):
|
||||
"""Version 1 API controller root."""
|
||||
|
||||
def __init__(self):
|
||||
particle_generator.build_api(self)
|
||||
services = str(cfg.CONF.api.service_list).split(',')
|
||||
service_list = list()
|
||||
for api_name in services:
|
||||
service_list.append(api_name.strip())
|
||||
particle_generator.build_api(self, service_list)
|
||||
|
||||
@wsme_pecan.wsexpose(V1)
|
||||
@wsme_pecan.wsexpose(ProtonRoot)
|
||||
def get(self):
|
||||
# NOTE: The reason why convert() is being called for every
|
||||
# request is because we need to get the host url from
|
||||
# the request object to make the links.
|
||||
return V1.convert()
|
||||
|
||||
# Breaks autodocs
|
||||
# __all__ = (API)
|
||||
return ProtonRoot.convert()
|
|
@ -20,8 +20,8 @@ from wsme import types as wtypes
|
|||
import wsmeext.pecan as wsme_pecan
|
||||
|
||||
from gluon.api.baseObject import APIBase
|
||||
from gluon.api.controller.v1.base import API as v1
|
||||
from gluon.api import link
|
||||
from gluon.api.proton_controller import ProtonController
|
||||
|
||||
|
||||
class Version(APIBase):
|
||||
|
@ -69,13 +69,13 @@ class Root(APIBase):
|
|||
|
||||
class RootController(rest.RestController):
|
||||
|
||||
_versions = ['v1']
|
||||
_versions = ['proton']
|
||||
"""All supported API versions"""
|
||||
|
||||
_default_version = 'v1'
|
||||
_default_version = 'proton'
|
||||
"""The default API version"""
|
||||
|
||||
v1 = v1()
|
||||
proton = ProtonController()
|
||||
|
||||
@wsme_pecan.wsexpose(Root)
|
||||
def get(self):
|
||||
|
|
|
@ -17,13 +17,10 @@
|
|||
# under the License.
|
||||
|
||||
import six
|
||||
import wsme
|
||||
from wsme import types as wtypes
|
||||
|
||||
from gluon.common import exception
|
||||
from oslo_log._i18n import _
|
||||
from oslo_utils import strutils
|
||||
from oslo_utils import uuidutils
|
||||
|
||||
|
||||
class DynamicDict(wtypes.DynamicBase):
|
||||
|
@ -43,7 +40,7 @@ class NameType(wtypes.UserType):
|
|||
@staticmethod
|
||||
def validate(value):
|
||||
if not value:
|
||||
raise exception.InvalidName(name=value)
|
||||
raise ValueError(value)
|
||||
return value
|
||||
|
||||
@staticmethod
|
||||
|
@ -53,7 +50,7 @@ class NameType(wtypes.UserType):
|
|||
return NameType.validate(value)
|
||||
|
||||
|
||||
class UuidType(wtypes.UserType):
|
||||
class UuidType(wtypes.UuidType):
|
||||
"""A simple UUID type."""
|
||||
|
||||
basetype = wtypes.text
|
||||
|
@ -62,11 +59,9 @@ class UuidType(wtypes.UserType):
|
|||
@staticmethod
|
||||
def validate(value):
|
||||
if value == '':
|
||||
value = wtypes.Unset
|
||||
return value
|
||||
if not uuidutils.is_uuid_like(value):
|
||||
raise exception.InvalidUUID(uuid=value)
|
||||
return value
|
||||
if wtypes.UuidType.validate(value):
|
||||
return value
|
||||
|
||||
@staticmethod
|
||||
def frombasetype(value):
|
||||
|
@ -85,9 +80,9 @@ class BooleanType(wtypes.UserType):
|
|||
def validate(value):
|
||||
try:
|
||||
return strutils.bool_from_string(value, strict=True)
|
||||
except ValueError as e:
|
||||
except ValueError:
|
||||
# raise Invalid to return 400 (BadRequest) in the API
|
||||
raise exception.Invalid(e)
|
||||
raise ValueError("Invalid boolean value: %s" % value)
|
||||
|
||||
@staticmethod
|
||||
def frombasetype(value):
|
||||
|
@ -116,7 +111,7 @@ class MultiType(wtypes.UserType):
|
|||
for t in self.types:
|
||||
try:
|
||||
return wtypes.validate_value(t, value)
|
||||
except (exception.InvalidUUID, ValueError):
|
||||
except ValueError:
|
||||
pass
|
||||
else:
|
||||
raise ValueError(_("Expected '%(type)s', got '%(value)s'")
|
||||
|
|
|
@ -14,9 +14,11 @@
|
|||
# under the License.
|
||||
|
||||
import abc
|
||||
import json
|
||||
import six
|
||||
import stevedore
|
||||
|
||||
from gluon.backends.proton_client import Client
|
||||
from oslo_log import log as logging
|
||||
|
||||
LOG = logging.getLogger(__name__)
|
||||
|
@ -37,25 +39,50 @@ class ProviderBase(object):
|
|||
@six.add_metaclass(abc.ABCMeta)
|
||||
class Driver(object):
|
||||
|
||||
@abc.abstractmethod
|
||||
def __init__(self, backend, dummy_net, dummy_subnet):
|
||||
self._client = Client(backend)
|
||||
self._dummy_net = dummy_net
|
||||
self._dummy_subnet = dummy_subnet
|
||||
|
||||
def bind(self, port_id, device_owner, zone, device_id, host_id,
|
||||
binding_profile):
|
||||
pass
|
||||
args = {}
|
||||
args["device_owner"] = device_owner
|
||||
args["device_id"] = device_id
|
||||
args["host_id"] = host_id
|
||||
if binding_profile is not None:
|
||||
args["profile"] = json.dumps(binding_profile, indent=0)
|
||||
args["zone"] = zone
|
||||
url = self._port_url + "/" + port_id
|
||||
return self._convert_port_data(self._client.do_put(url, args))
|
||||
|
||||
@abc.abstractmethod
|
||||
def unbind(self, port):
|
||||
pass
|
||||
def unbind(self, port_id):
|
||||
args = {}
|
||||
args["device_owner"] = ''
|
||||
args["device_id"] = ''
|
||||
args["host_id"] = ''
|
||||
args["profile"] = ''
|
||||
args["zone"] = ''
|
||||
url = self._port_url + "/" + port_id
|
||||
return self._convert_port_data(self._client.do_put(url, args))
|
||||
|
||||
@abc.abstractmethod
|
||||
def port(self, port_id):
|
||||
pass
|
||||
url = self._port_url + "/" + port_id
|
||||
return self._convert_port_data(self._client.json_get(url))
|
||||
|
||||
def ports(self):
|
||||
port_list = self._client.json_get(self._port_url)
|
||||
ret_port_list = []
|
||||
for port in port_list:
|
||||
ret_port_list.append(self._convert_port_data(port))
|
||||
return ret_port_list
|
||||
|
||||
@abc.abstractmethod
|
||||
def ports(self):
|
||||
def _convert_port_data(self, port_data):
|
||||
pass
|
||||
|
||||
|
||||
class Manager(object):
|
||||
class BackendLoader(object):
|
||||
"""Class used to manage backend drivers in Gluon.
|
||||
|
||||
Drivers know how to talk to particular network services. It
|
||||
|
|
|
@ -16,27 +16,26 @@
|
|||
import json
|
||||
|
||||
from gluon.backends import backend_base
|
||||
from gluon.backends.backends.proton_client import Client
|
||||
from oslo_config import cfg
|
||||
|
||||
from oslo_log import log as logging
|
||||
|
||||
LOG = logging.getLogger(__name__)
|
||||
logger = LOG
|
||||
|
||||
|
||||
API_SERVICE_OPTS = [
|
||||
cfg.StrOpt('ports_name',
|
||||
default='baseports',
|
||||
help='URL to get ports'),
|
||||
]
|
||||
class MyData(object):
|
||||
pass
|
||||
|
||||
CONF = cfg.CONF
|
||||
opt_group = cfg.OptGroup(name='gluon',
|
||||
title='Options for the gluon')
|
||||
CONF.register_group(opt_group)
|
||||
CONF.register_opts(API_SERVICE_OPTS, opt_group)
|
||||
DriverData = MyData()
|
||||
DriverData.service = u'net-l3vpn'
|
||||
DriverData.proton_base = 'proton'
|
||||
DriverData.ports_name = 'baseports'
|
||||
|
||||
|
||||
class Provider(backend_base.ProviderBase):
|
||||
|
||||
def driver_for(self, backend, dummy_net, dummy_subnet):
|
||||
if backend['service'] == u'net-l3vpn':
|
||||
if backend['service'] == DriverData.service:
|
||||
return Driver(backend, dummy_net, dummy_subnet)
|
||||
else:
|
||||
return None
|
||||
|
@ -45,46 +44,18 @@ class Provider(backend_base.ProviderBase):
|
|||
class Driver(backend_base.Driver):
|
||||
|
||||
def __init__(self, backend, dummy_net, dummy_subnet):
|
||||
self._client = Client(backend)
|
||||
self._port_url = backend["url"] + "/v1/" + cfg.CONF.gluon.ports_name
|
||||
self._dummy_net = dummy_net
|
||||
self._dummy_subnet = dummy_subnet
|
||||
|
||||
def bind(self, port_id, device_owner, zone, device_id, host_id,
|
||||
binding_profile):
|
||||
args = {}
|
||||
args["device_owner"] = device_owner
|
||||
args["device_id"] = device_id
|
||||
args["host_id"] = host_id
|
||||
if binding_profile is not None:
|
||||
args["profile"] = json.dumps(binding_profile, indent=0)
|
||||
args["zone"] = zone
|
||||
url = self._port_url + "/" + port_id + "/update"
|
||||
return self._convert_port_data(self._client.do_put(url, args))
|
||||
|
||||
def unbind(self, port_id):
|
||||
args = {}
|
||||
args["device_owner"] = ''
|
||||
args["device_id"] = ''
|
||||
args["host_id"] = ''
|
||||
args["profile"] = ''
|
||||
args["zone"] = ''
|
||||
url = self._port_url + "/" + port_id + "/update"
|
||||
return self._convert_port_data(self._client.do_put(url, args))
|
||||
|
||||
def port(self, port_id):
|
||||
url = self._port_url + "/" + port_id
|
||||
return self._convert_port_data(self._client.json_get(url))
|
||||
|
||||
def ports(self):
|
||||
port_list = self._client.json_get(self._port_url)
|
||||
ret_port_list = []
|
||||
for port in port_list:
|
||||
ret_port_list.append(self._convert_port_data(port))
|
||||
return ret_port_list
|
||||
super(Driver, self).__init__(backend, dummy_net, dummy_subnet)
|
||||
self._port_url = \
|
||||
"{0:s}/{1:s}/{2:s}/{3:s}".format(backend["url"],
|
||||
DriverData.proton_base,
|
||||
DriverData.service,
|
||||
DriverData.ports_name)
|
||||
|
||||
def _convert_port_data(self, port_data):
|
||||
LOG.debug("proton port_data = %s" % port_data)
|
||||
ret_port_data = {}
|
||||
ret_port_data["created_at"] = port_data["created_at"]
|
||||
ret_port_data["updated_at"] = port_data["updated_at"]
|
||||
ret_port_data["id"] = port_data["id"]
|
||||
ret_port_data["devname"] = 'tap%s' % port_data['id'][:11]
|
||||
ret_port_data["name"] = port_data.get("name")
|
||||
|
@ -102,12 +73,18 @@ class Driver(backend_base.Driver):
|
|||
"subnet_id": self._dummy_subnet}]
|
||||
ret_port_data["security_groups"] = []
|
||||
ret_port_data["binding:host_id"] = port_data.get("host_id", '')
|
||||
ret_port_data["binding:vif_details"] = \
|
||||
json.loads(port_data.get("vif_details", '{}'))
|
||||
vif_details = port_data.get("vif_details")
|
||||
if vif_details is None:
|
||||
vif_details = '{}'
|
||||
ret_port_data["binding:vif_details"] = json.loads(vif_details)
|
||||
ret_port_data["binding:vif_type"] = port_data.get("vif_type", '')
|
||||
ret_port_data["binding:vnic_type"] = \
|
||||
port_data.get("vnic_type", 'normal')
|
||||
if port_data.get("profile", '') != '':
|
||||
ret_port_data["binding:profile"] = \
|
||||
json.loads(port_data.get("profile", '{}'))
|
||||
profile = port_data.get("profile", '{}')
|
||||
if profile is None or profile == '':
|
||||
profile = '{}'
|
||||
ret_port_data["binding:profile"] = json.loads(profile)
|
||||
for k in ret_port_data:
|
||||
if ret_port_data[k] is None:
|
||||
ret_port_data[k] = ''
|
||||
return ret_port_data
|
|
@ -23,14 +23,11 @@ from oslo_log import log as logging
|
|||
|
||||
from gluon.api import app as api_app
|
||||
import gluon.cmd.config
|
||||
from gluon.cmd.manager import ProtonManager
|
||||
from gluon.common.particleGenerator import generator as particle_generator
|
||||
from gluon.common.particleGenerator.generator import set_package
|
||||
from gluon.common import service
|
||||
from gluon.core.manager import register_api_manager
|
||||
from gluon.db.sqlalchemy import models as sql_models
|
||||
from gluon.particleGenerator import generator as particle_generator
|
||||
from gluon.sync_etcd.thread import start_sync_thread
|
||||
|
||||
|
||||
LOG = logging.getLogger(__name__)
|
||||
#
|
||||
# Set the package name before class generation.
|
||||
|
@ -47,10 +44,14 @@ def main():
|
|||
service.prepare_service(sys.argv)
|
||||
cfg.CONF.log_opt_values(LOG, logging.DEBUG)
|
||||
# Set source of model files
|
||||
set_package("gluon", "models/proton/net-l3vpn")
|
||||
services = str(cfg.CONF.api.service_list).split(',')
|
||||
service_list = list()
|
||||
for api_name in services:
|
||||
service_list.append(api_name.strip())
|
||||
LOG.info("Service List: %s" % service_list)
|
||||
LOG.info("Generating DB Classes")
|
||||
particle_generator.build_sql_models(sql_models.Base)
|
||||
register_api_manager(ProtonManager())
|
||||
particle_generator.build_sql_models(service_list)
|
||||
|
||||
# API is generated during the setup_app phase.
|
||||
LOG.info("Generating API Classes")
|
||||
app = api_app.setup_app()
|
||||
|
@ -69,7 +70,6 @@ def main():
|
|||
else:
|
||||
LOG.info(_LI('serving on http://%(host)s:%(port)s') %
|
||||
dict(host=host, port=port))
|
||||
start_sync_thread(service_name=cfg.CONF.api.service_name,
|
||||
etcd_host=cfg.CONF.api.etcd_host,
|
||||
start_sync_thread(etcd_host=cfg.CONF.api.etcd_host,
|
||||
etcd_port=cfg.CONF.api.etcd_port)
|
||||
srv.serve_forever()
|
||||
|
|
|
@ -13,11 +13,15 @@
|
|||
# License for the specific language governing permissions and limitations
|
||||
# under the License.
|
||||
|
||||
import click
|
||||
import sys
|
||||
import types
|
||||
|
||||
from gluon.common.particleGenerator.cli import proc_model
|
||||
import click
|
||||
|
||||
from gluon.particleGenerator.cli import get_api_model
|
||||
from gluon.particleGenerator.cli import get_model_list
|
||||
from gluon.particleGenerator.cli import proc_model
|
||||
|
||||
|
||||
sys.tracebacklimit = 0
|
||||
|
||||
|
@ -29,9 +33,13 @@ def dummy():
|
|||
def main():
|
||||
cli = types.FunctionType(dummy.func_code, {})
|
||||
cli = click.group()(cli)
|
||||
model_list = get_model_list(package_name="gluon",
|
||||
model_dir="models/proton")
|
||||
model = get_api_model(sys.argv, model_list)
|
||||
proc_model(cli,
|
||||
package_name="gluon",
|
||||
model_dir="models/proton/net-l3vpn",
|
||||
model_dir="models/proton",
|
||||
api_model=model,
|
||||
hostenv="OS_PROTON_HOST",
|
||||
portenv="OS_PROTON_PORT",
|
||||
hostdefault="127.0.0.1",
|
||||
|
|
|
@ -21,12 +21,9 @@ API_SERVICE_OPTS = [
|
|||
cfg.StrOpt('host',
|
||||
default='127.0.0.1',
|
||||
help='The listen IP for the proton API server'),
|
||||
cfg.StrOpt('service_name',
|
||||
cfg.StrOpt('service_list',
|
||||
default='net-l3vpn',
|
||||
help='Name of the proton service'),
|
||||
cfg.StrOpt('service_type',
|
||||
default='L3VPN',
|
||||
help='Network service type provided by the proton API server'),
|
||||
help='Comma separated list of service models'),
|
||||
cfg.StrOpt('etcd_host',
|
||||
default='127.0.0.1',
|
||||
help='etcd host'),
|
||||
|
|
|
@ -1,227 +0,0 @@
|
|||
# Copyright 2016, Ericsson AB
|
||||
#
|
||||
# Licensed under the Apache License, Version 2.0 (the "License"); you may
|
||||
# not use this file except in compliance with the License. You may obtain
|
||||
# a copy of the License at
|
||||
#
|
||||
# http://www.apache.org/licenses/LICENSE-2.0
|
||||
#
|
||||
# Unless required by applicable law or agreed to in writing, software
|
||||
# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
|
||||
# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
|
||||
# License for the specific language governing permissions and limitations
|
||||
# under the License.
|
||||
|
||||
import etcd
|
||||
import json
|
||||
import time
|
||||
import webob.exc as exc
|
||||
|
||||
from oslo_config import cfg
|
||||
from oslo_log import log as logging
|
||||
|
||||
from gluon.common import exception
|
||||
from gluon.core.manager import ApiManager
|
||||
from gluon.sync_etcd.thread import SyncData
|
||||
|
||||
|
||||
LOG = logging.getLogger(__name__)
|
||||
logger = LOG
|
||||
|
||||
|
||||
class MyData(object):
|
||||
pass
|
||||
|
||||
ManagerData = MyData()
|
||||
ManagerData.etcd_port = 2379
|
||||
ManagerData.etcd_host = '127.0.0.1'
|
||||
|
||||
|
||||
class ProtonManager(ApiManager):
|
||||
def __init__(self):
|
||||
self.gluon_objects = {}
|
||||
host, port = cfg.CONF.api.host, cfg.CONF.api.port
|
||||
self.url = "http://%s:%d" % (host, port)
|
||||
self.service = cfg.CONF.api.service_name
|
||||
self.wait_index = 0
|
||||
self.etcd_client = etcd.Client(host=ManagerData.etcd_host,
|
||||
port=ManagerData.etcd_port,
|
||||
read_timeout=2)
|
||||
super(ProtonManager, self).__init__()
|
||||
|
||||
def setup_bind_key(self, key):
|
||||
etcd_key = "{0:s}/{1:s}/{2:s}/{3:s}".format("controller", self.service,
|
||||
"ProtonBasePort", key)
|
||||
#
|
||||
# If key does not exists, create it so we can wait on it to change.
|
||||
#
|
||||
try:
|
||||
message = self.etcd_client.read(etcd_key)
|
||||
self.wait_index = message.modifiedIndex + 1
|
||||
except etcd.EtcdKeyNotFound:
|
||||
self.wait_index = 0
|
||||
LOG.info("Key Not Found, creating it: %s" % etcd_key)
|
||||
data = dict()
|
||||
value = json.dumps(data)
|
||||
self.etcd_client.write(etcd_key, value)
|
||||
except Exception:
|
||||
pass
|
||||
|
||||
def wait_for_bind(self, key):
|
||||
etcd_key = "{0:s}/{1:s}/{2:s}/{3:s}".format("controller", self.service,
|
||||
"ProtonBasePort", key)
|
||||
retry = 4
|
||||
ret_val = dict()
|
||||
while retry > 0:
|
||||
try:
|
||||
LOG.info("watching %s" % etcd_key)
|
||||
message = self.etcd_client.read(etcd_key, wait=True,
|
||||
waitIndex=self.wait_index)
|
||||
ret_val = json.loads(message.value)
|
||||
break
|
||||
except etcd.EtcdKeyNotFound:
|
||||
LOG.info("Key Not Found %s" % etcd_key)
|
||||
retry -= 1
|
||||
time.sleep(1)
|
||||
except etcd.EtcdWatchTimedOut:
|
||||
LOG.info("timeout")
|
||||
retry -= 1
|
||||
except etcd.EtcdException:
|
||||
LOG.error("Cannot connect to etcd, make sure it is running")
|
||||
retry = 0
|
||||
except Exception as e:
|
||||
LOG.error("Unknown error: %s" % str(e))
|
||||
retry -= 1
|
||||
return ret_val
|
||||
|
||||
def get_all_vpnports(self, api_class, obj_class):
|
||||
return obj_class.as_list(obj_class.list())
|
||||
|
||||
def get_one_vpnports(self, api_class, obj_class, key):
|
||||
try:
|
||||
obj = obj_class.get_by_primary_key(key)
|
||||
except Exception:
|
||||
raise exc.HTTPNotFound()
|
||||
return obj.as_dict()
|
||||
|
||||
def create_vpnports(self, api_class, port):
|
||||
#
|
||||
# Validate that the BasePort and VPN objects exists
|
||||
#
|
||||
baseport_id = port.id
|
||||
vpn_id = port.vpn_instance
|
||||
baseport_class = self.get_gluon_object('ProtonBasePort')
|
||||
baseport = baseport_class.get_by_id(baseport_id)
|
||||
if not baseport:
|
||||
raise exception.NotFound(cls="ProtonBasePort", key=baseport_id)
|
||||
vpn_class = self.get_gluon_object('VpnInstance')
|
||||
vpn = vpn_class.get_by_id(vpn_id)
|
||||
if not vpn:
|
||||
raise exception.NotFound(cls="VpnInstance", key=vpn_id)
|
||||
port.create()
|
||||
return api_class.build(port)
|
||||
|
||||
def update_vpnports(self, api_class, obj_class, key, new_values):
|
||||
return api_class.build(obj_class.update(key, new_values))
|
||||
|
||||
def delete_vpnports(self, api_class, obj_class, key):
|
||||
return obj_class.delete(key)
|
||||
|
||||
def get_all_baseports(self, api_class, obj_class):
|
||||
return obj_class.as_list(obj_class.list())
|
||||
|
||||
def get_one_baseports(self, api_class, obj_class, key):
|
||||
try:
|
||||
obj = obj_class.get_by_primary_key(key)
|
||||
except Exception:
|
||||
raise exc.HTTPNotFound()
|
||||
return obj.as_dict()
|
||||
|
||||
def create_baseports(self, api_class, port):
|
||||
port.create()
|
||||
#
|
||||
# Register port in Gluon
|
||||
#
|
||||
msg = {"port_id": port.id, "tenant_id": port.tenant_id,
|
||||
"service": self.service, "url": self.url,
|
||||
"operation": "register"}
|
||||
SyncData.sync_queue.put(msg)
|
||||
return api_class.build(port)
|
||||
|
||||
def update_baseports(self, api_class, obj_class, key, new_values):
|
||||
has_bind_attrs = ("host_id" in new_values and
|
||||
"device_id" in new_values)
|
||||
is_bind_request = (has_bind_attrs and new_values["host_id"] != "" and
|
||||
new_values["device_id"] != "")
|
||||
if is_bind_request:
|
||||
self.setup_bind_key(key)
|
||||
ret_obj = api_class.build(obj_class.update(key, new_values))
|
||||
if is_bind_request:
|
||||
# bind
|
||||
vif_dict = self.wait_for_bind(key)
|
||||
if len(vif_dict) == 0:
|
||||
LOG.error("No binding information available")
|
||||
else:
|
||||
LOG.info(vif_dict)
|
||||
new_values = dict()
|
||||
if "vif_type" in vif_dict:
|
||||
new_values["vif_type"] = vif_dict["vif_type"]
|
||||
if "vif_details" in vif_dict:
|
||||
new_values["vif_details"] = \
|
||||
json.dumps(vif_dict["vif_details"])
|
||||
if len(new_values) > 0:
|
||||
ret_obj = api_class.build(obj_class.update(key,
|
||||
new_values))
|
||||
elif has_bind_attrs: # unbind request
|
||||
new_values["vif_type"] = ""
|
||||
new_values["vif_details"] = json.dumps({})
|
||||
ret_obj = api_class.build(obj_class.update(key, new_values))
|
||||
return ret_obj
|
||||
|
||||
def delete_baseports(self, api_class, obj_class, key):
|
||||
#
|
||||
# Remove port from Gluon
|
||||
#
|
||||
msg = {"port_id": key, "operation": "deregister"}
|
||||
SyncData.sync_queue.put(msg)
|
||||
return obj_class.delete(key)
|
||||
|
||||
def get_all_vpns(self, api_class, obj_class):
|
||||
return obj_class.as_list(obj_class.list())
|
||||
|
||||
def get_one_vpns(self, api_class, obj_class, key):
|
||||
try:
|
||||
obj = obj_class.get_by_primary_key(key)
|
||||
except Exception:
|
||||
raise exc.HTTPNotFound()
|
||||
return obj.as_dict()
|
||||
|
||||
def create_vpns(self, api_class, vpn):
|
||||
vpn.create()
|
||||
return api_class.build(vpn)
|
||||
|
||||
def update_vpns(self, api_class, obj_class, key, new_values):
|
||||
return api_class.build(obj_class.update(key, new_values))
|
||||
|
||||
def delete_vpns(self, api_class, obj_class, key):
|
||||
return obj_class.delete(key)
|
||||
|
||||
def get_all_vpnafconfigs(self, api_class, obj_class):
|
||||
return obj_class.as_list(obj_class.list())
|
||||
|
||||
def get_one_vpnafconfigs(self, api_class, obj_class, key):
|
||||
try:
|
||||
obj = obj_class.get_by_primary_key(key)
|
||||
except Exception:
|
||||
raise exc.HTTPNotFound()
|
||||
return obj.as_dict()
|
||||
|
||||
def create_vpnafconfigs(self, api_class, vpnafconfig):
|
||||
vpnafconfig.create()
|
||||
return api_class.build(vpnafconfig)
|
||||
|
||||
def update_vpnafconfigs(self, api_class, obj_class, key, new_values):
|
||||
return api_class.build(obj_class.update(key, new_values))
|
||||
|
||||
def delete_vpnafconfigs(self, api_class, obj_class, key):
|
||||
return obj_class.delete(key)
|
|
@ -1,78 +0,0 @@
|
|||
# Copyright (c) 2015 Cisco Systems, Inc.
|
||||
# All Rights Reserved
|
||||
#
|
||||
# Licensed under the Apache License, Version 2.0 (the "License"); you may
|
||||
# not use this file except in compliance with the License. You may obtain
|
||||
# a copy of the License at
|
||||
#
|
||||
# http://www.apache.org/licenses/LICENSE-2.0
|
||||
#
|
||||
# Unless required by applicable law or agreed to in writing, software
|
||||
# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
|
||||
# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
|
||||
# License for the specific language governing permissions and limitations
|
||||
# under the License.
|
||||
|
||||
import os
|
||||
import pkg_resources
|
||||
import yaml
|
||||
|
||||
from oslo_config import cfg
|
||||
from oslo_log._i18n import _LI
|
||||
from oslo_log import log as logging
|
||||
|
||||
LOG = logging.getLogger(__name__)
|
||||
|
||||
|
||||
class MyData(object):
|
||||
pass
|
||||
|
||||
GenData = MyData()
|
||||
GenData.DataBaseModelGeneratorInstance = None
|
||||
GenData.APIGeneratorInstance = None
|
||||
GenData.model = None
|
||||
GenData.package_name = "gluon"
|
||||
GenData.model_dir = "models/proton/net-l3vpn"
|
||||
|
||||
|
||||
def set_package(package, dir):
|
||||
GenData.package_name = package
|
||||
GenData.model_dir = dir
|
||||
|
||||
|
||||
# Singleton generator
|
||||
def load_model():
|
||||
if not GenData.model:
|
||||
GenData.model = {}
|
||||
for f in pkg_resources.resource_listdir(
|
||||
GenData.package_name, GenData.model_dir):
|
||||
f = GenData.model_dir + "/" + f
|
||||
with pkg_resources.resource_stream(GenData.package_name, f) as fd:
|
||||
GenData.model.update(yaml.safe_load(fd))
|
||||
|
||||
|
||||
def build_sql_models(base):
|
||||
from gluon.common.particleGenerator.DataBaseModelGenerator \
|
||||
import DataBaseModelProcessor
|
||||
load_model()
|
||||
if not GenData.DataBaseModelGeneratorInstance:
|
||||
GenData.DataBaseModelGeneratorInstance = DataBaseModelProcessor()
|
||||
GenData.DataBaseModelGeneratorInstance.add_model(GenData.model)
|
||||
GenData.DataBaseModelGeneratorInstance.build_sqla_models(base)
|
||||
|
||||
|
||||
def build_api(root):
|
||||
from gluon.common.particleGenerator.ApiGenerator import APIGenerator
|
||||
if not GenData.DataBaseModelGeneratorInstance:
|
||||
LOG.error("Database must be generated before API!!")
|
||||
return
|
||||
load_model()
|
||||
if not GenData.APIGeneratorInstance:
|
||||
GenData.APIGeneratorInstance = APIGenerator(
|
||||
GenData.DataBaseModelGeneratorInstance.db_models)
|
||||
GenData.APIGeneratorInstance.add_model(GenData.model)
|
||||
GenData.APIGeneratorInstance.create_api(root)
|
||||
|
||||
|
||||
def get_db_gen():
|
||||
return GenData.DataBaseModelGeneratorInstance
|
|
@ -23,13 +23,13 @@ PATH_OPTS = [
|
|||
cfg.StrOpt('pybasedir',
|
||||
default=os.path.abspath(os.path.join(os.path.dirname(__file__),
|
||||
'../')),
|
||||
help='Directory where cloudpulse python module is installed.'),
|
||||
help='Directory where gluon python module is installed.'),
|
||||
cfg.StrOpt('bindir',
|
||||
default='$pybasedir/bin',
|
||||
help='Directory where cloudpulse binaries are installed.'),
|
||||
help='Directory where gluon binaries are installed.'),
|
||||
cfg.StrOpt('state_path',
|
||||
default='$pybasedir',
|
||||
help="Top-level directory for maintaining cloudpulse's state."),
|
||||
help="Top-level directory for maintaining gluon's state."),
|
||||
]
|
||||
|
||||
CONF = cfg.CONF
|
||||
|
|
|
@ -1,68 +0,0 @@
|
|||
# Copyright 2016, Ericsson AB
|
||||
#
|
||||
# Licensed under the Apache License, Version 2.0 (the "License"); you may
|
||||
# not use this file except in compliance with the License. You may obtain
|
||||
# a copy of the License at
|
||||
#
|
||||
# http://www.apache.org/licenses/LICENSE-2.0
|
||||
#
|
||||
# Unless required by applicable law or agreed to in writing, software
|
||||
# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
|
||||
# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
|
||||
# License for the specific language governing permissions and limitations
|
||||
# under the License.
|
||||
|
||||
from gluon.backends import backend_base as BackendBase
|
||||
from gluon.common import exception
|
||||
from oslo_log import log as logging
|
||||
|
||||
# This has to be done to get the Database Models
|
||||
# built before the API is built.
|
||||
# It should be done in a better way.
|
||||
from gluon.db.sqlalchemy import models
|
||||
|
||||
|
||||
LOG = logging.getLogger(__name__)
|
||||
logger = LOG
|
||||
|
||||
|
||||
class MyData(object):
|
||||
pass
|
||||
|
||||
ManagerData = MyData()
|
||||
ManagerData.manager = None
|
||||
|
||||
|
||||
class ApiManager(object):
|
||||
"""Base class for ApiManager"""
|
||||
|
||||
def __init__(self):
|
||||
# TODO(name)
|
||||
# backend_manager = BackendBase.Manager(app.config)
|
||||
self.gluon_objects = {}
|
||||
|
||||
def get_gluon_object(self, name):
|
||||
return self.gluon_objects[name]
|
||||
|
||||
|
||||
def register_api_manager(manager):
|
||||
"""Register a given API manager
|
||||
|
||||
Each service should create a subclass from manager to handle
|
||||
the routing from the API. This manager should be registered before
|
||||
|
||||
:param manager:
|
||||
"""
|
||||
|
||||
ManagerData.manager = manager
|
||||
|
||||
|
||||
def get_api_manager():
|
||||
"""Return registered API Manager instance
|
||||
|
||||
:return:
|
||||
"""
|
||||
|
||||
if ManagerData.manager is None:
|
||||
LOG.error("No manager registered!")
|
||||
return ManagerData.manager
|
|
@ -69,7 +69,7 @@ def _paginate_query(model, limit=None, marker=None, sort_key=None,
|
|||
sort_dir=None, query=None):
|
||||
if not query:
|
||||
query = model_query(model)
|
||||
sort_keys = [model.get_primary_key_type()]
|
||||
sort_keys = [model.get_primary_key()]
|
||||
if sort_key and sort_key not in sort_keys:
|
||||
sort_keys.insert(0, sort_key)
|
||||
query = db_utils.paginate_query(query, model, limit, sort_keys,
|
||||
|
@ -131,7 +131,7 @@ class Connection(api.Connection):
|
|||
raise exception.NotFound(cls=model.__name__, key=uuid)
|
||||
|
||||
def get_by_primary_key(self, model, key):
|
||||
pk_type = model.get_primary_key_type()
|
||||
pk_type = model.get_primary_key()
|
||||
query = model_query(model)
|
||||
filter = {pk_type: key}
|
||||
query = query.filter_by(**filter)
|
||||
|
|
|
@ -12,10 +12,7 @@
|
|||
# License for the specific language governing permissions and limitations
|
||||
# under the License.
|
||||
|
||||
import os
|
||||
import six.moves.urllib.parse as urlparse
|
||||
from sqlalchemy import schema
|
||||
from sqlalchemy import (Column, Integer, String)
|
||||
|
||||
from sqlalchemy.ext.declarative import declarative_base
|
||||
|
||||
from oslo_config import cfg
|
||||
|
@ -42,9 +39,11 @@ db_options.set_defaults(cfg.CONF, _DEFAULT_SQL_CONNECTION, 'gluon.sqlite')
|
|||
|
||||
|
||||
class GluonBase(models.TimestampMixin, models.ModelBase):
|
||||
_primary_key = None
|
||||
__table__ = None
|
||||
|
||||
@classmethod
|
||||
def get_primary_key_type(cls):
|
||||
def get_primary_key(cls):
|
||||
return cls._primary_key
|
||||
|
||||
def as_dict(self):
|
||||
|
|
|
@ -0,0 +1,249 @@
|
|||
# Copyright 2016, Ericsson AB
|
||||
#
|
||||
# Licensed under the Apache License, Version 2.0 (the "License"); you may
|
||||
# not use this file except in compliance with the License. You may obtain
|
||||
# a copy of the License at
|
||||
#
|
||||
# http://www.apache.org/licenses/LICENSE-2.0
|
||||
#
|
||||
# Unless required by applicable law or agreed to in writing, software
|
||||
# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
|
||||
# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
|
||||
# License for the specific language governing permissions and limitations
|
||||
# under the License.
|
||||
|
||||
import abc
|
||||
import json
|
||||
import six
|
||||
import time
|
||||
|
||||
import etcd
|
||||
import stevedore
|
||||
|
||||
from gluon.api import types
|
||||
from gluon.sync_etcd.thread import SyncData
|
||||
from oslo_config import cfg
|
||||
from oslo_log import log as logging
|
||||
from oslo_utils.uuidutils import generate_uuid
|
||||
|
||||
|
||||
LOG = logging.getLogger(__name__)
|
||||
logger = LOG
|
||||
|
||||
|
||||
class MyData(object):
|
||||
pass
|
||||
|
||||
ManagerData = MyData()
|
||||
ManagerData.managers = dict()
|
||||
|
||||
|
||||
@six.add_metaclass(abc.ABCMeta)
|
||||
class ProviderBase(object):
|
||||
|
||||
def __init__(self):
|
||||
self._drivers = {}
|
||||
|
||||
@abc.abstractmethod
|
||||
def driver_for(self, api_name, host, port, etcd_host, etcd_port):
|
||||
return None
|
||||
|
||||
|
||||
#
|
||||
# Base class for ApiManager
|
||||
#
|
||||
class ApiManager(object):
|
||||
|
||||
def __init__(self, api_name, host, port, etcd_host, etcd_port):
|
||||
self.url = "http://%s:%d" % (host, port)
|
||||
self.service = api_name
|
||||
self.wait_index = 0
|
||||
self.etcd_client = etcd.Client(host=etcd_host,
|
||||
port=etcd_port,
|
||||
read_timeout=2)
|
||||
|
||||
def setup_bind_key(self, key):
|
||||
etcd_key = "{0:s}/{1:s}/{2:s}/{3:s}".format("controller", self.service,
|
||||
"ProtonBasePort", key)
|
||||
#
|
||||
# If key does not exists, create it so we can wait on it to change.
|
||||
#
|
||||
try:
|
||||
message = self.etcd_client.read(etcd_key)
|
||||
self.wait_index = message.modifiedIndex + 1
|
||||
except etcd.EtcdKeyNotFound:
|
||||
LOG.info("Key Not Found, creating it: %s" % etcd_key)
|
||||
data = dict()
|
||||
value = json.dumps(data)
|
||||
self.etcd_client.write(etcd_key, value)
|
||||
except Exception:
|
||||
pass
|
||||
|
||||
def wait_for_bind(self, key):
|
||||
etcd_key = "{0:s}/{1:s}/{2:s}/{3:s}".format("controller", self.service,
|
||||
"ProtonBasePort", key)
|
||||
retry = 4
|
||||
ret_val = dict()
|
||||
while retry > 0:
|
||||
try:
|
||||
LOG.info("watching %s" % etcd_key)
|
||||
message = self.etcd_client.read(etcd_key, wait=True,
|
||||
waitIndex=self.wait_index)
|
||||
ret_val = json.loads(message.value)
|
||||
break
|
||||
except etcd.EtcdKeyNotFound:
|
||||
LOG.info("Key Not Found %s" % etcd_key)
|
||||
retry -= 1
|
||||
time.sleep(1)
|
||||
except etcd.EtcdWatchTimedOut:
|
||||
LOG.info("timeout")
|
||||
retry -= 1
|
||||
except etcd.EtcdException:
|
||||
LOG.error("Cannot connect to etcd, make sure it is running")
|
||||
retry = 0
|
||||
except Exception as e:
|
||||
LOG.error("Unknown error: %s" % str(e))
|
||||
retry -= 1
|
||||
return ret_val
|
||||
|
||||
def create_baseports(self, api_class, values):
|
||||
ret_obj = api_class.create_in_db(values)
|
||||
#
|
||||
# Register port in Gluon
|
||||
#
|
||||
msg = {"port_id": values.get('id', ''),
|
||||
"tenant_id": values.get('tenant_id', ''),
|
||||
"service": self.service,
|
||||
"url": self.url,
|
||||
"operation": "register"}
|
||||
SyncData.sync_queue.put(msg)
|
||||
return ret_obj
|
||||
|
||||
def update_baseports(self, api_class, key, new_values):
|
||||
has_bind_attrs = (new_values.get("host_id") is not None and
|
||||
new_values.get("device_id") is not None)
|
||||
is_bind_request = (has_bind_attrs and
|
||||
new_values.get("host_id", "") != "" and
|
||||
new_values.get("device_id", "") != "")
|
||||
if is_bind_request:
|
||||
self.setup_bind_key(key)
|
||||
ret_obj = api_class.update_in_db(key, new_values)
|
||||
if is_bind_request:
|
||||
# bind
|
||||
vif_dict = self.wait_for_bind(key)
|
||||
if len(vif_dict) == 0:
|
||||
LOG.error("No binding information available")
|
||||
else:
|
||||
LOG.info(vif_dict)
|
||||
vif_values = dict()
|
||||
if "vif_type" in vif_dict:
|
||||
vif_values["vif_type"] = vif_dict["vif_type"]
|
||||
if "vif_details" in vif_dict:
|
||||
vif_values["vif_details"] = \
|
||||
json.dumps(vif_dict["vif_details"])
|
||||
if len(vif_values) > 0:
|
||||
ret_obj = api_class.update_in_db(key, vif_values)
|
||||
elif has_bind_attrs: # unbind request
|
||||
vif_dict = dict()
|
||||
vif_dict["vif_type"] = None
|
||||
vif_dict["vif_details"] = json.dumps({})
|
||||
ret_obj = api_class.update_in_db(key, vif_dict)
|
||||
return ret_obj
|
||||
|
||||
def delete_baseports(self, api_class, key):
|
||||
#
|
||||
# Remove port from Gluon
|
||||
#
|
||||
msg = {"port_id": key,
|
||||
"service": self.service,
|
||||
"operation": "deregister"}
|
||||
SyncData.sync_queue.put(msg)
|
||||
return api_class.delete_from_db(key)
|
||||
|
||||
def handle_create(self, root_class, values):
|
||||
api_class = root_class.api_object_class
|
||||
#
|
||||
# If the primary key is a UUID and it is not set, we generate
|
||||
# one and set it here.
|
||||
#
|
||||
if isinstance(root_class.primary_key_type, types.UuidType):
|
||||
primary_key = api_class.db_model.get_primary_key()
|
||||
key_value = values.get(primary_key)
|
||||
if not key_value or (key_value and key_value == ""):
|
||||
values[primary_key] = generate_uuid()
|
||||
if root_class.__name__ == 'baseports':
|
||||
return self.create_baseports(root_class.api_object_class, values)
|
||||
else:
|
||||
return api_class.create_in_db(values)
|
||||
|
||||
def handle_update(self, root_class, key, new_values):
|
||||
api_class = root_class.api_object_class
|
||||
if root_class.__name__ == 'baseports':
|
||||
return self.update_baseports(api_class, key, new_values)
|
||||
else:
|
||||
return api_class.update_in_db(key, new_values)
|
||||
|
||||
def handle_delete(self, root_class, key):
|
||||
api_class = root_class.api_object_class
|
||||
if root_class.__name__ == 'baseports':
|
||||
return self.delete_baseports(api_class, key)
|
||||
else:
|
||||
return api_class.delete_from_db(key)
|
||||
|
||||
|
||||
def load_api_manager(api_name):
|
||||
"""Register a given API manager
|
||||
|
||||
:param api_name:
|
||||
"""
|
||||
loader = ManagerLoader()
|
||||
ManagerData.managers[api_name] = loader.get_manager_driver(api_name)
|
||||
return ManagerData.managers.get(api_name)
|
||||
|
||||
|
||||
def get_api_manager(api_name):
|
||||
"""Return registered API Manager instance
|
||||
|
||||
:return:
|
||||
"""
|
||||
mgr = ManagerData.managers.get(api_name)
|
||||
if mgr is None:
|
||||
mgr = load_api_manager(api_name)
|
||||
return mgr
|
||||
|
||||
|
||||
class ManagerLoader(object):
|
||||
"""Class used to load manager drivers."""
|
||||
def __init__(self):
|
||||
|
||||
def upset(manager, entrypoint, exception):
|
||||
logger.error('Failed to load %s: %s' % (entrypoint, exception))
|
||||
|
||||
# Sort out the client drivers
|
||||
self._mgr = stevedore.ExtensionManager(
|
||||
namespace='gluon.managers',
|
||||
on_load_failure_callback=upset,
|
||||
invoke_on_load=True
|
||||
)
|
||||
for f in self._mgr:
|
||||
logger.info('Found manager %s' % f.name)
|
||||
|
||||
def get_manager_driver(self, api_name):
|
||||
|
||||
for f in self._mgr:
|
||||
x = f.obj.driver_for(api_name,
|
||||
cfg.CONF.api.host,
|
||||
cfg.CONF.api.port,
|
||||
cfg.CONF.api.etcd_host,
|
||||
cfg.CONF.api.etcd_port)
|
||||
if x is not None:
|
||||
logger.info("Using manager: %s" % api_name)
|
||||
return x
|
||||
|
||||
logger.warn('No manager driver for service, using default %s',
|
||||
api_name)
|
||||
return ApiManager(api_name,
|
||||
cfg.CONF.api.host,
|
||||
cfg.CONF.api.port,
|
||||
cfg.CONF.api.etcd_host,
|
||||
cfg.CONF.api.etcd_port)
|
|
@ -0,0 +1,42 @@
|
|||
# Copyright 2016, Ericsson AB
|
||||
#
|
||||
# Licensed under the Apache License, Version 2.0 (the "License"); you may
|
||||
# not use this file except in compliance with the License. You may obtain
|
||||
# a copy of the License at
|
||||
#
|
||||
# http://www.apache.org/licenses/LICENSE-2.0
|
||||
#
|
||||
# Unless required by applicable law or agreed to in writing, software
|
||||
# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
|
||||
# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
|
||||
# License for the specific language governing permissions and limitations
|
||||
# under the License.
|
||||
|
||||
|
||||
from oslo_log import log as logging
|
||||
|
||||
from gluon.managers.manager_base import ApiManager
|
||||
from gluon.managers.manager_base import ProviderBase
|
||||
|
||||
|
||||
LOG = logging.getLogger(__name__)
|
||||
logger = LOG
|
||||
|
||||
|
||||
class Provider(ProviderBase):
|
||||
|
||||
def driver_for(self, api_name, host, port, etcd_host, etcd_port):
|
||||
if api_name == u'net-l3vpn':
|
||||
return ProtonManager(api_name, host, port, etcd_host, etcd_port)
|
||||
else:
|
||||
return None
|
||||
|
||||
|
||||
#
|
||||
# It is possible to add model specific functionality here if needed.
|
||||
# Otherwise, just use base class.
|
||||
#
|
||||
class ProtonManager(ApiManager):
|
||||
def __init__(self, api_name, host, port, etcd_host, etcd_port):
|
||||
super(ProtonManager, self).__init__(api_name, host, port,
|
||||
etcd_host, etcd_port)
|
|
@ -1,51 +0,0 @@
|
|||
# This is used in Gluon to remember details of bindings - who has bound and who provides
|
||||
# the port to be bound to.
|
||||
GluonInternalPort:
|
||||
api:
|
||||
name: ports
|
||||
parent:
|
||||
type: root
|
||||
#type: GluonServiceBackend
|
||||
#attribute: owner
|
||||
attributes:
|
||||
id:
|
||||
type: uuid
|
||||
required: True
|
||||
primary: True
|
||||
description: "UUID of port"
|
||||
owner:
|
||||
type: GluonServiceBackend
|
||||
required: True
|
||||
description: "Pointer to backend service instance (name)"
|
||||
device_owner:
|
||||
type: 'string'
|
||||
length: 128
|
||||
description: "Name of compute or network service (if bound)"
|
||||
device_id:
|
||||
type: 'uuid'
|
||||
description: "UUID of bound VM"
|
||||
|
||||
|
||||
|
||||
GluonServiceBackend:
|
||||
api:
|
||||
name: backends
|
||||
parent:
|
||||
type: root
|
||||
attributes:
|
||||
name:
|
||||
type: string
|
||||
length: 32
|
||||
required: True
|
||||
primary: True
|
||||
description: "Name of the backend service - no spaces"
|
||||
service_type:
|
||||
type: string
|
||||
length: 32
|
||||
required: True
|
||||
description: "Type of service provided by backend"
|
||||
url:
|
||||
type: string
|
||||
length: 32
|
||||
required: True
|
||||
description: "URL of proton endpoint"
|
|
@ -1,137 +0,0 @@
|
|||
# Copyright 2015, Ericsson AB
|
||||
#
|
||||
# Licensed under the Apache License, Version 2.0 (the "License"); you may
|
||||
# not use this file except in compliance with the License. You may obtain
|
||||
# a copy of the License at
|
||||
#
|
||||
# http://www.apache.org/licenses/LICENSE-2.0
|
||||
#
|
||||
# Unless required by applicable law or agreed to in writing, software
|
||||
# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
|
||||
# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
|
||||
# License for the specific language governing permissions and limitations
|
||||
# under the License.
|
||||
|
||||
from pecan import Response
|
||||
|
||||
from gluon.common import exception
|
||||
from gluon.db import api as dbapi
|
||||
from oslo_log._i18n import _LI
|
||||
from oslo_log import log as logging
|
||||
from oslo_versionedobjects import base as ovoo_base
|
||||
|
||||
|
||||
LOG = logging.getLogger(__name__)
|
||||
|
||||
|
||||
class GluonObject(ovoo_base.VersionedObject,
|
||||
ovoo_base.VersionedObjectDictCompat):
|
||||
"""Base class and object factory."""
|
||||
|
||||
VERSION = '1.0'
|
||||
|
||||
db_instance = dbapi.get_instance()
|
||||
|
||||
@classmethod
|
||||
def class_builder(base_cls, name, db_model, fields):
|
||||
new_cls = type(name, (base_cls,), {'fields': fields})
|
||||
new_cls.db_model = db_model
|
||||
ovoo_base.VersionedObjectRegistry.register(new_cls)
|
||||
return new_cls
|
||||
|
||||
def as_dict(self):
|
||||
return dict((k, getattr(self, k))
|
||||
for k in self.fields
|
||||
if hasattr(self, k))
|
||||
|
||||
@staticmethod
|
||||
def as_list(db_obj_list):
|
||||
return [obj.as_dict() for obj in db_obj_list]
|
||||
|
||||
@classmethod
|
||||
def list(cls, limit=None, marker=None, sort_key=None,
|
||||
sort_dir=None, filters=None, failed=None, period=None):
|
||||
db_list = cls.db_instance.get_list(cls.db_model,
|
||||
filters=filters,
|
||||
limit=limit, marker=marker,
|
||||
sort_key=sort_key,
|
||||
sort_dir=sort_dir,
|
||||
failed=failed,
|
||||
period=period)
|
||||
return cls._from_db_object_list(cls, db_list)
|
||||
|
||||
@classmethod
|
||||
def get_by_filter(cls, filter):
|
||||
return cls.list(filters=filter)
|
||||
|
||||
@classmethod
|
||||
def get_by_primary_key(cls, key):
|
||||
filter = {}
|
||||
pk_type = cls.db_model.get_primary_key_type()
|
||||
filter[pk_type] = key
|
||||
obj = cls.get_by_filter(filter)
|
||||
if obj:
|
||||
return obj[0]
|
||||
else:
|
||||
raise exception.NotFound(cls=cls.db_model.__name__, key=key)
|
||||
|
||||
@classmethod
|
||||
def get_by_parent_and_primary_key(cls, parent_identifier,
|
||||
key):
|
||||
pk_type = cls.db_model.get_primary_key_type()
|
||||
pk_type = cls.db_model.get_primary_key_type()
|
||||
|
||||
@classmethod
|
||||
def get_by_uuid(cls, uuid):
|
||||
obj = cls.get_by_filter({'uuid': uuid})
|
||||
if obj:
|
||||
return obj[0]
|
||||
else:
|
||||
raise exception.NotFound(cls=cls.db_model.__name__, key=uuid)
|
||||
|
||||
@classmethod
|
||||
def get_by_id(cls, uuid):
|
||||
obj = cls.get_by_filter({'id': uuid})
|
||||
if obj:
|
||||
return obj[0]
|
||||
else:
|
||||
raise exception.NotFound(cls=cls.db_model.__name__, key=uuid)
|
||||
|
||||
@classmethod
|
||||
def get_by_name(cls, name):
|
||||
return cls.get_by_filter({'name': name})
|
||||
|
||||
@staticmethod
|
||||
def from_dict_object(cls, dict):
|
||||
"""Converts a database entity to a formal object."""
|
||||
for field in cls.fields:
|
||||
if dict[field] is not None:
|
||||
cls[field] = dict[field]
|
||||
|
||||
cls.obj_reset_changes()
|
||||
return cls
|
||||
|
||||
@staticmethod
|
||||
def _from_db_object_list(cls, db_objects):
|
||||
return [cls.from_dict_object(cls(), obj) for obj in db_objects]
|
||||
|
||||
def create(self):
|
||||
"""Create a Object in the DB."""
|
||||
values = self.obj_get_changes()
|
||||
LOG.info(_LI('Dumping CREATE port datastructure %s') % str(values))
|
||||
db_object = self.db_instance.create(self.db_model, values)
|
||||
self.from_dict_object(self, db_object)
|
||||
|
||||
@classmethod
|
||||
def update(cls, key, values):
|
||||
"""Update an Object in the DB."""
|
||||
db_object = cls.db_instance.get_by_primary_key(cls.db_model, key)
|
||||
db_object.update(values)
|
||||
db_object.save()
|
||||
return cls.from_dict_object(cls(), db_object)
|
||||
|
||||
@classmethod
|
||||
def delete(cls, key):
|
||||
"""Delete a Object in the DB."""
|
||||
db_object = cls.db_instance.get_by_primary_key(cls.db_model, key)
|
||||
db_object.delete()
|
|
@ -13,31 +13,60 @@
|
|||
# under the License.
|
||||
|
||||
import six
|
||||
import sys
|
||||
import yaml
|
||||
|
||||
from oslo_versionedobjects import fields
|
||||
from pecan import rest
|
||||
from wsme import types as wtypes
|
||||
import wsmeext.pecan as wsme_pecan
|
||||
|
||||
from gluon.api.baseObject import APIBase
|
||||
from gluon.api.baseObject import APIBaseObject
|
||||
from gluon.api.baseObject import RootObjectController
|
||||
from gluon.api.baseObject import SubObjectController
|
||||
from gluon.api import types
|
||||
from gluon.common.particleGenerator.DataBaseModelGenerator \
|
||||
# from gluon.api.baseObject import SubObjectController
|
||||
from gluon.particleGenerator.DataBaseModelGenerator \
|
||||
import DataBaseModelProcessor
|
||||
from gluon.core.manager import get_api_manager
|
||||
from gluon.objects import base as obj_base
|
||||
|
||||
|
||||
class ServiceRoot(APIBase):
|
||||
"""The root service URL"""
|
||||
id = wtypes.text
|
||||
|
||||
@staticmethod
|
||||
def convert(api_name):
|
||||
root = ServiceRoot()
|
||||
root.id = api_name
|
||||
return root
|
||||
|
||||
|
||||
class ServiceController(rest.RestController):
|
||||
"""Version 1 API controller root."""
|
||||
|
||||
def __init__(self, api_name):
|
||||
self.api_name = api_name
|
||||
|
||||
@wsme_pecan.wsexpose(ServiceRoot)
|
||||
def get(self):
|
||||
return ServiceRoot.convert(self.api_name)
|
||||
|
||||
|
||||
class APIGenerator(object):
|
||||
|
||||
def __init__(self, db_models):
|
||||
self.db_models = db_models
|
||||
self.objects = []
|
||||
def __init__(self):
|
||||
self.data = None
|
||||
self.api_name = None
|
||||
self.db_models = None
|
||||
|
||||
def add_model(self, model):
|
||||
self.data = model
|
||||
|
||||
def create_api(self, root):
|
||||
def create_controller(self, service_name, root):
|
||||
controller = ServiceController(service_name)
|
||||
setattr(root, service_name, controller)
|
||||
return controller
|
||||
|
||||
def create_api(self, root, service_name, db_models):
|
||||
self.db_models = db_models
|
||||
self.service_name = service_name
|
||||
controllers = {}
|
||||
if not self.data:
|
||||
raise Exception('Cannot create API from empty model.')
|
||||
|
@ -46,27 +75,16 @@ class APIGenerator(object):
|
|||
# For every entry build a (sub_)api_controller
|
||||
# an APIObject, an APIObject and an APIListObject
|
||||
# and a RealObject is created
|
||||
real_object_fields = {}
|
||||
api_object_fields = {}
|
||||
for attribute, attr_value in \
|
||||
six.iteritems(table_data['attributes']):
|
||||
api_type = self.translate_model_to_api_type(
|
||||
attr_value['type'], attr_value.get('values'))
|
||||
api_object_fields[attribute] = api_type
|
||||
real_object_fields[attribute] = \
|
||||
self.translate_model_to_real_obj_type(
|
||||
attr_value['type'], attr_value.get('values'))
|
||||
|
||||
# Real object
|
||||
object_class = obj_base.GluonObject.class_builder(
|
||||
table_name, self.db_models[table_name], real_object_fields)
|
||||
|
||||
# register in the API Manager instance
|
||||
get_api_manager().gluon_objects[table_name] = object_class
|
||||
|
||||
# API object
|
||||
api_object_class = APIBaseObject.class_builder(
|
||||
table_name, object_class, api_object_fields)
|
||||
table_name, self.db_models[table_name], api_object_fields)
|
||||
|
||||
# api_name
|
||||
api_name = table_data['api']['name']
|
||||
|
@ -81,12 +99,14 @@ class APIGenerator(object):
|
|||
parent_identifier_type = self.data[parent]['api']['name']
|
||||
parent_attribute_name =\
|
||||
table_data['api']['parent']['attribute']
|
||||
new_controller_class = SubObjectController.class_builder(
|
||||
api_name, api_object_class, primary_key_type,
|
||||
parent_identifier_type, parent_attribute_name)
|
||||
# TODO(hambtw) SubObjectController is not working!!
|
||||
# new_controller_class = SubObjectController.class_builder(
|
||||
# api_name, api_object_class, primary_key_type,
|
||||
# parent_identifier_type, parent_attribute_name)
|
||||
else:
|
||||
new_controller_class = RootObjectController.class_builder(
|
||||
api_name, api_object_class, primary_key_type)
|
||||
api_name, api_object_class, primary_key_type,
|
||||
self.service_name)
|
||||
|
||||
# The childs have to be instantized before the
|
||||
# parents so lets make a dict
|
||||
|
@ -116,25 +136,6 @@ class APIGenerator(object):
|
|||
table_data)
|
||||
return table_data['attributes'][primary_key]['type']
|
||||
|
||||
def translate_model_to_real_obj_type(self, model_type, values):
|
||||
# first make sure it is not a foreign key
|
||||
if model_type in self.data:
|
||||
# if it is we point to the primary key type type of this key
|
||||
model_type = self.get_primary_key_type(
|
||||
self.data[model_type])
|
||||
|
||||
if model_type == 'uuid':
|
||||
return fields.UUIDField(nullable=False)
|
||||
if model_type == 'string':
|
||||
return fields.StringField()
|
||||
if model_type == 'enum':
|
||||
return fields.EnumField(values)
|
||||
if model_type == 'integer':
|
||||
return fields.IntegerField()
|
||||
if model_type == 'boolean':
|
||||
return fields.BooleanField()
|
||||
raise Exception("Type %s not known." % model_type)
|
||||
|
||||
def translate_model_to_api_type(self, model_type, values):
|
||||
# first make sure it is not a foreign key
|
||||
if model_type in self.data:
|
|
@ -19,7 +19,6 @@ from __future__ import print_function
|
|||
import re
|
||||
import six
|
||||
import sys
|
||||
import yaml
|
||||
|
||||
import sqlalchemy as sa
|
||||
from sqlalchemy.ext.declarative import declarative_base
|
||||
|
@ -28,20 +27,24 @@ from sqlalchemy.ext.declarative import declarative_base
|
|||
class DataBaseModelProcessor(object):
|
||||
|
||||
def __init__(self):
|
||||
self.db_models = {}
|
||||
self.db_models = dict()
|
||||
self.data = None
|
||||
|
||||
def get_db_models(self, api_name):
|
||||
return self.db_models.get(api_name)
|
||||
|
||||
def add_model(self, model):
|
||||
self.data = model
|
||||
|
||||
def get_table_class(self, table_name):
|
||||
def get_table_class(self, api_name, table_name):
|
||||
try:
|
||||
return self.db_models[table_name]
|
||||
return self.db_models.get(api_name)[table_name]
|
||||
except ValueError:
|
||||
raise Exception('Unknown table name %s' % table_name)
|
||||
|
||||
def build_sqla_models(self, base=None):
|
||||
def build_sqla_models(self, api_name, base=None):
|
||||
"""Make SQLAlchemy classes for each of the elements in the data read"""
|
||||
|
||||
self.db_models[api_name] = dict()
|
||||
if not base:
|
||||
base = declarative_base()
|
||||
if not self.data:
|
||||
|
@ -49,7 +52,8 @@ class DataBaseModelProcessor(object):
|
|||
|
||||
def de_camel(s):
|
||||
s1 = re.sub('(.)([A-Z][a-z]+)', r'\1_\2', s)
|
||||
return re.sub('([a-z0-9])([A-Z])', r'\1_\2', s1).lower()
|
||||
ret_str = re.sub('([a-z0-9])([A-Z])', r'\1_\2', s1)
|
||||
return ret_str.lower().replace("-", "_")
|
||||
|
||||
# Make a model class that we've never thought of before
|
||||
for table_name, table_data in six.iteritems(self.data):
|
||||
|
@ -89,7 +93,8 @@ class DataBaseModelProcessor(object):
|
|||
# Set the SQLA col option to make clear what's
|
||||
# going on
|
||||
args.append(sa.ForeignKey('%s.%s' %
|
||||
(de_camel(tgt_name),
|
||||
(de_camel(api_name + "_"
|
||||
+ tgt_name),
|
||||
primary_col)))
|
||||
|
||||
# The col creation code will now duplicate the FK
|
||||
|
@ -139,10 +144,15 @@ class DataBaseModelProcessor(object):
|
|||
if '_primary_key' not in attrs:
|
||||
raise Exception("One and only one primary key has to "
|
||||
"be given to each column")
|
||||
attrs['__tablename__'] = de_camel(table_name)
|
||||
attrs['__name__'] = table_name
|
||||
attrs['__tablename__'] = de_camel(api_name + "_" + table_name)
|
||||
class_name = str(api_name + '_' +
|
||||
table_name).replace("-", "_")
|
||||
attrs['__name__'] = class_name
|
||||
attrs['__tname__'] = table_name
|
||||
attrs['_service_name'] = api_name
|
||||
|
||||
self.db_models[table_name] = type(table_name, (base,), attrs)
|
||||
self.db_models[api_name][table_name] = type(class_name,
|
||||
(base,), attrs)
|
||||
except Exception:
|
||||
print('During processing of table ', table_name,
|
||||
file=sys.stderr)
|
|
@ -13,6 +13,10 @@
|
|||
# License for the specific language governing permissions and limitations
|
||||
# under the License.
|
||||
|
||||
import os
|
||||
import six
|
||||
import sys
|
||||
|
||||
import click
|
||||
import json
|
||||
import pkg_resources
|
||||
|
@ -20,13 +24,58 @@ from requests import delete
|
|||
from requests import get
|
||||
from requests import post
|
||||
from requests import put
|
||||
import six
|
||||
import yaml
|
||||
|
||||
|
||||
from gluon.common import exception as exc
|
||||
|
||||
|
||||
def load_model(package_name, model_dir):
|
||||
def print_basic_usage(argv, model_list):
|
||||
print ("Usage: %s --api <api_name> [OPTIONS] COMMAND[ARGS]..." %
|
||||
os.path.basename(argv[0]))
|
||||
print("\nOptions:")
|
||||
print("--api TEXT Name of API, one of %s" % model_list)
|
||||
print("--port INTEGER Port of endpoint (OS_PROTON_PORT)")
|
||||
print("--host TEXT Host of endpoint (OS_PROTON_HOST)")
|
||||
print("--help Show this message and exit.")
|
||||
|
||||
|
||||
def get_api_model(argv, model_list):
|
||||
|
||||
try:
|
||||
arg_idx = argv.index("--api")
|
||||
val_idx = arg_idx + 1
|
||||
except ValueError:
|
||||
# If there is only one API model, --api is not needed
|
||||
if len(model_list) == 1:
|
||||
return model_list[0]
|
||||
print("--api is not specified!\n")
|
||||
print_basic_usage(argv, model_list)
|
||||
sys.exit(-1)
|
||||
try:
|
||||
api_name = argv[val_idx]
|
||||
except IndexError:
|
||||
print("API name is not specified!\n")
|
||||
print_basic_usage(argv, model_list)
|
||||
sys.exit(-1)
|
||||
if api_name not in model_list:
|
||||
print("Invalid API name!\n")
|
||||
print_basic_usage(argv, model_list)
|
||||
sys.exit(-1)
|
||||
del argv[arg_idx]
|
||||
del argv[arg_idx]
|
||||
return api_name
|
||||
|
||||
|
||||
def get_model_list(package_name, model_dir):
|
||||
model_list = list()
|
||||
for f in pkg_resources.resource_listdir(package_name, model_dir):
|
||||
model_list.append(f)
|
||||
return model_list
|
||||
|
||||
|
||||
def load_model(package_name, model_dir, model_name):
|
||||
model_dir = model_dir + "/" + model_name
|
||||
model = {}
|
||||
for f in pkg_resources.resource_listdir(package_name, model_dir):
|
||||
f = model_dir + '/' + f
|
||||
|
@ -86,24 +135,24 @@ def do_put(url, values):
|
|||
|
||||
|
||||
def make_url(host, port, *args):
|
||||
url = "http://%s:%d/v1" % (host, port)
|
||||
url = "http://%s:%d/proton" % (host, port)
|
||||
for arg in args:
|
||||
url = "%s/%s" % (url, arg)
|
||||
return url
|
||||
|
||||
|
||||
def make_list_func(tablename):
|
||||
def make_list_func(api_model, tablename):
|
||||
def list_func(**kwargs):
|
||||
url = make_url(kwargs["host"], kwargs["port"], tablename)
|
||||
url = make_url(kwargs["host"], kwargs["port"], api_model, tablename)
|
||||
result = json_get(url)
|
||||
print(json.dumps(result, indent=4))
|
||||
|
||||
return list_func
|
||||
|
||||
|
||||
def make_show_func(tablename, primary_key):
|
||||
def make_show_func(api_model, tablename, primary_key):
|
||||
def show_func(**kwargs):
|
||||
url = make_url(kwargs["host"], kwargs["port"], tablename,
|
||||
url = make_url(kwargs["host"], kwargs["port"], api_model, tablename,
|
||||
kwargs[primary_key])
|
||||
result = json_get(url)
|
||||
print(json.dumps(result, indent=4))
|
||||
|
@ -111,9 +160,9 @@ def make_show_func(tablename, primary_key):
|
|||
return show_func
|
||||
|
||||
|
||||
def make_create_func(tablename):
|
||||
def make_create_func(api_model, tablename):
|
||||
def create_func(**kwargs):
|
||||
url = make_url(kwargs["host"], kwargs["port"], tablename)
|
||||
url = make_url(kwargs["host"], kwargs["port"], api_model, tablename)
|
||||
del kwargs["host"]
|
||||
del kwargs["port"]
|
||||
data = {}
|
||||
|
@ -126,10 +175,10 @@ def make_create_func(tablename):
|
|||
return create_func
|
||||
|
||||
|
||||
def make_update_func(tablename, primary_key):
|
||||
def make_update_func(api_model, tablename, primary_key):
|
||||
def update_func(**kwargs):
|
||||
url = make_url(kwargs["host"], kwargs["port"], tablename,
|
||||
kwargs[primary_key], "update")
|
||||
url = make_url(kwargs["host"], kwargs["port"], api_model, tablename,
|
||||
kwargs[primary_key])
|
||||
del kwargs["host"]
|
||||
del kwargs["port"]
|
||||
del kwargs[primary_key]
|
||||
|
@ -143,9 +192,9 @@ def make_update_func(tablename, primary_key):
|
|||
return update_func
|
||||
|
||||
|
||||
def make_delete_func(tablename, primary_key):
|
||||
def make_delete_func(api_model, tablename, primary_key):
|
||||
def delete_func(**kwargs):
|
||||
url = make_url(kwargs["host"], kwargs["port"], tablename,
|
||||
url = make_url(kwargs["host"], kwargs["port"], api_model, tablename,
|
||||
kwargs[primary_key])
|
||||
do_delete(url)
|
||||
|
||||
|
@ -182,12 +231,13 @@ def set_type(kwargs, col_desc):
|
|||
|
||||
def proc_model(cli, package_name="unknown",
|
||||
model_dir="unknown",
|
||||
api_model="unknown",
|
||||
hostenv="unknown",
|
||||
portenv="unknown",
|
||||
hostdefault="unknown",
|
||||
portdefault=0):
|
||||
# print("loading model")
|
||||
model = load_model(package_name, model_dir)
|
||||
model = load_model(package_name, model_dir, api_model)
|
||||
for table_name, table_data in six.iteritems(model):
|
||||
get_primary_key(table_data)
|
||||
for table_name, table_data in six.iteritems(model):
|
||||
|
@ -227,7 +277,7 @@ def proc_model(cli, package_name="unknown",
|
|||
#
|
||||
hosthelp = "Host of endpoint (%s) " % hostenv
|
||||
porthelp = "Port of endpoint (%s) " % portenv
|
||||
list = make_list_func(attrs['__tablename__'])
|
||||
list = make_list_func(api_model, attrs['__tablename__'])
|
||||
list.func_name = "%s-list" % (attrs['__objname__'])
|
||||
list = click.option("--host", envvar=hostenv,
|
||||
default=hostdefault, help=hosthelp)(list)
|
||||
|
@ -235,7 +285,7 @@ def proc_model(cli, package_name="unknown",
|
|||
default=portdefault, help=porthelp)(list)
|
||||
cli.command()(list)
|
||||
|
||||
show = make_show_func(attrs['__tablename__'],
|
||||
show = make_show_func(api_model, attrs['__tablename__'],
|
||||
attrs['_primary_key'])
|
||||
show.func_name = "%s-show" % (attrs['__objname__'])
|
||||
show = click.option("--host", envvar=hostenv,
|
||||
|
@ -245,7 +295,7 @@ def proc_model(cli, package_name="unknown",
|
|||
show = click.argument(attrs['_primary_key'])(show)
|
||||
cli.command()(show)
|
||||
|
||||
create = make_create_func(attrs['__tablename__'])
|
||||
create = make_create_func(api_model, attrs['__tablename__'])
|
||||
create.func_name = "%s-create" % (attrs['__objname__'])
|
||||
create = click.option("--host", envvar=hostenv,
|
||||
default=hostdefault, help=hosthelp)(create)
|
||||
|
@ -263,7 +313,7 @@ def proc_model(cli, package_name="unknown",
|
|||
create = click.option(option_name, **kwargs)(create)
|
||||
cli.command()(create)
|
||||
|
||||
update = make_update_func(attrs['__tablename__'],
|
||||
update = make_update_func(api_model, attrs['__tablename__'],
|
||||
attrs['_primary_key'])
|
||||
update.func_name = "%s-update" % (attrs['__objname__'])
|
||||
update = click.option("--host", envvar=hostenv,
|
||||
|
@ -282,7 +332,7 @@ def proc_model(cli, package_name="unknown",
|
|||
update = click.argument(attrs['_primary_key'])(update)
|
||||
cli.command()(update)
|
||||
|
||||
del_func = make_delete_func(attrs['__tablename__'],
|
||||
del_func = make_delete_func(api_model, attrs['__tablename__'],
|
||||
attrs['_primary_key'])
|
||||
del_func.func_name = "%s-delete" % (attrs['__objname__'])
|
||||
del_func = click.option("--host", envvar=hostenv,
|
|
@ -0,0 +1,72 @@
|
|||
# Copyright (c) 2015 Cisco Systems, Inc.
|
||||
# All Rights Reserved
|
||||
#
|
||||
# Licensed under the Apache License, Version 2.0 (the "License"); you may
|
||||
# not use this file except in compliance with the License. You may obtain
|
||||
# a copy of the License at
|
||||
#
|
||||
# http://www.apache.org/licenses/LICENSE-2.0
|
||||
#
|
||||
# Unless required by applicable law or agreed to in writing, software
|
||||
# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
|
||||
# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
|
||||
# License for the specific language governing permissions and limitations
|
||||
# under the License.
|
||||
|
||||
import pkg_resources
|
||||
import yaml
|
||||
|
||||
from gluon.db.sqlalchemy import models as sql_models
|
||||
|
||||
from oslo_log import log as logging
|
||||
|
||||
LOG = logging.getLogger(__name__)
|
||||
|
||||
|
||||
class MyData(object):
|
||||
pass
|
||||
|
||||
GenData = MyData()
|
||||
GenData.DBGeneratorInstance = None
|
||||
GenData.models = dict()
|
||||
GenData.package_name = "gluon"
|
||||
GenData.model_dir = "models/proton"
|
||||
|
||||
|
||||
# Singleton generator
|
||||
def load_model(service):
|
||||
if GenData.models.get(service) is None:
|
||||
model_dir = GenData.model_dir + "/" + service
|
||||
GenData.models[service] = {}
|
||||
for f in pkg_resources.resource_listdir(
|
||||
GenData.package_name, model_dir):
|
||||
f = model_dir + "/" + f
|
||||
with pkg_resources.resource_stream(GenData.package_name, f) as fd:
|
||||
GenData.models[service].update(yaml.safe_load(fd))
|
||||
return GenData.models.get(service)
|
||||
|
||||
|
||||
def build_sql_models(service_list):
|
||||
from gluon.particleGenerator.DataBaseModelGenerator \
|
||||
import DataBaseModelProcessor
|
||||
if GenData.DBGeneratorInstance is None:
|
||||
GenData.DBGeneratorInstance = DataBaseModelProcessor()
|
||||
base = sql_models.Base
|
||||
for service in service_list:
|
||||
GenData.DBGeneratorInstance.add_model(load_model(service))
|
||||
GenData.DBGeneratorInstance.build_sqla_models(service, base)
|
||||
|
||||
|
||||
def build_api(root, service_list):
|
||||
from gluon.particleGenerator.ApiGenerator import APIGenerator
|
||||
for service in service_list:
|
||||
load_model(service)
|
||||
api_gen = APIGenerator()
|
||||
service_root = api_gen.create_controller(service, root)
|
||||
api_gen.add_model(load_model(service))
|
||||
api_gen.create_api(service_root, service,
|
||||
GenData.DBGeneratorInstance.get_db_models(service))
|
||||
|
||||
|
||||
def get_db_gen():
|
||||
return GenData.DBGeneratorInstance
|
|
@ -20,7 +20,7 @@ import os
|
|||
from oslo_log import helpers as log_helpers
|
||||
from oslo_log import log
|
||||
|
||||
from gluon.backends.backend_base import Manager
|
||||
from gluon.backends.backend_base import BackendLoader
|
||||
from neutron.plugins.ml2.plugin import Ml2Plugin
|
||||
|
||||
|
||||
|
@ -41,7 +41,7 @@ class GluonPlugin(Ml2Plugin):
|
|||
|
||||
def __init__(self):
|
||||
super(GluonPlugin, self).__init__()
|
||||
self.backend_manager = Manager()
|
||||
self.backend_manager = BackendLoader()
|
||||
self.gluon_network = None
|
||||
self.gluon_subnet = None
|
||||
self.etcd_client = etcd.Client(host=PluginData.etcd_host,
|
||||
|
@ -92,6 +92,9 @@ class GluonPlugin(Ml2Plugin):
|
|||
current_service = None
|
||||
driver = None
|
||||
for keydata in directory.children:
|
||||
if keydata.dir:
|
||||
LOG.debug("Skipping directory")
|
||||
continue
|
||||
id = os.path.basename(keydata.key)
|
||||
LOG.debug("id = %s" % id)
|
||||
meta = json.loads(keydata.value)
|
||||
|
|
|
@ -54,7 +54,7 @@ class ApiNetL3VPN(ApiModelBase):
|
|||
self.resync_mode = False
|
||||
|
||||
def bind_attributes_changed(self, attributes):
|
||||
return ("host_id" in attributes and "device_id" in attributes)
|
||||
return "host_id" in attributes and "device_id" in attributes
|
||||
|
||||
def is_bind_request(self, attributes):
|
||||
host_id = attributes.get("host_id", None)
|
||||
|
@ -63,7 +63,7 @@ class ApiNetL3VPN(ApiModelBase):
|
|||
device_id = attributes.get("device_id", None)
|
||||
if device_id == "":
|
||||
device_id = None
|
||||
return (host_id is not None and device_id is not None)
|
||||
return host_id is not None and device_id is not None
|
||||
|
||||
def get_etcd_bound_data(self, shim_data, key):
|
||||
etcd_key = "{0:s}/{1:s}/{2:s}/{3:s}".format("controller", self.name,
|
||||
|
@ -102,7 +102,7 @@ class ApiNetL3VPN(ApiModelBase):
|
|||
prev_port = self.model.ports[key]
|
||||
changes = self.model.ports[key].update_attrs(attributes)
|
||||
if self.bind_attributes_changed(changes.new):
|
||||
if self.model.ports[key]["__state"] == "Bound":
|
||||
if self.model.ports[key].get("__state") == "Bound":
|
||||
if self.is_bind_request(changes.new): # already bound?
|
||||
LOG.error("Bind request on bound port?")
|
||||
else: # Unbind
|
||||
|
@ -112,7 +112,7 @@ class ApiNetL3VPN(ApiModelBase):
|
|||
self.model.ports[key][vif_key] = ""
|
||||
self.update_etcd_unbound(shim_data, key)
|
||||
self.model.ports[key]["__state"] = "Unbound"
|
||||
elif self.model.ports[key]["__state"] == "Unbound":
|
||||
elif self.model.ports[key].get("__state") == "Unbound":
|
||||
if self.is_bind_request(changes.new):
|
||||
if changes.new["host_id"] in \
|
||||
shim_data.host_list: # On one of my hosts
|
||||
|
@ -131,13 +131,13 @@ class ApiNetL3VPN(ApiModelBase):
|
|||
"InUse" # Bound by another controller
|
||||
else:
|
||||
pass
|
||||
elif self.model.ports[key]["__state"] == "InUse":
|
||||
elif self.model.ports[key].get("__state") == "InUse":
|
||||
if self.is_bind_request(changes.new): # already bound?
|
||||
LOG.error("Bind request on InUse port: %s" % key)
|
||||
else:
|
||||
self.model.ports[key]["__state"] = "Unbound"
|
||||
else:
|
||||
if self.model.ports[key]["__state"] == "Bound":
|
||||
if self.model.ports[key].get("__state") == "Bound":
|
||||
self.backend.modify_port(key, self.model, changes)
|
||||
else:
|
||||
port = Model.Port(key, attributes)
|
||||
|
@ -157,7 +157,7 @@ class ApiNetL3VPN(ApiModelBase):
|
|||
if vpn_port["vpn_instance"] == key:
|
||||
port = self.model.ports.get(vpn_port["id"])
|
||||
changes = self.model.vpn_instances[key].update_attrs(attributes)
|
||||
if port and port["__state"] == "Bound":
|
||||
if port and port.get("__state") == "Bound":
|
||||
self.backend.modify_service(key, self.model, changes)
|
||||
else:
|
||||
obj = Model.DataObj(key, attributes)
|
||||
|
@ -165,18 +165,21 @@ class ApiNetL3VPN(ApiModelBase):
|
|||
|
||||
def handle_vpn_port_change(self, key, attributes, shim_data):
|
||||
port = self.model.ports.get(key)
|
||||
if not port:
|
||||
LOG.error("Port not found: %s" % key)
|
||||
return
|
||||
if key in self.model.vpn_ports:
|
||||
prev_binding = \
|
||||
{"id": self.model.vpn_ports[key].id,
|
||||
"vpn_instance": self.model.vpn_ports[key].vpn_instance}
|
||||
self.model.vpn_ports[key].update_attrs(attributes)
|
||||
if not self.resync_mode and port["__state"] == "Bound":
|
||||
if not self.resync_mode and port.get("__state") == "Bound":
|
||||
self.backend.modify_service_binding(key, self.model,
|
||||
prev_binding)
|
||||
else:
|
||||
obj = Model.DataObj(key, attributes)
|
||||
self.model.vpn_ports[key] = obj
|
||||
if not self.resync_mode and port["__state"] == "Bound":
|
||||
if not self.resync_mode and port.get("__state") == "Bound":
|
||||
self.backend.modify_service_binding(key, self.model, {})
|
||||
|
||||
def handle_vpnafconfig_change(self, key, attributes, shim_data):
|
||||
|
@ -193,7 +196,7 @@ class ApiNetL3VPN(ApiModelBase):
|
|||
for vpn_port in self.model.vpn_ports.itervalues():
|
||||
if vpn_port["vpn_instance"] == vpn_instance["id"]:
|
||||
port = self.model.ports.get(vpn_port["id"])
|
||||
if port and port["__state"] == "Bound":
|
||||
if port and port.get("__state") == "Bound":
|
||||
self.backend.modify_service(vpn_instance["id"],
|
||||
self.model, changes)
|
||||
else:
|
||||
|
@ -226,7 +229,7 @@ class ApiNetL3VPN(ApiModelBase):
|
|||
if vpn_port["vpn_instance"] == key:
|
||||
port = self.model.ports.get(vpn_port["id"])
|
||||
del self.model.vpn_instances[key]
|
||||
if port and port["__state"] == "Bound":
|
||||
if port and port.get("__state") == "Bound":
|
||||
self.backend.delete_service(key, self.model, deleted_obj)
|
||||
|
||||
def handle_vpn_port_delete(self, key, shim_data):
|
||||
|
@ -234,7 +237,7 @@ class ApiNetL3VPN(ApiModelBase):
|
|||
port = self.model.ports.get(key)
|
||||
deleted_obj = self.model.vpn_ports[key]
|
||||
del self.model.vpn_ports[key]
|
||||
if port and port["__state"] == "Bound":
|
||||
if port and port.get("__state") == "Bound":
|
||||
self.backend.delete_service_binding(self.model, deleted_obj)
|
||||
|
||||
def handle_vpnafconfig_delete(self, key, shim_data):
|
||||
|
@ -255,7 +258,7 @@ class ApiNetL3VPN(ApiModelBase):
|
|||
for vpn_port in self.model.vpn_ports.itervalues():
|
||||
if vpn_port["vpn_instance"] == vpn_instance["id"]:
|
||||
port = self.model.ports.get(vpn_port["id"])
|
||||
if port and port["__state"] == "Bound":
|
||||
if port and port.get("__state") == "Bound":
|
||||
self.backend.modify_service(vpn_instance["id"],
|
||||
self.model, changes)
|
||||
|
||||
|
|
|
@ -64,7 +64,7 @@ class DummyNetL3VPN(HandlerBase):
|
|||
|
||||
for afconfig_name in afconfig_name_list:
|
||||
afconfig = model.vpn_afconfigs.get(afconfig_name, None)
|
||||
if (afconfig):
|
||||
if afconfig:
|
||||
afconfig_list.append(afconfig)
|
||||
LOG.info(" afconfig(%s): %s" % (afconfig_name, afconfig))
|
||||
LOG.info(changes)
|
||||
|
@ -105,7 +105,6 @@ class DummyNetL3VPN(HandlerBase):
|
|||
|
||||
:param uuid: UUID of Port
|
||||
:param model: Model object
|
||||
:param changes: dictionary of changed attributes
|
||||
:returns: None
|
||||
"""
|
||||
pass
|
||||
|
|
|
@ -57,7 +57,6 @@ class ApiModelBase(object):
|
|||
|
||||
:param object: name of the etcd object that changed
|
||||
:param key: key of the object
|
||||
:param host_list: list of hosts managed by this shim layer server
|
||||
:param shim_data: Shim public data (name, client, host_list, etc)
|
||||
:returns: None
|
||||
"""
|
||||
|
@ -105,7 +104,6 @@ class HandlerBase(object):
|
|||
|
||||
:param uuid: UUID of Port
|
||||
:param model: Model object
|
||||
:param changes: dictionary of changed attributes
|
||||
:returns: None
|
||||
"""
|
||||
|
||||
|
|
|
@ -22,7 +22,7 @@ class ChangeData(object):
|
|||
self.prev = dict()
|
||||
|
||||
def __str__(self):
|
||||
'''returns simple dict representation of the mapping'''
|
||||
"""returns simple dict representation of the mapping"""
|
||||
return "new = " + str(self.new) + ", prev = " + str(self.prev)
|
||||
|
||||
|
||||
|
@ -36,8 +36,8 @@ class ObjBase(collections.MutableMapping):
|
|||
"""
|
||||
# ``__init__`` method required to create instance from class.
|
||||
def __init__(self, attributes=None):
|
||||
'''Use the object dict'''
|
||||
if (attributes is not None):
|
||||
"""Use the object dict"""
|
||||
if attributes is not None:
|
||||
self.__dict__.update(attributes)
|
||||
# The next five methods are requirements of the ABC.
|
||||
|
||||
|
@ -57,11 +57,11 @@ class ObjBase(collections.MutableMapping):
|
|||
return len(self.__dict__)
|
||||
|
||||
def __str__(self):
|
||||
'''returns simple dict representation of the mapping'''
|
||||
"""returns simple dict representation of the mapping"""
|
||||
return str(self.__dict__)
|
||||
|
||||
def __repr__(self):
|
||||
'''echoes class, id, & reproducible representation in the REPL'''
|
||||
"""echoes class, id, & reproducible representation in the REPL"""
|
||||
return '{}, {}'.format(super(ObjBase, self).__repr__(), self.__dict__)
|
||||
|
||||
def update_attrs(self, new_attributes):
|
||||
|
|
|
@ -18,7 +18,8 @@ from gluon.sync_etcd.thread import SyncData
|
|||
|
||||
def logupdate(f):
|
||||
def decorate(self, *args):
|
||||
record = {"table": self.__name__,
|
||||
record = {"table": self.__tname__,
|
||||
"service": self._service_name,
|
||||
"key": self.__getattribute__(self._primary_key),
|
||||
"operation": "update"}
|
||||
f(self, *args)
|
||||
|
@ -29,7 +30,8 @@ def logupdate(f):
|
|||
|
||||
def logdelete(f):
|
||||
def decorate(self, *args):
|
||||
record = {"table": self.__name__,
|
||||
record = {"table": self.__tname__,
|
||||
"service": self._service_name,
|
||||
"key": self.__getattribute__(self._primary_key),
|
||||
"operation": "delete"}
|
||||
f(self, *args)
|
||||
|
|
|
@ -18,16 +18,14 @@ import six
|
|||
from six.moves import queue
|
||||
import threading
|
||||
|
||||
import etcd
|
||||
|
||||
from gluon.db import api as dbapi
|
||||
from oslo_log._i18n import _LE
|
||||
from oslo_log._i18n import _LI
|
||||
from oslo_log._i18n import _LW
|
||||
from oslo_log import log as logging
|
||||
|
||||
import etcd
|
||||
|
||||
from gluon.common.particleGenerator.generator import get_db_gen
|
||||
from gluon.db import api as dbapi
|
||||
|
||||
LOG = logging.getLogger(__name__)
|
||||
|
||||
|
||||
|
@ -40,7 +38,6 @@ SyncData.sync_queue = queue.Queue()
|
|||
SyncData.etcd_port = 2379
|
||||
SyncData.etcd_host = '127.0.0.1'
|
||||
SyncData.source = "proton"
|
||||
SyncData.service = "net-l3vpn"
|
||||
|
||||
|
||||
class SyncThread(threading.Thread):
|
||||
|
@ -55,30 +52,37 @@ class SyncThread(threading.Thread):
|
|||
LOG.info("SyncThread starting")
|
||||
|
||||
def proc_sync_msg(self, msg):
|
||||
from gluon.particleGenerator import generator as particle_generator
|
||||
try:
|
||||
if msg["operation"] == "update":
|
||||
obj_key = "_".join(msg["key"].split()) # Get rid of spaces
|
||||
etcd_key = "{0:s}/{1:s}/{2:s}/{3:s}".format(
|
||||
SyncData.source, SyncData.service, msg["table"], obj_key)
|
||||
table_class = get_db_gen().get_table_class(msg["table"])
|
||||
SyncData.source, msg["service"], msg["table"], obj_key)
|
||||
db_gen = particle_generator.get_db_gen()
|
||||
table_class = db_gen.get_table_class(msg["service"],
|
||||
msg["table"])
|
||||
data = self.db_instance.get_by_primary_key(
|
||||
table_class, msg["key"])
|
||||
values = data.as_dict()
|
||||
d = {}
|
||||
for key in six.iterkeys(values):
|
||||
d[key] = str(values[key])
|
||||
if values[key] is None:
|
||||
d[key] = values[key]
|
||||
else:
|
||||
d[key] = str(values[key])
|
||||
json_str = json.dumps(d)
|
||||
self.etcd_client.write(etcd_key, json_str)
|
||||
elif msg["operation"] == "delete":
|
||||
obj_key = "_".join(msg["key"].split()) # Get rid of spaces
|
||||
etcd_key = "{0:s}/{1:s}/{2:s}/{3:s}".format(
|
||||
SyncData.source, SyncData.service, msg["table"], obj_key)
|
||||
SyncData.source, msg["service"], msg["table"], obj_key)
|
||||
self.etcd_client.delete(etcd_key)
|
||||
elif msg["operation"] == "register":
|
||||
obj_key = "_".join(msg["port_id"].split()) # Get rid of spaces
|
||||
port_key = "/gluon/port/{0:s}".format(obj_key)
|
||||
d = {"tenant_id": msg["tenant_id"],
|
||||
"service": msg["service"], "url": msg["url"]}
|
||||
"service": msg["service"],
|
||||
"url": msg["url"]}
|
||||
json_str = json.dumps(d)
|
||||
self.etcd_client.write(port_key, json_str)
|
||||
elif msg["operation"] == "deregister":
|
||||
|
@ -118,9 +122,7 @@ def start_sync_thread(**kwargs):
|
|||
|
||||
if not SyncData.sync_thread_running:
|
||||
for key, value in six.iteritems(kwargs):
|
||||
if key == "service_name":
|
||||
SyncData.service = value
|
||||
elif key == "etcd_host":
|
||||
if key == "etcd_host":
|
||||
SyncData.etcd_host = value
|
||||
elif key == "etcd_port":
|
||||
SyncData.etcd_port = value
|
||||
|
|
|
@ -15,7 +15,6 @@
|
|||
# License for the specific language governing permissions and limitations
|
||||
# under the License.
|
||||
|
||||
from oslo_config import cfg
|
||||
from oslotest import base
|
||||
|
||||
|
||||
|
|
|
@ -18,13 +18,7 @@ test_gluon
|
|||
|
||||
Tests for `gluon` module.
|
||||
"""
|
||||
|
||||
from gluon.common import exception
|
||||
from gluon.common.particleGenerator.generator import set_package
|
||||
set_package("gluon", "models/proton/net-l3vpn")
|
||||
|
||||
from gluon.tests.objects import base as objbase
|
||||
from gluon.tests.objects import utils
|
||||
|
||||
|
||||
class TestPort(objbase.ObjectTestCase):
|
||||
|
|
|
@ -34,7 +34,10 @@ console_scripts =
|
|||
shimserver = gluon.shim_example.main:main
|
||||
|
||||
gluon.backends =
|
||||
net-l3vpn = gluon.backends.backends.net_l3vpn:Provider
|
||||
net-l3vpn = gluon.backends.models.net_l3vpn:Provider
|
||||
|
||||
gluon.managers =
|
||||
net-l3vpn = gluon.managers.models.net_l3vpn:Provider
|
||||
|
||||
[upload_sphinx]
|
||||
upload-dir = doc/build/html
|
||||
|
|
Loading…
Reference in New Issue