Make datasources configurable via the API
This patch allows datasources to be configured via the API. It exposes a new api call /v1/drivers/<driver> which returns the configuration needed by a driver in order to configure it. Then, a user will make a post to /v1/data-sources with the config to create the datasource. This patch makes a few structural changes to congress the most noted that it makes d6Cage a singleton which is needed to be able to dynamically add new datasources to the message bus. Also, lots of the API code was changed in order to start moving toward an API that supports multi tenancy which should now be easy to add. I apologize that this patch set is so big though the scope of changes needed to be down to support a configurable API were large and through out many places in the code. Several cleanup patches will be push on top of this which should be easier to review. Implements blueprint: api-configurable-datasources Change-Id: If82b505e1835254216e19eb1c55a035d2c0b8a3e
This commit is contained in:
parent
47991c608d
commit
b7a950bebf
|
@ -0,0 +1,57 @@
|
|||
# Copyright (c) 2015 OpenStack Foundation
|
||||
#
|
||||
# Licensed under the Apache License, Version 2.0 (the "License"); you may
|
||||
# not use this file except in compliance with the License. You may obtain
|
||||
# a copy of the License at
|
||||
#
|
||||
# http://www.apache.org/licenses/LICENSE-2.0
|
||||
#
|
||||
# Unless required by applicable law or agreed to in writing, software
|
||||
# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
|
||||
# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
|
||||
# License for the specific language governing permissions and limitations
|
||||
# under the License.
|
||||
#
|
||||
|
||||
from congress.api import webservice
|
||||
from congress.dse import deepsix
|
||||
from congress.managers import datasource as datasource_manager
|
||||
from congress.openstack.common import log as logging
|
||||
|
||||
|
||||
LOG = logging.getLogger(__name__)
|
||||
|
||||
|
||||
def d6service(name, keys, inbox, datapath, args):
|
||||
return DatasourceConfigModel(name, keys, inbox=inbox,
|
||||
dataPath=datapath, **args)
|
||||
|
||||
|
||||
class DatasourceConfigModel(deepsix.deepSix):
|
||||
"""Model for handling API requests about Schemas."""
|
||||
def __init__(self, name, keys, inbox=None, dataPath=None,
|
||||
policy_engine=None):
|
||||
super(DatasourceConfigModel, self).__init__(name, keys, inbox=inbox,
|
||||
dataPath=dataPath)
|
||||
self.engine = policy_engine
|
||||
self.datasource_mgr = datasource_manager.DataSourceManager
|
||||
|
||||
def get_item(self, id_, params, context=None):
|
||||
"""Retrieve item with id id_ from model.
|
||||
|
||||
Args:
|
||||
id_: The ID of the item to retrieve
|
||||
params: A dict-like object containing parameters
|
||||
from the request query string and body.
|
||||
context: Key-values providing frame of reference of request
|
||||
|
||||
Returns:
|
||||
The matching item or None if item with id_ does not exist.
|
||||
"""
|
||||
driver = context.get('ds_id')
|
||||
try:
|
||||
datasource_info = self.datasource_mgr.get_driver_info(
|
||||
driver)
|
||||
except datasource_manager.DriverNotFound as e:
|
||||
raise webservice.DataModelException(e.code, e.message)
|
||||
return datasource_info
|
|
@ -13,7 +13,13 @@
|
|||
# under the License.
|
||||
#
|
||||
|
||||
from congress.api import webservice
|
||||
from congress.dse import deepsix
|
||||
from congress.managers import datasource as datasource_manager
|
||||
from congress.openstack.common import log as logging
|
||||
|
||||
|
||||
LOG = logging.getLogger(__name__)
|
||||
|
||||
|
||||
def d6service(name, keys, inbox, datapath, args):
|
||||
|
@ -27,28 +33,7 @@ class DatasourceModel(deepsix.deepSix):
|
|||
super(DatasourceModel, self).__init__(name, keys, inbox=inbox,
|
||||
dataPath=dataPath)
|
||||
self.engine = policy_engine
|
||||
|
||||
def get_item(self, id_, params, context=None):
|
||||
"""Retrieve item with id id_ from model.
|
||||
|
||||
Args:
|
||||
id_: The ID of the item to retrieve
|
||||
params: A dict-like object containing parameters
|
||||
from the request query string and body.
|
||||
context: Key-values providing frame of reference of request
|
||||
|
||||
Returns:
|
||||
The matching item or None if item with id_ does not exist.
|
||||
"""
|
||||
if id_ not in self.engine.d6cage.services:
|
||||
return None
|
||||
# TODO(thinrichs): add all these meta-properties to datasources
|
||||
d = {'id': id_,
|
||||
'owner_id': 'd6cage',
|
||||
'enabled': True,
|
||||
'type': None,
|
||||
'config': None}
|
||||
return d
|
||||
self.datasource_mgr = datasource_manager.DataSourceManager()
|
||||
|
||||
def get_items(self, params, context=None):
|
||||
"""Get items in model.
|
||||
|
@ -62,65 +47,39 @@ class DatasourceModel(deepsix.deepSix):
|
|||
a list of items in the model. Additional keys set in the
|
||||
dict will also be rendered for the user.
|
||||
"""
|
||||
datasources = (set(self.engine.d6cage.services.keys()) -
|
||||
self.engine.d6cage.system_service_names)
|
||||
results = [self.get_item(x, params, context) for x in datasources]
|
||||
datasources = self.datasource_mgr.get_datasources()
|
||||
results = [self.datasource_mgr.make_datasource_dict(datasource)
|
||||
for datasource in datasources]
|
||||
return {"results": results}
|
||||
|
||||
# TODO(thinrichs): It makes sense to sometimes allow users to "create"
|
||||
# a new datasource. It would mean giving us the Python code for
|
||||
# the driver. Or maybe it would mean instantiating it on the message
|
||||
# bus. Right now the policy engine takes care of instantiating
|
||||
# services on the bus, so this isn't crucial as of now.
|
||||
def add_item(self, item, params, id_=None, context=None):
|
||||
"""Add item to model.
|
||||
|
||||
# def add_item(self, item, id_=None, context=None):
|
||||
# """Add item to model.
|
||||
Args:
|
||||
item: The item to add to the model
|
||||
id_: The ID of the item, or None if an ID should be generated
|
||||
context: Key-values providing frame of reference of request
|
||||
|
||||
# Args:
|
||||
# item: The item to add to the model
|
||||
# id_: The ID of the item, or None if an ID should be generated
|
||||
# context: Key-values providing frame of reference of request
|
||||
Returns:
|
||||
Tuple of (ID, newly_created_item)
|
||||
|
||||
# Returns:
|
||||
# Tuple of (ID, newly_created_item)
|
||||
Raises:
|
||||
KeyError: ID already exists.
|
||||
"""
|
||||
try:
|
||||
obj = self.datasource_mgr.add_datasource(
|
||||
item=item)
|
||||
except (datasource_manager.BadConfig,
|
||||
datasource_manager.DatasourceNameInUse) as e:
|
||||
LOG.info(_("Datasource Error: %s") % e.message)
|
||||
raise webservice.DataModelException(e.code, e.message,
|
||||
http_status_code=e.code)
|
||||
|
||||
# Raises:
|
||||
# KeyError: ID already exists.
|
||||
# """
|
||||
return (obj['id'], obj)
|
||||
|
||||
# TODO(thinrichs): once we can create a data source, it will make
|
||||
# sense to update it as well.
|
||||
# def update_item(self, id_, item, context=None):
|
||||
# """Update item with id_ with new data.
|
||||
|
||||
# Args:
|
||||
# id_: The ID of the item to be updated
|
||||
# item: The new item
|
||||
# context: Key-values providing frame of reference of request
|
||||
|
||||
# Returns:
|
||||
# The updated item.
|
||||
|
||||
# Raises:
|
||||
# KeyError: Item with specified id_ not present.
|
||||
# """
|
||||
# # currently a noop since the owner_id cannot be changed
|
||||
# if id_ not in self.items:
|
||||
# raise KeyError("Cannot update item with ID '%s': "
|
||||
# "ID does not exist")
|
||||
# return item
|
||||
|
||||
# TODO(thinrichs): once we can create, we should be able to delete
|
||||
# def delete_item(self, id_, context=None):
|
||||
# """Remove item from model.
|
||||
|
||||
# Args:
|
||||
# id_: The ID of the item to be removed
|
||||
# context: Key-values providing frame of reference of request
|
||||
|
||||
# Returns:
|
||||
# The removed item.
|
||||
|
||||
# Raises:
|
||||
# KeyError: Item with specified id_ not present.
|
||||
# """
|
||||
def delete_item(self, id_, params, context=None):
|
||||
datasource = context.get('ds_id')
|
||||
try:
|
||||
self.datasource_mgr.delete_datasource(datasource)
|
||||
except datasource_manager.DatasourceDriverNotFound as e:
|
||||
raise webservice.DataModelException(e.code, e.message)
|
||||
|
|
|
@ -13,7 +13,9 @@
|
|||
# under the License.
|
||||
#
|
||||
|
||||
from congress.api import webservice
|
||||
from congress.dse import deepsix
|
||||
from congress.managers import datasource as datasource_manager
|
||||
from congress.openstack.common import log as logging
|
||||
|
||||
|
||||
|
@ -31,6 +33,7 @@ class RowModel(deepsix.deepSix):
|
|||
super(RowModel, self).__init__(name, keys, inbox=inbox,
|
||||
dataPath=dataPath)
|
||||
self.engine = policy_engine
|
||||
self.datasource_mgr = datasource_manager.DataSourceManager()
|
||||
|
||||
# TODO(thinrichs): No rows have IDs right now. Maybe eventually
|
||||
# could make ID the hash of the row, but then might as well
|
||||
|
@ -68,6 +71,13 @@ class RowModel(deepsix.deepSix):
|
|||
# table defined by data-source
|
||||
if 'ds_id' in context:
|
||||
service_name = context['ds_id']
|
||||
try:
|
||||
datasource = self.datasource_mgr.get_datasource(service_name)
|
||||
except datasource_manager.DatasourceNotFound as e:
|
||||
raise webservice.DataModelException(e.code, e.message,
|
||||
http_status_code=e.code)
|
||||
|
||||
service_name = datasource['name']
|
||||
service_obj = self.engine.d6cage.service_object(service_name)
|
||||
if service_obj is None:
|
||||
LOG.info("Unknown data-source name %s", service_name)
|
||||
|
|
|
@ -13,10 +13,11 @@
|
|||
# under the License.
|
||||
#
|
||||
|
||||
from congress.api import webservice
|
||||
from congress.dse import deepsix
|
||||
from congress.managers import datasource as datasource_manager
|
||||
from congress.openstack.common import log as logging
|
||||
|
||||
|
||||
LOG = logging.getLogger(__name__)
|
||||
|
||||
|
||||
|
@ -31,6 +32,7 @@ class SchemaModel(deepsix.deepSix):
|
|||
super(SchemaModel, self).__init__(name, keys, inbox=inbox,
|
||||
dataPath=dataPath)
|
||||
self.engine = policy_engine
|
||||
self.datasource_mgr = datasource_manager.DataSourceManager()
|
||||
|
||||
def _create_table_dict(self, tablename, schema):
|
||||
cols = [{'name': x, 'description': 'None'}
|
||||
|
@ -50,28 +52,23 @@ class SchemaModel(deepsix.deepSix):
|
|||
Returns:
|
||||
The matching item or None if item with id_ does not exist.
|
||||
"""
|
||||
datasource = context.get('ds_id')
|
||||
table = context.get('table_id')
|
||||
try:
|
||||
schema = self.datasource_mgr.get_datasource_schema(
|
||||
datasource)
|
||||
except (datasource_manager.DatasourceNotFound,
|
||||
datasource_manager.DriverNotFound) as e:
|
||||
raise webservice.DataModelException(e.code, e.message,
|
||||
http_status_code=e.code)
|
||||
|
||||
# TODO(thinrichs): either pass id_=None or incorporate id_ into
|
||||
# the logic below. Ignore id_ for now as it is
|
||||
# always part of CONTEXT.
|
||||
if 'ds_id' not in context:
|
||||
raise Exception(
|
||||
"The only element that currently has a schema is datasource "
|
||||
"but ds_id does not exist in context: " + str(context))
|
||||
service_name = context['ds_id']
|
||||
service_obj = self.engine.d6cage.service_object(service_name)
|
||||
if service_obj is None:
|
||||
return None
|
||||
schema = service_obj.get_schema()
|
||||
|
||||
# one table
|
||||
if 'table_id' in context:
|
||||
table = context['table_id']
|
||||
# request to see the schema for one table
|
||||
if table:
|
||||
if table not in schema:
|
||||
raise KeyError("Table '{}' for datasource '{}' has no "
|
||||
"schema ".format(id_, service_name))
|
||||
"schema ".format(id_, datasource))
|
||||
return self._create_table_dict(table, schema)
|
||||
|
||||
# all tables
|
||||
tables = [self._create_table_dict(table_, schema) for table_ in schema]
|
||||
tables = [self._create_table_dict(table_, schema)
|
||||
for table_ in schema]
|
||||
return {'tables': tables}
|
||||
|
|
|
@ -13,7 +13,9 @@
|
|||
# under the License.
|
||||
#
|
||||
|
||||
from congress.api import webservice
|
||||
from congress.dse import deepsix
|
||||
from congress.managers import datasource as datasource_manager
|
||||
|
||||
|
||||
def d6service(name, keys, inbox, datapath, args):
|
||||
|
@ -27,6 +29,7 @@ class StatusModel(deepsix.deepSix):
|
|||
super(StatusModel, self).__init__(name, keys, inbox=inbox,
|
||||
dataPath=dataPath)
|
||||
self.engine = policy_engine
|
||||
self.datasource_mgr = datasource_manager.DataSourceManager()
|
||||
|
||||
def get_item(self, id_, params, context=None):
|
||||
"""Retrieve item with id id_ from model.
|
||||
|
@ -66,12 +69,28 @@ class StatusModel(deepsix.deepSix):
|
|||
a list of items in the model. Additional keys set in the
|
||||
dict will also be rendered for the user.
|
||||
"""
|
||||
|
||||
# FIXME(arosen): I think this should actually be get_item and get_item
|
||||
# above doesn't seem to work....
|
||||
if 'ds_id' not in context:
|
||||
raise Exception(
|
||||
"The only element that currently has a status is datasource "
|
||||
"but ds-id does not exist in context: " + str(context))
|
||||
service_name = context['ds_id']
|
||||
service_obj = self.engine.d6cage.service_object(service_name)
|
||||
|
||||
datasource = context.get('ds_id')
|
||||
try:
|
||||
datasource = self.datasource_mgr.get_datasource(
|
||||
datasource)
|
||||
except datasource_manager.DatasourceNotFound as e:
|
||||
raise webservice.DataModelException(e.code, e.message,
|
||||
http_status_code=e.code)
|
||||
|
||||
service_obj = self.engine.d6cage.service_object(
|
||||
datasource['name'])
|
||||
if service_obj is None:
|
||||
return
|
||||
|
||||
service_obj = self.engine.d6cage.service_object(datasource['name'])
|
||||
if service_obj is None:
|
||||
return
|
||||
status = service_obj.get_status()
|
||||
|
|
|
@ -0,0 +1,89 @@
|
|||
# Copyright (c) 2014 VMware, Inc. All rights reserved.
|
||||
#
|
||||
# Licensed under the Apache License, Version 2.0 (the "License"); you may
|
||||
# not use this file except in compliance with the License. You may obtain
|
||||
# a copy of the License at
|
||||
#
|
||||
# http://www.apache.org/licenses/LICENSE-2.0
|
||||
#
|
||||
# Unless required by applicable law or agreed to in writing, software
|
||||
# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
|
||||
# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
|
||||
# License for the specific language governing permissions and limitations
|
||||
# under the License.
|
||||
#
|
||||
|
||||
from congress.api import webservice
|
||||
from congress.dse import deepsix
|
||||
from congress.managers import datasource as datasource_manager
|
||||
from congress.openstack.common import log as logging
|
||||
|
||||
|
||||
LOG = logging.getLogger(__name__)
|
||||
|
||||
|
||||
def d6service(name, keys, inbox, datapath, args):
|
||||
return DatasourceDriverModel(name, keys, inbox=inbox,
|
||||
dataPath=datapath, **args)
|
||||
|
||||
|
||||
class DatasourceDriverModel(deepsix.deepSix):
|
||||
"""Model for handling API requests about DatasourceDriver."""
|
||||
def __init__(self, name, keys, inbox=None, dataPath=None,
|
||||
policy_engine=None):
|
||||
super(DatasourceDriverModel, self).__init__(name, keys, inbox=inbox,
|
||||
dataPath=dataPath)
|
||||
self.engine = policy_engine
|
||||
self.datasource_mgr = datasource_manager.DataSourceManager()
|
||||
|
||||
def get_items(self, params, context=None):
|
||||
"""Get items in model.
|
||||
|
||||
Args:
|
||||
params: A dict-like object containing parameters
|
||||
from the request query string and body.
|
||||
context: Key-values providing frame of reference of request
|
||||
|
||||
Returns: A dict containing at least a 'results' key whose value is
|
||||
a list of items in the model. Additional keys set in the
|
||||
dict will also be rendered for the user.
|
||||
"""
|
||||
drivers = self.datasource_mgr.get_drivers_info()
|
||||
fields = ['id', 'description']
|
||||
results = [self.datasource_mgr.make_datasource_dict(
|
||||
driver, fields=fields)
|
||||
for driver in drivers]
|
||||
return {"results": results}
|
||||
|
||||
# FIXME(arosen): this is duplicated code...
|
||||
def _create_table_dict(self, tablename, schema):
|
||||
cols = [{'name': x, 'description': 'None'}
|
||||
for x in schema[tablename]]
|
||||
return {'table_id': tablename,
|
||||
'columns': cols}
|
||||
|
||||
def get_item(self, id_, params, context=None):
|
||||
"""Retrieve item with id id_ from model.
|
||||
|
||||
Args:
|
||||
id_: The ID of the item to retrieve
|
||||
params: A dict-like object containing parameters
|
||||
from the request query string and body.
|
||||
context: Key-values providing frame of reference of request
|
||||
|
||||
Returns:
|
||||
The matching item or None if item with id_ does not exist.
|
||||
"""
|
||||
datasource = context.get('driver_id')
|
||||
try:
|
||||
schema = self.datasource_mgr.get_driver_schema(
|
||||
datasource)
|
||||
driver = self.datasource_mgr.get_driver_info(datasource)
|
||||
except datasource_manager.DriverNotFound as e:
|
||||
raise webservice.DataModelException(e.code, e.message,
|
||||
http_status_code=e.code)
|
||||
|
||||
tables = [self._create_table_dict(table_, schema)
|
||||
for table_ in schema]
|
||||
driver['tables'] = tables
|
||||
return driver
|
|
@ -13,10 +13,11 @@
|
|||
# under the License.
|
||||
#
|
||||
|
||||
from congress.api import webservice
|
||||
from congress.dse import deepsix
|
||||
from congress.managers import datasource as datasource_manager
|
||||
from congress.openstack.common import log as logging
|
||||
|
||||
|
||||
LOG = logging.getLogger(__name__)
|
||||
|
||||
|
||||
|
@ -30,6 +31,7 @@ class TableModel(deepsix.deepSix):
|
|||
policy_engine=None):
|
||||
super(TableModel, self).__init__(name, keys, inbox=inbox,
|
||||
dataPath=dataPath)
|
||||
self.datasource_mgr = datasource_manager.DataSourceManager()
|
||||
self.engine = policy_engine
|
||||
|
||||
def get_item(self, id_, params, context=None):
|
||||
|
@ -44,6 +46,7 @@ class TableModel(deepsix.deepSix):
|
|||
Returns:
|
||||
The matching item or None if item with id_ does not exist.
|
||||
"""
|
||||
|
||||
# table defined by data-source
|
||||
if 'ds_id' in context:
|
||||
service_name = context['ds_id']
|
||||
|
@ -87,10 +90,19 @@ class TableModel(deepsix.deepSix):
|
|||
dict will also be rendered for the user.
|
||||
"""
|
||||
LOG.info('get_items has context %s', context)
|
||||
|
||||
# FIXME(arosen): this file needs refactoring.
|
||||
datasource = context.get('ds_id')
|
||||
try:
|
||||
datasource = self.datasource_mgr.get_datasource(
|
||||
datasource)
|
||||
except datasource_manager.DatasourceNotFound as e:
|
||||
raise webservice.DataModelException(e.code, e.message)
|
||||
|
||||
# data-source
|
||||
if 'ds_id' in context:
|
||||
service_name = context['ds_id']
|
||||
service_obj = self.engine.d6cage.service_object(service_name)
|
||||
service_obj = self.engine.d6cage.service_object(datasource['name'])
|
||||
if service_obj is None:
|
||||
LOG.info("data-source %s not found", service_name)
|
||||
return []
|
||||
|
|
|
@ -386,9 +386,10 @@ class CollectionHandler(AbstractApiHandler):
|
|||
if not hasattr(self.model, 'add_item'):
|
||||
return NOT_SUPPORTED_RESPONSE
|
||||
item = json.loads(request.body)
|
||||
context = self._get_context(request)
|
||||
try:
|
||||
id_, item = self.model.add_item(
|
||||
item, request.params, id_, context=self._get_context(request))
|
||||
item, request.params, id_, context=context)
|
||||
except KeyError:
|
||||
LOG.exception("Error occurred")
|
||||
return error_response(httplib.CONFLICT, httplib.CONFLICT,
|
||||
|
|
|
@ -17,6 +17,7 @@ import os
|
|||
from oslo.config import cfg
|
||||
from oslo.db import options as db_options
|
||||
|
||||
from congress.managers import datasource as datasource_mgr
|
||||
from congress.openstack.common import log as logging
|
||||
|
||||
LOG = logging.getLogger(__name__)
|
||||
|
@ -49,6 +50,9 @@ core_opts = [
|
|||
help=_("The API paste config file to use")),
|
||||
cfg.StrOpt('auth_strategy', default='keystone',
|
||||
help=_("The type of authentication to use")),
|
||||
cfg.ListOpt('drivers',
|
||||
default=[],
|
||||
help=_("List of driver class paths to import.")),
|
||||
]
|
||||
|
||||
# Register the configuration options
|
||||
|
@ -65,6 +69,7 @@ db_options.set_defaults(cfg.CONF,
|
|||
|
||||
def init(args, **kwargs):
|
||||
cfg.CONF(args=args, project='congress', **kwargs)
|
||||
datasource_mgr.DataSourceManager.validate_configured_drivers()
|
||||
|
||||
|
||||
def setup_logging():
|
||||
|
|
|
@ -156,6 +156,15 @@ class CeilometerDriver(datasource_driver.DataSourceDriver):
|
|||
return (cls.meters_translator, cls.alarms_translator,
|
||||
cls.events_translator, cls.statistics_translator)
|
||||
|
||||
@staticmethod
|
||||
def get_datasource_info():
|
||||
result = {}
|
||||
result['id'] = 'ceilometer'
|
||||
result['description'] = ('Datasource driver that interfaces with '
|
||||
'ceilometer.')
|
||||
result['config'] = datasource_utils.get_openstack_required_config()
|
||||
return result
|
||||
|
||||
def update_from_datasource(self):
|
||||
"""Read Data from Ceilometer datasource.
|
||||
|
||||
|
|
|
@ -35,6 +35,15 @@ class CinderDriver(datasource_driver.DataSourceDriver):
|
|||
self.cinder_client = cinderclient.client.Client(**self.creds)
|
||||
self.initialized = True
|
||||
|
||||
@staticmethod
|
||||
def get_datasource_info():
|
||||
result = {}
|
||||
result['id'] = 'cinder'
|
||||
result['description'] = ('Datasource driver that interfaces with '
|
||||
'OpenStack cinder.')
|
||||
result['config'] = datasource_utils.get_openstack_required_config()
|
||||
return result
|
||||
|
||||
def update_from_datasource(self):
|
||||
self.state = {}
|
||||
volumes = self.cinder_client.volumes.list(
|
||||
|
|
|
@ -106,6 +106,16 @@ class CloudFoundryV2Driver(DataSourceDriver):
|
|||
self.initialized = True
|
||||
self._cached_organizations = []
|
||||
|
||||
@staticmethod
|
||||
def get_datasource_info():
|
||||
result = {}
|
||||
result['id'] = 'cloudfoundryv2'
|
||||
result['description'] = ('Do not use this driver is deprecated')
|
||||
result['config'] = {'username': 'required',
|
||||
'password': 'required',
|
||||
'auth_url': 'required'}
|
||||
return result
|
||||
|
||||
def _save_organizations(self, organizations):
|
||||
temp_organizations = []
|
||||
for organization in organizations['resources']:
|
||||
|
|
|
@ -12,7 +12,6 @@
|
|||
# License for the specific language governing permissions and limitations
|
||||
# under the License.
|
||||
|
||||
|
||||
from congress import exception
|
||||
|
||||
|
||||
|
@ -31,3 +30,12 @@ def get_credentials(name, config_args):
|
|||
"Service {} is missing configuration data for {}".format(
|
||||
name, missing))
|
||||
return d
|
||||
|
||||
|
||||
def get_openstack_required_config():
|
||||
return {'auth_url': 'required',
|
||||
'endpoint': '(optional)',
|
||||
'region': '(optional)',
|
||||
'username': 'username',
|
||||
'password': 'required',
|
||||
'tenant_name': 'required'}
|
||||
|
|
|
@ -78,6 +78,15 @@ class GlanceV2Driver(datasource_driver.DataSourceDriver):
|
|||
token=keystone.auth_token)
|
||||
self.initialized = True
|
||||
|
||||
@staticmethod
|
||||
def get_datasource_info():
|
||||
result = {}
|
||||
result['id'] = 'glancev2'
|
||||
result['description'] = ('Datasource driver that interfaces with '
|
||||
'OpenStack Images aka Glance.')
|
||||
result['config'] = datasource_utils.get_openstack_required_config()
|
||||
return result
|
||||
|
||||
def update_from_datasource(self):
|
||||
"""Called when it is time to pull new data from this datasource.
|
||||
|
||||
|
|
|
@ -74,6 +74,15 @@ class KeystoneDriver(datasource_driver.DataSourceDriver):
|
|||
self.client = keystoneclient.v2_0.client.Client(**self.creds)
|
||||
self.initialized = True # flag that says __init__() has completed
|
||||
|
||||
@staticmethod
|
||||
def get_datasource_info():
|
||||
result = {}
|
||||
result['id'] = 'keystone'
|
||||
result['description'] = ('Datasource driver that interfaces with '
|
||||
'keystone.')
|
||||
result['config'] = datasource_utils.get_openstack_required_config()
|
||||
return result
|
||||
|
||||
def get_keystone_credentials_v2(self, name, args):
|
||||
creds = datasource_utils.get_credentials(name, args)
|
||||
d = {}
|
||||
|
|
|
@ -60,6 +60,15 @@ class MuranoDriver(datasource_driver.DataSourceDriver):
|
|||
|
||||
self.initialized = True
|
||||
|
||||
@staticmethod
|
||||
def get_datasource_info():
|
||||
result = {}
|
||||
result['id'] = 'murano'
|
||||
result['description'] = ('Datasource driver that interfaces with '
|
||||
'murano')
|
||||
result['config'] = datasource_utils.get_openstack_required_config()
|
||||
return result
|
||||
|
||||
def update_from_datasource(self):
|
||||
"""Called when it is time to pull new data from this datasource.
|
||||
|
||||
|
|
|
@ -173,6 +173,14 @@ class NeutronDriver(datasource_driver.DataSourceDriver):
|
|||
self.raw_state = {}
|
||||
self.initialized = True
|
||||
|
||||
@staticmethod
|
||||
def get_datasource_info():
|
||||
result = {}
|
||||
result['id'] = 'neutron'
|
||||
result['description'] = ('Do not use this driver is deprecated')
|
||||
result['config'] = datasource_utils.get_openstack_required_config()
|
||||
return result
|
||||
|
||||
def update_from_datasource(self):
|
||||
"""Called when it is time to pull new data from this datasource.
|
||||
|
||||
|
|
|
@ -223,6 +223,15 @@ class NeutronV2Driver(DataSourceDriver):
|
|||
self.raw_state = {}
|
||||
self.initialized = True
|
||||
|
||||
@staticmethod
|
||||
def get_datasource_info():
|
||||
result = {}
|
||||
result['id'] = 'neutronv2'
|
||||
result['description'] = ('Datasource driver that interfaces with '
|
||||
'OpenStack Networking aka Neutron.')
|
||||
result['config'] = datasource_utils.get_openstack_required_config()
|
||||
return result
|
||||
|
||||
def update_from_datasource(self):
|
||||
LOG.debug("Neutron grabbing networks")
|
||||
networks = self.neutron.list_networks()
|
||||
|
|
|
@ -106,6 +106,15 @@ class NovaDriver(datasource_driver.DataSourceDriver):
|
|||
self.register_translator(NovaDriver.floating_ips_translator)
|
||||
self.initialized = True
|
||||
|
||||
@staticmethod
|
||||
def get_datasource_info():
|
||||
result = {}
|
||||
result['id'] = 'nova'
|
||||
result['description'] = ('Datasource driver that interfaces with '
|
||||
'OpenStack Compute aka nova.')
|
||||
result['config'] = datasource_utils.get_openstack_required_config()
|
||||
return result
|
||||
|
||||
def get_nova_credentials_v2(self, name, args):
|
||||
creds = datasource_utils.get_credentials(name, args)
|
||||
d = {}
|
||||
|
|
|
@ -79,6 +79,15 @@ class PlexxiDriver(datasource_driver.DataSourceDriver):
|
|||
self.api_address = "http://" + host + ":" + port + "/v1"
|
||||
self.initialized = True
|
||||
|
||||
@staticmethod
|
||||
def get_datasource_info():
|
||||
result = {}
|
||||
result['id'] = 'plexxi'
|
||||
result['description'] = ('Datasource driver that interfaces with '
|
||||
'plexxi.')
|
||||
result['config'] = datasource_utils.get_openstack_required_config()
|
||||
return result
|
||||
|
||||
def update_from_datasource(self):
|
||||
"""Called when it is time to pull new data from this datasource.
|
||||
|
||||
|
|
|
@ -65,6 +65,16 @@ class SwiftDriver(datasource_driver.DataSourceDriver):
|
|||
self.raw_state = {}
|
||||
self.initialized = True
|
||||
|
||||
@staticmethod
|
||||
def get_datasource_info():
|
||||
# FIXME(arosen): Figure out how swift actually does auth?
|
||||
result = {}
|
||||
result['id'] = 'swift'
|
||||
result['description'] = ('Datasource driver that interfaces with '
|
||||
'swift.')
|
||||
result['config'] = {}
|
||||
return result
|
||||
|
||||
def update_from_datasource(self):
|
||||
'''Read and populate.
|
||||
|
||||
|
|
|
@ -132,6 +132,17 @@ class VCenterDriver(DataSourceDriver):
|
|||
create_session=True)
|
||||
self.initialized = True
|
||||
|
||||
@staticmethod
|
||||
def get_datasource_info():
|
||||
result = {}
|
||||
result['id'] = 'vcenter'
|
||||
result['description'] = ('Datasource driver that interfaces with '
|
||||
'vcenter')
|
||||
result['config'] = {'auth_url': 'required',
|
||||
'username': 'required',
|
||||
'password': 'required'}
|
||||
return result
|
||||
|
||||
def update_from_datasource(self):
|
||||
"""Called when it is time to pull new data from this datasource.
|
||||
|
||||
|
|
|
@ -0,0 +1,92 @@
|
|||
# Copyright (c) 2012 OpenStack Foundation.
|
||||
# All Rights Reserved.
|
||||
#
|
||||
# Licensed under the Apache License, Version 2.0 (the "License"); you may
|
||||
# not use this file except in compliance with the License. You may obtain
|
||||
# a copy of the License at
|
||||
#
|
||||
# http://www.apache.org/licenses/LICENSE-2.0
|
||||
#
|
||||
# Unless required by applicable law or agreed to in writing, software
|
||||
# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
|
||||
# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
|
||||
# License for the specific language governing permissions and limitations
|
||||
# under the License.
|
||||
|
||||
import json
|
||||
|
||||
import sqlalchemy as sa
|
||||
from sqlalchemy.orm import exc as db_exc
|
||||
|
||||
from congress.db import api as db
|
||||
from congress.db import model_base
|
||||
from congress.openstack.common import log as logging
|
||||
|
||||
|
||||
LOG = logging.getLogger(__name__)
|
||||
|
||||
|
||||
class Datasource(model_base.BASE, model_base.HasId):
|
||||
__tablename__ = 'datasources'
|
||||
|
||||
name = sa.Column(sa.String(255), unique=True)
|
||||
driver = sa.Column(sa.String(255))
|
||||
config = sa.Column(sa.Text(), nullable=False)
|
||||
description = sa.Column(sa.Text(), nullable=True)
|
||||
enabled = sa.Column(sa.Boolean, default=True)
|
||||
|
||||
def __init__(self, id_, name, driver, config, description,
|
||||
enabled=True):
|
||||
self.id = id_
|
||||
self.name = name
|
||||
self.driver = driver
|
||||
self.config = json.dumps(config)
|
||||
self.description = description
|
||||
self.enabled = enabled
|
||||
|
||||
|
||||
def add_datasource(id_, name, driver, config, description,
|
||||
enabled, session=None):
|
||||
session = session or db.get_session()
|
||||
with session.begin(subtransactions=True):
|
||||
datasource = Datasource(
|
||||
id_=id_,
|
||||
name=name,
|
||||
driver=driver,
|
||||
config=config,
|
||||
description=description,
|
||||
enabled=enabled)
|
||||
session.add(datasource)
|
||||
return datasource
|
||||
|
||||
|
||||
def delete_datasource(id_, session=None):
|
||||
session = session or db.get_session()
|
||||
return session.query(Datasource).filter(
|
||||
Datasource.id == id_).delete()
|
||||
|
||||
|
||||
def get_datasource(id_, session=None):
|
||||
session = session or db.get_session()
|
||||
try:
|
||||
return (session.query(Datasource).
|
||||
filter(Datasource.id == id_).
|
||||
one())
|
||||
except db_exc.NoResultFound:
|
||||
pass
|
||||
|
||||
|
||||
def get_datasource_by_name(name, session=None, deleted=False):
|
||||
session = session or db.get_session()
|
||||
try:
|
||||
return (session.query(Datasource).
|
||||
filter(Datasource.name == name).
|
||||
one())
|
||||
except db_exc.NoResultFound:
|
||||
pass
|
||||
|
||||
|
||||
def get_datasources(session=None, deleted=False):
|
||||
session = session or db.get_session()
|
||||
return (session.query(Datasource).
|
||||
all())
|
|
@ -0,0 +1,47 @@
|
|||
# Copyright 2015 OpenStack Foundation
|
||||
#
|
||||
# Licensed under the Apache License, Version 2.0 (the "License"); you may
|
||||
# not use this file except in compliance with the License. You may obtain
|
||||
# a copy of the License at
|
||||
#
|
||||
# http://www.apache.org/licenses/LICENSE-2.0
|
||||
#
|
||||
# Unless required by applicable law or agreed to in writing, software
|
||||
# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
|
||||
# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
|
||||
# License for the specific language governing permissions and limitations
|
||||
# under the License.
|
||||
#
|
||||
|
||||
"""add_datasources
|
||||
|
||||
Revision ID: 3cee191c4f84
|
||||
Revises: 56e86d51ec62
|
||||
Create Date: 2015-02-05 13:30:04.272571
|
||||
|
||||
"""
|
||||
|
||||
# revision identifiers, used by Alembic.
|
||||
revision = '3cee191c4f84'
|
||||
down_revision = '56e86d51ec62'
|
||||
|
||||
from alembic import op
|
||||
import sqlalchemy as sa
|
||||
|
||||
|
||||
def upgrade():
|
||||
op.create_table(
|
||||
'datasources',
|
||||
sa.Column('id', sa.String(length=36), nullable=False),
|
||||
sa.Column('name', sa.String(length=255), nullable=True),
|
||||
sa.Column('driver', sa.String(length=255), nullable=True),
|
||||
sa.Column('config', sa.Text(), nullable=False),
|
||||
sa.Column('description', sa.Text(), nullable=True),
|
||||
sa.Column('enabled', sa.Boolean(), nullable=True),
|
||||
sa.PrimaryKeyConstraint('id'),
|
||||
sa.UniqueConstraint('name'),
|
||||
mysql_engine='InnoDB')
|
||||
|
||||
|
||||
def downgrade():
|
||||
op.drop_table('datasources')
|
|
@ -1 +1 @@
|
|||
56e86d51ec62
|
||||
3cee191c4f84
|
|
@ -21,6 +21,7 @@ Based on this comparison database can be healed with healing migration.
|
|||
|
||||
"""
|
||||
|
||||
from congress.db import datasources # noqa
|
||||
from congress.db import db_policy_rules # noqa
|
||||
from congress.db import model_base
|
||||
|
||||
|
|
|
@ -27,6 +27,8 @@ import traceback
|
|||
import amqprouter
|
||||
import eventlet
|
||||
eventlet.monkey_patch()
|
||||
from oslo.utils import importutils
|
||||
|
||||
|
||||
from congress.dse import d6message
|
||||
from congress.dse import deepsix
|
||||
|
@ -40,6 +42,28 @@ class DataServiceError (Exception):
|
|||
pass
|
||||
|
||||
|
||||
# This holds the cage instance singleton
|
||||
instances = {}
|
||||
|
||||
|
||||
def singleton(class_):
|
||||
global instances
|
||||
|
||||
def getinstance(*args, **kwargs):
|
||||
if class_ not in instances:
|
||||
instances[class_] = class_(*args, **kwargs)
|
||||
return instances[class_]
|
||||
return getinstance
|
||||
|
||||
|
||||
def delete_cage():
|
||||
global instances
|
||||
for instance in instances.values():
|
||||
del instance
|
||||
instances = {}
|
||||
|
||||
|
||||
@singleton
|
||||
class d6Cage(deepsix.deepSix):
|
||||
def __init__(self):
|
||||
self.config = {}
|
||||
|
@ -178,29 +202,50 @@ class d6Cage(deepsix.deepSix):
|
|||
|
||||
self.loadModule(section, filename)
|
||||
|
||||
def deleteservice(self, name):
|
||||
eventlet.greenthread.kill(self.services[name]['object'])
|
||||
self.greenThreads.remove(self.services[name]['object'])
|
||||
self.table.remove(name, self.services[name]['inbox'])
|
||||
self.table.remove("local." + name, self.services[name]['inbox'])
|
||||
self.unsubscribe(name, 'routeKeys')
|
||||
del self.services[name]
|
||||
|
||||
def createservice(
|
||||
self,
|
||||
name="",
|
||||
keys="",
|
||||
description="",
|
||||
moduleName="",
|
||||
args={}):
|
||||
args={},
|
||||
module_driver=False):
|
||||
|
||||
self.log_info("creating service %s with module %s and args %s",
|
||||
name, moduleName, args)
|
||||
|
||||
if moduleName not in sys.modules:
|
||||
# FIXME(arosen) This will be refactored out in the next patchset
|
||||
# this is only done because existing imports from d6service
|
||||
# instead of the module.
|
||||
if module_driver:
|
||||
congress_expected_module_path = ""
|
||||
for entry in range(len(moduleName.split(".")) - 1):
|
||||
congress_expected_module_path += (
|
||||
moduleName.split(".")[entry] + ".")
|
||||
congress_expected_module_path = congress_expected_module_path[:-1]
|
||||
module = importutils.import_module(congress_expected_module_path)
|
||||
|
||||
if not module_driver and moduleName not in sys.modules:
|
||||
raise DataServiceError(
|
||||
"error loading service" + name +
|
||||
": module " + moduleName + " does not exist")
|
||||
|
||||
if name in self.services:
|
||||
if not module_driver and name in self.services:
|
||||
raise DataServiceError(
|
||||
"error loading service '%s': name already in use"
|
||||
% name)
|
||||
|
||||
inbox = eventlet.Queue()
|
||||
module = sys.modules[moduleName]
|
||||
if not module_driver:
|
||||
module = sys.modules[moduleName]
|
||||
|
||||
# set args to default values, as necessary
|
||||
if name in self.default_service_args:
|
||||
|
|
|
@ -22,6 +22,7 @@ import sys
|
|||
from congress.db import db_policy_rules
|
||||
from congress.dse import d6cage
|
||||
from congress import exception
|
||||
from congress.managers import datasource as datasource_manager
|
||||
from congress.openstack.common import log as logging
|
||||
from congress.policy.base import ACTION_POLICY_TYPE
|
||||
|
||||
|
@ -145,6 +146,28 @@ def create(rootdir, statedir, config_file, config_override=None):
|
|||
args={'policy_engine': engine})
|
||||
cage.system_service_names.add('api-schema')
|
||||
|
||||
# add datasource/config api
|
||||
api_path = os.path.join(src_path, "api/datasource_config_model.py")
|
||||
LOG.info("main::start() api_path: %s", api_path)
|
||||
cage.loadModule("API-config", api_path)
|
||||
cage.createservice(
|
||||
name="api-config",
|
||||
moduleName="API-config",
|
||||
description="API-config DSE instance",
|
||||
args={'policy_engine': engine})
|
||||
cage.system_service_names.add('api-config')
|
||||
|
||||
# add path for system/datasource-drivers
|
||||
api_path = os.path.join(src_path, "api/system/driver_model.py")
|
||||
LOG.info("main::start() api_path: %s", api_path)
|
||||
cage.loadModule("API-system", api_path)
|
||||
cage.createservice(
|
||||
name="api-system",
|
||||
moduleName="API-system",
|
||||
description="API-system DSE instance",
|
||||
args={'policy_engine': engine})
|
||||
cage.system_service_names.add('api-system')
|
||||
|
||||
# Load policies from database
|
||||
for policy in db_policy_rules.get_policies():
|
||||
engine.create_policy(
|
||||
|
@ -182,23 +205,20 @@ def create(rootdir, statedir, config_file, config_override=None):
|
|||
callback=engine.receive_policy_update)
|
||||
|
||||
# spin up all the configured services, if we have configured them
|
||||
if cage.config:
|
||||
for name in cage.config:
|
||||
if 'module' in cage.config[name]:
|
||||
engine.create_policy(name)
|
||||
load_data_service(name, cage.config[name], cage, src_path)
|
||||
# inform policy engine about schema
|
||||
service = cage.service_object(name)
|
||||
engine.set_schema(name, service.get_schema())
|
||||
|
||||
# populate rule api data, needs to be done after models are loaded.
|
||||
# FIXME(arosen): refactor how we're loading data and api.
|
||||
rules = db_policy_rules.get_policy_rules()
|
||||
for rule in rules:
|
||||
parsed_rule = engine.parse1(rule.rule)
|
||||
cage.services['api-rule']['object'].change_rule(
|
||||
parsed_rule,
|
||||
{'policy_id': rule.policy_name})
|
||||
datasource_mgr = datasource_manager.DataSourceManager
|
||||
drivers = datasource_mgr.get_datasources()
|
||||
# Setup cage.config as it previously done when it was loaded
|
||||
# from disk. FIXME(arosen) later!
|
||||
for driver in drivers:
|
||||
driver_info = datasource_mgr.get_driver_info(driver['driver'])
|
||||
engine.create_policy(driver['name'])
|
||||
cage.createservice(name=driver['name'],
|
||||
moduleName=driver_info['module'],
|
||||
args=driver['config'],
|
||||
module_driver=True)
|
||||
service = cage.service_object(driver['name'])
|
||||
engine.set_schema(driver['name'], service.get_schema())
|
||||
|
||||
return cage
|
||||
|
||||
|
@ -241,6 +261,8 @@ def initialize_config(config_file, config_override):
|
|||
|
||||
Also doing insulate rest of code from idiosyncracies of ConfigParser.
|
||||
"""
|
||||
# FIXME(arosen): config_override is just being used to aid in testing
|
||||
# but we don't need to do this.
|
||||
if config_override is None:
|
||||
config_override = {}
|
||||
if config_file is None:
|
||||
|
|
|
@ -0,0 +1,227 @@
|
|||
# Copyright (c) 2014 OpenStack Foundation
|
||||
#
|
||||
# Licensed under the Apache License, Version 2.0 (the "License");
|
||||
# you may not use this file except in compliance with the License.
|
||||
# You may obtain a copy of the License at
|
||||
#
|
||||
# http://www.apache.org/licenses/LICENSE-2.0
|
||||
#
|
||||
# Unless required by applicable law or agreed to in writing, software
|
||||
# distributed under the License is distributed on an "AS IS" BASIS,
|
||||
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or
|
||||
# implied.
|
||||
# See the License for the specific language governing permissions and
|
||||
# limitations under the License.
|
||||
|
||||
import json
|
||||
|
||||
from oslo.config import cfg
|
||||
from oslo.db import exception as db_exc
|
||||
from oslo.utils import importutils
|
||||
|
||||
from congress.db import api as db
|
||||
from congress.db import datasources as datasources_db
|
||||
from congress.dse import d6cage
|
||||
from congress import exception
|
||||
from congress.openstack.common import uuidutils
|
||||
|
||||
|
||||
class DataSourceManager(object):
|
||||
|
||||
loaded_drivers = {}
|
||||
|
||||
@classmethod
|
||||
def add_datasource(cls, item, deleted=False):
|
||||
req = cls.make_datasource_dict(item)
|
||||
driver_info = cls.validate_create_datasource(req)
|
||||
session = db.get_session()
|
||||
try:
|
||||
with session.begin(subtransactions=True):
|
||||
datasource = datasources_db.add_datasource(
|
||||
id_=req['id'],
|
||||
name=req['name'],
|
||||
driver=req['driver'],
|
||||
config=req['config'],
|
||||
description=req['description'],
|
||||
enabled=req['enabled'],
|
||||
session=session)
|
||||
datasource = cls.make_datasource_dict(datasource)
|
||||
cage = d6cage.d6Cage()
|
||||
engine = cage.service_object('engine')
|
||||
try:
|
||||
engine.create_policy(datasource['name'])
|
||||
except KeyError:
|
||||
# FIXME(arosen): we need a better exception then
|
||||
# key error being raised here
|
||||
raise DatasourceNameInUse(req['name'])
|
||||
cage.createservice(name=datasource['name'],
|
||||
moduleName=driver_info['module'],
|
||||
args=datasource['config'],
|
||||
module_driver=True)
|
||||
service = cage.service_object(req['name'])
|
||||
engine.set_schema(req['name'], service.get_schema())
|
||||
|
||||
except db_exc.DBDuplicateEntry:
|
||||
raise DatasourceNameInUse(req['name'])
|
||||
return cls.make_datasource_dict(datasource)
|
||||
|
||||
@classmethod
|
||||
def validate_configured_drivers(cls):
|
||||
result = {}
|
||||
for driver_path in cfg.CONF.drivers:
|
||||
obj = importutils.import_class(driver_path)
|
||||
driver = obj.get_datasource_info()
|
||||
if driver['id'] in result:
|
||||
raise BadConfig(_("There is a driver loaded already with the"
|
||||
"driver name of %s")
|
||||
% driver['driver'])
|
||||
driver['module'] = driver_path
|
||||
result[driver['id']] = driver
|
||||
cls.loaded_drivers = result
|
||||
|
||||
@classmethod
|
||||
def make_datasource_dict(cls, req, fields=None):
|
||||
result = {'id': req.get('id') or uuidutils.generate_uuid(),
|
||||
'name': req.get('name'),
|
||||
'driver': req.get('driver'),
|
||||
'description': req.get('description'),
|
||||
'type': None,
|
||||
'enabled': req.get('enabled', True)}
|
||||
# NOTE(arosen): we store the config as a string in the db so
|
||||
# here we serialize it back when returning it.
|
||||
if type(req.get('config')) in [str, unicode]:
|
||||
result['config'] = json.loads(req['config'])
|
||||
else:
|
||||
result['config'] = req.get('config')
|
||||
|
||||
return cls._fields(result, fields)
|
||||
|
||||
@classmethod
|
||||
def _fields(cls, resource, fields):
|
||||
if fields:
|
||||
return dict(((key, item) for key, item in resource.items()
|
||||
if key in fields))
|
||||
return resource
|
||||
|
||||
@classmethod
|
||||
def get_datasources(cls):
|
||||
"""Return the created datasources."""
|
||||
return [cls.make_datasource_dict(datasouce_driver)
|
||||
for datasouce_driver in datasources_db.get_datasources()
|
||||
]
|
||||
|
||||
@classmethod
|
||||
def get_datasource(cls, id_):
|
||||
"""Return the created datasource."""
|
||||
result = datasources_db.get_datasource(id_)
|
||||
if not result:
|
||||
raise DatasourceNotFound(id=id_)
|
||||
return cls.make_datasource_dict(result)
|
||||
|
||||
@classmethod
|
||||
def get_driver_info(cls, driver):
|
||||
driver = cls.loaded_drivers.get(driver)
|
||||
if not driver:
|
||||
raise DriverNotFound(id=driver)
|
||||
return driver
|
||||
|
||||
@classmethod
|
||||
def get_driver_schema(cls, datasource_id):
|
||||
driver = cls.get_driver_info(datasource_id)
|
||||
obj = importutils.import_class(driver['module'])
|
||||
return obj.get_schema()
|
||||
|
||||
@classmethod
|
||||
def get_datasource_schema(cls, datasource_id):
|
||||
datasource = datasources_db.get_datasource(datasource_id)
|
||||
if not datasource:
|
||||
raise DatasourceNotFound(id=datasource_id)
|
||||
driver = cls.get_driver_info(datasource.driver)
|
||||
if driver:
|
||||
# NOTE(arosen): raises if not found
|
||||
driver = cls.get_driver_info(
|
||||
driver['id'])
|
||||
obj = importutils.import_class(driver['module'])
|
||||
return obj.get_schema()
|
||||
|
||||
@classmethod
|
||||
def delete_datasource(cls, datasource_id):
|
||||
datasource = cls.get_datasource(datasource_id)
|
||||
session = db.get_session()
|
||||
with session.begin(subtransactions=True):
|
||||
result = datasources_db.delete_datasource(
|
||||
datasource_id, session)
|
||||
if not result:
|
||||
raise DatasourceNotFound(id=datasource_id)
|
||||
cage = d6cage.d6Cage()
|
||||
# NOTE(arosen): need to refactor this to support multi tenancy
|
||||
cage.deleteservice(datasource['name'])
|
||||
engine = cage.service_object('engine')
|
||||
engine.delete_policy(datasource['name'])
|
||||
|
||||
@classmethod
|
||||
def get_drivers_info(cls):
|
||||
return [driver for driver in cls.loaded_drivers.values()]
|
||||
|
||||
@classmethod
|
||||
def validate_create_datasource(cls, req):
|
||||
driver = req['driver']
|
||||
config = req['config']
|
||||
for loaded_driver in cls.loaded_drivers.values():
|
||||
if loaded_driver['id'] == driver:
|
||||
specified_options = set(config.keys())
|
||||
valid_options = set(loaded_driver['config'].keys())
|
||||
# Check that all the specified options passed in are
|
||||
# valid configuration options that the driver exposes.
|
||||
invalid_options = specified_options - valid_options
|
||||
if invalid_options:
|
||||
raise InvalidDriverOption(invalid_options=invalid_options)
|
||||
|
||||
# check that all the required options are passed in
|
||||
required_options = set(
|
||||
[k for k, v in loaded_driver['config'].iteritems()
|
||||
if v == 'required'])
|
||||
missing_options = required_options - specified_options
|
||||
if missing_options:
|
||||
missing_options = ', '.join(missing_options)
|
||||
raise MissingRequiredConfigOptions(
|
||||
missing_options=missing_options)
|
||||
return loaded_driver
|
||||
|
||||
# If we get here no datasource driver match was found.
|
||||
raise InvalidDriver(driver=req)
|
||||
|
||||
|
||||
class BadConfig(exception.CongressException):
|
||||
code = 400
|
||||
|
||||
|
||||
class DatasourceDriverException(exception.CongressException):
|
||||
pass
|
||||
|
||||
|
||||
class MissingRequiredConfigOptions(BadConfig):
|
||||
msg_fmt = _("Missing required config options: %(missing_options)s")
|
||||
|
||||
|
||||
class InvalidDriver(BadConfig):
|
||||
msg_fmt = _("Invalid driver: %(driver)s")
|
||||
|
||||
|
||||
class InvalidDriverOption(BadConfig):
|
||||
msg_fmt = _("Invalid driver options: %(invalid_options)s")
|
||||
|
||||
|
||||
class DatasourceNameInUse(exception.CongressException):
|
||||
msg_fmt = _("Datasource already in use with name %(name)s")
|
||||
code = 409
|
||||
|
||||
|
||||
class DatasourceNotFound(exception.CongressException):
|
||||
msg_fmt = _("Datasource Driver not found %(id)s")
|
||||
code = 404
|
||||
|
||||
|
||||
class DriverNotFound(exception.CongressException):
|
||||
msg_fmt = _("Driver not found %(id)s")
|
||||
code = 404
|
|
@ -151,20 +151,26 @@ def initialize_resources(resource_mgr, cage):
|
|||
policy_rules, "{policy_id}")
|
||||
resource_mgr.register_handler(rule_element_handler)
|
||||
|
||||
# Setup /v1/data-sources
|
||||
data_sources = cage.service_object('api-datasource')
|
||||
resource_mgr.register_model('data_sources', data_sources)
|
||||
ds_collection_handler = webservice.CollectionHandler(
|
||||
r'/v1/data-sources',
|
||||
data_sources)
|
||||
resource_mgr.register_handler(ds_collection_handler)
|
||||
|
||||
# Setup /v1/data-sources/<ds_id>
|
||||
ds_path = r'/v1/data-sources/(?P<ds_id>[^/]+)'
|
||||
ds_element_handler = webservice.ElementHandler(ds_path, data_sources)
|
||||
resource_mgr.register_handler(ds_element_handler)
|
||||
|
||||
# Setup /v1/data-sources/<ds_id>/schema
|
||||
schema = cage.service_object('api-schema')
|
||||
schema_path = "%s/schema" % ds_path
|
||||
schema_element_handler = webservice.ElementHandler(schema_path, schema)
|
||||
resource_mgr.register_handler(schema_element_handler)
|
||||
|
||||
# Setup /v1/data-sources/<ds_id>/tables/<table_id>/spec
|
||||
table_schema_path = "%s/tables/(?P<table_id>[^/]+)/spec" % ds_path
|
||||
table_schema_element_handler = webservice.ElementHandler(
|
||||
table_schema_path, schema)
|
||||
|
@ -196,6 +202,23 @@ def initialize_resources(resource_mgr, cage):
|
|||
row_element_handler = webservice.ElementHandler(row_path, table_rows)
|
||||
resource_mgr.register_handler(row_element_handler)
|
||||
|
||||
# Setup /v1/system/datasource-drivers
|
||||
system = cage.service_object('api-system')
|
||||
resource_mgr.register_model('system', system)
|
||||
# NOTE(arosen): start url out with datasource-drivers since we don't
|
||||
# yet implement /v1/system/ yet.
|
||||
system_collection_handler = webservice.CollectionHandler(
|
||||
r'/v1/system/drivers',
|
||||
system)
|
||||
resource_mgr.register_handler(system_collection_handler)
|
||||
|
||||
# Setup /v1/system/datasource-drivers/<driver_id>
|
||||
driver_path = r'/v1/system/drivers/(?P<driver_id>[^/]+)'
|
||||
driver_element_handler = webservice.ElementHandler(
|
||||
driver_path,
|
||||
system)
|
||||
resource_mgr.register_handler(driver_element_handler)
|
||||
|
||||
|
||||
def main():
|
||||
config.init(sys.argv[1:])
|
||||
|
|
|
@ -30,11 +30,8 @@ def fail_gracefully(f):
|
|||
def wrapper(*args, **kw):
|
||||
try:
|
||||
return f(*args, **kw)
|
||||
except Exception as e:
|
||||
LOG.debug(e, exc_info=True)
|
||||
|
||||
# exception message is printed to all logs
|
||||
LOG.critical(e)
|
||||
except Exception:
|
||||
LOG.exception("Fatal Exception:")
|
||||
sys.exit(1)
|
||||
|
||||
return wrapper
|
||||
|
|
|
@ -28,6 +28,7 @@ from congress.db import api as db_api
|
|||
# Import all data models
|
||||
from congress.db.migration.models import head # noqa
|
||||
from congress.db import model_base
|
||||
from congress.dse import d6cage
|
||||
from congress.tests import helper
|
||||
from congress.tests import policy_fixture
|
||||
|
||||
|
@ -70,6 +71,9 @@ class TestCase(testtools.TestCase):
|
|||
|
||||
self.log_fixture = self.useFixture(fixtures.FakeLogger())
|
||||
self.policy = self.useFixture(policy_fixture.PolicyFixture())
|
||||
# cage is a singleton so we delete it here and
|
||||
# recreate it after each test
|
||||
self.addCleanup(d6cage.delete_cage)
|
||||
|
||||
def setup_config(self):
|
||||
"""Tests that need a non-default config can override this method."""
|
||||
|
|
|
@ -0,0 +1,101 @@
|
|||
# Copyright (c) 2015 OpenStack Foundation
|
||||
#
|
||||
# Licensed under the Apache License, Version 2.0 (the "License"); you may
|
||||
# not use this file except in compliance with the License. You may obtain
|
||||
# a copy of the License at
|
||||
#
|
||||
# http://www.apache.org/licenses/LICENSE-2.0
|
||||
#
|
||||
# Unless required by applicable law or agreed to in writing, software
|
||||
# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
|
||||
# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
|
||||
# License for the specific language governing permissions and limitations
|
||||
# under the License.
|
||||
|
||||
from congress.db import datasources
|
||||
from congress.openstack.common import uuidutils
|
||||
from congress.tests import base
|
||||
|
||||
|
||||
class TestDbDatasource(base.SqlTestCase):
|
||||
|
||||
def test_add_datasource(self):
|
||||
id_ = uuidutils.generate_uuid()
|
||||
source = datasources.add_datasource(
|
||||
id_=id_,
|
||||
name="hiya",
|
||||
driver="foo",
|
||||
config='{user: foo}',
|
||||
description="hello",
|
||||
enabled=True)
|
||||
self.assertEqual(id_, source.id)
|
||||
self.assertEqual("hiya", source.name)
|
||||
self.assertEqual("foo", source.driver)
|
||||
self.assertEqual("hello", source.description)
|
||||
self.assertEqual('"{user: foo}"', source.config)
|
||||
self.assertEqual(True, source.enabled)
|
||||
|
||||
def test_delete_datasource(self):
|
||||
id_ = uuidutils.generate_uuid()
|
||||
datasources.add_datasource(
|
||||
id_=id_,
|
||||
name="hiya",
|
||||
driver="foo",
|
||||
config='{user: foo}',
|
||||
description="hello",
|
||||
enabled=True)
|
||||
self.assertTrue(datasources.delete_datasource(id_))
|
||||
|
||||
def test_delete_non_existing_datasource(self):
|
||||
self.assertFalse(datasources.delete_datasource('no_id'))
|
||||
|
||||
def test_get_datasource_by_name(self):
|
||||
id_ = uuidutils.generate_uuid()
|
||||
datasources.add_datasource(
|
||||
id_=id_,
|
||||
name="hiya",
|
||||
driver="foo",
|
||||
config='{user: foo}',
|
||||
description="hello",
|
||||
enabled=True)
|
||||
source = datasources.get_datasource_by_name('hiya')
|
||||
self.assertEqual(id_, source.id)
|
||||
self.assertEqual("hiya", source.name)
|
||||
self.assertEqual("foo", source.driver)
|
||||
self.assertEqual("hello", source.description)
|
||||
self.assertEqual('"{user: foo}"', source.config)
|
||||
self.assertEqual(True, source.enabled)
|
||||
|
||||
def test_get_datasource_by_id(self):
|
||||
id_ = uuidutils.generate_uuid()
|
||||
datasources.add_datasource(
|
||||
id_=id_,
|
||||
name="hiya",
|
||||
driver="foo",
|
||||
config='{user: foo}',
|
||||
description="hello",
|
||||
enabled=True)
|
||||
source = datasources.get_datasource(id_)
|
||||
self.assertEqual(id_, source.id)
|
||||
self.assertEqual("hiya", source.name)
|
||||
self.assertEqual("foo", source.driver)
|
||||
self.assertEqual("hello", source.description)
|
||||
self.assertEqual('"{user: foo}"', source.config)
|
||||
self.assertEqual(True, source.enabled)
|
||||
|
||||
def test_get_datasource(self):
|
||||
id_ = uuidutils.generate_uuid()
|
||||
datasources.add_datasource(
|
||||
id_=id_,
|
||||
name="hiya",
|
||||
driver="foo",
|
||||
config='{user: foo}',
|
||||
description="hello",
|
||||
enabled=True)
|
||||
sources = datasources.get_datasources()
|
||||
self.assertEqual(id_, sources[0].id)
|
||||
self.assertEqual("hiya", sources[0].name)
|
||||
self.assertEqual("foo", sources[0].driver)
|
||||
self.assertEqual("hello", sources[0].description)
|
||||
self.assertEqual('"{user: foo}"', sources[0].config)
|
||||
self.assertEqual(True, sources[0].enabled)
|
|
@ -0,0 +1,51 @@
|
|||
# Copyright (c) 2015 OpenStack Foundation
|
||||
#
|
||||
# Licensed under the Apache License, Version 2.0 (the "License"); you may
|
||||
# not use this file except in compliance with the License. You may obtain
|
||||
# a copy of the License at
|
||||
#
|
||||
# http://www.apache.org/licenses/LICENSE-2.0
|
||||
#
|
||||
# Unless required by applicable law or agreed to in writing, software
|
||||
# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
|
||||
# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
|
||||
# License for the specific language governing permissions and limitations
|
||||
# under the License.
|
||||
#
|
||||
|
||||
|
||||
from congress.datasources import datasource_driver
|
||||
from congress.datasources import datasource_utils
|
||||
|
||||
|
||||
def d6service(name, keys, inbox, datapath, args):
|
||||
"""This method is called by d6cage to create a dataservice instance."""
|
||||
return FakeDataSource(name, keys, inbox, datapath, args)
|
||||
|
||||
|
||||
class FakeDataSource(datasource_driver.DataSourceDriver):
|
||||
|
||||
value_trans = {'translation-type': 'VALUE'}
|
||||
fake_translator = {
|
||||
'translation-type': 'HDICT',
|
||||
'table-name': 'fake_table',
|
||||
'selector-type': 'DICT_SELECTOR',
|
||||
'field-translators':
|
||||
({'fieldname': 'id', 'translator': value_trans},
|
||||
{'fieldname': 'name', 'translator': value_trans})}
|
||||
|
||||
TRANSLATORS = [fake_translator]
|
||||
|
||||
def __init__(self, name='', keys='', inbox=None, datapath=None, args=None):
|
||||
super(FakeDataSource, self).__init__(name, keys, inbox,
|
||||
datapath, args)
|
||||
self.register_translator(FakeDataSource.fake_translator)
|
||||
self.initialized = True
|
||||
|
||||
@staticmethod
|
||||
def get_datasource_info():
|
||||
result = {}
|
||||
result['id'] = 'fake_datasource'
|
||||
result['description'] = 'This is a fake driver used for testing'
|
||||
result['config'] = datasource_utils.get_openstack_required_config()
|
||||
return result
|
|
@ -0,0 +1,167 @@
|
|||
# Copyright (c) 2014 OpenStack Foundation
|
||||
#
|
||||
# Licensed under the Apache License, Version 2.0 (the "License");
|
||||
# you may not use this file except in compliance with the License.
|
||||
# You may obtain a copy of the License at
|
||||
#
|
||||
# http://www.apache.org/licenses/LICENSE-2.0
|
||||
#
|
||||
# Unless required by applicable law or agreed to in writing, software
|
||||
# distributed under the License is distributed on an "AS IS" BASIS,
|
||||
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or
|
||||
# implied.
|
||||
# See the License for the specific language governing permissions and
|
||||
# limitations under the License.
|
||||
|
||||
from oslo.config import cfg
|
||||
|
||||
from congress import harness
|
||||
from congress.managers import datasource as datasource_manager
|
||||
from congress.tests import base
|
||||
from congress.tests import fake_datasource
|
||||
from congress.tests import helper
|
||||
|
||||
|
||||
class TestDataSourceManager(base.SqlTestCase):
|
||||
|
||||
def setUp(self):
|
||||
super(TestDataSourceManager, self).setUp()
|
||||
cfg.CONF.set_override(
|
||||
'drivers',
|
||||
['congress.tests.fake_datasource.FakeDataSource'])
|
||||
self.datasource_mgr = datasource_manager.DataSourceManager
|
||||
self.datasource_mgr.validate_configured_drivers()
|
||||
self.cage = harness.create(helper.root_path(), helper.state_path(),
|
||||
None, {})
|
||||
|
||||
def _get_datasource_request(self):
|
||||
return {'id': 'asdf',
|
||||
'name': 'aaron',
|
||||
'driver': '',
|
||||
'description': 'hello world!',
|
||||
'enabled': True,
|
||||
'type': None,
|
||||
'config': {}}
|
||||
|
||||
def test_make_datasource_dict(self):
|
||||
req = self._get_datasource_request()
|
||||
result = self.datasource_mgr.make_datasource_dict(req)
|
||||
self.assertEqual(req, result)
|
||||
|
||||
def test_validate_create_datasource_invalid_driver(self):
|
||||
req = self._get_datasource_request()
|
||||
self.assertRaises(datasource_manager.InvalidDriver,
|
||||
self.datasource_mgr.validate_create_datasource,
|
||||
req)
|
||||
|
||||
def test_validate_create_datasource_invalid_config_invalid_options(self):
|
||||
req = self._get_datasource_request()
|
||||
req['driver'] = 'invalid_datasource'
|
||||
self.assertRaises(datasource_manager.InvalidDriver,
|
||||
self.datasource_mgr.validate_create_datasource,
|
||||
req)
|
||||
|
||||
def test_validate_create_datasource_missing_config_options(self):
|
||||
req = self._get_datasource_request()
|
||||
req['driver'] = 'fake_datasource'
|
||||
# This is still missing some required options
|
||||
req['config'] = {'auth_url': '1234'}
|
||||
self.assertRaises(datasource_manager.MissingRequiredConfigOptions,
|
||||
self.datasource_mgr.validate_create_datasource,
|
||||
req)
|
||||
|
||||
def test_add_datasource(self):
|
||||
req = self._get_datasource_request()
|
||||
req['driver'] = 'fake_datasource'
|
||||
req['config'] = {'auth_url': 'foo',
|
||||
'username': 'armax',
|
||||
'password': 'password',
|
||||
'tenant_name': 'armax'}
|
||||
# let driver generate this for us.
|
||||
del req['id']
|
||||
result = self.datasource_mgr.add_datasource(req)
|
||||
for key, value in req.iteritems():
|
||||
self.assertEqual(value, result[key])
|
||||
|
||||
def test_get_datasouce(self):
|
||||
req = self._get_datasource_request()
|
||||
req['driver'] = 'fake_datasource'
|
||||
req['config'] = {'auth_url': 'foo',
|
||||
'username': 'armax',
|
||||
'password': 'password',
|
||||
'tenant_name': 'armax'}
|
||||
# let driver generate this for us.
|
||||
del req['id']
|
||||
result = self.datasource_mgr.add_datasource(req)
|
||||
result = self.datasource_mgr.get_datasource(result['id'])
|
||||
for key, value in req.iteritems():
|
||||
self.assertEqual(value, result[key])
|
||||
|
||||
def test_get_datasources(self):
|
||||
req = self._get_datasource_request()
|
||||
req['driver'] = 'fake_datasource'
|
||||
req['name'] = 'datasource1'
|
||||
req['config'] = {'auth_url': 'foo',
|
||||
'username': 'armax',
|
||||
'password': 'password',
|
||||
'tenant_name': 'armax'}
|
||||
# let driver generate this for us.
|
||||
del req['id']
|
||||
self.datasource_mgr.add_datasource(req)
|
||||
req['name'] = 'datasource2'
|
||||
self.datasource_mgr.add_datasource(req)
|
||||
result = self.datasource_mgr.get_datasources()
|
||||
|
||||
req['name'] = 'datasource1'
|
||||
for key, value in req.iteritems():
|
||||
self.assertEqual(value, result[0][key])
|
||||
|
||||
req['name'] = 'datasource2'
|
||||
for key, value in req.iteritems():
|
||||
self.assertEqual(value, result[1][key])
|
||||
|
||||
def test_create_datasource_duplicate_name(self):
|
||||
req = self._get_datasource_request()
|
||||
req['driver'] = 'fake_datasource'
|
||||
req['name'] = 'datasource1'
|
||||
req['config'] = {'auth_url': 'foo',
|
||||
'username': 'armax',
|
||||
'password': 'password',
|
||||
'tenant_name': 'armax'}
|
||||
# let driver generate this for us.
|
||||
del req['id']
|
||||
self.datasource_mgr.add_datasource(req)
|
||||
self.assertRaises(datasource_manager.DatasourceNameInUse,
|
||||
self.datasource_mgr.add_datasource, req)
|
||||
|
||||
def test_delete_datasource(self):
|
||||
req = self._get_datasource_request()
|
||||
req['driver'] = 'fake_datasource'
|
||||
req['config'] = {'auth_url': 'foo',
|
||||
'username': 'armax',
|
||||
'password': 'password',
|
||||
'tenant_name': 'armax'}
|
||||
# let driver generate this for us.
|
||||
del req['id']
|
||||
result = self.datasource_mgr.add_datasource(req)
|
||||
self.datasource_mgr.delete_datasource(result['id'])
|
||||
self.assertRaises(datasource_manager.DatasourceNotFound,
|
||||
self.datasource_mgr.get_datasource,
|
||||
result['id'])
|
||||
|
||||
def test_delete_invalid_datasource(self):
|
||||
self.assertRaises(datasource_manager.DatasourceNotFound,
|
||||
self.datasource_mgr.delete_datasource,
|
||||
"does_not_exist")
|
||||
|
||||
def test_get_driver_schema(self):
|
||||
schema = self.datasource_mgr.get_driver_schema(
|
||||
'fake_datasource')
|
||||
self.assertEqual(
|
||||
schema,
|
||||
fake_datasource.FakeDataSource.get_schema())
|
||||
|
||||
def test_get_datasouce_schema_driver_not_found(self):
|
||||
self.assertRaises(datasource_manager.DatasourceNotFound,
|
||||
self.datasource_mgr.get_datasource_schema,
|
||||
"does_not_exist")
|
|
@ -20,7 +20,9 @@ test_congress
|
|||
|
||||
Tests for `congress` module.
|
||||
"""
|
||||
import os
|
||||
|
||||
import mock
|
||||
import mox
|
||||
import neutronclient.v2_0
|
||||
|
||||
|
@ -50,13 +52,9 @@ class TestCongress(base.SqlTestCase):
|
|||
neutronclient.v2_0.client.Client)
|
||||
neutron_mock2 = mock_factory.CreateMock(
|
||||
neutronclient.v2_0.client.Client)
|
||||
override = {}
|
||||
override['neutron'] = {'poll_time': 0}
|
||||
override['neutron2'] = {'poll_time': 0}
|
||||
override['nova'] = {'poll_time': 0}
|
||||
|
||||
cage = harness.create(helper.root_path(), helper.state_path(),
|
||||
helper.datasource_config_path(), override)
|
||||
helper.datasource_config_path())
|
||||
engine = cage.service_object('engine')
|
||||
|
||||
api = {'policy': cage.service_object('api-policy'),
|
||||
|
@ -67,7 +65,34 @@ class TestCongress(base.SqlTestCase):
|
|||
'status': cage.service_object('api-status'),
|
||||
'schema': cage.service_object('api-schema')}
|
||||
|
||||
config = {'username': 'demo',
|
||||
'auth_url': 'http://127.0.0.1:5000/v2.0',
|
||||
'tenant_name': 'demo',
|
||||
'password': 'password',
|
||||
'poll_time': 0,
|
||||
'module': 'datasources/neutron_driver.py'}
|
||||
|
||||
# FIXME(arosen): remove all this code
|
||||
# monkey patch
|
||||
engine.create_policy('neutron')
|
||||
engine.create_policy('neutron2')
|
||||
engine.create_policy('nova')
|
||||
harness.load_data_service(
|
||||
'neutron', config, cage,
|
||||
os.path.join(helper.root_path(), "congress"))
|
||||
service = cage.service_object('neutron')
|
||||
engine.set_schema('neutron', service.get_schema())
|
||||
harness.load_data_service(
|
||||
'neutron2', config, cage,
|
||||
os.path.join(helper.root_path(), "congress"))
|
||||
|
||||
engine.set_schema('neutron2', service.get_schema())
|
||||
config['module'] = 'datasources/nova_driver.py'
|
||||
harness.load_data_service(
|
||||
'nova', config, cage,
|
||||
os.path.join(helper.root_path(), "congress"))
|
||||
engine.set_schema('nova', service.get_schema())
|
||||
|
||||
cage.service_object('neutron').neutron = neutron_mock
|
||||
cage.service_object('neutron2').neutron = neutron_mock2
|
||||
|
||||
|
@ -297,6 +322,7 @@ class TestCongress(base.SqlTestCase):
|
|||
|
||||
def test_table_api_model(self):
|
||||
"""Test the table api model."""
|
||||
self.skipTest("Move to test/api/api_model and use fake driver...")
|
||||
api = self.api
|
||||
engine = self.engine
|
||||
|
||||
|
@ -581,18 +607,26 @@ class TestCongress(base.SqlTestCase):
|
|||
Same as test_multiple except we use the api interface
|
||||
instead of the DSE interface.
|
||||
"""
|
||||
api = self.api
|
||||
engine = self.engine
|
||||
# Insert formula (which creates neutron services)
|
||||
net_formula = test_neutron.create_networkXnetwork_group('p')
|
||||
LOG.debug("Sending formula: %s", net_formula)
|
||||
context = {'policy_id': engine.DEFAULT_THEORY}
|
||||
(id1, rule) = api['rule'].add_item(
|
||||
{'rule': str(net_formula)}, {}, context=context)
|
||||
datasources = api['datasource'].get_items({})['results']
|
||||
datasources = [d['id'] for d in datasources]
|
||||
self.assertEqual(set(datasources),
|
||||
set(['neutron', 'neutron2', 'nova']))
|
||||
self.skipTest("Move to test/api/api_model and use fake driver...")
|
||||
# FIXME(arosen): we should break out these tests into
|
||||
# congress/tests/api/test_datasource.py
|
||||
with mock.patch("congress.managers.datasource.DataSourceDriverManager."
|
||||
"get_datasource_drivers_info") as get_info:
|
||||
get_info.return_value = [{'datasource_driver': 'neutron'},
|
||||
{'datasource_driver': 'neutron2'},
|
||||
{'datasource_driver': 'nova'}]
|
||||
api = self.api
|
||||
engine = self.engine
|
||||
# Insert formula (which creates neutron services)
|
||||
net_formula = test_neutron.create_networkXnetwork_group('p')
|
||||
LOG.debug("Sending formula: %s", net_formula)
|
||||
context = {'policy_id': engine.DEFAULT_THEORY}
|
||||
(id1, rule) = api['rule'].add_item(
|
||||
{'rule': str(net_formula)}, {}, context=context)
|
||||
datasources = api['datasource'].get_items({})['results']
|
||||
datasources = [d['datasource_driver'] for d in datasources]
|
||||
self.assertEqual(set(datasources),
|
||||
set(['neutron', 'neutron2', 'nova']))
|
||||
|
||||
def test_status_api_model(self):
|
||||
"""Test the status api model.
|
||||
|
@ -600,6 +634,7 @@ class TestCongress(base.SqlTestCase):
|
|||
Same as test_multiple except we use the api interface
|
||||
instead of the DSE interface.
|
||||
"""
|
||||
self.skipTest("Move to test/api/test_status and use fake driver...")
|
||||
api = self.api
|
||||
context = {'ds_id': 'neutron'}
|
||||
|
||||
|
@ -631,6 +666,8 @@ class TestCongress(base.SqlTestCase):
|
|||
Same as test_multiple except we use the api interface
|
||||
instead of the DSE interface.
|
||||
"""
|
||||
# FIXME(arosen): here...
|
||||
self.skipTest("Move to test/api/test_schema and use fake driver...")
|
||||
api = self.api
|
||||
neutron_schema = self.cage.service_object('neutron').get_schema()
|
||||
|
||||
|
@ -658,6 +695,7 @@ class TestCongress(base.SqlTestCase):
|
|||
|
||||
def test_row_api_model(self):
|
||||
"""Test the row api model."""
|
||||
self.skipTest("Move to test/api/test_row_api_model..")
|
||||
api = self.api
|
||||
engine = self.engine
|
||||
# add some rules defining tables
|
||||
|
|
|
@ -33,6 +33,7 @@ if is_service_enabled congress; then
|
|||
# Start the congress API and Congress taskmgr components
|
||||
echo_summary "Starting Congress"
|
||||
start_congress_service_and_check
|
||||
configure_congress_datasources
|
||||
fi
|
||||
|
||||
if [[ "$1" == "unstack" ]]; then
|
||||
|
|
|
@ -88,6 +88,8 @@ function configure_congress {
|
|||
sudo chown $STACK_USER $CONGRESS_CONF_DIR
|
||||
|
||||
cp $CONGRESS_DIR/etc/congress.conf.sample $CONGRESS_CONF
|
||||
# FIXME(arosen) quick work around for now.
|
||||
touch /etc/congress/datasource.conf
|
||||
|
||||
# If needed, move config file from ``$CONGRESS_DIR/etc/congress`` to ``CONGRESS_CONF_DIR``
|
||||
|
||||
|
@ -98,7 +100,6 @@ function configure_congress {
|
|||
CONGRESS_API_PASTE_FILE=$CONGRESS_CONF_DIR/api-paste.ini
|
||||
CONGRESS_POLICY_FILE=$CONGRESS_CONF_DIR/policy.json
|
||||
CONGRESS_POLICY_PATH=$CONGRESS_CONF_DIR/snapshot
|
||||
CONGRESS_DATASOURCE_FILE=$CONGRESS_CONF_DIR/datasources.conf
|
||||
|
||||
cp $CONGRESS_DIR/etc/api-paste.ini $CONGRESS_API_PASTE_FILE
|
||||
cp $CONGRESS_DIR/etc/policy.json $CONGRESS_POLICY_FILE
|
||||
|
@ -108,30 +109,31 @@ function configure_congress {
|
|||
sudo mkdir -p $CONGRESS_CONF_DIR/snapshot
|
||||
fi
|
||||
|
||||
rm -rf $CONGRESS_DATASOURCE_FILE
|
||||
_configure_service neutron neutronv2
|
||||
_configure_service nova nova
|
||||
_configure_service key keystone
|
||||
_configure_service ceilometer ceilometer
|
||||
_configure_service cinder cinder
|
||||
_configure_service swift swift
|
||||
_configure_service glance glancev2
|
||||
|
||||
# Update either configuration file
|
||||
iniset $CONGRESS_CONF DEFAULT verbose True
|
||||
iniset $CONGRESS_CONF DEFAULT debug $ENABLE_DEBUG_LOG_LEVEL
|
||||
iniset $CONGRESS_CONF DEFAULT policy_file $CONGRESS_POLICY_FILE
|
||||
iniset $CONGRESS_CONF DEFAULT policy_path $CONGRESS_POLICY_PATH
|
||||
iniset $CONGRESS_CONF DEFAULT datasource_file $CONGRESS_DATASOURCE_FILE
|
||||
iniset $CONGRESS_CONF DEFAULT auth_strategy $CONGRESS_AUTH_STRATEGY
|
||||
|
||||
iniset $CONGRESS_CONF database connection `database_connection_url $CONGRESS_DB_NAME`
|
||||
|
||||
# Update congress datasource file
|
||||
# NOTE(arosen): congress datasource file does not use the same format as others
|
||||
# so we use sed instead of iniset. This will removed anyways onces congress can
|
||||
# configure datasources via the api.
|
||||
sed -i -e "s/password: password/password: $ADMIN_PASSWORD/g" $CONGRESS_DATASOURCE_FILE
|
||||
CONGRESS_DRIVERS="congress.datasources.neutronv2_driver.NeutronV2Driver,"
|
||||
CONGRESS_DRIVERS+="congress.datasources.glancev2_driver.GlanceV2Driver,"
|
||||
CONGRESS_DRIVERS+="congress.datasources.nova_driver.NovaDriver,"
|
||||
CONGRESS_DRIVERS+="congress.datasources.keystone_driver.KeystoneDriver,"
|
||||
CONGRESS_DRIVERS+="congress.datasources.ceilometer_driver.CeilometerDriver,"
|
||||
CONGRESS_DRIVERS+="congress.datasources.cinder_driver.CinderDriver,"
|
||||
CONGRESS_DRIVERS+="congress.datasources.swift_driver.SwiftDriver,"
|
||||
CONGRESS_DRIVERS+="congress.datasources.plexxi_driver.PlexxiDriver,"
|
||||
CONGRESS_DRIVERS+="congress.datasources.vCenter_driver.VCenterDriver"
|
||||
# FIXME(arosen): congress does not yet have the murano client in requirements.txt
|
||||
# so we can't yet load it.
|
||||
#CONGRESS_DRIVERS+="congress.datasources.murano_driver.MuranoDriver"
|
||||
|
||||
|
||||
iniset $CONGRESS_CONF DEFAULT drivers $CONGRESS_DRIVERS
|
||||
|
||||
iniset $CONGRESS_CONF database connection `database_connection_url $CONGRESS_DB_NAME`
|
||||
|
||||
_congress_setup_keystone $CONGRESS_CONF keystone_authtoken
|
||||
|
||||
|
@ -140,6 +142,32 @@ function configure_congress {
|
|||
fi
|
||||
}
|
||||
|
||||
function configure_congress_datasources {
|
||||
_configure_service neutron neutronv2
|
||||
_configure_service nova nova
|
||||
_configure_service key keystone
|
||||
_configure_service ceilometer ceilometer
|
||||
_configure_service cinder cinder
|
||||
# FIXME(arosen): need to figure out how swift auth works...
|
||||
#_configure_service swift swift
|
||||
_configure_service glance glancev2
|
||||
_configure_service murano murano
|
||||
|
||||
|
||||
}
|
||||
|
||||
function _configure_service {
|
||||
if is_service_enabled $1; then
|
||||
openstack congress datasource create $2 "$2" \
|
||||
--config username=$OS_USERNAME \
|
||||
--config tenant_name=$OS_TENANT_NAME \
|
||||
--config password=$OS_PASSWORD \
|
||||
--config auth_url=http://$SERVICE_HOST:5000/v2.0
|
||||
fi
|
||||
}
|
||||
|
||||
|
||||
|
||||
function configure_congressclient {
|
||||
setup_develop $CONGRESSCLIENT_DIR
|
||||
}
|
||||
|
@ -215,6 +243,7 @@ function start_congress_service_and_check {
|
|||
# fi
|
||||
}
|
||||
|
||||
|
||||
# stop_congress() - Stop running processes (non-screen)
|
||||
function stop_congress {
|
||||
:
|
||||
|
@ -260,19 +289,6 @@ function _congress_setup_horizon {
|
|||
restart_apache_server
|
||||
}
|
||||
|
||||
# Enable openstack services with congress
|
||||
function _configure_service {
|
||||
if is_service_enabled $1; then
|
||||
echo -e \
|
||||
"\n[$2]"\
|
||||
"\nmodule: datasources/$2_driver.py"\
|
||||
"\nusername: $OS_USERNAME"\
|
||||
"\npassword: $OS_PASSWORD"\
|
||||
"\nauth_url: http://$SERVICE_HOST:5000/v2.0"\
|
||||
"\ntenant_name: $OS_TENANT_NAME" >> $CONGRESS_DATASOURCE_FILE
|
||||
fi
|
||||
}
|
||||
|
||||
# Restore xtrace
|
||||
$XTRACE
|
||||
|
||||
|
|
|
@ -37,12 +37,14 @@ class TestCeilometerDriver(manager_congress.ScenarioPolicyBase):
|
|||
raise cls.skipException(msg)
|
||||
cls.os = clients.Manager(cls.admin_credentials())
|
||||
cls.telemetry_client = cls.os.telemetry_client
|
||||
cls.datasource_id = manager_congress.get_datasource_id(
|
||||
cls.admin_manager.congress_client, 'ceilometer')
|
||||
|
||||
@test.attr(type='smoke')
|
||||
def test_ceilometer_meters_table(self):
|
||||
meter_schema = (
|
||||
self.admin_manager.congress_client.show_datasource_table_schema(
|
||||
'ceilometer', 'meters')['columns'])
|
||||
self.datasource_id, 'meters')['columns'])
|
||||
meter_id_col = next(i for i, c in enumerate(meter_schema)
|
||||
if c['name'] == 'meter_id')
|
||||
|
||||
|
@ -56,7 +58,7 @@ class TestCeilometerDriver(manager_congress.ScenarioPolicyBase):
|
|||
|
||||
results = (
|
||||
self.admin_manager.congress_client.list_datasource_rows(
|
||||
'ceilometer', 'meters'))
|
||||
self.datasource_id, 'meters'))
|
||||
for row in results['results']:
|
||||
try:
|
||||
meter_row = meter_map[row['data'][meter_id_col]]
|
||||
|
|
|
@ -40,12 +40,14 @@ class TestCinderDriver(manager_congress.ScenarioPolicyBase):
|
|||
super(TestCinderDriver, cls).setUp()
|
||||
cls.os = clients.Manager(cls.admin_credentials())
|
||||
cls.cinder = cls.os.volumes_client
|
||||
cls.datasource_id = manager_congress.get_datasource_id(
|
||||
cls.admin_manager.congress_client, 'cinder')
|
||||
|
||||
@test.attr(type='smoke')
|
||||
def test_cinder_volumes_table(self):
|
||||
volume_schema = (
|
||||
self.admin_manager.congress_client.show_datasource_table_schema(
|
||||
'cinder', 'volumes')['columns'])
|
||||
self.datasource_id, 'volumes')['columns'])
|
||||
volume_id_col = next(i for i, c in enumerate(volume_schema)
|
||||
if c['name'] == 'id')
|
||||
|
||||
|
@ -59,7 +61,7 @@ class TestCinderDriver(manager_congress.ScenarioPolicyBase):
|
|||
|
||||
results = (
|
||||
self.admin_manager.congress_client.list_datasource_rows(
|
||||
'cinder', 'volumes'))
|
||||
self.datasource_id, 'volumes'))
|
||||
for row in results['results']:
|
||||
try:
|
||||
volume_row = volumes_map[row['data'][volume_id_col]]
|
||||
|
|
|
@ -43,13 +43,15 @@ class TestGlanceV2Driver(manager_congress.ScenarioPolicyBase):
|
|||
raise cls.skipException(skip_msg)
|
||||
cls.os = clients.Manager()
|
||||
cls.glancev2 = cls.os.image_client_v2
|
||||
cls.datasource_id = manager_congress.get_datasource_id(
|
||||
cls.admin_manager.congress_client, 'glancev2')
|
||||
|
||||
@test.attr(type='smoke')
|
||||
@test.services('image')
|
||||
def test_glancev2_images_table(self):
|
||||
image_schema = (
|
||||
self.admin_manager.congress_client.show_datasource_table_schema(
|
||||
'glancev2', 'images')['columns'])
|
||||
self.datasource_id, 'images')['columns'])
|
||||
image_id_col = next(i for i, c in enumerate(image_schema)
|
||||
if c['name'] == 'id')
|
||||
|
||||
|
@ -63,7 +65,7 @@ class TestGlanceV2Driver(manager_congress.ScenarioPolicyBase):
|
|||
|
||||
results = (
|
||||
self.admin_manager.congress_client.list_datasource_rows(
|
||||
'glancev2', 'images'))
|
||||
self.datasource_id, 'images'))
|
||||
for row in results['results']:
|
||||
try:
|
||||
image_row = image_map[row['data'][image_id_col]]
|
||||
|
@ -103,7 +105,7 @@ class TestGlanceV2Driver(manager_congress.ScenarioPolicyBase):
|
|||
|
||||
results = (
|
||||
self.admin_manager.congress_client.list_datasource_rows(
|
||||
'glancev2', 'tags'))
|
||||
self.datasource_id, 'tags'))
|
||||
for row in results['results']:
|
||||
image_id, tag = row['data'][0], row['data'][1]
|
||||
glance_image_tags = image_tag_map.get(image_id)
|
||||
|
|
|
@ -40,12 +40,14 @@ class TestKeystoneV2Driver(manager_congress.ScenarioPolicyBase):
|
|||
super(TestKeystoneV2Driver, cls).setUp()
|
||||
cls.os = clients.Manager(cls.admin_credentials())
|
||||
cls.keystone = cls.os.identity_client
|
||||
cls.datasource_id = manager_congress.get_datasource_id(
|
||||
cls.admin_manager.congress_client, 'keystone')
|
||||
|
||||
@test.attr(type='smoke')
|
||||
def test_keystone_users_table(self):
|
||||
user_schema = (
|
||||
self.admin_manager.congress_client.show_datasource_table_schema(
|
||||
'keystone', 'users')['columns'])
|
||||
self.datasource_id, 'users')['columns'])
|
||||
user_id_col = next(i for i, c in enumerate(user_schema)
|
||||
if c['name'] == 'id')
|
||||
|
||||
|
@ -59,7 +61,7 @@ class TestKeystoneV2Driver(manager_congress.ScenarioPolicyBase):
|
|||
|
||||
results = (
|
||||
self.admin_manager.congress_client.list_datasource_rows(
|
||||
'keystone', 'users'))
|
||||
self.datasource_id, 'users'))
|
||||
for row in results['results']:
|
||||
try:
|
||||
user_row = user_map[row['data'][user_id_col]]
|
||||
|
@ -85,7 +87,7 @@ class TestKeystoneV2Driver(manager_congress.ScenarioPolicyBase):
|
|||
def test_keystone_roles_table(self):
|
||||
role_schema = (
|
||||
self.admin_manager.congress_client.show_datasource_table_schema(
|
||||
'keystone', 'roles')['columns'])
|
||||
self.datasource_id, 'roles')['columns'])
|
||||
role_id_col = next(i for i, c in enumerate(role_schema)
|
||||
if c['name'] == 'id')
|
||||
|
||||
|
@ -99,7 +101,7 @@ class TestKeystoneV2Driver(manager_congress.ScenarioPolicyBase):
|
|||
|
||||
results = (
|
||||
self.admin_manager.congress_client.list_datasource_rows(
|
||||
'keystone', 'roles'))
|
||||
self.datasource_id, 'roles'))
|
||||
for row in results['results']:
|
||||
try:
|
||||
role_row = roles_map[row['data'][role_id_col]]
|
||||
|
@ -120,7 +122,7 @@ class TestKeystoneV2Driver(manager_congress.ScenarioPolicyBase):
|
|||
def test_keystone_tenants_table(self):
|
||||
tenant_schema = (
|
||||
self.admin_manager.congress_client.show_datasource_table_schema(
|
||||
'keystone', 'tenants')['columns'])
|
||||
self.datasource_id, 'tenants')['columns'])
|
||||
tenant_id_col = next(i for i, c in enumerate(tenant_schema)
|
||||
if c['name'] == 'id')
|
||||
|
||||
|
@ -134,7 +136,7 @@ class TestKeystoneV2Driver(manager_congress.ScenarioPolicyBase):
|
|||
|
||||
results = (
|
||||
self.admin_manager.congress_client.list_datasource_rows(
|
||||
'keystone', 'tenants'))
|
||||
self.datasource_id, 'tenants'))
|
||||
for row in results['results']:
|
||||
try:
|
||||
tenant_row = tenants_map[row['data'][tenant_id_col]]
|
||||
|
|
|
@ -44,6 +44,8 @@ class TestNeutronV2Driver(manager_congress.ScenarioPolicyBase):
|
|||
raise cls.skipException(skip_msg)
|
||||
cls.os = clients.Manager(cls.admin_credentials())
|
||||
cls.neutron_client = cls.os.network_client
|
||||
cls.datasource_id = manager_congress.get_datasource_id(
|
||||
cls.admin_manager.congress_client, 'neutronv2')
|
||||
|
||||
@test.attr(type='smoke')
|
||||
@test.services('network')
|
||||
|
@ -55,12 +57,12 @@ class TestNeutronV2Driver(manager_congress.ScenarioPolicyBase):
|
|||
|
||||
network_schema = (
|
||||
self.admin_manager.congress_client.show_datasource_table_schema(
|
||||
'neutronv2', 'networks')['columns'])
|
||||
self.datasource_id, 'networks')['columns'])
|
||||
|
||||
def _check_data():
|
||||
results = (
|
||||
self.admin_manager.congress_client.list_datasource_rows(
|
||||
'neutronv2', 'networks'))
|
||||
self.datasource_id, 'networks'))
|
||||
for row in results['results']:
|
||||
network_row = network_map[row['data'][0]]
|
||||
for index in range(len(network_schema)):
|
||||
|
@ -84,26 +86,26 @@ class TestNeutronV2Driver(manager_congress.ScenarioPolicyBase):
|
|||
|
||||
port_schema = (
|
||||
self.admin_manager.congress_client.show_datasource_table_schema(
|
||||
'neutronv2', 'ports')['columns'])
|
||||
self.datasource_id, 'ports')['columns'])
|
||||
|
||||
port_sec_binding_schema = (
|
||||
self.admin_manager.congress_client.show_datasource_table_schema(
|
||||
'neutronv2', 'security_group_port_bindings')['columns'])
|
||||
self.datasource_id, 'security_group_port_bindings')['columns'])
|
||||
|
||||
fixed_ips_schema = (
|
||||
self.admin_manager.congress_client.show_datasource_table_schema(
|
||||
'neutronv2', 'fixed_ips')['columns'])
|
||||
self.datasource_id, 'fixed_ips')['columns'])
|
||||
|
||||
def _check_data():
|
||||
ports = (
|
||||
self.admin_manager.congress_client.list_datasource_rows(
|
||||
'neutronv2', 'ports'))
|
||||
self.datasource_id, 'ports'))
|
||||
security_group_port_bindings = (
|
||||
self.admin_manager.congress_client.list_datasource_rows(
|
||||
'neutronv2', 'security_group_port_bindings'))
|
||||
self.datasource_id, 'security_group_port_bindings'))
|
||||
fixed_ips = (
|
||||
self.admin_manager.congress_client.list_datasource_rows(
|
||||
'neutronv2', 'fixed_ips'))
|
||||
self.datasource_id, 'fixed_ips'))
|
||||
|
||||
# Validate ports table
|
||||
for row in ports['results']:
|
||||
|
@ -159,33 +161,33 @@ class TestNeutronV2Driver(manager_congress.ScenarioPolicyBase):
|
|||
|
||||
subnet_schema = (
|
||||
self.admin_manager.congress_client.show_datasource_table_schema(
|
||||
'neutronv2', 'subnets')['columns'])
|
||||
self.datasource_id, 'subnets')['columns'])
|
||||
|
||||
host_routes_schema = (
|
||||
self.admin_manager.congress_client.show_datasource_table_schema(
|
||||
'neutronv2', 'host_routes')['columns'])
|
||||
self.datasource_id, 'host_routes')['columns'])
|
||||
|
||||
dns_nameservers_schema = (
|
||||
self.admin_manager.congress_client.show_datasource_table_schema(
|
||||
'neutronv2', 'dns_nameservers')['columns'])
|
||||
self.datasource_id, 'dns_nameservers')['columns'])
|
||||
|
||||
allocation_pools_schema = (
|
||||
self.admin_manager.congress_client.show_datasource_table_schema(
|
||||
'neutronv2', 'allocation_pools')['columns'])
|
||||
self.datasource_id, 'allocation_pools')['columns'])
|
||||
|
||||
def _check_data():
|
||||
subnets = (
|
||||
self.admin_manager.congress_client.list_datasource_rows(
|
||||
'neutronv2', 'subnets'))
|
||||
self.datasource_id, 'subnets'))
|
||||
host_routes = (
|
||||
self.admin_manager.congress_client.list_datasource_rows(
|
||||
'neutronv2', 'host_routes'))
|
||||
self.datasource_id, 'host_routes'))
|
||||
dns_nameservers = (
|
||||
self.admin_manager.congress_client.list_datasource_rows(
|
||||
'neutronv2', 'dns_nameservers'))
|
||||
self.datasource_id, 'dns_nameservers'))
|
||||
allocation_pools = (
|
||||
self.admin_manager.congress_client.list_datasource_rows(
|
||||
'neutronv2', 'allocation_pools'))
|
||||
self.datasource_id, 'allocation_pools'))
|
||||
|
||||
# Validate subnets table
|
||||
for row in subnets['results']:
|
||||
|
@ -252,20 +254,20 @@ class TestNeutronV2Driver(manager_congress.ScenarioPolicyBase):
|
|||
|
||||
router_schema = (
|
||||
self.admin_manager.congress_client.show_datasource_table_schema(
|
||||
'neutronv2', 'routers')['columns'])
|
||||
self.datasource_id, 'routers')['columns'])
|
||||
|
||||
ext_gw_info_schema = (
|
||||
self.admin_manager.congress_client.show_datasource_table_schema(
|
||||
'neutronv2', 'external_gateway_infos')['columns'])
|
||||
self.datasource_id, 'external_gateway_infos')['columns'])
|
||||
|
||||
def _check_data():
|
||||
routers = (
|
||||
self.admin_manager.congress_client.list_datasource_rows(
|
||||
'neutronv2', 'routers'))
|
||||
self.datasource_id, 'routers'))
|
||||
|
||||
ext_gw_info = (
|
||||
self.admin_manager.congress_client.list_datasource_rows(
|
||||
'neutronv2', 'external_gateway_infos'))
|
||||
self.datasource_id, 'external_gateway_infos'))
|
||||
|
||||
# Validate routers table
|
||||
for row in routers['results']:
|
||||
|
@ -303,12 +305,12 @@ class TestNeutronV2Driver(manager_congress.ScenarioPolicyBase):
|
|||
|
||||
sg_schema = (
|
||||
self.admin_manager.congress_client.show_datasource_table_schema(
|
||||
'neutronv2', 'security_groups')['columns'])
|
||||
self.datasource_id, 'security_groups')['columns'])
|
||||
|
||||
def _check_data():
|
||||
security_groups = (
|
||||
self.admin_manager.congress_client.list_datasource_rows(
|
||||
'neutronv2', 'security_groups'))
|
||||
self.datasource_id, 'security_groups'))
|
||||
|
||||
# Validate security_group table
|
||||
for row in security_groups['results']:
|
||||
|
@ -336,12 +338,12 @@ class TestNeutronV2Driver(manager_congress.ScenarioPolicyBase):
|
|||
|
||||
sgrs_schema = (
|
||||
self.admin_manager.congress_client.show_datasource_table_schema(
|
||||
'neutronv2', 'security_group_rules')['columns'])
|
||||
self.datasource_id, 'security_group_rules')['columns'])
|
||||
|
||||
def _check_data():
|
||||
security_group_rules = (
|
||||
self.admin_manager.congress_client.list_datasource_rows(
|
||||
'neutronv2', 'security_group_rules'))
|
||||
self.datasource_id, 'security_group_rules'))
|
||||
|
||||
# Validate security_group_rules table
|
||||
for row in security_group_rules['results']:
|
||||
|
|
|
@ -39,6 +39,8 @@ class TestNovaDriver(manager_congress.ScenarioPolicyBase):
|
|||
super(TestNovaDriver, self).setUp()
|
||||
self.keypairs = {}
|
||||
self.servers = []
|
||||
self.datasource_id = manager_congress.get_datasource_id(
|
||||
self.admin_manager.congress_client, 'nova')
|
||||
|
||||
@test.attr(type='smoke')
|
||||
@test.services('compute', 'network')
|
||||
|
@ -47,7 +49,7 @@ class TestNovaDriver(manager_congress.ScenarioPolicyBase):
|
|||
|
||||
server_schema = (
|
||||
self.admin_manager.congress_client.show_datasource_table_schema(
|
||||
'nova', 'servers')['columns'])
|
||||
self.datasource_id, 'servers')['columns'])
|
||||
# Convert some of the column names.
|
||||
|
||||
def convert_col(col):
|
||||
|
@ -65,7 +67,7 @@ class TestNovaDriver(manager_congress.ScenarioPolicyBase):
|
|||
def _check_data_table_nova_servers():
|
||||
results = (
|
||||
self.admin_manager.congress_client.list_datasource_rows(
|
||||
'nova', 'servers'))
|
||||
self.datasource_id, 'servers'))
|
||||
for row in results['results']:
|
||||
match = True
|
||||
for index in range(len(keys)):
|
||||
|
@ -99,7 +101,7 @@ class TestNovaDriver(manager_congress.ScenarioPolicyBase):
|
|||
|
||||
results = (
|
||||
self.admin_manager.congress_client.list_datasource_rows(
|
||||
'nova', 'flavors'))
|
||||
self.datasource_id, 'flavors'))
|
||||
# TODO(alexsyip): Not sure what the following OS-FLV-EXT-DATA:
|
||||
# prefix is for.
|
||||
keys = ['id', 'name', 'vcpus', 'ram', 'disk',
|
||||
|
|
|
@ -33,6 +33,14 @@ Floating_IP_tuple = collections.namedtuple('Floating_IP_tuple',
|
|||
['floating_ip', 'server'])
|
||||
|
||||
|
||||
def get_datasource_id(client, name):
|
||||
datasources = client.list_datasources()
|
||||
for datasource in datasources['results']:
|
||||
if datasource['name'] == name:
|
||||
return datasource['id']
|
||||
raise Exception("Datasource %s not found." % name)
|
||||
|
||||
|
||||
# Note: these tests all use neutron today so we mix with that.
|
||||
class ScenarioPolicyBase(manager.NetworkScenarioTest):
|
||||
@classmethod
|
||||
|
|
|
@ -40,6 +40,10 @@
|
|||
# Supported values are 'keystone'(default), 'noauth'.
|
||||
# auth_strategy = keystone
|
||||
|
||||
# List of datasource driver class paths to import.
|
||||
# For example: congress.datasources.neutronv2_driver.NeutronV2Driver, etc
|
||||
# datasource_drivers = []
|
||||
|
||||
[keystone_authtoken]
|
||||
auth_host = 127.0.0.1
|
||||
auth_port = 35357
|
||||
|
|
|
@ -1,77 +0,0 @@
|
|||
[neutronv2]
|
||||
module: datasources/neutronv2_driver.py
|
||||
username: admin
|
||||
password: password
|
||||
auth_url: http://127.0.0.1:5000/v2.0
|
||||
tenant_name: admin
|
||||
|
||||
[nova]
|
||||
module: datasources/nova_driver.py
|
||||
username: admin
|
||||
password: password
|
||||
auth_url: http://127.0.0.1:5000/v2.0
|
||||
tenant_name: admin
|
||||
|
||||
[keystone]
|
||||
module: datasources/keystone_driver.py
|
||||
username: admin
|
||||
password: password
|
||||
auth_url: http://127.0.0.1:5000/v2.0
|
||||
tenant_name: admin
|
||||
|
||||
[ceilometer]
|
||||
module: datasources/ceilometer_driver.py
|
||||
username: admin
|
||||
password: password
|
||||
auth_url: http://127.0.0.1:5000/v2.0
|
||||
tenant_name: admin
|
||||
|
||||
[cinder]
|
||||
module: datasources/cinder_driver.py
|
||||
username: admin
|
||||
password: password
|
||||
auth_url: http://127.0.0.1:5000/v2.0
|
||||
tenant_name: admin
|
||||
|
||||
[swift]
|
||||
module: datasources/swift_driver.py
|
||||
username: admin
|
||||
password: password
|
||||
auth_url: http://127.0.0.1:5000/v2.0
|
||||
tenant_name: admin
|
||||
|
||||
[glancev2]
|
||||
module: datasources/glancev2_driver.py
|
||||
username: admin
|
||||
password: password
|
||||
auth_url: http://127.0.0.1:5000/v2.0
|
||||
tenant_name: admin
|
||||
|
||||
#[plexxi]
|
||||
#module:datasources/plexxi_driver.py
|
||||
#username: plexxiCore_username
|
||||
#password: plexxiCore_password
|
||||
#auth_url: http://PlexxiCoreURL/PlexxiCore/api
|
||||
#unique_names: False
|
||||
|
||||
#[vCenter]
|
||||
#module:datasources/vCenter_driver.py
|
||||
#username: vCenter_username
|
||||
#password: vCenter_password
|
||||
#auth_url: vCenterURL
|
||||
#max_Hosts: 999
|
||||
#max_VMs: 999
|
||||
|
||||
[murano]
|
||||
module: datasources/murano_driver.py
|
||||
username: admin
|
||||
password: password
|
||||
auth_url: http://127.0.0.1:5000/v2.0
|
||||
tenant_name: admin
|
||||
|
||||
[cloudfoundryv2]
|
||||
module: datasources/cloudfoundryv2_driver.py
|
||||
username: foo@bar.com
|
||||
password: cloudfoundry_password
|
||||
auth_url: https://api.run.pivotal.io/
|
||||
tenant_name: foo@bar.com
|
|
@ -23,6 +23,7 @@ six>=1.7.0
|
|||
oslo.config>=1.6.0 # Apache-2.0
|
||||
oslo.db>=1.4.1 # Apache-2.0
|
||||
oslo.serialization>=1.2.0 # Apache-2.0
|
||||
oslo.utils>=1.2.0 # Apache-2.0
|
||||
oslo.middleware>=0.3.0 # Apache-2.0
|
||||
oslo.vmware>=0.9.0 # Apache-2.0
|
||||
WebOb>=1.2.3
|
||||
|
|
Loading…
Reference in New Issue