Merge "Supplementing accelerator CRUD api"

This commit is contained in:
Zuul 2018-01-17 15:47:59 +00:00 committed by Gerrit Code Review
commit 52e31bb6bb
19 changed files with 791 additions and 55 deletions

View File

@ -25,3 +25,9 @@ class APIBase(wtypes.Base):
updated_at = wsme.wsattr(datetime.datetime, readonly=True)
"""The time in UTC at which the object is updated"""
def as_dict(self):
"""Render this object as a dict of its fields."""
return dict((k, getattr(self, k))
for k in self.fields
if hasattr(self, k) and getattr(self, k) != wsme.Unset)

View File

@ -22,7 +22,9 @@ from wsme import types as wtypes
from cyborg.api.controllers import base
from cyborg.api.controllers import link
from cyborg.api.controllers.v1 import types
from cyborg.api.controllers.v1 import utils as api_utils
from cyborg.api import expose
from cyborg.common import exception
from cyborg.common import policy
from cyborg import objects
@ -36,41 +38,90 @@ class Accelerator(base.APIBase):
"""
uuid = types.uuid
"""The UUID of the accelerator"""
name = wtypes.text
"""The name of the accelerator"""
description = wtypes.text
"""The description of the accelerator"""
project_id = types.uuid
"""The project UUID of the accelerator"""
user_id = types.uuid
"""The user UUID of the accelerator"""
device_type = wtypes.text
"""The device type of the accelerator"""
acc_type = wtypes.text
"""The type of the accelerator"""
acc_capability = wtypes.text
"""The capability of the accelerator"""
vendor_id = wtypes.text
"""The vendor id of the accelerator"""
product_id = wtypes.text
"""The product id of the accelerator"""
remotable = wtypes.IntegerType()
"""Whether the accelerator is remotable"""
links = wsme.wsattr([link.Link], readonly=True)
"""A list containing a self link"""
def __init__(self, **kwargs):
super(Accelerator, self).__init__(**kwargs)
self.fields = []
for field in objects.Accelerator.fields:
self.fields.append(field)
setattr(self, field, kwargs.get(field, wtypes.Unset))
@classmethod
def convert_with_links(cls, acc_obj):
accelerator = Accelerator(**acc_obj.as_dict())
def convert_with_links(cls, obj_acc):
api_acc = cls(**obj_acc.as_dict())
url = pecan.request.public_url
accelerator.links = [
link.Link.make_link('self', url, 'accelerators',
accelerator.uuid),
link.Link.make_link('bookmark', url, 'accelerators',
accelerator.uuid, bookmark=True)
api_acc.links = [
link.Link.make_link('self', url, 'accelerators', api_acc.uuid),
link.Link.make_link('bookmark', url, 'accelerators', api_acc.uuid,
bookmark=True)
]
return api_acc
return accelerator
class AcceleratorCollection(base.APIBase):
"""API representation of a collection of accelerators."""
accelerators = [Accelerator]
"""A list containing accelerator objects"""
@classmethod
def convert_with_links(cls, obj_accs):
collection = cls()
collection.accelerators = [Accelerator.convert_with_links(obj_acc)
for obj_acc in obj_accs]
return collection
class AcceleratorPatchType(types.JsonPatchType):
_api_base = Accelerator
@staticmethod
def internal_attrs():
defaults = types.JsonPatchType.internal_attrs()
return defaults + ['/project_id', '/user_id', '/device_type',
'/acc_type', '/acc_capability', '/vendor_id',
'/product_id', '/remotable']
class AcceleratorsControllerBase(rest.RestController):
_resource = None
def _get_resource(self, uuid):
self._resource = objects.Accelerator.get(pecan.request.context, uuid)
return self._resource
@ -82,16 +133,100 @@ class AcceleratorsController(AcceleratorsControllerBase):
@policy.authorize_wsgi("cyborg:accelerator", "create", False)
@expose.expose(Accelerator, body=types.jsontype,
status_code=http_client.CREATED)
def post(self, accelerator):
def post(self, acc):
"""Create a new accelerator.
:param accelerator: an accelerator within the request body.
:param acc: an accelerator within the request body.
"""
context = pecan.request.context
rpc_acc = objects.Accelerator(context, **accelerator)
new_acc = pecan.request.conductor_api.accelerator_create(
context, rpc_acc)
obj_acc = objects.Accelerator(context, **acc)
new_acc = pecan.request.conductor_api.accelerator_create(context,
obj_acc)
# Set the HTTP Location Header
pecan.response.location = link.build_url('accelerators',
new_acc.uuid)
pecan.response.location = link.build_url('accelerators', new_acc.uuid)
return Accelerator.convert_with_links(new_acc)
@policy.authorize_wsgi("cyborg:accelerator", "get")
@expose.expose(Accelerator, types.uuid)
def get_one(self, uuid):
"""Retrieve information about the given accelerator.
:param uuid: UUID of an accelerator.
"""
obj_acc = self._resource or self._get_resource(uuid)
return Accelerator.convert_with_links(obj_acc)
@expose.expose(AcceleratorCollection, int, types.uuid, wtypes.text,
wtypes.text, types.boolean)
def get_all(self, limit=None, marker=None, sort_key='id', sort_dir='asc',
all_tenants=None):
"""Retrieve a list of accelerators.
:param limit: Optional, to determinate the maximum number of
accelerators to return.
:param marker: Optional, to display a list of accelerators after this
marker.
:param sort_key: Optional, to sort the returned accelerators list by
this specified key value.
:param sort_dir: Optional, to return a list of accelerators with this
sort direction.
:param all_tenants: Optional, allows administrators to see the
accelerators owned by all tenants, otherwise only
the accelerators associated with the calling
tenant are included in the response.
"""
context = pecan.request.context
project_only = True
if context.is_admin and all_tenants:
project_only = False
marker_obj = None
if marker:
marker_obj = objects.Accelerator.get(context, marker)
obj_accs = objects.Accelerator.list(context, limit, marker_obj,
sort_key, sort_dir, project_only)
return AcceleratorCollection.convert_with_links(obj_accs)
@policy.authorize_wsgi("cyborg:accelerator", "update")
@expose.expose(Accelerator, types.uuid, body=[AcceleratorPatchType])
def patch(self, uuid, patch):
"""Update an accelerator.
:param uuid: UUID of an accelerator.
:param patch: a json PATCH document to apply to this accelerator.
"""
obj_acc = self._resource or self._get_resource(uuid)
try:
api_acc = Accelerator(
**api_utils.apply_jsonpatch(obj_acc.as_dict(), patch))
except api_utils.JSONPATCH_EXCEPTIONS as e:
raise exception.PatchError(patch=patch, reason=e)
# Update only the fields that have changed
for field in objects.Accelerator.fields:
try:
patch_val = getattr(api_acc, field)
except AttributeError:
# Ignore fields that aren't exposed in the API
continue
if patch_val == wtypes.Unset:
patch_val = None
if obj_acc[field] != patch_val:
obj_acc[field] = patch_val
context = pecan.request.context
new_acc = pecan.request.conductor_api.accelerator_update(context,
obj_acc)
return Accelerator.convert_with_links(new_acc)
@policy.authorize_wsgi("cyborg:accelerator", "delete")
@expose.expose(None, types.uuid, status_code=http_client.NO_CONTENT)
def delete(self, uuid):
"""Delete an accelerator.
:param uuid: UUID of an accelerator.
"""
obj_acc = self._resource or self._get_resource(uuid)
context = pecan.request.context
pecan.request.conductor_api.accelerator_delete(context, obj_acc)

View File

@ -13,12 +13,16 @@
# License for the specific language governing permissions and limitations
# under the License.
import inspect
import json
from oslo_utils import strutils
from oslo_utils import uuidutils
import wsme
from wsme import types as wtypes
from cyborg.common import exception
from cyborg.common.i18n import _
class UUIDType(wtypes.UserType):
@ -60,5 +64,98 @@ class JsonType(wtypes.UserType):
return JsonType.validate(value)
class BooleanType(wtypes.UserType):
"""A simple boolean type."""
basetype = wtypes.text
name = 'boolean'
@staticmethod
def validate(value):
try:
return strutils.bool_from_string(value, strict=True)
except ValueError as e:
# raise Invalid to return 400 (BadRequest) in the API
raise exception.Invalid(e)
@staticmethod
def frombasetype(value):
if value is None:
return None
return BooleanType.validate(value)
uuid = UUIDType()
jsontype = JsonType()
boolean = BooleanType()
class JsonPatchType(wtypes.Base):
"""A complex type that represents a single json-patch operation."""
path = wtypes.wsattr(wtypes.StringType(pattern='^(/[\w-]+)+$'),
mandatory=True)
op = wtypes.wsattr(wtypes.Enum(str, 'add', 'replace', 'remove'),
mandatory=True)
value = wtypes.wsattr(jsontype, default=wtypes.Unset)
# The class of the objects being patched. Override this in subclasses.
# Should probably be a subclass of cyborg.api.controllers.base.APIBase.
_api_base = None
# Attributes that are not required for construction, but which may not be
# removed if set. Override in subclasses if needed.
_extra_non_removable_attrs = set()
# Set of non-removable attributes, calculated lazily.
_non_removable_attrs = None
@staticmethod
def internal_attrs():
"""Returns a list of internal attributes.
Internal attributes can't be added, replaced or removed. This
method may be overwritten by derived class.
"""
return ['/created_at', '/id', '/links', '/updated_at', '/uuid']
@classmethod
def non_removable_attrs(cls):
"""Returns a set of names of attributes that may not be removed.
Attributes whose 'mandatory' property is True are automatically added
to this set. To add additional attributes to the set, override the
field _extra_non_removable_attrs in subclasses, with a set of the form
{'/foo', '/bar'}.
"""
if cls._non_removable_attrs is None:
cls._non_removable_attrs = cls._extra_non_removable_attrs.copy()
if cls._api_base:
fields = inspect.getmembers(cls._api_base,
lambda a: not inspect.isroutine(a))
for name, field in fields:
if getattr(field, 'mandatory', False):
cls._non_removable_attrs.add('/%s' % name)
return cls._non_removable_attrs
@staticmethod
def validate(patch):
_path = '/' + patch.path.split('/')[1]
if _path in patch.internal_attrs():
msg = _("'%s' is an internal attribute and can not be updated")
raise wsme.exc.ClientSideError(msg % patch.path)
if patch.path in patch.non_removable_attrs() and patch.op == 'remove':
msg = _("'%s' is a mandatory attribute and can not be removed")
raise wsme.exc.ClientSideError(msg % patch.path)
if patch.op != 'remove':
if patch.value is wsme.Unset:
msg = _("'add' and 'replace' operations need a value")
raise wsme.exc.ClientSideError(msg)
ret = {'path': patch.path, 'op': patch.op}
if patch.value is not wsme.Unset:
ret['value'] = patch.value
return ret

View File

@ -0,0 +1,35 @@
# Copyright 2017 Huawei Technologies Co.,LTD.
# All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License"); you may
# not use this file except in compliance with the License. You may obtain
# a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
# License for the specific language governing permissions and limitations
# under the License.
import jsonpatch
import wsme
from cyborg.common.i18n import _
JSONPATCH_EXCEPTIONS = (jsonpatch.JsonPatchException,
jsonpatch.JsonPointerException,
KeyError)
def apply_jsonpatch(doc, patch):
for p in patch:
if p['op'] == 'add' and p['path'].count('/') == 1:
if p['path'].lstrip('/') not in doc:
msg = _('Adding a new attribute (%s) to the root of '
' the resource is not allowed')
raise wsme.exc.ClientSideError(msg % p['path'])
return jsonpatch.apply_patch(doc, jsonpatch.JsonPatch(patch))

View File

@ -111,6 +111,16 @@ class InvalidJsonType(Invalid):
_msg_fmt = _("%(value)s is not JSON serializable.")
# Cannot be templated as the error syntax varies.
# msg needs to be constructed when raised.
class InvalidParameterValue(Invalid):
_msg_fmt = _("%(err)s")
class PatchError(Invalid):
_msg_fmt = _("Couldn't apply patch '%(patch)s'. Reason: %(reason)s")
class NotAuthorized(CyborgException):
_msg_fmt = _("Not authorized.")
code = http_client.FORBIDDEN
@ -118,3 +128,21 @@ class NotAuthorized(CyborgException):
class HTTPForbidden(NotAuthorized):
_msg_fmt = _("Access was denied to the following resource: %(resource)s")
class NotFound(CyborgException):
_msg_fmt = _("Resource could not be found.")
code = http_client.NOT_FOUND
class AcceleratorNotFound(NotFound):
_msg_fmt = _("Accelerator %(uuid)s could not be found.")
class Conflict(CyborgException):
_msg_fmt = _('Conflict.')
code = http_client.CONFLICT
class DuplicateName(Conflict):
_msg_fmt = _("An accelerator with name %(name)s already exists.")

View File

@ -16,6 +16,7 @@
import oslo_messaging as messaging
from cyborg.conf import CONF
from cyborg import objects
class ConductorManager(object):
@ -32,12 +33,35 @@ class ConductorManager(object):
def periodic_tasks(self, context, raise_on_error=False):
pass
def accelerator_create(self, context, acc_obj):
def accelerator_create(self, context, obj_acc):
"""Create a new accelerator.
:param context: request context.
:param acc_obj: a changed (but not saved) accelerator object.
:param obj_acc: a changed (but not saved) accelerator object.
:returns: created accelerator object.
"""
acc_obj.create()
return acc_obj
base_options = {
'project_id': context.tenant,
'user_id': context.user
}
obj_acc.update(base_options)
obj_acc.create(context)
return obj_acc
def accelerator_update(self, context, obj_acc):
"""Update an accelerator.
:param context: request context.
:param obj_acc: an accelerator object to update.
:returns: updated accelerator object.
"""
obj_acc.save(context)
return obj_acc
def accelerator_delete(self, context, obj_acc):
"""Delete an accelerator.
:param context: request context.
:param obj_acc: an accelerator object to delete.
"""
obj_acc.destroy(context)

View File

@ -47,12 +47,31 @@ class ConductorAPI(object):
version_cap=self.RPC_API_VERSION,
serializer=serializer)
def accelerator_create(self, context, acc_obj):
def accelerator_create(self, context, obj_acc):
"""Signal to conductor service to create an accelerator.
:param context: request context.
:param acc_obj: a created (but not saved) accelerator object.
:param obj_acc: a created (but not saved) accelerator object.
:returns: created accelerator object.
"""
cctxt = self.client.prepare(topic=self.topic, server=CONF.host)
return cctxt.call(context, 'accelerator_create', acc_obj=acc_obj)
return cctxt.call(context, 'accelerator_create', obj_acc=obj_acc)
def accelerator_update(self, context, obj_acc):
"""Signal to conductor service to update an accelerator.
:param context: request context.
:param obj_acc: an accelerator object to update.
:returns: updated accelerator object.
"""
cctxt = self.client.prepare(topic=self.topic, server=CONF.host)
return cctxt.call(context, 'accelerator_update', obj_acc=obj_acc)
def accelerator_delete(self, context, obj_acc):
"""Signal to conductor service to delete an accelerator.
:param context: request context.
:param obj_acc: an accelerator object to delete.
"""
cctxt = self.client.prepare(topic=self.topic, server=CONF.host)
cctxt.call(context, 'accelerator_delete', obj_acc=obj_acc)

View File

@ -44,4 +44,21 @@ class Connection(object):
# accelerator
@abc.abstractmethod
def accelerator_create(self, context, values):
"""Create a new server type."""
"""Create a new accelerator."""
@abc.abstractmethod
def accelerator_get(self, context, uuid):
"""Get requested accelerator."""
@abc.abstractmethod
def accelerator_list(self, context, limit, marker, sort_key, sort_dir,
project_only):
"""Get requested list of accelerators."""
@abc.abstractmethod
def accelerator_update(self, context, uuid, values):
"""Update an accelerator."""
@abc.abstractmethod
def accelerator_destroy(self, context, uuid):
"""Delete an accelerator."""

View File

@ -17,14 +17,17 @@
import threading
from oslo_db import api as oslo_db_api
from oslo_db import exception as db_exc
from oslo_db.sqlalchemy import enginefacade
from oslo_db.sqlalchemy import utils as sqlalchemyutils
from oslo_log import log
from oslo_utils import strutils
from oslo_utils import uuidutils
from sqlalchemy.orm.exc import NoResultFound
from cyborg.common import exception
from cyborg.common.i18n import _
from cyborg.db import api
from cyborg.db.sqlalchemy import models
@ -89,6 +92,21 @@ def add_identity_filter(query, value):
raise exception.InvalidIdentity(identity=value)
def _paginate_query(context, model, limit, marker, sort_key, sort_dir, query):
sort_keys = ['id']
if sort_key and sort_key not in sort_keys:
sort_keys.insert(0, sort_key)
try:
query = sqlalchemyutils.paginate_query(query, model, limit, sort_keys,
marker=marker,
sort_dir=sort_dir)
except db_exc.InvalidSortKey:
raise exception.InvalidParameterValue(
_('The sort_key value "%(key)s" is an invalid field for sorting')
% {'key': sort_key})
return query.all()
class Connection(api.Connection):
"""SqlAlchemy connection."""
@ -99,9 +117,6 @@ class Connection(api.Connection):
if not values.get('uuid'):
values['uuid'] = uuidutils.generate_uuid()
if not values.get('description'):
values['description'] = ''
accelerator = models.Accelerator()
accelerator.update(values)
@ -112,3 +127,52 @@ class Connection(api.Connection):
except db_exc.DBDuplicateEntry:
raise exception.AcceleratorAlreadyExists(uuid=values['uuid'])
return accelerator
def accelerator_get(self, context, uuid):
query = model_query(
context,
models.Accelerator).filter_by(uuid=uuid)
try:
return query.one()
except NoResultFound:
raise exception.AcceleratorNotFound(uuid=uuid)
def accelerator_list(self, context, limit, marker, sort_key, sort_dir,
project_only):
query = model_query(context, models.Accelerator,
project_only=project_only)
return _paginate_query(context, models.Accelerator, limit, marker,
sort_key, sort_dir, query)
def accelerator_update(self, context, uuid, values):
if 'uuid' in values:
msg = _("Cannot overwrite UUID for an existing Accelerator.")
raise exception.InvalidParameterValue(err=msg)
try:
return self._do_update_accelerator(context, uuid, values)
except db_exc.DBDuplicateEntry as e:
if 'name' in e.columns:
raise exception.DuplicateName(name=values['name'])
@oslo_db_api.retry_on_deadlock
def _do_update_accelerator(self, context, uuid, values):
with _session_for_write():
query = model_query(context, models.Accelerator)
query = add_identity_filter(query, uuid)
try:
ref = query.with_lockmode('update').one()
except NoResultFound:
raise exception.AcceleratorNotFound(uuid=uuid)
ref.update(values)
return ref
@oslo_db_api.retry_on_deadlock
def accelerator_destroy(self, context, uuid):
with _session_for_write():
query = model_query(context, models.Accelerator)
query = add_identity_filter(query, uuid)
count = query.delete()
if count != 1:
raise exception.AcceleratorNotFound(uuid=uuid)

View File

@ -13,6 +13,7 @@
# License for the specific language governing permissions and limitations
# under the License.
from oslo_log import log as logging
from oslo_versionedobjects import base as object_base
from cyborg.db import api as dbapi
@ -20,6 +21,9 @@ from cyborg.objects import base
from cyborg.objects import fields as object_fields
LOG = logging.getLogger(__name__)
@base.CyborgObjectRegistry.register
class Accelerator(base.CyborgObject, object_base.VersionedObjectDictCompat):
# Version 1.0: Initial version
@ -41,11 +45,33 @@ class Accelerator(base.CyborgObject, object_base.VersionedObjectDictCompat):
'remotable': object_fields.IntegerField(nullable=False),
}
def __init__(self, *args, **kwargs):
super(Accelerator, self).__init__(*args, **kwargs)
def create(self, context=None):
def create(self, context):
"""Create an Accelerator record in the DB."""
values = self.obj_get_changes()
db_accelerator = self.dbapi.accelerator_create(context, values)
self._from_db_object(context, self, db_accelerator)
db_acc = self.dbapi.accelerator_create(context, values)
self._from_db_object(self, db_acc)
@classmethod
def get(cls, context, uuid):
"""Find a DB Accelerator and return an Obj Accelerator."""
db_acc = cls.dbapi.accelerator_get(context, uuid)
obj_acc = cls._from_db_object(cls(context), db_acc)
return obj_acc
@classmethod
def list(cls, context, limit, marker, sort_key, sort_dir, project_only):
"""Return a list of Accelerator objects."""
db_accs = cls.dbapi.accelerator_list(context, limit, marker, sort_key,
sort_dir, project_only)
return cls._from_db_object_list(db_accs, context)
def save(self, context):
"""Update an Accelerator record in the DB."""
updates = self.obj_get_changes()
db_acc = self.dbapi.accelerator_update(context, self.uuid, updates)
self._from_db_object(self, db_acc)
def destroy(self, context):
"""Delete the Accelerator from the DB."""
self.dbapi.accelerator_destroy(context, self.uuid)
self.obj_reset_changes()

View File

@ -62,21 +62,28 @@ class CyborgObject(object_base.VersionedObject):
if hasattr(self, k))
@staticmethod
def _from_db_object(context, obj, db_object):
def _from_db_object(obj, db_obj):
"""Converts a database entity to a formal object.
:param context: security context
:param obj: An object of the class.
:param db_object: A DB model of the object
:param db_obj: A DB model of the object
:return: The object of the class with the database entity added
"""
for field in obj.fields:
obj[field] = db_object[field]
obj[field] = db_obj[field]
obj.obj_reset_changes()
return obj
@classmethod
def _from_db_object_list(cls, db_objs, context):
"""Converts a list of database entities to a list of formal objects."""
objs = []
for db_obj in db_objs:
objs.append(cls._from_db_object(cls(context), db_obj))
return objs
class CyborgObjectSerializer(object_base.VersionedObjectSerializer):
# Base class to use for object hydration

View File

@ -24,6 +24,7 @@ from oslotest import base
import pecan
from cyborg.common import config as cyborg_config
from cyborg.tests.unit import policy_fixture
CONF = cfg.CONF
@ -40,13 +41,8 @@ class TestCase(base.BaseTestCase):
def setUp(self):
super(TestCase, self).setUp()
self.context = context.get_admin_context()
self._set_config()
def reset_pecan():
pecan.set_config({}, overwrite=True)
self.addCleanup(reset_pecan)
self.policy = self.useFixture(policy_fixture.PolicyFixture())
def _set_config(self):
self.cfg_fixture = self.useFixture(config_fixture.Config(cfg.CONF))

View File

@ -135,3 +135,80 @@ class BaseApiTest(base.DbTestCase):
}
return headers
def get_json(self, path, expect_errors=False, headers=None,
extra_environ=None, q=None, **params):
"""Sends simulated HTTP GET request to Pecan test app.
:param path: url path of target service
:param expect_errors: Boolean value; whether an error is expected based
on request
:param headers: a dictionary of headers to send along with the request
:param extra_environ: a dictionary of environ variables to send along
with the request
:param q: list of queries consisting of: field, value, op, and type
keys
:param path_prefix: prefix of the url path
:param params: content for wsgi.input of request
"""
full_path = self.PATH_PREFIX + path
q = q or []
query_params = {
'q.field': [],
'q.value': [],
'q.op': [],
}
for query in q:
for name in ['field', 'op', 'value']:
query_params['q.%s' % name].append(query.get(name, ''))
all_params = {}
all_params.update(params)
if q:
all_params.update(query_params)
response = self.app.get(full_path,
params=all_params,
headers=headers,
extra_environ=extra_environ,
expect_errors=expect_errors)
if not expect_errors:
response = response.json
return response
def patch_json(self, path, params, expect_errors=False, headers=None,
extra_environ=None, status=None):
"""Sends simulated HTTP PATCH request to Pecan test app.
:param path: url path of target service
:param params: content for wsgi.input of request
:param expect_errors: Boolean value; whether an error is expected based
on request
:param headers: a dictionary of headers to send along with the request
:param extra_environ: a dictionary of environ variables to send along
with the request
:param status: expected status code of response
"""
full_path = self.PATH_PREFIX + path
return self._request_json(path=full_path, params=params,
expect_errors=expect_errors,
headers=headers, extra_environ=extra_environ,
status=status, method="patch")
def delete(self, path, expect_errors=False, headers=None,
extra_environ=None, status=None):
"""Sends simulated HTTP DELETE request to Pecan test app.
:param path: url path of target service
:param expect_errors: Boolean value; whether an error is expected based
on request
:param headers: a dictionary of headers to send along with the request
:param extra_environ: a dictionary of environ variables to send along
with the request
:param status: expected status code of response
"""
full_path = self.PATH_PREFIX + path
response = self.app.delete(full_path,
headers=headers,
status=status,
extra_environ=extra_environ,
expect_errors=expect_errors)
return response

View File

@ -13,46 +13,162 @@
# License for the specific language governing permissions and limitations
# under the License.
import datetime
import mock
from oslo_utils import timeutils
from six.moves import http_client
from cyborg.conductor import rpcapi
from cyborg.tests.unit.api.controllers.v1 import base as v1_test
from cyborg.tests.unit.db import utils
from cyborg.tests.unit.db import utils as db_utils
from cyborg.tests.unit.objects import utils as obj_utils
def gen_post_body(**kw):
return utils.get_test_accelerator(**kw)
return db_utils.get_test_accelerator(**kw)
def _rpcapi_accelerator_create(self, context, acc_obj):
def _rpcapi_accelerator_create(context, obj_acc):
"""Fake used to mock out the conductor RPCAPI's accelerator_create method.
Performs creation of the accelerator object and returns the created
accelerator as-per the real method.
"""
acc_obj.create()
return acc_obj
obj_acc.create(context)
return obj_acc
@mock.patch.object(rpcapi.ConductorAPI, 'accelerator_create', autospec=True,
side_effect=_rpcapi_accelerator_create)
class TestPost(v1_test.APITestV1):
ACCELERATOR_UUID = '10efe63d-dfea-4a37-ad94-4116fba50981'
def setUp(self):
super(TestPost, self).setUp()
self.headers = self.gen_headers(self.context)
p = mock.patch.object(rpcapi.ConductorAPI, 'accelerator_create')
self.mock_create = p.start()
self.mock_create.side_effect = _rpcapi_accelerator_create
self.addCleanup(p.stop)
@mock.patch('oslo_utils.uuidutils.generate_uuid')
def test_accelerator_post(self, mock_uuid, mock_create):
def test_post(self, mock_uuid):
mock_uuid.return_value = self.ACCELERATOR_UUID
body = gen_post_body(name='test_accelerator')
headers = self.gen_headers(self.context)
response = self.post_json('/accelerators', body, headers=headers)
body = gen_post_body(name='post_accelerator')
response = self.post_json('/accelerators', body, headers=self.headers)
self.assertEqual(http_client.CREATED, response.status_int)
response = response.json
self.assertEqual(self.ACCELERATOR_UUID, response['uuid'])
self.assertEqual(body['name'], response['name'])
mock_create.assert_called_once_with(mock.ANY, mock.ANY, mock.ANY)
self.mock_create.assert_called_once_with(mock.ANY, mock.ANY)
class TestList(v1_test.APITestV1):
def setUp(self):
super(TestList, self).setUp()
self.accs = []
for i in range(3):
acc = obj_utils.create_test_accelerator(self.context)
self.accs.append(acc)
self.acc = self.accs[0]
self.context.tenant = self.acc.project_id
self.headers = self.gen_headers(self.context)
def test_get_one(self):
data = self.get_json('/accelerators/%s' % self.acc.uuid,
headers=self.headers)
self.assertEqual(self.acc.uuid, data['uuid'])
self.assertIn('acc_capability', data)
self.assertIn('acc_type', data)
self.assertIn('created_at', data)
self.assertIn('description', data)
self.assertIn('device_type', data)
self.assertIn('links', data)
self.assertIn('name', data)
self.assertIn('product_id', data)
self.assertIn('project_id', data)
self.assertIn('remotable', data)
self.assertIn('updated_at', data)
self.assertIn('user_id', data)
self.assertIn('vendor_id', data)
def test_get_all(self):
data = self.get_json('/accelerators', headers=self.headers)
self.assertEqual(3, len(data['accelerators']))
data_uuids = [d['uuid'] for d in data['accelerators']]
acc_uuids = [acc.uuid for acc in self.accs]
self.assertItemsEqual(acc_uuids, data_uuids)
def _rpcapi_accelerator_update(context, obj_acc):
"""Fake used to mock out the conductor RPCAPI's accelerator_update method.
Performs update of the accelerator object and returns the updated
accelerator as-per the real method.
"""
obj_acc.save(context)
return obj_acc
class TestPatch(v1_test.APITestV1):
def setUp(self):
super(TestPatch, self).setUp()
self.acc = obj_utils.create_test_accelerator(self.context)
self.context.tenant = self.acc.project_id
self.headers = self.gen_headers(self.context)
p = mock.patch.object(rpcapi.ConductorAPI, 'accelerator_update')
self.mock_update = p.start()
self.mock_update.side_effect = _rpcapi_accelerator_update
self.addCleanup(p.stop)
@mock.patch.object(timeutils, 'utcnow')
def test_patch(self, mock_utcnow):
test_time = datetime.datetime(2012, 12, 12, 12, 12)
mock_utcnow.return_value = test_time
description = 'new-description'
response = self.patch_json('/accelerators/%s' % self.acc.uuid,
[{'path': '/description',
'value': description,
'op': 'replace'}],
headers=self.headers)
self.assertEqual(http_client.OK, response.status_code)
data = self.get_json('/accelerators/%s' % self.acc.uuid,
headers=self.headers)
self.assertEqual(description, data['description'])
return_updated_at = timeutils.parse_isotime(
data['updated_at']).replace(tzinfo=None)
self.assertEqual(test_time, return_updated_at)
self.mock_update.assert_called_once_with(mock.ANY, mock.ANY)
def _rpcapi_accelerator_delete(context, obj_acc):
"""Fake used to mock out the conductor RPCAPI's accelerator_delete method.
Performs deletion of the accelerator object as-per the real method.
"""
obj_acc.destroy(context)
class TestDelete(v1_test.APITestV1):
def setUp(self):
super(TestDelete, self).setUp()
self.acc = obj_utils.create_test_accelerator(self.context)
self.context.tenant = self.acc.project_id
self.headers = self.gen_headers(self.context)
p = mock.patch.object(rpcapi.ConductorAPI, 'accelerator_delete')
self.mock_delete = p.start()
self.mock_delete.side_effect = _rpcapi_accelerator_delete
self.addCleanup(p.stop)
def test_delete(self):
response = self.delete('/accelerators/%s' % self.acc.uuid,
headers=self.headers)
self.assertEqual(http_client.NO_CONTENT, response.status_code)
self.mock_delete.assert_called_once_with(mock.ANY, mock.ANY)

View File

@ -13,7 +13,7 @@
# License for the specific language governing permissions and limitations
# under the License.
"""Cyborg test utilities."""
"""Cyborg db test utilities."""
def get_test_accelerator(**kw):
@ -26,4 +26,6 @@ def get_test_accelerator(**kw):
'vendor_id': kw.get('vendor_id', 'vendor_id'),
'product_id': kw.get('product_id', 'product_id'),
'remotable': kw.get('remotable', 1),
'project_id': kw.get('project_id', 'b492a6fb12964ae3bd291ce585107d48'),
'user_id': kw.get('user_id', '7009409e21614d1db1ef7a8c5ee101d8'),
}

View File

View File

@ -0,0 +1,41 @@
# Copyright 2017 Huawei Technologies Co.,LTD.
# All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License"); you may
# not use this file except in compliance with the License. You may obtain
# a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
# License for the specific language governing permissions and limitations
# under the License.
"""Cyborg object test utilities."""
from cyborg import objects
from cyborg.tests.unit.db import utils as db_utils
def get_test_accelerator(ctxt, **kw):
"""Return an Accelerator object with appropriate attributes.
NOTE: The object leaves the attributes marked as changed, such
that a create() could be used to commit it to the DB.
"""
test_acc = db_utils.get_test_accelerator(**kw)
obj_acc = objects.Accelerator(ctxt, **test_acc)
return obj_acc
def create_test_accelerator(ctxt, **kw):
"""Create and return a test accelerator object.
Create an accelerator in the DB and return an Accelerator object with
appropriate attributes.
"""
acc = get_test_accelerator(ctxt, **kw)
acc.create(ctxt)
return acc

View File

@ -0,0 +1,45 @@
# Copyright 2017 Huawei Technologies Co.,LTD.
# All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License"); you may
# not use this file except in compliance with the License. You may obtain
# a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
# License for the specific language governing permissions and limitations
# under the License.
import os
import fixtures
from oslo_config import cfg
from oslo_policy import opts as policy_opts
from cyborg.common import policy as cyborg_policy
CONF = cfg.CONF
policy_data = """
{
}
"""
class PolicyFixture(fixtures.Fixture):
def setUp(self):
super(PolicyFixture, self).setUp()
self.policy_dir = self.useFixture(fixtures.TempDir())
self.policy_file_name = os.path.join(self.policy_dir.path,
'policy.json')
with open(self.policy_file_name, 'w') as policy_file:
policy_file.write(policy_data)
policy_opts.set_defaults(CONF)
CONF.set_override('policy_file', self.policy_file_name, 'oslo_policy')
cyborg_policy._ENFORCER = None
self.addCleanup(cyborg_policy.get_enforcer().clear)

View File

@ -22,3 +22,4 @@ SQLAlchemy!=1.1.5,!=1.1.6,!=1.1.7,!=1.1.8,>=1.0.10 # MIT
alembic>=0.8.10 # MIT
stevedore>=1.20.0 # Apache-2.0
keystonemiddleware>=4.17.0 # Apache-2.0
jsonpatch!=1.20,>=1.16 # BSD