Merge "explicit data types - part I"

This commit is contained in:
Zuul 2018-07-26 18:21:48 +00:00 committed by Gerrit Code Review
commit 2d1595e17b
8 changed files with 484 additions and 50 deletions

240
congress/data_types.py Normal file
View File

@ -0,0 +1,240 @@
# Copyright (c) 2018 VMware, Inc. All rights reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License"); you may
# not use this file except in compliance with the License. You may obtain
# a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
# License for the specific language governing permissions and limitations
# under the License.
import abc
import collections
import ipaddress
import json
import six
TypeNullabilityTuple = collections.namedtuple(
'TypeNullabilityTuple', 'type nullable')
def nullable(marshal):
'''decorator to make marshal function accept None value'''
def func(cls, value):
if value is None:
return None
else:
return marshal(cls, value)
return func
class UnqualifiedNameStr(abc.ABCMeta):
'''metaclass to make str(Type) == Type'''
def __str__(self):
return self.__name__
@six.add_metaclass(UnqualifiedNameStr)
class CongressDataType(object):
@classmethod
@abc.abstractmethod
def marshal(cls, value):
'''Validate a value as valid for this type.
:Raises ValueError: if the value is not valid for this type
'''
raise NotImplementedError
@classmethod
def least_ancestor(cls, target_types):
'''Find this type's least ancestor among target_types
This method helps a data consumer find the least common ancestor of
this type among the types the data consumer supports.
:param supported_types: iterable collection of types
:returns: the subclass of CongressDataType which is the least ancestor
'''
target_types = frozenset(target_types)
current_class = cls
try:
while current_class not in target_types:
current_class = current_class._get_parent()
return current_class
except cls.CongressDataTypeNoParent:
return None
@classmethod
def convert_to_ancestor(cls, value, ancestor_type):
'''Convert this type's exchange value to ancestor_type's exchange value
Generally there is no actual conversion because descendant type value
is directly interpretable as ancestor type value. The only exception
is the conversion from non-string descendents to string. This
conversion is needed by Agnostic engine does not support boolean.
.. warning:: undefined behavior if ancestor_type is not an ancestor of
this type.
'''
if ancestor_type == Str:
return json.dumps(value)
else:
if cls.least_ancestor([ancestor_type]) is None:
raise cls.CongressDataTypeHierarchyError
else:
return value
@classmethod
def _get_parent(cls):
congress_parents = [parent for parent in cls.__bases__
if issubclass(parent, CongressDataType)]
if len(congress_parents) == 1:
return congress_parents[0]
elif len(congress_parents) == 0:
raise cls.CongressDataTypeNoParent(
'No parent type found for {0}'.format(cls))
else:
raise cls.CongressDataTypeHierarchyError(
'More than one parent type found for {0}: {1}'
.format(cls, congress_parents))
class CongressDataTypeNoParent(TypeError):
pass
class CongressDataTypeHierarchyError(TypeError):
pass
class Scalar(CongressDataType):
'''Most general type, emcompassing all JSON scalar values'''
ACCEPTED_VALUE_TYPES = [
six.string_types, six.text_type, six.integer_types, float, bool]
@classmethod
@nullable
def marshal(cls, value):
for type in cls.ACCEPTED_VALUE_TYPES:
if isinstance(value, type):
return value
raise ValueError('Input value (%s) is of %s instead of one of the '
'expected types %s'
% (value, type(value), cls.ACCEPTED_VALUE_TYPES))
class Str(Scalar):
@classmethod
@nullable
def marshal(cls, value):
if not isinstance(value, six.string_types):
raise ValueError('Input value (%s) is of %s instead of expected %s'
% (value, type(value), six.string_types))
return value
class Bool(Scalar):
@classmethod
@nullable
def marshal(cls, value):
if not isinstance(value, bool):
raise ValueError('Input value (%s) is of %s instead of expected %s'
% (value, type(value), bool))
return value
class Int(Scalar):
@classmethod
@nullable
def marshal(cls, value):
if isinstance(value, int):
return value
elif isinstance(value, float) and value.is_integer():
return int(value)
else:
raise ValueError('Input value (%s) is of %s instead of expected %s'
' or %s' % (value, type(value), int, float))
class Float(Scalar):
@classmethod
@nullable
def marshal(cls, value):
if isinstance(value, float):
return value
elif isinstance(value, int):
return float(value)
else:
raise ValueError('Input value (%s) is of %s instead of expected %s'
' or %s' % (value, type(value), int, float))
class IPAddress(Str):
@classmethod
@nullable
def marshal(cls, value):
try:
return str(ipaddress.IPv4Address(value))
except ipaddress.AddressValueError:
try:
ipv6 = ipaddress.IPv6Address(value)
if ipv6.ipv4_mapped:
return str(ipv6.ipv4_mapped)
else:
return str(ipv6)
except ipaddress.AddressValueError:
raise ValueError('Input value (%s) is not interprable '
'as an IP address' % value)
@six.add_metaclass(abc.ABCMeta)
class CongressTypeFiniteDomain(object):
'''Abstract base class for a Congress type of bounded domain.
Each type inheriting from this class must have a class variable DOMAIN
which is a frozenset of the set of values allowed in the type.
'''
pass
def create_congress_str_enum_type(class_name, enum_items):
'''Return a sub-type of CongressStr
representing a string from a fixed, finite domain.
'''
for item in enum_items:
if not isinstance(item, six.string_types):
raise ValueError
class NewType(Str, CongressTypeFiniteDomain):
DOMAIN = frozenset(enum_items)
@classmethod
@nullable
def marshal(cls, value):
if value not in cls.DOMAIN:
raise ValueError(
'Input value (%s) is not in the expected domain of values '
'%s' % (value, cls.DOMAIN))
return value
NewType.__name__ = class_name
return NewType
NetworkDirection = create_congress_str_enum_type(
'NetworkDirection', ('ingress', 'egress'))
TYPES = [Scalar, Str, Bool, Int, Float, IPAddress]
TYPE_NAME_TO_TYPE_CLASS = {str(type_obj): type_obj for type_obj in TYPES}

View File

@ -27,6 +27,7 @@ from six.moves import range
from oslo_log import log as logging
from oslo_utils import uuidutils
from congress import data_types
from congress.datalog import analysis
from congress.datalog import base
from congress.datalog import builtin
@ -82,8 +83,8 @@ class Schema(object):
def __contains__(self, tablename):
return tablename in self.map
@classmethod
def col(self, cols):
@staticmethod
def _col(cols):
# For Datasource tables, columns would be in the format -
# {'name': 'colname', 'desc': 'description'}
if len(cols) and isinstance(cols[0], dict):
@ -91,6 +92,20 @@ class Schema(object):
else:
return [x for x in cols]
@staticmethod
def _type(cols):
# For Datasource tables, columns would be in the format -
# {'name': 'colname', 'desc': 'description',
# 'type': 'typename', 'nullable': True/False}
if len(cols) and isinstance(cols[0], dict):
return [data_types.TypeNullabilityTuple(
data_types.TYPE_NAME_TO_TYPE_CLASS.get(
x.get('type', str(data_types.Scalar))),
x.get('nullable', True)) for x in cols]
else:
return [data_types.TypeNullabilityTuple(data_types.Scalar, True)
for x in cols]
def columns(self, tablename):
"""Returns the list of column names for the given TABLENAME.
@ -99,7 +114,17 @@ class Schema(object):
if tablename not in self.map.keys():
return
cols = self.map[tablename]
return Schema.col(cols)
return Schema._col(cols)
def types(self, tablename):
"""Returns the list of column names for the given TABLENAME.
Return None if the tablename's columns are unknown.
"""
if tablename not in self.map.keys():
return
cols = self.map[tablename]
return Schema._type(cols)
def arity(self, tablename):
"""Returns the number of columns for the given TABLENAME.

View File

@ -113,7 +113,7 @@ class DataSourceDriver(data_service.DataService):
specified with a sub-translator, that value is included as a column
in the top-level translator's table.
Using both parent-key and id-col at the same time is redudant, so
Using both parent-key and id-col at the same time is redundant, so
DataSourceDriver will reject that configuration.
The example translator expects an object such as:
@ -268,6 +268,8 @@ class DataSourceDriver(data_service.DataService):
IN_LIST = 'in-list'
OBJECTS_EXTRACT_FN = 'objects-extract-fn'
DESCRIPTION = 'desc'
DATA_TYPE = 'data-type'
NULLABLE = 'nullable'
# Name of the column name and desc when using a parent key.
PARENT_KEY_COL_NAME = 'parent_key'
@ -284,7 +286,7 @@ class DataSourceDriver(data_service.DataService):
LIST_PARAMS = (TRANSLATION_TYPE, TABLE_NAME, PARENT_KEY, ID_COL, VAL_COL,
TRANSLATOR, PARENT_COL_NAME, OBJECTS_EXTRACT_FN,
PARENT_KEY_DESC, VAL_COL_DESC)
VALUE_PARAMS = (TRANSLATION_TYPE, EXTRACT_FN)
VALUE_PARAMS = (TRANSLATION_TYPE, EXTRACT_FN, DATA_TYPE, NULLABLE)
TRANSLATION_TYPE_PARAMS = (TRANSLATION_TYPE,)
VALID_TRANSLATION_TYPES = (HDICT, VDICT, LIST, VALUE)
@ -463,7 +465,7 @@ class DataSourceDriver(data_service.DataService):
self._table_deps[translator[self.TABLE_NAME]] = related_tables
self._validate_translator(translator, related_tables)
self._translators.append(translator)
self._schema.update(self._get_schema(translator, {}))
self._schema.update(self._get_schema(translator, {}).schema)
def get_translator(self, translator_name):
"""Get a translator.
@ -488,12 +490,16 @@ class DataSourceDriver(data_service.DataService):
"""
return self._translators
SCHEMA_RETURN_TUPLE = collections.namedtuple('SchemaReturnTuple',
'schema id_type')
@classmethod
def _get_schema_hdict(cls, translator, schema):
def _get_schema_hdict(cls, translator, schema, parent_key_type=None):
tablename = translator[cls.TABLE_NAME]
parent_key = translator.get(cls.PARENT_KEY, None)
id_col = translator.get(cls.ID_COL, None)
field_translators = translator[cls.FIELD_TRANSLATORS]
parent_col_name = None
columns = []
# columns here would be list of dictionaries.
@ -504,25 +510,61 @@ class DataSourceDriver(data_service.DataService):
parent_col_name = translator.get(cls.PARENT_COL_NAME,
cls.PARENT_KEY_COL_NAME)
desc = translator.get(cls.PARENT_KEY_DESC)
columns.append(ds_utils.add_column(parent_col_name, desc))
columns.append(ds_utils.add_column(
parent_col_name, desc, type=parent_key_type))
for field_translator in field_translators:
# Sort with fields lacking parent-key coming first so that the
# subtranslators that need a parent field will be able to get them
# from the fields processed first
field_translators_with_order = [
(index, trans) for index, trans in enumerate(field_translators)]
field_translators_sorted = sorted(
field_translators_with_order, key=cmp_to_key(
cls._compare_tuple_by_subtranslator))
columns_indexed = {}
def get_current_table_col_type(name):
if parent_col_name and parent_col_name == name:
return parent_key_type
elif name == cls._id_col_name(id_col):
return None # FIXME(ekcs): return type for ID col
else:
[type] = [column_schema.get('type') for column_schema in
columns_indexed.values()
if column_schema.get('name') == name]
return type
for (index, field_translator) in field_translators_sorted:
col = field_translator.get(
cls.COL, field_translator[cls.FIELDNAME])
desc = field_translator.get(cls.DESCRIPTION)
subtranslator = field_translator[cls.TRANSLATOR]
if cls.PARENT_KEY not in subtranslator:
columns.append(ds_utils.add_column(col, desc))
cls._get_schema(subtranslator, schema)
if cls.PARENT_KEY in subtranslator:
# TODO(ekcs): disallow nullable parent key
cls._get_schema(subtranslator, schema,
parent_key_type=get_current_table_col_type(
subtranslator[cls.PARENT_KEY]))
else:
field_type = subtranslator.get(cls.DATA_TYPE)
nullable = subtranslator.get(cls.NULLABLE, True)
columns_indexed[index] = ds_utils.add_column(
col, desc, field_type, nullable)
cls._get_schema(subtranslator, schema)
for index in range(0, len(field_translators)):
if index in columns_indexed:
columns.append(columns_indexed[index])
if tablename in schema:
raise exception.InvalidParamException(
"table %s already in schema" % tablename)
schema[tablename] = tuple(columns)
return schema
return cls.SCHEMA_RETURN_TUPLE(schema, None)
@classmethod
def _get_schema_vdict(cls, translator, schema):
def _get_schema_vdict(cls, translator, schema, parent_key_type=None):
tablename = translator[cls.TABLE_NAME]
parent_key = translator.get(cls.PARENT_KEY, None)
id_col = translator.get(cls.ID_COL, None)
@ -546,10 +588,10 @@ class DataSourceDriver(data_service.DataService):
new_schema = new_schema + (value_col,)
schema[tablename] = new_schema
return schema
return cls.SCHEMA_RETURN_TUPLE(schema, None)
@classmethod
def _get_schema_list(cls, translator, schema):
def _get_schema_list(cls, translator, schema, parent_key_type=None):
tablename = translator[cls.TABLE_NAME]
parent_key = translator.get(cls.PARENT_KEY, None)
id_col = translator.get(cls.ID_COL, None)
@ -572,31 +614,36 @@ class DataSourceDriver(data_service.DataService):
ds_utils.add_column(value_col, val_desc))
else:
schema[tablename] = (ds_utils.add_column(value_col, val_desc), )
return schema
return cls.SCHEMA_RETURN_TUPLE(schema, None)
@classmethod
def _get_schema(cls, translator, schema):
"""Returns the schema of a translator.
def _get_schema(cls, translator, schema, parent_key_type=None):
"""Returns named tuple with values:
schema: the schema of a translator,
id_type: the data type of the id-col, or None of absent
Note: this method uses the argument schema to store
data in since this method words recursively. It might
be worthwhile in the future to refactor this code so this
is not required.
:param parent_key_type: passes down the column data type which the
translator refers to as parent-key
"""
cls.check_translation_type(translator.keys())
translation_type = translator[cls.TRANSLATION_TYPE]
if translation_type == cls.HDICT:
cls._get_schema_hdict(translator, schema)
return cls._get_schema_hdict(translator, schema, parent_key_type)
elif translation_type == cls.VDICT:
cls._get_schema_vdict(translator, schema)
return cls._get_schema_vdict(translator, schema, parent_key_type)
elif translation_type == cls.LIST:
cls._get_schema_list(translator, schema)
return cls._get_schema_list(translator, schema, parent_key_type)
elif translation_type == cls.VALUE:
pass
return cls.SCHEMA_RETURN_TUPLE(schema, None)
else:
raise AssertionError('Unexpected translator type %s' %
translation_type)
return schema
@classmethod
def get_schema(cls):
@ -745,17 +792,25 @@ class DataSourceDriver(data_service.DataService):
return h
@classmethod
def _extract_value(cls, obj, extract_fn):
def _extract_value(cls, obj, extract_fn, data_type, nullable=True):
# Reads a VALUE object and returns (result_rows, h)
if extract_fn is None:
extract_fn = lambda x: x
value = extract_fn(obj)
# preserve type if possible; convert to str if not Hashable
if isinstance(value, collections.Hashable):
return value
else:
return str(value)
if not isinstance(value, collections.Hashable):
value = str(value)
# check that data type matches if specified in translator
if data_type is not None and value is not None:
value = data_type.marshal(value)
return value
@classmethod
def _compare_tuple_by_subtranslator(cls, x, y):
return cls._compare_subtranslator(x[1], y[1])
@classmethod
def _compare_subtranslator(cls, x, y):
@ -779,8 +834,9 @@ class DataSourceDriver(data_service.DataService):
if subtrans[cls.TRANSLATION_TYPE] == cls.VALUE:
extract_fn = subtrans.get(cls.EXTRACT_FN, None)
converted_values = tuple([cls._extract_value(o, extract_fn)
for o in obj])
data_type = subtrans.get(cls.DATA_TYPE)
converted_values = tuple(
[cls._extract_value(o, extract_fn, data_type) for o in obj])
if id_col:
h = cls._compute_id(id_col, obj, converted_values)
new_tuples = [(table, (h, v)) for v in converted_values]
@ -844,8 +900,10 @@ class DataSourceDriver(data_service.DataService):
if subtrans[cls.TRANSLATION_TYPE] == cls.VALUE:
extract_fn = subtrans.get(cls.EXTRACT_FN, None)
converted_items = tuple([(k, cls._extract_value(v, extract_fn))
for k, v in obj.items()])
data_type = subtrans.get(cls.DATA_TYPE)
converted_items = tuple(
[(k, cls._extract_value(v, extract_fn, data_type))
for k, v in obj.items()])
if id_col:
h = cls._compute_id(id_col, obj, converted_items)
new_tuples = [(table, (h,) + i) for i in converted_items]
@ -932,9 +990,18 @@ class DataSourceDriver(data_service.DataService):
subtranslator = field_translator[cls.TRANSLATOR]
if subtranslator[cls.TRANSLATION_TYPE] == cls.VALUE:
extract_fn = subtranslator.get(cls.EXTRACT_FN)
v = cls._extract_value(
cls._get_value(obj, field, selector), extract_fn)
hdict_row[col_name] = v
data_type = subtranslator.get(cls.DATA_TYPE)
nullable = subtranslator.get(cls.NULLABLE, True)
try:
v = cls._extract_value(
cls._get_value(obj, field, selector),
extract_fn, data_type, nullable)
hdict_row[col_name] = v
except TypeError as exc:
arg0 = "While translating field: %s, column: %s; " \
"%s" % (field, col_name, exc.args[0])
exc.args = tuple([arg0]) + exc.args[1:]
raise
else:
assert translator[cls.TRANSLATION_TYPE] in (cls.HDICT,
cls.VDICT,

View File

@ -63,9 +63,14 @@ def update_state_on_changed(root_table_name):
return outer
def add_column(colname, desc=None):
def add_column(colname, desc=None, type=None, nullable=True):
"""Adds column in the form of dict."""
return {'name': colname, 'desc': desc}
col_dict = {'name': colname, 'desc': desc}
if type is not None:
col_dict['type'] = str(type)
if not nullable:
col_dict['nullable'] = False
return col_dict
def inspect_methods(client, api_prefix):

View File

@ -19,6 +19,7 @@ from __future__ import absolute_import
import copy
from congress import data_types
from congress.datalog import analysis
from congress.datalog import base as datalogbase
from congress.datalog import compile
@ -972,3 +973,22 @@ class TestDependencyGraph(base.TestCase):
self.assertEqual(set(g.tables_with_modal('execute')), set())
g.undo_changes(chgs)
self.assertEqual(set(g.tables_with_modal('execute')), set(['p']))
class TestSchema(base.TestCase):
def test_schema_columns(self):
test_schema = compile.Schema({
'p': (1, 2, 3),
'q': ({'name': 'a', 'type': 'Str'},
{'name': 'b', 'nullable': False})},
complete=True)
self.assertEqual(test_schema.columns('p'),
[1, 2, 3])
self.assertEqual(test_schema.columns('q'),
['a', 'b'])
self.assertEqual([(data_types.Scalar, True), (data_types.Scalar, True),
(data_types.Scalar, True)],
test_schema.types('p'))
self.assertEqual([(data_types.Str, True), (data_types.Scalar, False)],
test_schema.types('q'))

View File

@ -24,6 +24,7 @@ import eventlet
import mock
from oslo_utils import uuidutils
from congress import data_types
from congress.datasources import datasource_driver
from congress.datasources import datasource_utils
from congress.db import db_ds_table_data
@ -40,6 +41,9 @@ class TestDatasourceDriver(base.TestCase):
super(TestDatasourceDriver, self).setUp()
self.val_trans = {'translation-type': 'VALUE'}
def typed_value_trans(self, type):
return {'translation-type': 'VALUE', 'data-type': type}
def compute_hash(self, obj):
s = json.dumps(sorted(obj, key=(lambda x: str(type(x)) + repr(x))),
sort_keys=True)
@ -72,6 +76,16 @@ class TestDatasourceDriver(base.TestCase):
self.assertEqual(params, expected)
def test_in_list_results_hdict_hdict(self):
class Type1(object):
@classmethod
def marshal(cls, value):
return value
class Type2(object):
@classmethod
def marshal(cls, value):
return value
ports_fixed_ips_translator = {
'translation-type': 'HDICT',
'table-name': 'fixed-ips',
@ -79,7 +93,8 @@ class TestDatasourceDriver(base.TestCase):
'selector-type': 'DICT_SELECTOR',
'in-list': True,
'field-translators':
({'fieldname': 'ip_address', 'translator': self.val_trans},
({'fieldname': 'ip_address',
'translator': self.typed_value_trans(Type2)},
{'fieldname': 'subnet_id', 'translator': self.val_trans})}
ports_translator = {
@ -87,7 +102,8 @@ class TestDatasourceDriver(base.TestCase):
'table-name': 'ports',
'selector-type': 'DICT_SELECTOR',
'field-translators':
({'fieldname': 'id', 'translator': self.val_trans},
({'fieldname': 'id',
'translator': self.typed_value_trans(Type2)},
{'fieldname': 'fixed_ips',
'translator': ports_fixed_ips_translator})}
@ -1093,14 +1109,17 @@ class TestDatasourceDriver(base.TestCase):
'field-translators': (
{'fieldname': 'a',
'col': 'a1',
'translator': self.val_trans},
'translator':
self.typed_value_trans(
data_types.Bool)},
{'fieldname': 'b',
'col': 'b1',
'translator': self.val_trans})}},
{'fieldname': 'testfield2',
'translator': {'translation-type': 'HDICT',
'table-name': 'subtable2',
'id-col': 'id2',
'parent-key': 'zparent_col3',
'parent-col-name': 'id2',
'field-translators': (
{'fieldname': 'c',
'col': 'c1',
@ -1109,7 +1128,7 @@ class TestDatasourceDriver(base.TestCase):
'col': 'd1',
'translator': self.val_trans})}},
{'fieldname': 'ztestfield3', 'col': 'zparent_col3',
'translator': self.val_trans},
'translator': self.typed_value_trans(data_types.Str)},
{'fieldname': 'testfield4', 'col': 'parent_col4',
'translator': {'translation-type': 'VALUE',
'extract-fn': lambda x: x.id}},
@ -1143,9 +1162,9 @@ class TestDatasourceDriver(base.TestCase):
self.assertEqual(7, len(schema))
self.assertEqual(({'name': 'id1', 'desc': None},
{'name': 'a1', 'desc': None},
{'name': 'a1', 'desc': None, 'type': 'Bool'},
{'name': 'b1', 'desc': None}), schema['subtable1'])
self.assertEqual(({'name': 'id2', 'desc': None},
self.assertEqual(({'name': 'id2', 'desc': None, 'type': 'Str'},
{'name': 'c1', 'desc': None},
{'name': 'd1', 'desc': None}), schema['subtable2'])
self.assertEqual(('id3', 'key3', 'value3'), schema['subtable3'])
@ -1157,8 +1176,7 @@ class TestDatasourceDriver(base.TestCase):
'desc': None},), schema['subtable6'])
self.assertEqual(
({'name': 'parent_col1', 'desc': None},
{'name': 'testfield2', 'desc': None},
{'name': 'zparent_col3', 'desc': None},
{'name': 'zparent_col3', 'desc': None, 'type': 'Str'},
{'name': 'parent_col4', 'desc': None},
{'name': 'parent_col5', 'desc': None},
{'name': 'parent_col6', 'desc': None},
@ -1218,6 +1236,36 @@ class TestDatasourceDriver(base.TestCase):
({'name': 'parent_key', 'desc': None},
{'name': 'val', 'desc': None}), schema['subtable'])
def test_get_schema_with_hdict_parent_and_id_col_in_subtranslator(self):
class TestDriver(datasource_driver.DataSourceDriver):
subtranslator = {'translation-type': 'LIST',
'table-name': 'subtable',
'id-col': 'id', 'val-col': 'val',
'translator': self.val_trans}
translator = {'translation-type': 'HDICT',
'table-name': 'testtable',
'selector-type': 'DICT_SELECTOR',
'field-translators': ({'fieldname': 'unique_key',
'translator': self.val_trans},
{'fieldname': 'sublist',
'translator': subtranslator})}
TRANSLATORS = [translator]
def __init__(self):
super(TestDriver, self).__init__('', None)
schema = TestDriver().get_schema()
self.assertEqual(2, len(schema))
self.assertEqual(
({'desc': None, 'name': 'unique_key'},
{'desc': None, 'name': 'sublist'}), schema['testtable'])
self.assertEqual(
({'desc': None, 'name': 'id'},
{'desc': None, 'name': 'val'}), schema['subtable'])
def test_get_schema_with_hdict_id_function(self):
class TestDriver(datasource_driver.DataSourceDriver):
translator = {

View File

@ -48,9 +48,7 @@ class TestDseRuntime(base.SqlTestCase):
node.invoke_service_rpc = mock.MagicMock()
node.invoke_service_rpc.return_value = [
['id1', 'name1', 'status1'],
['id2', 'name2', 'status2'],
]
0, [['id1', 'name1', 'status1'], ['id2', 'name2', 'status2']]]
# loaded rule is disabled
subscriptions = engine2.subscription_list()

View File

@ -0,0 +1,31 @@
# Copyright (c) 2018 VMware
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or
# implied.
# See the License for the specific language governing permissions and
# limitations under the License.
from __future__ import print_function
from __future__ import division
from __future__ import absolute_import
import testtools
from congress import data_types
class TestDataTypes(testtools.TestCase):
def test_congress_str_nullable(self):
self.assertEqual(data_types.Str.marshal('test-str-value'),
'test-str-value')
self.assertIsNone(data_types.Str.marshal(None))
self.assertRaises(ValueError, data_types.Str.marshal, True)