Support python3.5 for monasca-analytics

This patch implements followings for py35 support using six(er) and 2to3.

- Python 3 map/filter returns iterator, must be converted to list
- use six.string_types instead of basestring
- use six.iteritems instead of iteritems
- use six.moves for using cPickle and SocketServer packages
- use six.assertCountEqual instead of assertItemsEqual
- remove relative imports
- update BytecodeAssembler(monasca_analytics/
  banana/bytecode/assembler.py) for python3

Can be tested with:
  tox -e py35

Change-Id: If1b92d0ffc56492950f6a02ebdbe1596d0dce368
This commit is contained in:
Daisuke Fujita 2018-11-10 20:54:52 -08:00
parent 8a1dff0bbb
commit 6c7316ebbd
58 changed files with 360 additions and 222 deletions

View File

@ -20,10 +20,10 @@
#
# It has been adapted to match the requirements of monasca_analytics
import six
import sys
from array import array
from decorators import decorate_assignment
from dis import cmp_op
from dis import EXTENDED_ARG
from dis import hasfree
@ -33,10 +33,11 @@ from dis import haslocal
from dis import hasname
from dis import HAVE_ARGUMENT
from dis import opname
from symbols import Symbol
from types import CodeType
from types import FunctionType
from monasca_analytics.banana.bytecode.decorators import decorate_assignment
from monasca_analytics.banana.bytecode.symbols import Symbol
opcode = {}
for op in range(256):
@ -79,7 +80,12 @@ BUILD_LIST = opcode["BUILD_LIST"]
UNPACK_SEQUENCE = opcode["UNPACK_SEQUENCE"]
RETURN_VALUE = opcode["RETURN_VALUE"]
BUILD_SLICE = opcode["BUILD_SLICE"]
DUP_TOPX = opcode["DUP_TOPX"]
if six.PY2:
DUP_TOPX = opcode["DUP_TOPX"]
else:
# DUP_TOPX no longer in use from python3.3
DUP_TOP_TWO = opcode["DUP_TOP_TWO"]
RAISE_VARARGS = opcode["RAISE_VARARGS"]
MAKE_FUNCTION = opcode["MAKE_FUNCTION"]
MAKE_CLOSURE = opcode["MAKE_CLOSURE"]
@ -460,9 +466,14 @@ class Code(object):
self.stackchange(count, 1)
self.emit_arg(BUILD_SLICE, count)
def DUP_TOPX(self, count):
self.stackchange(count, count * 2)
self.emit_arg(DUP_TOPX, count)
if six.PY2:
def DUP_TOPX(self, count):
self.stackchange(count, count * 2)
self.emit_arg(DUP_TOPX, count)
else:
def DUP_TOP_TWO(self, count):
self.stackchange(count, count * 2)
self.emit_arg(DUP_TOP_TWO, count)
def RAISE_VARARGS(self, argc):
assert 0 <= argc <= 3, "Invalid number of arguments for RAISE_VARARGS"
@ -711,13 +722,13 @@ class Code(object):
def tuple_arg(args):
self.UNPACK_SEQUENCE(len(args))
for arg in args:
if not isinstance(arg, basestring):
if not isinstance(arg, six.string_types):
tuple_arg(arg)
else:
self.STORE_FAST(arg)
for narg, arg in enumerate(args):
if not isinstance(arg, basestring):
if not isinstance(arg, six.string_types):
dummy_name = '.' + str(narg)
self.co_varnames[narg] = dummy_name
self.LOAD_FAST(dummy_name)
@ -836,14 +847,25 @@ class Code(object):
elif parent is not None and self.co_freevars:
parent.makecells(self.co_freevars)
return CodeType(
self.co_argcount, len(self.co_varnames),
self.co_stacksize, flags, self.co_code.tostring(),
tuple(self.co_consts), tuple(self.co_names),
tuple(self.co_varnames),
self.co_filename, self.co_name, self.co_firstlineno,
self.co_lnotab.tostring(), self.co_freevars, self.co_cellvars
)
if six.PY2:
return CodeType(
self.co_argcount, len(self.co_varnames),
self.co_stacksize, flags, self.co_code.tostring(),
tuple(self.co_consts), tuple(self.co_names),
tuple(self.co_varnames),
self.co_filename, self.co_name, self.co_firstlineno,
self.co_lnotab.tostring(), self.co_freevars, self.co_cellvars
)
else:
kwonlyargcount = 0
return CodeType(
self.co_argcount, kwonlyargcount, len(self.co_varnames),
self.co_stacksize, flags, self.co_code.tobytes(),
tuple(self.co_consts), tuple(self.co_names),
tuple(self.co_varnames),
self.co_filename, self.co_name, self.co_firstlineno,
self.co_lnotab.tobytes(), self.co_freevars, self.co_cellvars
)
for op in hasfree:
if not hasattr(Code, opname[op]):
@ -921,11 +943,10 @@ def gen_list(code, ob):
generate_types = {
int: Code.LOAD_CONST,
long: Code.LOAD_CONST,
bool: Code.LOAD_CONST,
CodeType: Code.LOAD_CONST,
str: Code.LOAD_CONST,
unicode: Code.LOAD_CONST,
six.text_type: Code.LOAD_CONST,
complex: Code.LOAD_CONST,
float: Code.LOAD_CONST,
type(None): Code.LOAD_CONST,
@ -933,6 +954,8 @@ generate_types = {
list: gen_list,
dict: gen_map,
}
if six.PY2:
generate_types[long] = Code.LOAD_CONST
class _se(object):

View File

@ -19,6 +19,7 @@ import json
import logging
import os
import six
import voluptuous
from monasca_analytics.config import const
@ -307,7 +308,8 @@ class MonanasDSL(object):
:returns: True if the component is connected to another component
according to the configuration, False otherwise
"""
for origin_id, dest_ids in self._config[const.CONNECTIONS].iteritems():
cons = self._config[const.CONNECTIONS]
for origin_id, dest_ids in six.iteritems(cons):
if dest_ids == []:
continue
if origin_id == component_id:

View File

@ -21,6 +21,9 @@ import monasca_analytics.banana.typeck.type_util as type_util
import monasca_analytics.exception.banana as exception
import six
def deadpathck(banana_file, type_table, emitter=emit.PrintEmitter()):
"""
Perform dead path elimination on the provided AST.
@ -67,7 +70,7 @@ def deadpathck(banana_file, type_table, emitter=emit.PrintEmitter()):
# We can now remove all the components that are "dead"
# from the list of connections
for ident, node in dag_nodes.iteritems():
for ident, node in six.iteritems(dag_nodes):
if not node.is_alive():
emitter.emit_warning(
ident.span,
@ -75,10 +78,9 @@ def deadpathck(banana_file, type_table, emitter=emit.PrintEmitter()):
"starting from a 'Source' and ending with a 'Sink'."
)
banana_file.components.pop(ident)
connections.connections = filter(
lambda edge: edge[0] != ident and edge[1] != ident,
connections.connections
)
connections.connections = [edge for edge in connections.connections
if edge[0] != ident and
edge[1] != ident]
# TODO(Joan): We could also remove them from the statements.
# TODO(Joan): But for this we need a dependency graph between
@ -107,8 +109,8 @@ def contains_at_least_one_path_to_a_sink(banana_file, type_table):
return isinstance(type_comp, type_util.Source)
comp_vars = banana_file.components.keys()
at_least_one_sink = len(filter(is_sink, comp_vars)) > 0
at_least_one_source = len(filter(is_src, comp_vars)) > 0
at_least_one_sink = len(list(filter(is_sink, comp_vars))) > 0
at_least_one_source = len(list(filter(is_src, comp_vars))) > 0
if not at_least_one_sink:
raise exception.BananaNoFullPath("Sink")

View File

@ -26,6 +26,9 @@ import monasca_analytics.config.const as conf_const
import monasca_analytics.exception.banana as exception
import monasca_analytics.util.common_util as introspect
import six
logger = logging.getLogger(__name__)
@ -186,7 +189,7 @@ def eval_comp(context, comp, expected_type):
# Get default config for the component
conf = component_type.get_default_config()
# Update modified params
for k, val in arguments.iteritems():
for k, val in six.iteritems(arguments):
conf[k] = val
# Delay evaluation until we do the assign
return component_type, conf
@ -204,7 +207,7 @@ def eval_object(context, obj, expected_type):
:return: Returns the computed value
"""
result = expected_type.default_value()
for name, val in obj.props.iteritems():
for name, val in six.iteritems(obj.props):
subtype = expected_type[name]
ctx.set_property(result, name, eval_rhs(context, val, subtype))
return result
@ -239,7 +242,7 @@ def eval_expr(context, expr, expected_type):
)
current_operator = operator.add
for el in expr.expr_tree:
if isinstance(el, basestring) and el in ['+', '-', '*', '/']:
if isinstance(el, six.string_types) and el in ['+', '-', '*', '/']:
current_operator = get_op_func(el)
else:
value = eval_rhs(context, el, expected_type)

View File

@ -23,6 +23,9 @@ import monasca_analytics.source.base as source
import monasca_analytics.voter.base as voter
import six
def into_old_conf_dict(components):
"""
Convert the provided dict of components
@ -37,20 +40,20 @@ def into_old_conf_dict(components):
return {
conf_const.INGESTORS:
dict(filter(lambda x: isinstance(x[1], ingestor.BaseIngestor),
components.iteritems())),
six.iteritems(components))),
conf_const.VOTERS:
dict(filter(lambda x: isinstance(x[1], voter.BaseVoter),
components.iteritems())),
six.iteritems(components))),
conf_const.SINKS:
dict(filter(lambda x: isinstance(x[1], sink.BaseSink),
components.iteritems())),
six.iteritems(components))),
conf_const.LDPS:
dict(filter(lambda x: isinstance(x[1], ldp.BaseLDP),
components.iteritems())),
six.iteritems(components))),
conf_const.SOURCES:
dict(filter(lambda x: isinstance(x[1], source.BaseSource),
components.iteritems())),
six.iteritems(components))),
conf_const.SMLS:
dict(filter(lambda x: isinstance(x[1], sml.BaseSML),
components.iteritems())),
six.iteritems(components))),
}

View File

@ -22,6 +22,9 @@ import monasca_analytics.util.string_util as strut
import pyparsing as p
import six
ASTNode = base.ASTNode
Span = base.Span
@ -112,7 +115,7 @@ class BananaFile(object):
def statements_to_str(self):
return "{ " + ', '.join(
map(lambda x: '{} = {}'.format(x[0], x[1]), self.statements)
['{} = {}'.format(x[0], x[1]) for x in self.statements]
) + ' }'
def __str__(self):
@ -134,7 +137,7 @@ def make_span(s, l, t):
for tok in tokens:
if isinstance(tok, ASTNode):
hi = max(hi, tok.span.hi)
elif isinstance(tok, basestring):
elif isinstance(tok, six.string_types):
hi += len(tok)
elif isinstance(tok, p.ParseResults):
hi = max(hi, compute_hi(init_loc, tok))
@ -266,12 +269,12 @@ class DotPath(ASTNode):
def into_unmodified_str(self):
arr = [self.varname.into_unmodified_str()]
arr.extend(map(lambda x: x.into_unmodified_str(), self.properties))
arr.extend([x.into_unmodified_str() for x in self.properties])
return '.'.join(arr)
def __str__(self):
arr = [str(self.varname)]
arr.extend(map(lambda x: str(x), self.properties))
arr.extend([str(x) for x in self.properties])
return 'DotPath< {} >'.format('.'.join(arr))
def __key(self):
@ -489,7 +492,8 @@ class Connection(ASTNode):
"""
def extend(into, iterable, what):
for other_thing in iterable:
if len(filter(lambda x: x.val == other_thing.val, into)) > 0:
if len(list(filter(lambda x: x.val == other_thing.val,
into))) > 0:
emitter.emit_warning(
other_thing.span,
"{} {} already present".format(
@ -561,8 +565,8 @@ class Connection(ASTNode):
def __str__(self):
res = "Connection<"
res += " {} ".format(map(lambda x: (str(x[0]), str(x[1])),
self.connections))
res += " {} ".format([(str(x[0]), str(x[1]))
for x in self.connections])
res += ">"
return res

View File

@ -87,7 +87,7 @@ class Span(object):
Returns a string that start at self and stops at to_span.
:type to_span: Span
:param to_span: Span to stop at.
:rtype: basestring
:rtype: six.string_types
:return: Returns the string encapsulating both
"""
return self._text[self.lo:to_span.hi]

View File

@ -122,7 +122,7 @@ def banana_grammar(emitter=emit.PrintEmitter()):
# TODO(Joan): Remove once it is no longer needed
def print_stmt(s, l, t):
print("\nPRINT AST")
print(l, map(lambda x: str(x), t))
print((l, [str(x) for x in t]))
print("END PRINT AST\n")
def action_unimplemented(s, l, t):

View File

@ -30,6 +30,9 @@ import monasca_analytics.exception.monanas as exception_monanas
import monasca_analytics.util.common_util as introspect
import six
def typeck(banana_file):
"""
Type-check the provided BananaFile instance.
@ -93,7 +96,7 @@ def typeck_jsonobj(json_obj, type_table):
"""
root_type = u.Object(strict_checking=False)
for k, v in json_obj.props.iteritems():
for k, v in six.iteritems(json_obj.props):
sub_type = u.create_object_tree(k, typeck_rhs(v, type_table))
u.attach_to_root(root_type, sub_type, json_obj.span)
@ -175,7 +178,7 @@ def typeck_expr(expr, type_table):
_type = check_type(_type, dotpath_type)
elif isinstance(el, ast.Expr):
_type = check_type(_type, typeck_expr(el, type_table))
elif isinstance(el, basestring):
elif isinstance(el, six.string_types):
if el not in allowed_symbol(_type):
raise exception.BananaUnknownOperator(expr.span, el, _type)
if el in ['-', '*', '/']:
@ -252,8 +255,9 @@ def typeck_component(component, type_table):
if all_named == 1:
for arg in component.args:
param = filter(lambda x: x.param_name == arg.arg_name.inner_val(),
comp_params)
param = list(filter(lambda x:
x.param_name == arg.arg_name.inner_val(),
comp_params))
if len(param) != 1:
raise exception.BananaComponentIncorrectParamName(
component=component.type_name,

View File

@ -22,6 +22,9 @@ import monasca_analytics.exception.banana as exception
import monasca_analytics.util.string_util as strut
import six
class TypeTable(object):
"""
Type table. Support lookup for JsonLike object.
@ -173,7 +176,7 @@ class TypeTable(object):
:return: Returns this type table as a dict.
"""
res = {}
for key, val in self._variables.iteritems():
for key, val in six.iteritems(self._variables):
res[key.inner_val()] = val.to_json()
return res
@ -182,12 +185,12 @@ class TypeTable(object):
Test if the type table contains or not the provided
path. This function is more permissive than the other two.
It will never raise any exception (or should aim not to).
:type key: basestring | ast.Ident | ast.DothPath
:type key: six.string_types | ast.Ident | ast.DothPath
:param key: The key to test.
:return: Returns True if the TypeTable contains a type for the
given path or identifier.
"""
if isinstance(key, basestring):
if isinstance(key, six.string_types):
return key in self._variables
if isinstance(key, ast.Ident):

View File

@ -188,7 +188,7 @@ def attach_to_root(root_obj, obj1, span, erase_existing=False):
:param erase_existing: Set to true if the root type should
always be erased.
"""
for key, child_type in obj1.props.iteritems():
for key, child_type in six.iteritems(obj1.props):
if key in root_obj.props:
root_sub_type = root_obj.props[key]
# Both are object -> recurse
@ -311,13 +311,13 @@ class Object(String):
def default_value(self):
default_value = {}
for key, val in self.props.iteritems():
for key, val in six.iteritems(self.props):
default_value[key] = val.default_value()
return default_value
def to_json(self):
res = {"id": "object", "props": {}}
for key, val in self.props.iteritems():
for key, val in six.iteritems(self.props):
res["props"][key] = val.to_json()
return res
@ -617,7 +617,7 @@ def can_be_cast_to(_type1, _type2):
if not _type2.strict_checking:
return True
else:
for prop_name, prop_type in _type2.props.iteritems():
for prop_name, prop_type in six.iteritems(_type2.props):
if prop_name not in _type1.props:
return False
if not can_be_cast_to(_type1.props[prop_name], prop_type):

View File

@ -19,6 +19,9 @@ import logging
from monasca_analytics.config import const
from monasca_analytics.util import common_util
import six
logger = logging.getLogger(__name__)
@ -51,7 +54,7 @@ def _create_comps_by_module(comp_type, _config):
"""
logger.debug("Creating components of type : " + comp_type)
ret = {}
for comp_id, comp_config in _config[comp_type].iteritems():
for comp_id, comp_config in six.iteritems(_config[comp_type]):
comp = _create_component_by_module(
comp_id, comp_config, comp_type)
ret[comp_id] = comp

View File

@ -18,6 +18,7 @@
import logging
import six
import voluptuous
from monasca_analytics.config import const
@ -94,7 +95,7 @@ def _validate_schema(config):
"""
config_schema = voluptuous.Schema({
"spark_config": {
"appName": basestring,
"appName": six.string_types[0],
"streaming": {
"batch_interval": voluptuous.And(int, voluptuous.Range(min=1))
}
@ -104,28 +105,34 @@ def _validate_schema(config):
"debug": bool
},
"sources": {
voluptuous.Optional(basestring): {basestring: object}
voluptuous.Optional(six.string_types[0]): {six.string_types[0]:
object}
},
"ingestors": {
voluptuous.Optional(basestring): {basestring: object}
voluptuous.Optional(six.string_types[0]): {six.string_types[0]:
object}
},
"smls": {
voluptuous.Optional(basestring): {basestring: object}
voluptuous.Optional(six.string_types[0]): {six.string_types[0]:
object}
},
"voters": {
voluptuous.Optional(basestring): {basestring: object}
voluptuous.Optional(six.string_types[0]): {six.string_types[0]:
object}
},
"sinks": {
voluptuous.Optional(basestring): {basestring: object}
voluptuous.Optional(six.string_types[0]): {six.string_types[0]:
object}
},
"ldps": {
voluptuous.Optional(basestring): {basestring: object}
voluptuous.Optional(six.string_types[0]): {six.string_types[0]:
object}
},
"connections": {
voluptuous.Optional(basestring): [basestring]
voluptuous.Optional(six.string_types[0]): [six.string_types[0]]
},
"feedback": {
voluptuous.Optional(basestring): [basestring]
voluptuous.Optional(six.string_types[0]): [six.string_types[0]]
}
}, required=True)
return config_schema(config)

View File

@ -17,6 +17,7 @@
import logging
import numpy as np
import six
import voluptuous
from monasca_analytics.ingestor import base
@ -35,7 +36,8 @@ class CloudIngestor(base.BaseIngestor):
@staticmethod
def validate_config(_config):
cloud_schema = voluptuous.Schema({
"module": voluptuous.And(basestring, vu.NoSpaceCharacter())
"module": voluptuous.And(six.string_types[0],
vu.NoSpaceCharacter())
}, required=True)
return cloud_schema(_config)

View File

@ -17,6 +17,7 @@
import logging
import numpy as np
import six
import voluptuous
from monasca_analytics.ingestor import base
@ -45,7 +46,8 @@ class IptablesIngestor(base.BaseIngestor):
@staticmethod
def validate_config(_config):
iptables_schema = voluptuous.Schema({
"module": voluptuous.And(basestring, vu.NoSpaceCharacter())
"module": voluptuous.And(six.string_types[0],
vu.NoSpaceCharacter())
}, required=True)
return iptables_schema(_config)

View File

@ -16,6 +16,7 @@
import logging
import six
import voluptuous
import monasca_analytics.ldp.base as bt
@ -32,7 +33,8 @@ class CloudCausalityLDP(bt.BaseLDP):
@staticmethod
def validate_config(_config):
cloud_causality_schema = voluptuous.Schema({
"module": voluptuous.And(basestring, vu.NoSpaceCharacter())
"module": voluptuous.And(six.string_types[0],
vu.NoSpaceCharacter())
}, required=True)
return cloud_causality_schema(_config)

View File

@ -16,6 +16,7 @@
import logging
import six
import voluptuous
import monasca_analytics.ingestor.iptables as ip_ing
@ -36,7 +37,8 @@ class IptablesLDP(bt.BaseLDP):
@staticmethod
def validate_config(_config):
iptables_ldp_schema = voluptuous.Schema({
"module": voluptuous.And(basestring, vu.NoSpaceCharacter())
"module": voluptuous.And(six.string_types[0],
vu.NoSpaceCharacter())
}, required=True)
return iptables_ldp_schema(_config)

View File

@ -19,6 +19,7 @@ import voluptuous
import monasca_analytics.banana.typeck.type_util as type_util
import monasca_analytics.component.params as params
import six
import monasca_analytics.ldp.base as bt
import monasca_analytics.ldp.monasca.helpers as helpers
@ -40,7 +41,8 @@ class MonascaAggregateLDP(bt.BaseLDP):
@staticmethod
def validate_config(_config):
monasca_ag_schema = voluptuous.Schema({
"module": voluptuous.And(basestring, vu.NoSpaceCharacter()),
"module": voluptuous.And(six.string_types[0],
vu.NoSpaceCharacter()),
"period": voluptuous.Or(float, int),
"func": voluptuous.Or(
"avg",
@ -123,25 +125,23 @@ class MonascaAggregateLDP(bt.BaseLDP):
# Collect all dimensions
dims = {}
for metric_dims in separated_metrics.keys():
for prop, val in dict(metric_dims).iteritems():
for prop, val in six.iteritems(dict(metric_dims)):
if prop in dims:
dims[prop].add(val)
else:
dims[prop] = set(val)
# Sort each metric
for _, metric in separated_metrics.iteritems():
for _, metric in six.iteritems(separated_metrics):
metric.sort(key=lambda v: v["metric"]["timestamp"])
separated_metrics = sorted(separated_metrics.values(), key=len)
separated_metrics = sorted(list(separated_metrics.values()), key=len)
separated_metrics.reverse()
# Compute the new values
new_values = []
all_timestamps = map(
lambda l: map(
lambda x: x["metric"]["timestamp"], l),
separated_metrics)
all_timestamps = [[x["metric"]["timestamp"] for x in l]
for l in separated_metrics]
metric_count = len(separated_metrics)
for index in range(0, len(separated_metrics[0])):
new_value = reducer[0](

View File

@ -20,6 +20,7 @@ import voluptuous
import monasca_analytics.banana.typeck.type_util as type_util
import monasca_analytics.component.params as params
import six
import monasca_analytics.ldp.base as bt
import monasca_analytics.ldp.monasca.helpers as helpers
@ -100,7 +101,7 @@ class MonascaCombineLDP(bt.BaseLDP):
if len(separated_metrics.keys()) != nb_of_metrics:
return []
separated_metrics = sorted(list(separated_metrics.iteritems()),
separated_metrics = sorted(list(six.iteritems(separated_metrics)),
key=lambda x: len(x[1]))
separated_metrics = separated_metrics # type: list[(str, list[dict])]
@ -109,10 +110,8 @@ class MonascaCombineLDP(bt.BaseLDP):
metric[1].sort(key=lambda v: v["metric"]["timestamp"])
temp_values = []
all_timestamp = map(
lambda l: map(
lambda x: x["metric"]["timestamp"], l[1]),
separated_metrics)
all_timestamp = [[x["metric"]["timestamp"] for x in l[1]]
for l in separated_metrics]
for index in range(0, len(separated_metrics[0][1])):
current_env = {
separated_metrics[0][0]:
@ -145,14 +144,15 @@ class MonascaCombineLDP(bt.BaseLDP):
@staticmethod
def validate_config(_config):
monasca_comb_schema = voluptuous.Schema({
"module": voluptuous.And(basestring, vu.NoSpaceCharacter()),
"metric": basestring,
"module": voluptuous.And(six.string_types[0],
vu.NoSpaceCharacter()),
"metric": six.string_types[0],
"period": voluptuous.And(
voluptuous.Or(float, int),
lambda i: i >= 0 and math.floor(i) == math.ceil(i)),
"lambda": basestring,
"lambda": six.string_types[0],
"bindings": {
basestring: voluptuous.Or(
six.string_types[0]: voluptuous.Or(
"apache.net.kbytes_sec",
"apache.net.requests_sec",
"apache.performance.cpu_load_perc",

View File

@ -20,6 +20,7 @@ import voluptuous
import monasca_analytics.banana.typeck.type_util as type_util
import monasca_analytics.component.params as params
import six
import monasca_analytics.ldp.base as bt
import monasca_analytics.ldp.monasca.helpers as helpers
@ -47,7 +48,8 @@ class MonascaDerivativeLDP(bt.BaseLDP):
@staticmethod
def validate_config(_config):
monasca_der_schema = voluptuous.Schema({
"module": voluptuous.And(basestring, vu.NoSpaceCharacter()),
"module": voluptuous.And(six.string_types[0],
vu.NoSpaceCharacter()),
# Derivative period in multiple of batch interval
"period": voluptuous.And(
voluptuous.Or(float, int),
@ -81,7 +83,7 @@ class MonascaDerivativeLDP(bt.BaseLDP):
return dstream.map(fn.from_json) \
.window(period, period) \
.map(lambda m: ((frozenset(
m["metric"]["dimensions"].items()),
list(m["metric"]["dimensions"].items())),
m["metric"]["name"]),
m)) \
.groupByKey() \
@ -106,8 +108,8 @@ class MonascaDerivativeLDP(bt.BaseLDP):
meta = metric_values.data[0]["meta"]
dims = metric_values.data[0]["metric"]["dimensions"]
# All values
timestamps = map(lambda m: m["metric"]["timestamp"], metric_values)
all_values = map(lambda m: m["metric"]["value"], metric_values)
timestamps = [m["metric"]["timestamp"] for m in metric_values]
all_values = [m["metric"]["value"] for m in metric_values]
# Sort values
all_values = [y for (_, y) in
sorted(zip(timestamps, all_values), key=lambda x: x[0])]

View File

@ -22,6 +22,9 @@ import monasca_analytics.banana.bytecode.assembler as asbl
import monasca_analytics.exception.banana as exception
import monasca_analytics.parsing.private as priv
import six
logger = logging.getLogger(__name__)
@ -44,7 +47,7 @@ class ExpressionParser(object):
def parse(self, string, code=asbl.Code()):
"""
Parse a given string and construct an Evaluator
:type string: basestring
:type string: six.string_types
:param string: String to parse.
:type code: ass.Code
:param code: Generated code will be written here.
@ -76,7 +79,7 @@ class ExpressionParser(object):
current_operator = None
pushed_one_stack_value = False
for child in subtree:
if isinstance(child, basestring):
if isinstance(child, six.string_types):
if priv.is_op(child):
current_operator = child
else:
@ -125,7 +128,7 @@ def create_fn_with_config(env, expr_string):
code(asbl.Local('__monanas__env'))
code.co_argcount = 1
# Create local variables
for key, value in env.iteritems():
for key, value in six.iteritems(env):
code(asbl.Call(
asbl.Getattr(
asbl.Local('__monanas__env'), 'get'),
@ -147,12 +150,12 @@ def validate_environment(env):
is expecting.
:param env: Environment spec
"""
for key, val in env.iteritems():
if not isinstance(key, basestring):
for key, val in six.iteritems(env):
if not isinstance(key, six.string_types):
raise exception.BananaEnvironmentError(
"{} is not a valid key (only string are)".format(key)
)
if not isinstance(val, basestring):
if not isinstance(val, six.string_types):
raise exception.BananaEnvironmentError(
"{} is not a valid value (only string are)".format(val)
)
@ -167,9 +170,9 @@ def validate_expression(expr_string):
name usage against an environment.
:raises: exception.BananaInvalidExpression
"""
if not isinstance(expr_string, basestring):
if not isinstance(expr_string, six.string_types):
raise exception.BananaArgumentTypeError(
expected_type=basestring,
expected_type=six.string_types[0],
received_type=type(expr_string)
)
parser = ExpressionParser()
@ -202,7 +205,7 @@ def validate_name_binding(expr_handle, environment):
:param subtree: subtree
"""
for child in subtree:
if isinstance(child, basestring):
if isinstance(child, six.string_types):
if priv.is_not_op(child):
names.add(child)
else:
@ -210,7 +213,7 @@ def validate_name_binding(expr_handle, environment):
names = set()
collect_names(expr_handle.tree)
for name in names:
if name not in environment.keys():
if name not in list(environment.keys()):
raise exception.BananaInvalidExpression(
"The expression '{}' can't be used with the provided "
"environment: '{}'. Reason: '{}' is not defined.".format(

View File

@ -17,10 +17,10 @@
import abc
import logging
import six
from six.moves import cPickle
import sqlite3
import time
import cPickle
from monasca_analytics.sink import base

View File

@ -26,6 +26,9 @@ import monasca_analytics.sink.base as base
from monasca_analytics.util import validation_utils as vu
import six
class FileSink(base.BaseSink):
"""Sink that prints the dstream to a file in the driver
@ -75,9 +78,10 @@ class FileSink(base.BaseSink):
@staticmethod
def validate_config(_config):
file_schema = voluptuous.Schema({
"module": voluptuous.And(basestring, vu.NoSpaceCharacter()),
"module": voluptuous.And(six.string_types[0],
vu.NoSpaceCharacter()),
"path": voluptuous.Or(
voluptuous.And(basestring, vu.ExistingPath()),
voluptuous.And(six.string_types[0], vu.ExistingPath()),
None)
}, required=True)
return file_schema(_config)

View File

@ -22,6 +22,9 @@ import monasca_analytics.sink.base_sqlite as base
from monasca_analytics.util import validation_utils as vu
import six
class IptablesSQLiteSink(base.BaseSQLiteSink):
"""IPTables SQLite Sink implementation."""
@ -52,8 +55,9 @@ class IptablesSQLiteSink(base.BaseSQLiteSink):
@staticmethod
def validate_config(_config):
iptables_sql_schema = voluptuous.Schema({
"module": voluptuous.And(basestring, vu.NoSpaceCharacter()),
"module": voluptuous.And(six.string_types[0],
vu.NoSpaceCharacter()),
voluptuous.Optional("db_name"): voluptuous.And(
basestring, vu.NoSpaceCharacter()),
six.string_types[0], vu.NoSpaceCharacter()),
}, required=True)
return iptables_sql_schema(_config)

View File

@ -22,17 +22,20 @@ import voluptuous
from monasca_analytics.util import validation_utils as vu
import six
def validate_kafka_sink_config(config):
"""Validates the KafkaSink configuration"""
config_schema = voluptuous.Schema({
"module": voluptuous.And(basestring, vu.AvailableSink()),
"module": voluptuous.And(six.string_types[0], vu.AvailableSink()),
"host": voluptuous.And(
basestring, vu.NoSpaceCharacter()),
six.string_types[0], vu.NoSpaceCharacter()),
"port": voluptuous.And(
voluptuous.Or(float, int),
lambda i: i >= 0 and math.floor(i) == math.ceil(i)),
"topic": voluptuous.And(
basestring, vu.NoSpaceCharacter())
six.string_types[0], vu.NoSpaceCharacter())
}, required=True)
return config_schema(config)

View File

@ -20,6 +20,9 @@ from monasca_analytics.sink import base
from monasca_analytics.util import validation_utils as vu
import six
class StdoutSink(base.BaseSink):
"""Sink that prints the dstream to stdout, using pprint command"""
@ -36,7 +39,8 @@ class StdoutSink(base.BaseSink):
@staticmethod
def validate_config(_config):
stdout_schema = voluptuous.Schema({
"module": voluptuous.And(basestring, vu.NoSpaceCharacter())
"module": voluptuous.And(six.string_types[0],
vu.NoSpaceCharacter())
}, required=True)
stdout_schema(_config)

View File

@ -17,6 +17,7 @@
import logging
import numpy as np
import six
from sklearn.metrics import classification_report
from sklearn import tree
import voluptuous
@ -41,8 +42,8 @@ class DecisionTreeClassifier(BaseSML):
@staticmethod
def validate_config(_config):
decisiontree_schema = voluptuous.Schema({
'module': voluptuous.And(
basestring, NoSpaceCharacter()),
'module': voluptuous.And(six.string_types[0],
NoSpaceCharacter()),
'nb_samples': voluptuous.Or(float, int)
}, required=True)
return decisiontree_schema(_config)

View File

@ -17,6 +17,7 @@
import logging
import numpy as np
import six
from sklearn import covariance
import voluptuous
@ -40,8 +41,8 @@ class EllipticEnvelope(BaseSML):
@staticmethod
def validate_config(_config):
elliptic_schema = voluptuous.Schema({
'module': voluptuous.And(
basestring, NoSpaceCharacter()),
'module': voluptuous.And(six.string_types[0],
NoSpaceCharacter()),
'nb_samples': voluptuous.Or(float, int)
}, required=True)
return elliptic_schema(_config)

View File

@ -17,6 +17,7 @@
import logging
import numpy as np
import six
from sklearn import ensemble
import voluptuous
@ -40,8 +41,8 @@ class IsolationForest(BaseSML):
@staticmethod
def validate_config(_config):
isolation_schema = voluptuous.Schema({
'module': voluptuous.And(
basestring, NoSpaceCharacter()),
'module': voluptuous.And(six.string_types[0],
NoSpaceCharacter()),
'nb_samples': voluptuous.Or(float, int)
}, required=True)
return isolation_schema(_config)

View File

@ -18,6 +18,7 @@ import logging
import math
import numpy as np
import six
from sklearn import decomposition
import voluptuous
@ -41,7 +42,8 @@ class LiNGAM(base.BaseSML):
@staticmethod
def validate_config(_config):
lingam_schema = voluptuous.Schema({
"module": voluptuous.And(basestring, vu.NoSpaceCharacter()),
"module": voluptuous.And(six.string_types[0],
vu.NoSpaceCharacter()),
"threshold": float
}, required=True)
return lingam_schema(_config)

View File

@ -17,6 +17,7 @@
import logging
import numpy as np
import six
from sklearn import linear_model
from sklearn.metrics import classification_report
import voluptuous
@ -41,8 +42,8 @@ class LogisticRegression(BaseSML):
@staticmethod
def validate_config(_config):
log_reg_schema = voluptuous.Schema({
'module': voluptuous.And(
basestring, NoSpaceCharacter()),
'module': voluptuous.And(six.string_types[0],
NoSpaceCharacter()),
'nb_samples': voluptuous.Or(float, int)
}, required=True)
return log_reg_schema(_config)

View File

@ -17,6 +17,7 @@
import logging
import numpy as np
import six
from sklearn import ensemble
from sklearn.metrics import classification_report
import voluptuous
@ -41,8 +42,8 @@ class RandomForestClassifier(BaseSML):
@staticmethod
def validate_config(_config):
randomforest_schema = voluptuous.Schema({
'module': voluptuous.And(
basestring, NoSpaceCharacter()),
'module': voluptuous.And(six.string_types[0],
NoSpaceCharacter()),
'nb_samples': voluptuous.Or(float, int)
}, required=True)
return randomforest_schema(_config)

View File

@ -17,6 +17,7 @@
import logging
import numpy as np
import six
from sklearn.metrics import classification_report
from sklearn import svm
import voluptuous
@ -41,8 +42,8 @@ class Svc(BaseSML):
@staticmethod
def validate_config(_config):
svc_schema = voluptuous.Schema({
'module': voluptuous.And(
basestring, NoSpaceCharacter()),
'module': voluptuous.And(six.string_types[0],
NoSpaceCharacter()),
'nb_samples': voluptuous.Or(float, int)
}, required=True)
return svc_schema(_config)

View File

@ -17,6 +17,7 @@
import logging
import numpy as np
import six
from sklearn import svm
import voluptuous
@ -43,7 +44,8 @@ class SvmOneClass(base.BaseSML):
@staticmethod
def validate_config(_config):
svm_schema = voluptuous.Schema({
"module": voluptuous.And(basestring, vu.NoSpaceCharacter()),
"module": voluptuous.And(six.string_types[0],
vu.NoSpaceCharacter()),
"nb_samples": voluptuous.Or(float, int)
}, required=True)
return svm_schema(_config)

View File

@ -28,6 +28,9 @@ import monasca_analytics.source.markov_chain.state_check as dck
import monasca_analytics.source.markov_chain.transition as tr
from monasca_analytics.util import validation_utils as vu
import six
logger = logging.getLogger(__name__)
@ -36,8 +39,8 @@ class CloudMarkovChainSource(base.MarkovChainSource):
@staticmethod
def validate_config(_config):
source_schema = voluptuous.Schema({
"module": voluptuous.And(
basestring, vu.NoSpaceCharacter()),
"module": voluptuous.And(six.string_types[0],
vu.NoSpaceCharacter()),
"min_event_per_burst": voluptuous.Or(float, int),
"sleep": voluptuous.And(
float, voluptuous.Range(
@ -81,7 +84,8 @@ class CloudMarkovChainSource(base.MarkovChainSource):
},
},
"graph": {
voluptuous.And(basestring, vu.ValidMarkovGraph()): [basestring]
voluptuous.And(six.string_types[0],
vu.ValidMarkovGraph()): [six.string_types[0]]
}
}, required=True)
return source_schema(_config)
@ -205,7 +209,7 @@ class CloudMarkovChainSource(base.MarkovChainSource):
support_node.dependencies.append(webs)
nodes[node_name] = webs
for k, v in graph.iteritems():
for k, v in six.iteritems(graph):
node_name, _ = k.split(":")
for depend_on in v:

View File

@ -28,6 +28,9 @@ import monasca_analytics.source.markov_chain.state_check as dck
import monasca_analytics.source.markov_chain.transition as tr
from monasca_analytics.util import validation_utils as vu
import six
logger = logging.getLogger(__name__)
STATE_STOP = "stop"
@ -67,7 +70,8 @@ class IPTablesSource(base.MarkovChainSource):
@staticmethod
def validate_config(_config):
source_schema = voluptuous.Schema({
"module": voluptuous.And(basestring, vu.NoSpaceCharacter()),
"module": voluptuous.And(six.string_types[0],
vu.NoSpaceCharacter()),
"sleep": voluptuous.And(
float,
voluptuous.Range(
@ -149,7 +153,7 @@ class IPTablesSource(base.MarkovChainSource):
and a little bit of ssh traffic
"""
tr = []
for iptable, feature in iptables.iteritems():
for iptable, feature in six.iteritems(iptables):
if feature.startswith("ssh"):
tr.append(self._create_trigger(0.1, STATE_NORMAL, iptable))
elif feature.startswith("http"):
@ -165,7 +169,7 @@ class IPTablesSource(base.MarkovChainSource):
but the ping traffic is dramatically increased
"""
tr = []
for iptable, feature in iptables.iteritems():
for iptable, feature in six.iteritems(iptables):
if feature.startswith("ssh"):
tr.append(self._create_trigger(0.1, STATE_ATTACK, iptable))
elif feature.startswith("http"):

View File

@ -25,6 +25,9 @@ import monasca_analytics.component.params as params
from monasca_analytics.source import base
from monasca_analytics.util import validation_utils as vu
import six
logger = logging.getLogger(__name__)
@ -34,13 +37,17 @@ class KafkaSource(base.BaseSource):
@staticmethod
def validate_config(_config):
source_schema = voluptuous.Schema({
"module": voluptuous.And(basestring, vu.NoSpaceCharacter()),
"module": voluptuous.And(six.string_types[0],
vu.NoSpaceCharacter()),
"params": {
"zk_host": voluptuous.And(basestring, vu.NoSpaceCharacter()),
"zk_host": voluptuous.And(six.string_types[0],
vu.NoSpaceCharacter()),
"zk_port": int,
"group_id": voluptuous.And(basestring, vu.NoSpaceCharacter()),
"group_id": voluptuous.And(six.string_types[0],
vu.NoSpaceCharacter()),
"topics": {
voluptuous.And(basestring, vu.NoSpaceCharacter()):
voluptuous.And(six.string_types[0],
vu.NoSpaceCharacter()):
voluptuous.And(int, voluptuous.Range(min=1))
}
}

View File

@ -20,7 +20,7 @@ import itertools
import json
import logging
import six
import SocketServer
from six.moves import socketserver
import threading
import time
@ -74,7 +74,7 @@ class MarkovChainSource(base.BaseSource):
self._server_thread.join()
def _start_thread(self, system):
self._server = SocketServer.ThreadingTCPServer(
self._server = socketserver.ThreadingTCPServer(
("", 0), # Let the OS pick a port for us
FMSTCPHandler, # Handler of the requests
False)
@ -229,7 +229,7 @@ class StateNode(object):
dep.collect_events(hour_of_day, fake_date, request)
class FMSTCPHandler(SocketServer.BaseRequestHandler):
class FMSTCPHandler(socketserver.BaseRequestHandler):
"""A TCP server handler for the alert generation."""
def handle(self):

View File

@ -36,7 +36,7 @@ class ProbCheck(object):
success associated with it.
"""
if isinstance(prob, dict):
self._prob = prob.items()
self._prob = list(prob.items())
else:
self._prob = prob

View File

@ -20,6 +20,7 @@ import voluptuous
import monasca_analytics.banana.typeck.type_util as type_util
import monasca_analytics.component.params as params
import six
import monasca_analytics.source.markov_chain.base as base
import monasca_analytics.source.markov_chain.events as ev
@ -38,7 +39,8 @@ class MonascaMarkovChainSource(base.MarkovChainSource):
@staticmethod
def validate_config(_config):
markov_schema = voluptuous.Schema({
"module": voluptuous.And(basestring, vu.NoSpaceCharacter()),
"module": voluptuous.And(six.string_types[0],
vu.NoSpaceCharacter()),
"sleep": voluptuous.And(
float, voluptuous.Range(
min=0, max=1, min_included=False, max_included=False)),

View File

@ -20,7 +20,7 @@ import logging
import numpy as np
import random
import six
import SocketServer
from six.moves import socketserver
import threading as th
import time
import uuid
@ -54,7 +54,7 @@ class RandomSource(base.BaseSource):
The server object is configured according to
the configuration of this source module
"""
self._server = SocketServer.ThreadingTCPServer(
self._server = socketserver.ThreadingTCPServer(
(self._config["params"]["host"],
self._config["params"]["port"]),
MonanasTCPHandler, False)
@ -77,17 +77,21 @@ class RandomSource(base.BaseSource):
@staticmethod
def validate_config(_config):
source_schema = voluptuous.Schema({
"module": voluptuous.And(basestring, vu.NoSpaceCharacter()),
"module": voluptuous.And(six.string_types[0],
vu.NoSpaceCharacter()),
"params": {
"host": voluptuous.And(basestring, vu.NoSpaceCharacter()),
"host": voluptuous.And(six.string_types[0],
vu.NoSpaceCharacter()),
"port": int,
"model": {
"name": voluptuous.And(basestring, vu.NoSpaceCharacter()),
"name": voluptuous.And(six.string_types[0],
vu.NoSpaceCharacter()),
"params": {
"origin_types": voluptuous.And([
{
"origin_type": voluptuous.And(
basestring, vu.NoSpaceCharacter()),
six.string_types[0],
vu.NoSpaceCharacter()),
"weight": voluptuous.And(
voluptuous.Or(int, float),
voluptuous.Range(
@ -379,7 +383,7 @@ class UncorrelatedDataSourceGenerator(BaseDataSourceGenerator):
return i
class MonanasTCPHandler(SocketServer.BaseRequestHandler):
class MonanasTCPHandler(socketserver.BaseRequestHandler):
"""A TCP server handler for the alert generation."""
def handle(self):

View File

@ -168,12 +168,12 @@ def get_class_by_name(class_name, class_type=None):
"""
classes = get_available_classes(class_type)
if class_type:
clazz = filter(lambda t_class: t_class.__name__ == class_name,
classes[class_type])
clazz = list(filter(lambda t_class: t_class.__name__ == class_name,
classes[class_type]))
else:
for c_type in classes.keys():
clazz = filter(lambda t_class: t_class.__name__ == class_name,
classes[c_type])
clazz = list(filter(lambda t_class: t_class.__name__ == class_name,
classes[c_type]))
if clazz:
break
if not clazz:

View File

@ -14,6 +14,8 @@
# License for the specific language governing permissions and limitations
# under the License.
import six
def array_to_str(array, multiline=False, indent=None):
"""
@ -77,7 +79,7 @@ def dict_to_str(dictionary, multiline=False, indent=None):
if multiline:
res += "\n"
multiline = multiline or indent is not None
for k, v in sorted(dictionary.iteritems(), key=lambda ke: str(ke[0])):
for k, v in sorted(six.iteritems(dictionary), key=lambda ke: str(ke[0])):
if indent is not None:
res += " " * indent
if isinstance(v, dict):
@ -115,7 +117,7 @@ def stable_repr(obj):
res = "{"
for k, v in sorted(obj.iteritems(), key=lambda ke: str(ke[0])):
for k, v in sorted(six.iteritems(obj), key=lambda ke: str(ke[0])):
res += "{}: {}, ".format(repr(k), stable_repr(v))
res = res[0:-2]

View File

@ -22,6 +22,9 @@ import monasca_analytics.banana.typeck.type_util as type_util
import monasca_analytics.component.params as params
from monasca_analytics.voter import base
import six
logger = logging.getLogger(__name__)
@ -35,7 +38,8 @@ class PickIndexVoter(base.BaseVoter):
def validate_config(_config):
pick_schema = voluptuous.Schema({
"module": voluptuous.And(
basestring, lambda i: not any(c.isspace() for c in i)),
six.string_types[0],
lambda i: not any(c.isspace() for c in i)),
"index": voluptuous.And(
voluptuous.Or(float, int),
lambda i: i >= 0 and math.ceil(i) == math.floor(i)

View File

@ -19,6 +19,7 @@ import logging
import sys
import traceback
import six
from tornado import web
import voluptuous
@ -132,7 +133,7 @@ class BananaMetaDataHandler(web.RequestHandler):
def get(self):
all_components = introspect.get_available_classes()
result = {"components": []}
for kind, components in all_components.iteritems():
for kind, components in six.iteritems(all_components):
for component in components:
result["components"].append({
"name": component.__name__,

View File

@ -19,10 +19,14 @@
import voluptuous
import six
def action_model(value):
"""Validates the data against action_model schema."""
action_model_schema = voluptuous.Schema({
"action": voluptuous.And(basestring, lambda o: not o.startswith("_"))
"action": voluptuous.And(six.string_types[0],
lambda o: not o.startswith("_"))
}, required=True)
return action_model_schema(value)
@ -31,7 +35,7 @@ def action_model(value):
def banana_model(value):
"""Validates the data against the banana_model schema."""
banana_model_schema = voluptuous.Schema({
"content": basestring
"content": six.string_types[0]
}, required=True)
return banana_model_schema(value)

View File

@ -52,7 +52,7 @@ class DeadPathTestCase(MonanasTestCase):
self.assertEqual(emitter.nb_errors, 0)
self.assertEqual(emitter.nb_warnings, 4)
self.assertEqual(len(ast.components), 0)
self.assertEqual(len(ast.connections.connections), 0)
self.assertEqual(len(list(ast.connections.connections)), 0)
def test_banana_should_remove_one(self):
banana_str = "" +\
@ -72,7 +72,7 @@ class DeadPathTestCase(MonanasTestCase):
self.assertEqual(emitter.nb_errors, 0)
self.assertEqual(emitter.nb_warnings, 1)
self.assertEqual(len(ast.components), 3)
self.assertEqual(len(ast.connections.connections), 2)
self.assertEqual(len(list(ast.connections.connections)), 2)
def test_banana_should_not_remove_anything(self):
banana_str = "" +\
@ -92,7 +92,7 @@ class DeadPathTestCase(MonanasTestCase):
self.assertEqual(emitter.nb_errors, 0)
self.assertEqual(emitter.nb_warnings, 0)
self.assertEqual(len(ast.components), 4)
self.assertEqual(len(ast.connections.connections), 3)
self.assertEqual(len(list(ast.connections.connections)), 3)
class CustomEmitter(emit.Emitter):

View File

@ -17,6 +17,7 @@
import json
import logging.config
import os
import six
import unittest
from monasca_analytics.banana.cli import const
@ -74,50 +75,50 @@ class TestMonanasDSL(unittest.TestCase):
def test_parse_create(self):
info = parser.get_parser().parseString("A = my_module")
self.assertEqual(1, len(info))
self.assertItemsEqual([const.CREATE], info[0].keys())
six.assertCountEqual(self, [const.CREATE], info[0].keys())
self.assert_create(info[0], "A", "my_module")
def test_parse_modify(self):
info = parser.get_parser().parseString("_A1.params.123._inf- = my.val")
self.assertEqual(1, len(info))
self.assertItemsEqual([const.MODIFY], info[0].keys())
six.assertCountEqual(self, [const.MODIFY], info[0].keys())
self.assert_modify(info[0], "_A1", ["params", "123", "_inf-"],
"my.val")
def test_parse_connect(self):
info = parser.get_parser().parseString("_A1->B")
self.assertEqual(1, len(info))
self.assertItemsEqual([const.CONNECT], info[0].keys())
six.assertCountEqual(self, [const.CONNECT], info[0].keys())
self.assert_connect(info[0], "_A1", "B")
def test_parse_disconnect(self):
info = parser.get_parser().parseString("_A1!->B")
self.assertEqual(1, len(info))
self.assertItemsEqual([const.DISCONNECT], info[0].keys())
six.assertCountEqual(self, [const.DISCONNECT], info[0].keys())
self.assert_disconnect(info[0], "_A1", "B")
def test_parse_remove(self):
info = parser.get_parser().parseString("rM(A)")
self.assertEqual(1, len(info))
self.assertItemsEqual([const.REMOVE], info[0].keys())
six.assertCountEqual(self, [const.REMOVE], info[0].keys())
self.assert_remove(info[0], "A")
def test_parse_load(self):
info = parser.get_parser().parseString("LoAd(_some/path/123.json)")
self.assertEqual(1, len(info))
self.assertItemsEqual([const.LOAD], info[0].keys())
six.assertCountEqual(self, [const.LOAD], info[0].keys())
self.assert_load(info[0], "_some/path/123.json")
def test_parse_save(self):
info = parser.get_parser().parseString("sAVe()")
self.assertEqual(1, len(info))
self.assertItemsEqual([const.SAVE], info[0].keys())
six.assertCountEqual(self, [const.SAVE], info[0].keys())
self.assert_save(info[0])
def test_parse_save_as(self):
info = parser.get_parser().parseString("sAVE(/root/0/path_/f.conf)")
self.assertEqual(1, len(info))
self.assertItemsEqual([const.SAVE_AS], info[0].keys())
six.assertCountEqual(self, [const.SAVE_AS], info[0].keys())
self.assert_save_as(info[0], "/root/0/path_/f.conf")
def test_parse_multiline(self):

View File

@ -36,7 +36,7 @@ class TestIptablesLDP(MonanasTestCase):
"id": "1"
}
}]
self.raw_events = map(lambda x: x["event"], self.rdd_entry)
self.raw_events = [x["event"] for x in self.rdd_entry]
self.ip_ldp = iptables_ldp.IptablesLDP("fake_id",
{"module": "fake_config"})

View File

@ -84,29 +84,29 @@ class TestMonascaAggregateLDP(MonanasTestCase):
def test_aggregate_with_avg(self):
reducer = MonascaAggregateLDP.select_reducer(self._conf("avg"))
res = MonascaAggregateLDP.aggregate(self.all_metrics, reducer, "_avg")
res = map(lambda m: m["metric"]["value"], res)
res = [m["metric"]["value"] for m in res]
self.assertEqual(res, [1.25, 2.5])
def test_aggregate_with_min(self):
reducer = MonascaAggregateLDP.select_reducer(self._conf("min"))
res = MonascaAggregateLDP.aggregate(self.all_metrics, reducer, "_min")
res = map(lambda m: m["metric"]["value"], res)
res = [m["metric"]["value"] for m in res]
self.assertEqual(res, [1.0, 2.0])
def test_aggregate_with_max(self):
reducer = MonascaAggregateLDP.select_reducer(self._conf("max"))
res = MonascaAggregateLDP.aggregate(self.all_metrics, reducer, "_max")
res = map(lambda m: m["metric"]["value"], res)
res = [m["metric"]["value"] for m in res]
self.assertEqual(res, [1.5, 3.0])
def test_aggregate_with_sum(self):
reducer = MonascaAggregateLDP.select_reducer(self._conf("sum"))
res = MonascaAggregateLDP.aggregate(self.all_metrics, reducer, "_sum")
res = map(lambda m: m["metric"]["value"], res)
res = [m["metric"]["value"] for m in res]
self.assertEqual(res, [2.5, 5.0])
def test_aggregate_with_cnt(self):
reducer = MonascaAggregateLDP.select_reducer(self._conf("cnt"))
res = MonascaAggregateLDP.aggregate(self.all_metrics, reducer, "_cnt")
res = map(lambda m: m["metric"]["value"], res)
res = [m["metric"]["value"] for m in res]
self.assertEqual(res, [2, 2])

View File

@ -37,19 +37,19 @@ class TestMonascaAggregateLDP(MonanasTestCase):
def test_combine_for_two_metric_product(self):
fn = create_fn_with_config({"a": "nb_cores", "b": "idl_perc"}, "a * b")
res = MonascaCombineLDP.combine(self.all_metrics, fn, "cpu_usage", 2)
res = map(lambda m: m["metric"]["value"], res)
res = [m["metric"]["value"] for m in res]
self.assertEqual(res, [0.24, 1.6])
def test_combine_for_two_metric_sum(self):
fn = create_fn_with_config({"a": "nb_cores", "b": "idl_perc"},
"b - a")
res = MonascaCombineLDP.combine(self.all_metrics, fn, "cpu_usage", 2)
res = map(lambda m: m["metric"]["value"], res)
res = [m["metric"]["value"] for m in res]
self.assertEqual(res, [-1.0, -1.2])
def test_combine_for_two_metric_some_expr(self):
fn = create_fn_with_config({"a": "nb_cores", "b": "idl_perc"},
"a * b - a + b")
res = MonascaCombineLDP.combine(self.all_metrics, fn, "cpu_usage", 2)
res = map(lambda m: m["metric"]["value"], res)
res = [m["metric"]["value"] for m in res]
self.assertEqual(res, [-0.76, 0.40000000000000013])

View File

@ -36,7 +36,7 @@ class TestMonascaAggregateLDP(MonanasTestCase):
]
def _values(self, values):
return map(lambda m: m["metric"]["value"], values)
return [m["metric"]["value"] for m in values]
def tearDown(self):
super(TestMonascaAggregateLDP, self).tearDown()

View File

@ -18,8 +18,9 @@ import os
import sqlite3
import unittest
import cPickle
import numpy as np
import six
from six.moves import cPickle
import voluptuous
import monasca_analytics.banana.typeck.type_util as type_util
@ -61,9 +62,11 @@ class BaseSQLiteSinkDummyExtension(bsql.BaseSQLiteSink):
def validate_config(_config):
base_schema = voluptuous.Schema({
"module": voluptuous.And(
basestring, lambda i: not any(c.isspace() for c in i)),
six.string_types[0],
lambda i: not any(c.isspace() for c in i)),
voluptuous.Optional("db_name"): voluptuous.And(
basestring, lambda i: not any(c.isspace() for c in i)),
six.string_types[0],
lambda i: not any(c.isspace() for c in i)),
}, required=True)
return base_schema(_config)
@ -103,7 +106,10 @@ class TestSQLiteSink(unittest.TestCase):
c.execute('SELECT sml FROM smls WHERE voter_id = "' +
voter_id + '"')
fetched_sml = c.fetchone()
fetched_sml = cPickle.loads(str(fetched_sml[0]))
if six.PY2:
fetched_sml = cPickle.loads(str(fetched_sml[0]))
else:
fetched_sml = cPickle.loads(fetched_sml[0])
self.assertEqual(len(sml), len(fetched_sml))
self.assertTrue((sml == fetched_sml).all())

View File

@ -20,6 +20,9 @@ from monasca_analytics.source import iptables_markov_chain
from test.util_for_testing import MonanasTestCase
import six
class TestIPTablesSource(MonanasTestCase):
def setUp(self):
@ -77,7 +80,7 @@ class TestIPTablesSource(MonanasTestCase):
self.assertEqual(len(node._triggers), num_triggers)
self.assertEqual(len(node._markov_chain._transitions),
len(states_transitions.keys()))
for state, num_transitions in states_transitions.iteritems():
for state, num_transitions in six.iteritems(states_transitions):
self.assertEqual(len(node._markov_chain._transitions[state]),
num_transitions)

View File

@ -22,7 +22,7 @@ import monasca_analytics.spark.driver as driver
import monasca_analytics.util.common_util as cu
from test.mocks import sml_mocks
from test.mocks import spark_mocks
from util_for_testing import MonanasTestCase
from test.util_for_testing import MonanasTestCase
class MonanasTest(MonanasTestCase):

View File

@ -15,7 +15,7 @@
# under the License.
import run
from util_for_testing import MonanasTestCase
from test.util_for_testing import MonanasTestCase
class ParserTest(MonanasTestCase):

View File

@ -15,6 +15,7 @@
# under the License.
import os
import six
import unittest
from monasca_analytics.config import const
@ -34,37 +35,37 @@ class CommonUtilTest(unittest.TestCase):
test_json_file = os.path.join(current_dir,
"../resources/test_json.json")
parsed_json = common_util.parse_json_file(test_json_file)
self.assertItemsEqual(parsed_json["sources"]["src1"],
{"module": "src_module1",
"params": {
"param1": "val1",
"param2": "val2",
"model_id": 3}
})
self.assertItemsEqual(parsed_json["ingestors"]["ing1"],
{"module": "ingestor_module"})
self.assertItemsEqual(parsed_json["smls"]["sml1"],
{"module": "sml_module"})
six.assertCountEqual(self, parsed_json["sources"]["src1"],
{"module": "src_module1",
"params": {
"param1": "val1",
"param2": "val2",
"model_id": 3}
})
six.assertCountEqual(self, parsed_json["ingestors"]["ing1"],
{"module": "ingestor_module"})
six.assertCountEqual(self, parsed_json["smls"]["sml1"],
{"module": "sml_module"})
self.assertEqual(parsed_json["voters"]["vot1"],
{"module": "voter_module"})
self.assertItemsEqual(parsed_json["sinks"]["snk1"],
{"module": "sink_module1"})
self.assertItemsEqual(parsed_json["sinks"]["snk2"],
{"module": "sink_module2"})
self.assertItemsEqual(parsed_json["ldps"]["ldp1"],
{"module": "ldps_module1"})
self.assertItemsEqual(parsed_json["connections"],
{"src1": ["ing1"],
"src2": ["ing1"],
"ing1": ["aggr1", "ldp1", "sin1"],
"snk1": [],
"snk2": [],
"sml1": ["vot1", "snk1"],
"vot1": ["ldp1", "snk1"],
"ldp1": ["snk2"]})
self.assertItemsEqual(parsed_json["feedback"],
{"snk1": ["sml1"],
"snk2": ["vot1"]})
six.assertCountEqual(self, parsed_json["sinks"]["snk1"],
{"module": "sink_module1"})
six.assertCountEqual(self, parsed_json["sinks"]["snk2"],
{"module": "sink_module2"})
six.assertCountEqual(self, parsed_json["ldps"]["ldp1"],
{"module": "ldps_module1"})
six.assertCountEqual(self, parsed_json["connections"],
{"src1": ["ing1"],
"src2": ["ing1"],
"ing1": ["aggr1", "ldp1", "sin1"],
"snk1": [],
"snk2": [],
"sml1": ["vot1", "snk1"],
"vot1": ["ldp1", "snk1"],
"ldp1": ["snk2"]})
six.assertCountEqual(self, parsed_json["feedback"],
{"snk1": ["sml1"],
"snk2": ["vot1"]})
def test_get_class_by_name(self):
common_util.get_class_by_name("RandomSource", const.SOURCES)
@ -79,9 +80,9 @@ class CommonUtilTest(unittest.TestCase):
children = common_util.get_available_inherited_classes(util,
inh.Baseclass)
classes = [source_class.__name__ for source_class in children]
self.assertItemsEqual(classes,
["Extended_1_1", "Extended_1_2",
"Extended_1_3", "Extended_2_1", "Extended_3_1"])
six.assertCountEqual(self, classes,
["Extended_1_1", "Extended_1_2",
"Extended_1_3", "Extended_2_1", "Extended_3_1"])
def test_get_source_class_by_name(self):
clazz = common_util.get_source_class_by_name("KafkaSource")
@ -89,7 +90,8 @@ class CommonUtilTest(unittest.TestCase):
def test_get_available_source_class_names(self):
names = common_util.get_available_source_class_names()
self.assertItemsEqual(
six.assertCountEqual(
self,
['RandomSource', 'KafkaSource',
'CloudMarkovChainSource', 'IPTablesSource',
'MonascaMarkovChainSource'],
@ -97,7 +99,8 @@ class CommonUtilTest(unittest.TestCase):
def test_get_available_ingestor_class_names(self):
names = common_util.get_available_ingestor_class_names()
self.assertItemsEqual(
six.assertCountEqual(
self,
['CloudIngestor', 'IptablesIngestor'],
names)
@ -108,7 +111,8 @@ class CommonUtilTest(unittest.TestCase):
def test_get_available_sml_class_names(self):
names = common_util.get_available_sml_class_names()
self.assertItemsEqual(
six.assertCountEqual(
self,
['LiNGAM',
'SvmOneClass',
'IsolationForest',
@ -126,11 +130,11 @@ class CommonUtilTest(unittest.TestCase):
def test_get_available_voter_class_names(self):
names = common_util.get_available_voter_class_names()
self.assertItemsEqual(["PickIndexVoter"], names)
six.assertCountEqual(self, ["PickIndexVoter"], names)
def test_get_available_ldp_class_names(self):
names = common_util.get_available_ldp_class_names()
self.assertItemsEqual([
six.assertCountEqual(self, [
"CloudCausalityLDP", "IptablesLDP",
'MonascaDerivativeLDP', 'MonascaAggregateLDP',
'MonascaCombineLDP'

View File

@ -59,7 +59,7 @@ class TestConfigModel(MonanasTestCase):
self.config = self.get_config()
def test_validate_config_missing_spark_key(self):
for key in self.config["spark_config"].keys():
for key in list(self.config["spark_config"].keys()):
del self.config["spark_config"][key]
self.assertRaises(voluptuous.Invalid,
validation.validate_config, self.config)