This commit introduces the first version of Banana configuration language.

As of this commit, to change the configuration using Banana, we
need to make an HTTP POST request to `/banana` REST API. This API is
temporary and is likely to be changed later.

The implementation is done entirely in the `banana` module. Under this
module there are:

 * `typeck` module contains the type checker,
 * `grammar` module contains the parser and the AST and,
 * `eval` module contains the interpreter.

Additionally, a test framework has been created to ease the test of
particular conditions of the language.

Within the banana module, there is a README.md file for each associated
sub-module explaining further the details of the language.

Once this commit is merged, there's still a lot that can be improved:

 - All components should be tested in Banana.
 - The 'deadpathck' pass could be improved (see TODO)
 - We don't support generated JSON ingestors yet.
 - Imports will be key for reusability (not implemented).

Change-Id: I1305bdfa0606f30619b31404afbe0acf111c029f
This commit is contained in:
Joan Varvenne 2016-07-14 17:15:55 +01:00
parent 754e90cf09
commit 5812bd8429
147 changed files with 5818 additions and 489 deletions

View File

@ -0,0 +1,16 @@
##############################
# IP Tables anomalies
#
sleep = 0.01
src = IPTablesSource(sleep=sleep)
ing1 = IptablesIngestor()
svm = SvmOneClass()
voter = PickIndexVoter(0)
ldp1 = IptablesLDP()
stdout = StdoutSink()
sqlite = IptablesSQLiteSink()
src -> [ing1, ldp1]
ing1 -> svm -> voter -> ldp1
ldp1 -> [sqlite, stdout]

View File

@ -12,9 +12,7 @@
"sources": {
"src1": {
"module": "IPTablesSource",
"params": {
"server_sleep_in_seconds": 0.01
}
"sleep": 0.01
}
},
"ingestors": {
@ -30,9 +28,7 @@
"voters": {
"vot1": {
"module": "PickIndexVoter",
"params": {
"index": 0
}
"index": 0
}
},
"sinks": {

View File

@ -0,0 +1,57 @@
##############################
# Markov source config example
# (cloud-like data model)
#
src = CloudMarkovChainSource(sleep=0.01)
src.transitions.web_service = {
"run=>slow": {
"0": 0.001,
"8": 0.02,
"12": 0.07,
"14": 0.07,
"22": 0.03,
"24": 0.00
},
"slow=>run": {
"0": 0.99,
"8": 0.7,
"12": 0.1,
"14": 0.1,
"22": 0.8,
"24": 0.9
},
"stop=>run": 0.7
}
src.transitions.host = {
"on=>off": 0.005,
"off=>on": 0.5
}
src.transitions.switch = {
"on=>off": 0.01,
"off=>on": 0.7
}
src.triggers.support = {
"get_called" : {
"0": 0.1,
"8": 0.2,
"12": 0.8,
"14": 0.8,
"22": 0.5,
"24": 0.0
}
}
ing1 = CloudIngestor()
ling = LiNGAM(threshold=0.5)
voter = PickIndexVoter(0)
sink = KafkaSink(host="localhost", port=9092, topic="transformed_alerts")
ldp = CloudCausalityLDP()
# Connections
src -> [ing1 -> ling, ldp]
ling -> voter -> ldp -> sink

View File

@ -12,9 +12,7 @@
"sources": {
"src1": {
"module": "CloudMarkovChainSource",
"params": {
"server_sleep_in_seconds": 0.01
},
"sleep": 0.01,
"transitions": {
"web_service": {
"run=>slow": {
@ -74,27 +72,21 @@
"smls": {
"sml1": {
"module": "LiNGAM",
"params": {
"threshold": 0.5
}
"threshold": 0.5
}
},
"voters": {
"vot1": {
"module": "PickIndexVoter",
"params": {
"index": 0
}
"index": 0
}
},
"sinks": {
"snk1": {
"module": "KafkaSink",
"params": {
"host": "localhost",
"port": 9092,
"topic": "transformed_alerts"
}
"host": "localhost",
"port": 9092,
"topic": "transformed_alerts"
}
},
"ldps": {

View File

@ -0,0 +1,30 @@
#######################
# Metric experiments
#
# Sources
src = MonascaMarkovChainSource(sleep=0.01)
# Sinks
stdout = StdoutSink()
file = FileSink(path="~/monasca-aggregate.log")
# Live data processors
period = 0.1 * 2
aggregator = MonascaAggregateLDP(func="cnt", period=period)
combiner = MonascaCombineLDP(
metric= "cpu.logical_cores_actives",
bindings= {
a: "cpu.idle_perc",
b: "cpu.total_logical_cores",
},
lambda= "a * b",
period= period
)
derivator = MonascaDerivativeLDP(period=period)
# Connections
src -> aggregator -> stdout
src -> [combiner, derivator] -> stdout
[combiner, derivator] -> file

View File

@ -12,9 +12,7 @@
"sources": {
"src1": {
"module": "MonascaMarkovChainSource",
"params": {
"server_sleep_in_seconds": 0.01
}
"sleep": 0.01
}
},
"ingestors": {},
@ -26,36 +24,28 @@
},
"snk3": {
"module": "FileSink",
"params": {
"path": "~/monasca-aggregate.log"
}
"path": "~/monasca-aggregate.log"
}
},
"ldps": {
"ldp3": {
"module": "MonascaAggregateLDP",
"params": {
"aggregation_period": 2,
"aggregation_function": "cnt"
}
"period": 2,
"func": "cnt"
},
"ldp4": {
"module": "MonascaCombineLDP",
"params": {
"metric_name": "cpu.logical_cores_actives",
"combine_period": 1,
"lambda": "a * b",
"metric_names_binding": {
"a": "cpu.idle_perc",
"b": "cpu.total_logical_cores"
}
"metric": "cpu.logical_cores_actives",
"period": 1,
"lambda": "a * b",
"bindings": {
"a": "cpu.idle_perc",
"b": "cpu.total_logical_cores"
}
},
"ldp5": {
"module": "MonascaDerivativeLDP",
"params": {
"derivative_period": 1
}
"period": 1
}
},
"connections": {

View File

@ -0,0 +1,13 @@
##############################
# Monasca aggregate all functions
src = MonascaMarkovChainSource(sleep=0.01)
snk = KafkaSink(host="localhost", port=9092, topic="experiments")
stdout = StdoutSink()
ldp1 = MonascaAggregateLDP(func="cnt")
ldp2 = MonascaAggregateLDP(func="max")
ldp3 = MonascaAggregateLDP(func="min")
ldp4 = MonascaAggregateLDP(func="avg")
src -> [ldp1, ldp2, ldp3, ldp4] -> [snk, stdout]

View File

@ -12,9 +12,7 @@
"sources": {
"src1": {
"module": "MonascaMarkovChainSource",
"params": {
"server_sleep_in_seconds": 0.01
}
"sleep": 0.01
}
},
"ingestors": {},
@ -23,11 +21,9 @@
"sinks": {
"snk1": {
"module": "KafkaSink",
"params": {
"host": "localhost",
"port": 9092,
"topic": "monasca_experiments"
}
"host": "localhost",
"port": 9092,
"topic": "monasca_experiments"
},
"snk2": {
"module": "StdoutSink"
@ -36,31 +32,23 @@
"ldps": {
"ldp1": {
"module": "MonascaAggregateLDP",
"params": {
"aggregation_period": 2,
"aggregation_function": "max"
}
"period": 2,
"func": "max"
},
"ldp2": {
"module": "MonascaAggregateLDP",
"params": {
"aggregation_period": 2,
"aggregation_function": "min"
}
"period": 2,
"func": "min"
},
"ldp3": {
"module": "MonascaAggregateLDP",
"params": {
"aggregation_period": 2,
"aggregation_function": "avg"
}
"period": 2,
"func": "avg"
},
"ldp4": {
"module": "MonascaAggregateLDP",
"params": {
"aggregation_period": 2,
"aggregation_function": "sum"
}
"period": 2,
"func": "sum"
}
},
"connections": {

View File

@ -15,11 +15,11 @@
# under the License.
import json
from logging import config as log_conf
import os
import six
from logging import config as log_conf
from monasca_analytics.dsl import interpreter
from monasca_analytics.banana.cli import interpreter
DEFAULT_LOGGING_CONFIG_FILE = "config/logging.json"
@ -35,11 +35,14 @@ def setup_logging():
def main():
setup_logging()
print(">>>>> DEPRECATED TOOL <<<<<")
print(">>>>> Use the banana language instead <<<<<")
print("")
print("Welcome to Monanas config command line")
print("Type help for help about commands")
inter = interpreter.DSLInterpreter()
cmd = ""
while("exit" != cmd.lower()):
while "exit" != cmd.lower():
cmd = six.moves.input(">> ")
if cmd != "":
try:

141
doc/banana.md Normal file
View File

@ -0,0 +1,141 @@
# Banana: a configuration language for Monanas
Welcome to Banana, a configuration language for Monanas. The language is the
key of "recipes" allowing users to reuse it to tailor solutions for their
use-cases. In other words, Banana allows us to write a recipe(s) that will be
ingested by Monanas.
The language is fully typed. It uses type inference to avoid having to add any
typing annotation. It is still in its early stages, so more features will be
added to solve common problems.
> Note: a valid `banana` recipe (or file) might still contain errors that
> can only be discovered at run-time. The type system, will remove
> most of them though.
To get you started, we provide an example below, which would allow us to
understand most parts of the language.
> TODO: Once we have a specified interface, we should use it instead as it will
> provide syntax highlighting, in-editor error indication as well as other
> features such as autocompletion.
## Part 1: Creating components
Here is how we create a component:
```python
# Create an IPTablesSource with sleep parameter set to 0.01
src = IPTablesSource(sleep=0.01)
```
We could also create a component without any parameter. In that case, each
parameter is initialized with a default value.
In order to get something interesting we first create the following components:
```python
src = IPTablesSource(sleep=0.01)
ing1 = IptablesIngestor()
svm = SvmOneClass()
voter = PickIndexVoter(0)
ldp1 = IptablesLDP()
stdout = StdoutSink()
sqlite = IptablesSQLiteSink()
```
## Part 2: Connecting components
Connections can be placed anywhere in the file. They will always be processed
after everything else.
We have created five components so far, but note that, some components can only
connected to certain types of components). For instance, a source can only
be connected to an ingestor or a live data processor. However, you can't
connect it to a statistical or machine learning algorithms as those need to get
curated data only. Try to add the following line:
```py
src -> alg
```
You should see an error, saying that this is not possible:
```
Error: Can't connect src to alg, src can only be connected to Ingestor.
```
What we want is to have those connections:
```
+---------+ +---------+ +---------+ +---------+ +---------+ +------------+
| src +-----> | ing1 +------> | alg +-----> | vot +---> | ldp +-----> | stdout |
+----+----+ +---------+ +---------+ +---------+ +----+----+ +------------+
| ^
| |
+----------------------------------------------------------------------+
```
Here is how we can achieve that:
```
src -> [ing1 -> alg -> vot -> ldp, ldp]
ldp -> stdout
```
We could also write it like this:
```
src -> ing1 -> alg -> vot -> ldp
src -> ldp -> stdout
```
Or like this:
```
src -> ing1
src -> ldp
ing1 -> alg
alg -> vot
vot -> ldp
ldp -> stdout
```
The main difference is readability and this is subjective. Use the version that
you think is more readable.
Banana will treat all of them as being semantically identical.
## Part 3: Sharing settings between components
From what we described above, it is possible that we could end up with many
similar parameters across components. It would be great if we could share them.
In Banana we can declare a variable not only for components, but also for
`string`, `number` and json-like `object`.
For instance, this is valid in banana:
```python
sleep = 0.01
# ...
```
You can also make use of arithmetic operators to perform any computation you
might require with previously declared variables or anything else.
Some global variables, defined by the execution environment are also available.
For instance, you could define `sleep` like this:
```python
sleep = 0.01 / $spark.BATCH_INTERVAL
# ...
```
> TODO: the above feature has not yet been implemented.
Finally, Banana supports string interpolation to mix many types together:
```python
port = 9645
host = "localhost:" + port
```

187
doc/cli.md Normal file
View File

@ -0,0 +1,187 @@
# Command line interface to generate a JSON configuration
A simple command line tool has been implemented in order to manage
configurations in an easier way. It is not expected to be maintained in the
long term. It is introduced to experiment with the creation of `banana`, the
configuration language of Monanas.
This section explains what operations are currently available and how to use
them.
> NOTE: Please refer to [Monasca/Configuration](configuration.md) for the
> structure of the JSON configuration.
> NOTE: This tool is DEPRECATED, use [BANANA](banana.md) configuration language
> instead.
## Usage
* Start the Monanas cli
```bash
python $MONANAS_HOME/config_dsl.py
```
After running this command, a simple empty configuration will be created. You
can then modify it by running the commands listed below:
## Available commands
You can run the following operations from the CLI in order to create, remove, connect,
disconnect and modify components from the configuration.
### Create component
Create a component and assign it to a variable:
```python
>> A = IPTablesSource
```
This command adds a new source of type `IPTablesSource` to the configuration,
assigning it with a default configuration. It links the source component to
variable A and returns a unique ID. You can either use the variable or the ID
to refer to the instance of the `IPTablesSource` created.
### Remove component
Remove a component using an ID or a variable name:
```python
>> rm A
```
This command removes the component referenced by `A` from the configuration.
The parameter can either be a variable or an ID associated to a component
in the configuration. The component will only be removed if it is not connected
to any other component.
### Connect components
Connect two components in the configuration:
```python
>> A -> B
```
This command connects the component referenced by `A` with the component
referenced by `B`. Both `A` and `B` can be variables or IDs, and the connection
is directional from `A` to `B`. The connection is valid and considered only if
the associated components exist and allowed for connection. For example,
connecting a source with an ingestor is allowed, but connecting a source with
a voter is not.
### Disconnect components
Disconnect two components in the configuration:
```python
>> A !-> B
```
This command disconnects the component `A` from component `B`. Both `A` and `B`
can be variables or IDs and the connection is directional from `A` to `B`. If
the connection doesn't exist, nothing will happen.
### Modify component
Modify values of the configuration of a component:
```python
>> A.params.subparam1.subparam2 = value
```
This command modifies the value of the configuration parameter at the end of
the path defined by a dot notation. The configuration is validated before being
modified; hence, if the modification results in an invalid configuration, it
will not be executed. `A` can either be a variable or an ID.
## Config presentation operations
The following operations can be run using the tool in order to view the
current configuration, sub-configurations, and available components that can be
instantiated.
### Print
Print the full configuration:
```python
>> print
```
This command displays the full configuration in JSON format on the screen.
Print component type sub-configuration:
```python
>> print connections
```
If a parameter is passed to the print command that corresponds to a component
type, or in general, a first level key of the configuration, only the relevant
sub-configuration that is selected will be displayed on the screen.
Print a particular component sub-configuration:
```python
>> print A
```
If a parameter is passed to the print command that corresponds to a variable
or an ID associated to a particular component, only its configuration will be
displayed on the screen.
### List
Print all available components:
```python
>> list
```
This command displays all available components that can be add to the
configuration, organized by type.
Print all available components of a particular type:
```python
>> list smls
```
If a type is passed as a parameter to the list command, only the available
components of that type will be listed.
## Config storage operations
The following operations can be run from the tool in order to load and save
configurations from/to files.
### Load
Load a configuration from a file:
```python
>> load filename
```
This command loads the configuration stored in the file 'filename', overriding
the configuration currently being manipulated in memory.
### Save
Save a configuration to a file:
```python
>> save filename
```
This command saves the configuration being currently handled to the file
'filename', overriding the file if it existed previously.
Save a configuration to the last file:
```python
>> save
```
If no parameter is provided, the save operation saves the current
configuration being handled to the last file loaded from or saved to.

View File

@ -1,129 +0,0 @@
# Domain Specific Language (DSL) for configuration handling
A simple DSL language, accessible from a command line tool, has been implemented
in order to manage configurations in an easier way. This section explains what
operations are available, and how to use them.
> Note: Please refer to [Monasca/Configuration](configuration.md) to understand the structure of the configuration.
## Usage
* Start Mananas DSL tool
```bash
python $MONANAS_HOME/config_dsl.py
```
After running this command, a simple empty orchestration configuration will be created,
which you can then modify running operations according to your needs.
## Config manipulation operations
You can run the following operations from the DSL command line in order to create, remove, connect, disconnect and modify components in the configuration.
###Create component
* Create a component and assign it to a variable
```bash
>> A = IPTablesSource
```
This command adds a new source of type IPTablesSource to the configuration, assigning it
its default configuration. It links the source component to variable A, and returns a unique ID.
You can either use the variable or the ID in order to reference the instance of IPTablesSource
you just created.
### Remove component
* Remove a component using its ID or variable name
```bash
>> rm A
```
This command removes the component referenced by A from the configuration.
The parameter can either be a variable or an ID associated to a component
in the configuration.
The component will only be removed if it is not connected to any other component.
### Connect components
* Connect two components in the configuration
```bash
>> A -> B
```
This command connects the component referenced by A with the component referenced
by B. Both A and B can be variables or IDs, and the connection is directional from A to B.
The connection will only be performed if the components exist and their connection is allowed.
For example, connecting a source with an ingestor is allowed, but connecting a source with a voter is not.
### Disconnect components
* Disconnect two components in the configuration
```bash
>> A !-> B
```
This command disconnects the component A from component B. Both A and B can be variables or IDs, and the connection is directional from A to B.
If the connection didn't exist, nothing will happen.
### Modify component
* Modify values of the configuration of a component
```bash
>> A.params.subparam1.subparam2 = value
```
This command modifies the value of the configuration parameter at the end of the path defined by the dot notation.
The configuration is validated before being modified, hence if the modification results in an invalid configuration,
it will not be performed.
A can either be a variable or an ID.
## Config presentation operations
You can run the following operations from the DSL command line in order to view the current configuration, sub-configurations, and available components that you can instantiate.
### Print
* Print the full configuration
```bash
>> print
```
This command displays the full configuration in json format to your screen.
* Print component type sub-configuration
```bash
>> print connections
```
If you pass a parameter to the print command that corresponds to a component type, or, in general, a first level key of the configuration, only the relevant sub-configuration that you selected will be displayed to your screen.
* Print a particular component sub-configuration
```bash
>> print A
```
If you pass a parameter to the print command that corresponds to a variable or an ID associated to a particular component, only its configuration will be displayed to your screen.
### List
* Print all available components
```bash
>> list
```
This command displays all available components that you can add to your configuration, organized by type.
* Print all available components of a particular type
```bash
>> list smls
```
If you pass the type as a parameter to the list command, only the available components of that type will be listed.
## Config storage operations
You can run the following operations from the DSL command line in order to load and save configurations from/to files.
### Load
* Load a configuration from a file
```bash
>> load filename
```
This command loads the configuration stored in the file 'filename', overriding the existing configuration you were handling before.
### Save
* Save a configuration to a file
```bash
>> save filename
```
This command saves the configuration being currently handled to the file 'filename', overriding the file if it existed before.
* Save a configuration to the last file
```bash
>> save
```
If no parameter is provided, the save operation saves the current configuration being handled to the last file you loaded from or saved to.

View File

@ -61,7 +61,7 @@ python $HOME/monanas/run.py -p $HOME/spark -c $HOME/monanas/config/config.json \
MoNanas consumes two configuration files, one for orchestrating data execution
and the other for logging. A Domain Specific Language (DSL) has been implemented in order to manipulate configurations.
Please, see [MoNanas/Configuration](configuration.md) for more details on MoNanas configuration;
and [MoNanas/Dsl](dsl.md) for more details on MoNanas DSL.
and [MoNanas/Banana](banana.md) for more details on `banana` the language to write recipes.
## Examples

View File

@ -0,0 +1,40 @@
## Banana configuration language
This module contains everything related to Banana. In each
sub-module (sub-folder) you will find a `README.md` file
that describes:
* Purpose of the module.
* The current status of the implementation.
* How testing is done.
The compiler is split in passes. Each pass performs some
transformations and / or generates more data. Only the last
step has side-effects on the Monanas instance.
Each sub-module roughly maps to one pass run by the compiler.
### Passes
The Banana compiler runs the following passes:
* `parse`, parse the input and build an [AST](./grammar/README.md).
* `typeck`, type check the input.
* `deadpathck`, remove dead path in the connections.
* `eval`, evaluate the AST generated.
Each pass makes some assumptions about the state of
the data, and in particular that the previous passes
have run successfully. While this is made obvious by
the arguments required to run some passes, it is less
so for others.
Generally, things to remember:
* Changing the ordering of passes is more likely to
break things.
* New passes are free to modify the AST / TypeTable.
* New passes should not break invariants.
For more information on passes, have a look in their
specific `README.md` file.

View File

@ -31,6 +31,10 @@ logger = logging.getLogger(__name__)
MODULE = "module"
"""
DEPRECATED: Preferred way is to now use the Banana language instead.
"""
class MonanasDSL(object):

View File

@ -18,10 +18,10 @@ import copy
import json
import logging
from monasca_analytics.banana.cli import const as dsl_const
from monasca_analytics.banana.cli import dsl
from monasca_analytics.banana.cli import parser
from monasca_analytics.config import const as config_const
from monasca_analytics.dsl import const as dsl_const
from monasca_analytics.dsl import dsl
from monasca_analytics.dsl import parser
from monasca_analytics.exception import dsl as err
import monasca_analytics.util.common_util as cu

View File

@ -16,8 +16,7 @@
import pyparsing as p
from monasca_analytics.dsl import const
from monasca_analytics.banana.cli import const
EQUALS = p.Literal("=").suppress()
CONNECT = p.Literal("->").suppress()
@ -51,10 +50,10 @@ cmd_list = (LIST + p.Optional(LPAREN) + p.Optional(VARNAME) +
p.Optional(RPAREN))
cmd_help = (HELP + p.Optional(LPAREN) + p.Optional(RPAREN))
bnfLine = cmd_create(const.CREATE) | cmd_connect(const.CONNECT) |\
cmd_disconnect(const.DISCONNECT) | cmd_load(const.LOAD) |\
cmd_save_as(const.SAVE_AS) | cmd_save(const.SAVE) |\
cmd_remove(const.REMOVE) | cmd_modify(const.MODIFY) |\
bnfLine = cmd_create(const.CREATE) | cmd_connect(const.CONNECT) | \
cmd_disconnect(const.DISCONNECT) | cmd_load(const.LOAD) | \
cmd_save_as(const.SAVE_AS) | cmd_save(const.SAVE) | \
cmd_remove(const.REMOVE) | cmd_modify(const.MODIFY) | \
cmd_print(const.PRINT) | cmd_list(const.LIST) | cmd_help(const.HELP)
bnfComment = "#" + p.restOfLine

View File

@ -0,0 +1,22 @@
## Dead path checker
Dead path checking is about removing paths in the pipeline that
lead to nothing. For instance, if there's no source or no sink
in a path. This pass is the only one that modifies the AST.
This is the third step of the pipeline:
```
+---------------------+ +---------------------+
| | | |
---> | AST & TypeTable | ---- deadpathck ---> | AST' & TypeTable' | --->
| | | |
+---------------------+ +---------------------+
```
### Current status:
* [x] Remove branches that are dead from the list of connections.
* [x] Remove the components from the collected list of components.
* [ ] Remove statements that are dead code:
- [ ] Do not instantiate components.
- [ ] Do not compute expressions for unused variables.

View File

@ -0,0 +1,109 @@
#!/usr/bin/env python
# Copyright (c) 2016 Hewlett Packard Enterprise Development Company, L.P.
#
# Licensed under the Apache License, Version 2.0 (the "License"); you may
# not use this file except in compliance with the License. You may obtain
# a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
# License for the specific language governing permissions and limitations
# under the License.
import monasca_analytics.banana.deadpathck.dag as dag
import monasca_analytics.banana.emitter as emit
import monasca_analytics.banana.grammar.ast as ast
import monasca_analytics.banana.typeck.type_util as type_util
import monasca_analytics.exception.banana as exception
def deadpathck(banana_file, type_table, emitter=emit.PrintEmitter()):
"""
Perform dead path elimination on the provided AST.
This allow to remove branches and components that
are not connected to a Sink.
:type banana_file: ast.BananaFile
:param banana_file: The AST tree we will clean.
:type type_table monasca_analytics.banana.typeck.type_table.TypeTable
:param type_table: The TypeTable of the provided AST.
:type emitter: emit.Emitter
:param emitter: Emitter for reporting warnings.
"""
# Check that first argument is a banana file. Mainly
# an excuse to remove the F401 warning.
if not isinstance(banana_file, ast.BananaFile):
raise Exception("Expected BananaFile as first argument.")
# Look first for all branch that are "dead"
connections = banana_file.connections # type: ast.Connection
# Collect the nodes and connect them.
dag_nodes = {}
# Create all the nodes
for ident in banana_file.components.keys():
dag_nodes[ident] = dag.DagNode(type_table.get_type(ident))
# Connect them
for ident_from, ident_to in connections.connections:
dag_from = dag_nodes[ident_from]
dag_to = dag_nodes[ident_to]
dag_from.children.append(dag_to)
dag_to.parents.append(dag_from)
# Start from every sources and for each, check if the path is dead
for node in dag_nodes.values():
if isinstance(node.typec, type_util.Source):
node.visit()
# We can now remove all the components that are "dead"
# from the list of connections
for ident, node in dag_nodes.iteritems():
if not node.is_alive():
emitter.emit_warning(
ident.span,
"Dead code found, this component is not in a path "
"starting from a 'Source' and ending with a 'Sink'."
)
banana_file.components.pop(ident)
connections.connections = filter(
lambda edge: edge[0] != ident and edge[1] != ident,
connections.connections
)
# TODO(Joan): We could also remove them from the statements.
# TODO(Joan): But for this we need a dependency graph between
# TODO(Joan): statements to make sure we don't break the code.
def contains_at_least_one_path_to_a_sink(banana_file, type_table):
"""
Check that there's at least one path to a sink in the list
of components.
To run this pass, you need to make sure you
have eliminated all dead path first.
:type banana_file: ast.BananaFile
:param banana_file: The AST to check.
:type type_table monasca_analytics.banana.typeck.type_table.TypeTable
:param type_table: The TypeTable of the provided AST.
:raise: Raise an exception if there's no Sink.
"""
def is_sink(comp):
type_comp = type_table.get_type(comp)
return isinstance(type_comp, type_util.Sink)
def is_src(comp):
type_comp = type_table.get_type(comp)
return isinstance(type_comp, type_util.Source)
comp_vars = banana_file.components.keys()
at_least_one_sink = len(filter(is_sink, comp_vars)) > 0
at_least_one_source = len(filter(is_src, comp_vars)) > 0
if not at_least_one_sink:
raise exception.BananaNoFullPath("Sink")
if not at_least_one_source:
raise exception.BananaNoFullPath("Source")

View File

@ -0,0 +1,56 @@
#!/usr/bin/env python
# Copyright (c) 2016 Hewlett Packard Enterprise Development Company, L.P.
#
# Licensed under the Apache License, Version 2.0 (the "License"); you may
# not use this file except in compliance with the License. You may obtain
# a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
# License for the specific language governing permissions and limitations
# under the License.
import monasca_analytics.banana.typeck.type_util as type_util
class DagNode(object):
def __init__(self, typec):
"""
Create a DAGNode.
:param typec: The type of the node.
"""
self.parents = []
self.children = []
self.typec = typec
self._visited = False
self._seen_sink = False
def visit(self):
"""
Visit this nodes and all of its connections.
"""
if not self._visited:
self._visited = True
if isinstance(self.typec, type_util.Sink):
self.visit_parents()
return
for child in self.children:
child.visit()
def visit_parents(self):
"""
Visit the parent to tell them that we've seen a Sink.
"""
if not self._seen_sink:
self._seen_sink = True
for parent in self.parents:
parent.visit_parents()
def is_alive(self):
return self._visited and self._seen_sink

View File

@ -0,0 +1,90 @@
#!/usr/bin/env python
# Copyright (c) 2016 Hewlett Packard Enterprise Development Company, L.P.
#
# Licensed under the Apache License, Version 2.0 (the "License"); you may
# not use this file except in compliance with the License. You may obtain
# a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
# License for the specific language governing permissions and limitations
# under the License.
import abc
import six
@six.add_metaclass(abc.ABCMeta)
class Emitter(object):
"""
Emitter base class to emit errors and warnings.
Typically errors will be collected and then send
over the network as an http response but for tests
and debugging, a `PrintEmitter` can be used instead.
"""
@abc.abstractmethod
def emit_warning(self, span, message):
"""
Emit a warning.
:type span: monasca_analytics.banana.grammar.base_ast.Span
:param span: Span associated with the message.
:type message: str
:param message: message to emit with the warning level.
"""
pass
@abc.abstractmethod
def emit_error(self, span, message):
"""
Emit an error
:type span: monasca_analytics.banana.grammar.base_ast.Span
:param span: Span associated with the message.
:type message: str
:param message: message to emit with the error level.
"""
pass
class PrintEmitter(Emitter):
"""
Print warnings and errors to the console.
"""
def emit_warning(self, span, message):
print("WARNING at line:{}".format(span.get_lineno()))
print("WARNING: {}".format(message))
def emit_error(self, span, message):
print("ERROR at line:{}".format(span.get_lineno()))
print("ERROR: {}".format(message))
class JsonEmitter(Emitter):
"""
Print warnings and errors in a Json object.
"""
def __init__(self):
self.result = {
"errors": [],
"warnings": [],
}
def emit_error(self, span, message):
self.result["errors"].append({
"line": span.get_lineno(),
"col": 0,
"byteRange": [span.lo, span.hi],
"message": message
})
def emit_warning(self, span, message):
self.result["warnings"].append({
"line": span.get_lineno(),
"col": 0,
"byteRange": [span.lo, span.hi],
"message": message
})

View File

@ -0,0 +1,50 @@
## Interpreter / Evaluator
This folder contains everything related to the evaluation
of banana files.
This pass makes some assumptions: it is valid to create
all the components and connecting them won't throw any errors.
Some components might need to be created in order to
check if they are valid. For instance, when a DNS lookup is
involved. In such cases, an error will be thrown during
the interpretation. However, the general intention is to move
the checks out of the evaluation as much as possible. We want to
avoid at all cost an half-working pipeline as it could have
side-effects on external data sources by corrupting them or
feeding them with incorrect data.
The execution environment (e.g Spark) might also reject the
pipeline during an evaluation for some reason. However, this is
less likely to happen as the `deadpathck` pass removes
components and paths that would lead to errors.
This is the last step of the pipeline:
```
+---------------------+
| |
---> | AST & TypeTable | ---- interpret ---> Done
| |
+---------------------+
```
### Current status
* [x] Evaluate expressions
* [x] Create components
* [x] Connect components
* [x] Restart the pipeline
### Tests
All tests are located in `test/banana/eval`. We only try
to evaluate valid files, so for this pass there's only a
`should_pass` directory.
#### Available instruction
* `# LHS_EQ <string-version-of-the-value>`: This instruction
compares the evaluation of the left hand side of the previous
expression with the provided string. If they are not equal,
the test will fail.

View File

@ -0,0 +1,262 @@
#!/usr/bin/env python
# Copyright (c) 2016 Hewlett Packard Enterprise Development Company, L.P.
#
# Licensed under the Apache License, Version 2.0 (the "License"); you may
# not use this file except in compliance with the License. You may obtain
# a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
# License for the specific language governing permissions and limitations
# under the License.
import logging
import operator
import monasca_analytics.banana.eval.ctx as ctx
import monasca_analytics.banana.eval.old_style as into
import monasca_analytics.banana.grammar.ast as ast
import monasca_analytics.banana.typeck.type_util as type_util
import monasca_analytics.config.connection as connection
import monasca_analytics.config.const as conf_const
import monasca_analytics.exception.banana as exception
import monasca_analytics.util.common_util as introspect
logger = logging.getLogger(__name__)
def eval_ast(ast_root, type_table, driver):
"""
Evaluate the provided AST by instantiating
the appropriate components and connecting them
together using the Driver interface.
:type ast_root: ast.BananaFile
:param ast_root: AST to evaluate.
:type type_table: monasca_analytics.banana.typeck.type_table.TypeTable
:param type_table: the TypeTable (used to create configurations)
:type driver: monasca_analytics.spark.driver.DriverExecutor
:param driver: Driver that will manage the created
components and connect them together.
"""
logger.debug("Creating the config dictionary from the AST...")
_config = conf_const.get_default_base_config()
try:
logger.debug("Creating components according to banana config...")
components = eval_create_components(ast_root.statements, type_table)
convert_connections(ast_root.connections, _config)
logger.debug("Done creating components. Creating link data...")
# Pre-process to convert to old style components
components_old_style = into.into_old_conf_dict(components)
links = connection.connect_components(components_old_style, _config)
logger.debug("Done connecting components. Successful instantiation")
except Exception as ex:
logger.error("Failed to instantiate components")
logger.error("Reason : " + str(ex))
return
# Restart Spark using the new config
logger.debug("Stop pipeline")
driver.stop_pipeline()
logger.debug("Set new links")
driver.set_links(links)
logger.debug("Start pipeline")
driver.start_pipeline()
def convert_connections(connections, output_config):
"""
Augment the output_config object with the list of
connections
:type connections: ast.Connection
:param connections: The list of connections.
:type output_config: dict
:param output_config: Config where the links will be written.
"""
output_config[conf_const.CONNECTIONS] = connections.connections_cache
def eval_create_components(statements, type_table):
"""
Convert the provided AST into the old dict configuration.
:type statements: list[(ast.ASTNode, ast.ASTNode)]
:param statements: The AST to process
:type type_table: monasca_analytics.banana.typeck.type_table.TypeTable
:param type_table: the type table.
:rtype: dict[str, Component]
:return: Returns the component keyed by name.
"""
context = ctx.EvaluationContext()
eval_statements_generic(
statements,
type_table,
context
)
return context.get_components()
def eval_statements_generic(
statements,
type_table,
context,
cb=lambda *a, **k: None):
"""
Eval the list of statements, and call the cb after evaluating
each statement providing it with the type of the value, the
left hand side ast node, and the computed value.
:type statements: list[(ast.ASTNode, ast.ASTNode)]
:param statements: The AST to process
:type type_table: monasca_analytics.banana.typeck.type_table.TypeTable
:param type_table: the type table.
:type context: ctx.EvaluationContext
:param context: evaluation context that will collect
all intermediary results.
:type cb: (type_util.IsType, ast.ASTNode, object) -> None
:param cb: Callback called after each statement evaluation.
"""
stmt_index = 0
for stmt in statements:
lhs, rhs = stmt
expected_type = type_table.get_type(lhs, stmt_index + 1)
stmt_index += 1
# Provide the expected type
value = eval_rhs(context, rhs, expected_type)
# Call the cb with the expected_type of the value
# The lhs node and the value
cb(expected_type, lhs, value)
# Store result if referenced later.
context.set_variable(lhs, value)
def eval_rhs(context, ast_node, expected_type):
"""
Eval the right hand side node.
:type context: ctx.EvaluationContext
:param context: Evaluation context.
:type ast_node: ast.ASTNode
:param ast_node: the node to evaluate.
:type expected_type: type_util.IsType
:param expected_type: The expected type of this computation.
:return: Returns the result of this evaluation.
"""
if isinstance(ast_node, ast.StringLit):
return ast_node.inner_val()
if isinstance(ast_node, ast.Ident):
return context.get_variable(ast_node.inner_val())
if isinstance(ast_node, ast.JsonObj):
return eval_object(context, ast_node, expected_type)
if isinstance(ast_node, ast.Number):
return ast_node.val
if isinstance(ast_node, ast.DotPath):
variable_name = ast_node.varname.inner_val()
prop = map(lambda x: x.inner_val(), ast_node.properties)
return context.get_prop_of_variable(variable_name, prop)
if isinstance(ast_node, ast.Expr):
return eval_expr(context, ast_node, expected_type)
if isinstance(ast_node, ast.Component):
return eval_comp(context, ast_node, expected_type)
raise Exception("Unhandled ast value type {}!!".format(ast_node))
def eval_comp(context, comp, expected_type):
"""
Instantiate the given component, computing
the required config.
:type context: ctx.EvaluationContext
:param context: Evaluation context.
:type comp: ast.Component
:param comp: the node to evaluate.
:type expected_type: type_util.IsType
:param expected_type: The expected type of this computation.
:return: Returns the instantiated component.
"""
arguments = {}
# Compute arguments
for arg in comp.args:
arg_name = ast.DotPath(arg.arg_name.span, arg.arg_name, [])
arg_value = eval_rhs(context, arg.value, expected_type[arg_name])
arguments[arg.arg_name.inner_val()] = arg_value
# Lookup component
component_type = introspect.get_class_by_name(comp.type_name.val)
# Get default config for the component
conf = component_type.get_default_config()
# Update modified params
for k, val in arguments.iteritems():
conf[k] = val
# Delay evaluation until we do the assign
return component_type, conf
def eval_object(context, obj, expected_type):
"""
Evaluate the provided object
:type context: ctx.EvaluationContext
:param context: Evaluation context.
:type obj: ast.JsonObj
:param obj: The expression to evaluate
:type expected_type: type_util.IsType
:param expected_type: The expected type of this computation.
:return: Returns the computed value
"""
result = expected_type.default_value()
for name, val in obj.props.iteritems():
subtype = expected_type[name]
ctx.set_property(result, name, eval_rhs(context, val, subtype))
return result
def eval_expr(context, expr, expected_type):
"""
Eval the provided expression
:type context: ctx.EvaluationContext
:param context: Evaluation context.
:type expr: ast.Expr
:param expr: The expression to evaluate
:type expected_type: type_util.IsType
:param expected_type: The expected type of this computation.
:rtype: str | float
:return: Returns the computed value
"""
if len(expr.expr_tree) == 1:
return eval_rhs(context, expr.expr_tree[0], expected_type)
if isinstance(expected_type, type_util.Number):
result = 0
cast_func = float
elif isinstance(expected_type, type_util.String):
result = ""
cast_func = str
else:
raise exception.BananaEvalBug(
"Expected type for an expression can only be "
"'TypeNumber' or 'TypeString', got '{}'".format(
str(expected_type))
)
current_operator = operator.add
for el in expr.expr_tree:
if isinstance(el, basestring) and el in ['+', '-', '*', '/']:
current_operator = get_op_func(el)
else:
value = eval_rhs(context, el, expected_type)
value = cast_func(value)
result = current_operator(result, value)
return result
def get_op_func(op_str):
if op_str == '+':
return operator.add
if op_str == '-':
return operator.sub
if op_str == '*':
return operator.mul
if op_str == '/':
return operator.div
raise exception.BananaEvalBug(
"Unknown operator '{}'".format(op_str)
)

View File

@ -0,0 +1,119 @@
#!/usr/bin/env python
# Copyright (c) 2016 Hewlett Packard Enterprise Development Company, L.P.
#
# Licensed under the Apache License, Version 2.0 (the "License"); you may
# not use this file except in compliance with the License. You may obtain
# a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
# License for the specific language governing permissions and limitations
# under the License.
import monasca_analytics.banana.grammar.ast as ast
import monasca_analytics.exception.banana as exception
class EvaluationContext(object):
"""Evalutation context for an AST evaluation"""
def __init__(self):
"""
Construct an evaluation context using the type table
as based for the variable needed to be created.
"""
self._variables = {}
self._components = {}
def set_variable(self, name, value):
"""Set the variable value."""
if isinstance(value, tuple) and len(value) == 2:
comp_type, config = value
comp_name = name.varname.inner_val()
self._components[comp_name] = comp_type(
comp_name,
config
)
self._variables[comp_name] = config
elif not set_property(self._variables, name, value):
raise exception.BananaEvalBug(
"set_variable can only be used with DotPath or Ident."
)
def get_components(self):
return self._components
def get_variable(self, name):
"""Returns the variable value."""
return self._variables[name]
def get_prop_of_variable(self, name, prop):
"""Returns the sub property of the given variable name."""
variable = self._variables[name]
for el in prop:
variable = variable[el]
return variable
def set_property(root_object, name, value):
"""
Set the property name of the root_object to value.
:type root_object: dict
:param root_object: The root object
:type name: ast.DotPath | ast.Ident | ast.StringLit
:param name: Name of
:param value:
:return: Returns true if succeeded.
"""
if isinstance(name, ast.Ident) or isinstance(name, ast.StringLit):
root_object[name.inner_val()] = value
return True
elif isinstance(name, ast.DotPath):
_create_as_many_object_as_needed(root_object, name, value)
return True
return False
def _create_as_many_object_as_needed(root_object, dot_path, value):
"""
Create as many object as needed to be able to access the
nested property.
:type root_object: dict
:param root_object: The root object
:type dot_path: ast.DotPath
:param dot_path: Dot Path to use.
:type value: object
:param value: Any value to set.
"""
name = dot_path.varname.inner_val()
if len(dot_path.properties) == 0:
root_object[name] = value
return
if name in root_object:
variable = root_object[name]
else:
variable = {}
current_var = variable
last_index = len(dot_path.properties) - 1
for index, subpath in enumerate(dot_path.properties):
subpath_name = subpath.inner_val()
if index != last_index:
if subpath_name in current_var:
current_var = current_var[subpath_name]
else:
new_object = {}
current_var[subpath_name] = new_object
current_var = new_object
else:
current_var[subpath_name] = value
root_object[name] = variable

View File

@ -0,0 +1,56 @@
#!/usr/bin/env python
# Copyright (c) 2016 Hewlett Packard Enterprise Development Company, L.P.
#
# Licensed under the Apache License, Version 2.0 (the "License"); you may
# not use this file except in compliance with the License. You may obtain
# a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
# License for the specific language governing permissions and limitations
# under the License.
import monasca_analytics.config.const as conf_const
import monasca_analytics.ingestor.base as ingestor
import monasca_analytics.ldp.base as ldp
import monasca_analytics.sink.base as sink
import monasca_analytics.sml.base as sml
import monasca_analytics.source.base as source
import monasca_analytics.voter.base as voter
def into_old_conf_dict(components):
"""
Convert the provided dict of components
keyed by ids into a dict keyed by component
type. This is the data structure used to do
the validation of JSON configuration (the old format).
:type components: dict[str, object]
:param components: The dictionary of components.
:return: Returns the old conf dictionary.
"""
return {
conf_const.INGESTORS:
dict(filter(lambda x: isinstance(x[1], ingestor.BaseIngestor),
components.iteritems())),
conf_const.VOTERS:
dict(filter(lambda x: isinstance(x[1], voter.BaseVoter),
components.iteritems())),
conf_const.SINKS:
dict(filter(lambda x: isinstance(x[1], sink.BaseSink),
components.iteritems())),
conf_const.LDPS:
dict(filter(lambda x: isinstance(x[1], ldp.BaseLDP),
components.iteritems())),
conf_const.SOURCES:
dict(filter(lambda x: isinstance(x[1], source.BaseSource),
components.iteritems())),
conf_const.SMLS:
dict(filter(lambda x: isinstance(x[1], sml.BaseSML),
components.iteritems())),
}

View File

@ -0,0 +1,63 @@
## Grammar
This folder is all about the definition of the `banana` grammar.
The grammar purpose is to convert the input, text, into an
abstract syntax tree (AST).
This is the first step of the pipeline:
```
+--------+ +---------+
| | | |
| Text | --- grammar ---> | AST | --->
| | | |
+--------+ +---------+
```
The module `ast.py` contains all the possible `ASTNode` which
itself is defined in `base_ast.py`.
### Current status
* [x] Parsing connections such as `a -> b`, `a -> [b, c]`,
`[a, b] -> [c, d]`
* [x] Parsing numbers
* [x] Parsing string literals
* [ ] Parsing booleans
* [x] Parsing assignments where the left hand side can be a property
or an identifier.
* [x] Parsing assignments where the right hand side is a number, a
string literal, a property or an identifier.
* [x] Parsing components arguments using a constructor-like syntax.
* [ ] Parsing ingestors generators (for JSON dialect)
* [ ] Parsing imports such as `from ldp.monasca import *`
* [ ] Parsing disconnections such as `a !-> b` *(requires imports)*
### Tests
All test regarding the grammar (i.e. the syntax and the way
the AST is built) is defined in `test/banana/grammar`.
This folder looks like this:
```
test/banana/grammar
├── should_fail
│   ├── ...
│   └── file.banana
├── should_pass
│   ├── ...
│   └── file.banana
└── test_config.py
```
The `test_config` generates one test for each file in the
`should_pass` and `should_fail` directories.
Test can assert various things using instructions below.
#### Available instruction
* `# RAISE <exception-name>`: Check that `exception-name` is raised.
* `# STMT_EQ <ast-of-statements>` Check the AST of statements.
* `# AST_EQ <full-ast>` Check the full AST.
* `# CONN_EQ <ast-of-connections>` Check the AST of connections.

View File

@ -0,0 +1,674 @@
#!/usr/bin/env python
# Copyright (c) 2016 Hewlett Packard Enterprise Development Company, L.P.
#
# Licensed under the Apache License, Version 2.0 (the "License"); you may
# not use this file except in compliance with the License. You may obtain
# a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
# License for the specific language governing permissions and limitations
# under the License.
import monasca_analytics.banana.emitter as emit
import monasca_analytics.banana.grammar.base_ast as base
import monasca_analytics.exception.banana as exception
import monasca_analytics.util.string_util as strut
import pyparsing as p
ASTNode = base.ASTNode
Span = base.Span
class BananaFile(object):
def __init__(self, emitter=emit.PrintEmitter()):
self.imports = []
self._emitter = emitter
# Components is a dict where keys
# are the name of the var and
# values are of type Component
self.components = dict()
# Statement are component
# creation or variable creation
self.statements = []
self.connections = None
def add_component_ctor(self, dot_path, component):
"""
Add a component if the component
does not already exists. If it does exists,
then this raises an Error.
:type dot_path: DotPath
:param dot_path: Name of the variable or property path
:type component: Component
:param component: AST part that will contains all properties
"""
if not isinstance(dot_path, DotPath):
raise exception.BananaAssignmentError(
dot_path.span, component.span)
if not len(dot_path.properties) == 0:
raise exception.BananaAssignmentError(
dot_path.span, component.span)
if dot_path.varname in self.components:
other_dot_path = filter(
lambda x: x == dot_path,
self.components.keys())[0]
no_effect_str = dot_path.span.str_from_to(component.span)
collision_str = other_dot_path.span.str_from_to(
self.components[other_dot_path].span)
self._emitter.emit_warning(
dot_path.span,
"Statement has no effect: '{}'".format(no_effect_str)
)
self._emitter.emit_warning(
other_dot_path.span,
"It collides with: '{}'".format(collision_str)
)
else:
self.components[dot_path.varname] = component
self.statements.append((dot_path, component))
def add_assignment(self, dot_path, ast_value):
"""
Add an assignment to a property or a variable.
We don't check at this point whether or not the variable
has collision with a component.
This will be done during the name resolution pass.
:type dot_path: DotPath
:param dot_path: Name of the variable or property
:type ast_value: Ident | JsonObj | StringLit | Number | DotPath
:param ast_value: Ast node this variable is assigned to.
"""
if not isinstance(dot_path, DotPath):
raise exception.BananaAssignmentError(
dot_path.span,
ast_value.span
)
self.statements.append((dot_path, ast_value))
def add_connections(self, connections):
"""
Add a new set of connections between components
This function performs the same checks as the one
performs when components are connected. It warns
on redundant connections.
:type connections: Connection
:param connections: AST node that contains the collected connections.
"""
if self.connections is None:
self.connections = connections
else:
self.connections.merge_and_reset_inputs_outputs(
connections,
self._emitter
)
def statements_to_str(self):
return "{ " + ', '.join(
map(lambda x: '{} = {}'.format(x[0], x[1]), self.statements)
) + ' }'
def __str__(self):
res = "BananaFile { "
res += 'components: { '
res += strut.dict_to_str(self.components)
res += ' }, '
res += 'statements: ' + self.statements_to_str()
res += 'connections: { '
res += str(self.connections)
res += " } }"
return res
def make_span(s, l, t):
if len(t) > 0:
if isinstance(t[0], ASTNode):
return Span(s, l, t[0].span.hi)
return Span(s, l, len(t[0]) + l)
else:
return Span(s, l, 2)
class Number(ASTNode):
def __init__(self, span, val):
"""
Construct a Number ast node.
:type span: Span
:param span: Span for this number
:type val: str
:param val: Value for this number
"""
super(Number, self).__init__(span)
self.val = float(val)
def __str__(self):
return "Number< {} >".format(self.val)
def into_unmodified_str(self):
return str(self.val)
class StringLit(ASTNode):
def __init__(self, span, val):
"""
Construct a StringLit ast node.
:type span: Span
:param span: Span for this string
:type val: str
:param val: Value for this string
"""
super(StringLit, self).__init__(span)
self.val = val
def __hash__(self):
return hash(self.inner_val())
def __eq__(self, other):
return (isinstance(other, StringLit) or isinstance(other, Ident))\
and self.inner_val() == other.inner_val()
def __str__(self):
return "StringLit< {} >".format(self.val)
def into_unmodified_str(self):
return self.val
def inner_val(self):
return self.val.strip()[1:-1]
class Ident(ASTNode):
def __init__(self, span, val):
"""
Construct an Ident ast node.
:type span: Span
:param span: Span for this identifier
:type val: str
:param val: Value of this identifier
"""
super(Ident, self).__init__(span)
self.val = val
def __hash__(self):
return hash(self.val)
def __eq__(self, other):
return (isinstance(other, StringLit) or isinstance(other, Ident))\
and self.val == other.inner_val()
def __str__(self):
return "Ident< {} >".format(self.val)
def into_unmodified_str(self):
return self.val
def inner_val(self):
return self.val
class DotPath(ASTNode):
def __init__(self, span, varname, properties):
"""
:type span: Span
:param span: Span for this dotpath.
:type varname: Ident | StringLit
:param varname: Name of the variable being changed.
:type properties: list[StringLit | Ident]
:param properties: Properties being accessed.
"""
super(DotPath, self).__init__(span)
self.varname = varname
self.properties = properties
def next_dot_path(self):
"""
Assuming the properties length is more than zero.
This returns the dot path where the varname has been
dropped. So given 'a.b.c' the returned dot path will
be 'b.c'.
:rtype: DotPath
:return: Returns the next dot path.
"""
return DotPath(
self.span.new_with_offset(len(self.varname.val)),
self.properties[0],
self.properties[1:]
)
def into_unmodified_str(self):
arr = [self.varname.into_unmodified_str()]
arr.extend(map(lambda x: x.into_unmodified_str(), self.properties))
return '.'.join(arr)
def __str__(self):
arr = [str(self.varname)]
arr.extend(map(lambda x: str(x), self.properties))
return 'DotPath< {} >'.format('.'.join(arr))
def __key(self):
return self.into_unmodified_str().replace('"', '')
def __eq__(self, other):
return self.__key() == other.__key()
def __hash__(self):
return hash(self.__key())
class Expr(ASTNode):
def __init__(self, span, expr_tree):
"""
Construct an expression
:type span: Span
:param span: Span for the expression.
:type expr_tree: p.ParseResults
;:param expr_tree: The tree generated by pyparsing.infixNotation
"""
super(Expr, self).__init__(span)
# We don't use this tree at this point.
# During typecheck we will make sure
# that the expression can evaluate
# Finally during evaluation, we will evaluate
# the final result.
if isinstance(expr_tree, p.ParseResults):
expr_tree = expr_tree.asList()
if isinstance(expr_tree, list):
for i in xrange(0, len(expr_tree)):
if isinstance(expr_tree[i], list):
expr_tree[i] = Expr(span, expr_tree[i])
self.expr_tree = expr_tree
else:
self.expr_tree = [expr_tree]
def into_unmodified_str(self):
# TODO(Joan): reconstruct the original expression
return 'expression'
def __str__(self):
return "Expr< {} >".format(strut.array_to_str(self.expr_tree))
class JsonObj(ASTNode):
def __init__(self, span, tokens):
super(JsonObj, self).__init__(span)
self.props = {}
last_prop = None
if len(tokens) > 0:
for toks in tokens:
for tok in toks:
if isinstance(tok, DotPath):
if last_prop is None:
last_prop = tok
else:
self._set_new_prop(last_prop, tok)
last_prop = None
elif isinstance(tok, StringLit):
if last_prop is None:
last_prop = tok
else:
self._set_new_prop(last_prop, tok)
last_prop = None
elif isinstance(tok, list):
if last_prop is None:
raise p.ParseFatalException(
"Bug Found in JsonObj!"
)
self._set_new_prop(
last_prop,
JsonObj.dictify_array(tok)
)
last_prop = None
else:
if last_prop is None:
raise p.ParseFatalException(
"Bug Found in JsonObj!"
)
self._set_new_prop(last_prop, tok)
last_prop = None
def _set_new_prop(self, prop, token):
if prop in self.props:
raise exception.BananaJsonObjShadowingError(self.span, prop.span)
else:
self.props[prop] = token
def into_unmodified_str(self):
# TODO(Joan): improve this for error reporting
return str(self.props)
def __str__(self):
return "JsonObj< {} >".format(strut.dict_to_str(self.props))
@staticmethod
def dictify_array(tok):
new_array = []
for el in tok:
if isinstance(el, list):
new_array.append(JsonObj.dictify_array(el))
else:
new_array.append(el)
return new_array
def into_connection(ast_node):
"""
Convert an ast node into a Connection node.
:type ast_node: Connection | Ident
:param ast_node: The ast node to convert.
:rtype: Connection
:return: Returns a Connection node
"""
if isinstance(ast_node, Connection):
return ast_node
elif isinstance(ast_node, Ident):
return Connection(
ast_node.span,
[ast_node],
[ast_node]
)
else:
raise p.ParseFatalException("Bug found!")
class Connection(ASTNode):
def __init__(self, span, inputs=None, outputs=None, connections=None):
"""
Create a connection object.
:type span: Span
:param span: Span for this connection
:type inputs: list[Ident]
:param inputs: Input ast nodes of the connection
:type outputs: list[Ident]
:param outputs: Outputs nodes
:type connections: list[(Ident, Ident)]
:param connections: The list of connections aggregated so far.
"""
super(Connection, self).__init__(span)
if inputs is None:
inputs = []
if outputs is None:
outputs = []
if connections is None:
connections = []
self.inputs = inputs
self.outputs = outputs
self.connections = connections
self.connections_cache = {}
self._build_connection_cache()
def connect_to(self, other_con, emitter):
"""
Connect this connection to the other one.
After this function has been executed, the other_con
object can be dropped.
:type other_con: Connection
:param other_con: Other connection to connect to.
:type emitter: emit.Emitter
:param emitter: Emitter.
"""
old_outputs = self.outputs
self.outputs = other_con.outputs
# Generate new connections
for old_output in old_outputs:
for other_input in other_con.inputs:
self._check_and_connect(old_output, other_input, emitter)
# Preserve old connections
self._merge_connections(other_con, emitter)
def merge_all(self, tokens, emitter):
"""
Merge all the tokens with this class
:type tokens: list[list[Connection | Ident]]
:param tokens: List of list of tokens
:type emitter: emit.Emitter
:param emitter: Emitter to report errors
"""
if len(tokens) == 1:
if len(tokens[0]) > 0:
for tok in tokens[0]:
other_con = into_connection(tok)
self.merge_with(other_con, emitter)
def merge_and_reset_inputs_outputs(self, other_con, emitter):
"""
Merge this connection with other_con and reset inputs / outputs
as they're no longer necessary.
:type other_con: Connection
:param other_con: the other connection we are gonna merge with.
:type emitter: emit.Emitter
:param emitter: Emitter to report errors
"""
self.inputs = []
self.outputs = []
self._merge_connections(other_con, emitter)
def merge_with(self, other_con, emitter):
"""
Merge the provided connection with this one.
:type other_con: Connection
:param other_con: Connection to merge with self.
:type emitter: emit.Emitter
:param emitter: Emitter to report errors
"""
def extend(into, iterable, what):
for other_thing in iterable:
if len(filter(lambda x: x.val == other_thing.val, into)) > 0:
emitter.emit_warning(
other_thing.span,
"{} {} already present".format(
what, other_thing.val
)
)
else:
into.append(other_thing)
extend(self.inputs, other_con.inputs, 'Input')
extend(self.outputs, other_con.outputs, 'Output')
self._merge_connections(other_con, emitter)
def _merge_connections(self, other_con, emitter):
"""
Merge only the connections field from other_con into self.
:type other_con: Connection
:param other_con: Connection to merge with self.
:type emitter: emit.Emitter
:param emitter: Emitter to report errors
"""
for ident_from, ident_to in other_con.connections:
self._check_and_connect(ident_from, ident_to, emitter)
def _check_and_connect(self, ident_from, ident_to, emitter):
"""
Check if the connection does not already exists and if it does not,
add it to the list of connections. Otherwise report a warning and
do nothing.
:type ident_from: Ident
:param ident_from: The 'From' node of the directed edge.
:type ident_to: Ident
:param ident_to: The 'To' node of the directed edge we are creating.
:type emitter: emit.Emitter
:param emitter: Emitter to report errors.
"""
if ident_from.val in self.connections_cache:
if ident_to.val in self.connections_cache[ident_from.val]:
emitter.emit_warning(
ident_to.span,
"Connection from '{}' to '{}'"
" is already present"
.format(ident_from.val, ident_to.val)
)
return
self.connections_cache[ident_from.val].append(ident_to.val)
else:
self.connections_cache[ident_from.val] = [ident_to.val]
self.connections.append((ident_from, ident_to))
def _build_connection_cache(self):
"""
Build a cache of connections keyed by where they start from.
"""
for ident_from, ident_to in self.connections:
if ident_from.val not in self.connections_cache:
self.connections_cache[ident_from.val] = []
if ident_to.val not in self.connections_cache:
self.connections_cache[ident_to.val] = []
self.connections_cache[ident_from.val].append(ident_to.val)
# Sanity check
for _, vals in self.connections_cache:
if len(set(vals)) != len(vals):
raise p.ParseFatalException("Bug found in Connection!!")
def into_unmodified_str(self):
# TODO(Joan): improve this
return "connection"
def __str__(self):
res = "Connection<"
res += " {} ".format(map(lambda (x, y): (str(x), str(y)),
self.connections))
res += ">"
return res
class Assignment(ASTNode):
def __init__(self, span, dot_path, value):
"""
Construct an assignment AST node.
:type span: Span
:param span: the span of the assignment.
:type dot_path: DotPath
:param dot_path: the left hand side of the assignment.
:type value: Component | Number | StringLit | JsonObj | DotPath | Expr
:param value: the right hand side of the assignment.
"""
super(Assignment, self).__init__(span)
if (isinstance(value, Component) or
isinstance(value, JsonObj) or
isinstance(value, Number) or
isinstance(value, StringLit) or
isinstance(value, Ident) or
isinstance(value, DotPath) or
isinstance(value, Expr)) and\
isinstance(dot_path, DotPath):
self.lhs = dot_path
self.rhs = value
else:
raise exception.BananaGrammarBug(
'Impossible assignment found with'
' left hand side: {} and'
' right hand side: {}'
.format(type(dot_path), type(value))
)
def into_unmodified_str(self):
return "{} = {}".format(self.lhs.into_unmodified_str(),
self.rhs.into_unmodified_str())
def __str__(self):
return "{} = {}".format(str(self.lhs), str(self.rhs))
class ComponentCtorArg(ASTNode):
def __init__(self, span, value, arg_name=None):
"""
Construct an argument for a component ctor
:type span: Span
:param span: Span of the argument.
:type value: Number | StringLit | JsonObj | DotPath | Expr
:param value: Value for the argument
:type arg_name: Ident
:param arg_name: Name of the argument
"""
super(ComponentCtorArg, self).__init__(span)
if (isinstance(value, JsonObj) or
isinstance(value, Number) or
isinstance(value, StringLit) or
isinstance(value, Ident) or
isinstance(value, DotPath) or
isinstance(value, Expr)) and (
isinstance(arg_name, Ident) or
arg_name is None):
self.arg_name = arg_name
self.value = value
else:
# This code should be unreachable.
# The grammar as defined should prevent us from
# seeing an arg value or a value of the incorrect type
raise exception.BananaGrammarBug(
'Impossible constructor argument found with'
' left hand side: {} and'
' right hand side: {}'
.format(type(arg_name), type(value))
)
def into_unmodified_str(self):
return "{} = {}".format(self.arg_name.into_unmodified_str(),
self.value.into_unmodified_str())
def __str__(self):
if self.arg_name is not None:
return "{} = {}".format(self.arg_name, self.value)
else:
return "{}".format(self.value)
class Component(ASTNode):
def __init__(self, span, type_name=None, args=None):
"""
Construct a component
:type span: Span
:param span: Span of this component
:type type_name: Ident
:param type_name: Name of this component
:type args: list[ComponentCtorArg]
:param args: List of arguments
"""
super(Component, self).__init__(span)
if args is None:
args = []
self.type_name = type_name
self.args = args
def set_ctor(self, type_name):
"""
Set the constructor name of that component
:type type_name: Ident
:param type_name: Name of that constructor
"""
self.type_name = type_name
def add_arg(self, arg):
"""
Add an argument to that component constructor.
:type arg: ComponentCtorArg
:param arg: Argument to add to that component.
"""
self.args.append(arg)
def into_unmodified_str(self):
return self.type_name.into_unmodified_str() + "(" + \
', '.join(map(lambda x: x.into_unmodified_str(), self.args)) +\
")"
def __str__(self):
res = ""
res += "Component {"
res += " type_name: {},".format(self.type_name)
arr = ', '.join(map(lambda x: str(x), self.args))
res += " args: {}".format("[" + arr + "]")
res += "}"
return res

View File

@ -0,0 +1,145 @@
#!/usr/bin/env python
# Copyright (c) 2016 Hewlett Packard Enterprise Development Company, L.P.
#
# Licensed under the Apache License, Version 2.0 (the "License"); you may
# not use this file except in compliance with the License. You may obtain
# a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
# License for the specific language governing permissions and limitations
# under the License.
import abc
import six
@six.add_metaclass(abc.ABCMeta)
class ASTNode(object):
"""
Base class of all ast nodes
"""
def __init__(self, span):
"""
Construct an ASTNode
:type span: Span
:param span: span for this AST node.
"""
self.span = span
@abc.abstractmethod
def into_unmodified_str(self):
"""
Returns a simple name for this ASTNode. It should be minimalist
and user oriented. No span info, no debug info.
:rtype: str
:returns: A simple name for that ast node.
"""
pass
def __ne__(self, other):
return not self.__eq__(other)
class Span(object):
"""
Represent a region of code, used for error reporting.
Position are absolute within the file.
"""
def __init__(self, text, lo, hi):
"""
:type text: str | None
:param text: Full text of the file
:type lo: int
:param lo: position of the beginning of the region
:type hi: int
:param hi: position of the end of the region
"""
self._text = text
self.lo = lo
self.hi = hi
def __str__(self):
if self._text is not None:
return self._text[self.lo:self.hi]
else:
return '?SPAN?'
def new_with_offset(self, offset):
"""
Construct a new Span with an offset applied
to lo.
:type offset: int
:param offset: Offset to apply to lo.
:rtype: Span
:return: Returns a new span
"""
return Span(self._text, self.lo + offset, self.hi)
def str_from_to(self, to_span):
"""
Returns a string that start at self and stops at to_span.
:type to_span: Span
:param to_span: Span to stop at.
:rtype: basestring
:return: Returns the string encapsulating both
"""
return self._text[self.lo:to_span.hi]
def get_line(self):
"""
Returns the line for associated with this span.
"""
if self._text is not None:
splitted = self._text.splitlines()
current_pos = 0
for line in splitted:
if current_pos < self.lo < len(line) + current_pos:
return line.strip()
else:
current_pos += len(line)
else:
return '?LINE?'
def get_lineno(self):
"""
Returns the line number of this span.
"""
if self._text is not None:
splitted = self._text.splitlines()
current_pos = 0
lineno = 0
for _ in xrange(0, len(splitted)):
line = splitted[lineno]
if current_pos < self.lo < len(line) + current_pos:
return lineno + 1
else:
current_pos += len(line)
lineno += 1
return lineno
else:
return -1
DUMMY_SPAN = Span(None, 0, 0)
def from_parse_fatal(parse_fatal_exception):
"""
Convert the provided ParseFatalException into a Span.
:type parse_fatal_exception: pyparsing.ParseFatalException
:param parse_fatal_exception: Exception to convert.
:rtype: Span
:return: Returns the span mapping to that fatal exception.
"""
return Span(
parse_fatal_exception.pstr,
parse_fatal_exception.loc,
parse_fatal_exception.loc + 1
)

View File

@ -0,0 +1,285 @@
#!/usr/bin/env python
# Copyright (c) 2016 Hewlett Packard Enterprise Development Company, L.P.
#
# Licensed under the Apache License, Version 2.0 (the "License"); you may
# not use this file except in compliance with the License. You may obtain
# a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
# License for the specific language governing permissions and limitations
# under the License.
import pyparsing as p
import monasca_analytics.banana.emitter as emit
import monasca_analytics.banana.grammar.ast as ast
import monasca_analytics.banana.grammar.const as const
import monasca_analytics.exception.banana as exception
# This file describe the grammar for the banana config file.
# It make use of one sub grammar for certain configuration
# that requires expressions (see expression.py file)
def banana_grammar(emitter=emit.PrintEmitter()):
"""
Generate a banana parser that can be then used to
parse a banana content. It build an AST on which
operation can then be applied.
:return: Return a banana parser
:rtype: BananaScopeParser
"""
# Should debug
debug_grammar = False
# Actions
def action_str_lit(s, l, t):
return ast.StringLit(ast.make_span(s, l, t), t[0])
def action_num_lit(s, l, t):
return ast.Number(ast.make_span(s, l, t), t[0])
def action_ident(s, l, t):
return ast.Ident(ast.make_span(s, l, t), t[0])
def action_expr(s, l, t):
if len(t) != 1:
raise exception.BananaGrammarBug(
'Bug found in the grammar for expression,'
' Please report this bug.'
)
if isinstance(t[0], ast.Expr):
return t[0]
return ast.Expr(ast.make_span(s, l, t), t[0])
def action_dot_path(s, l, t):
# First token is the name of the variable
# The rest is the property path
if isinstance(t[0], ast.StringLit) and len(t[1:]) == 0:
return t[0]
return ast.DotPath(ast.make_span(s, l, t), t[0], t[1:])
def action_json_obj(s, l, t):
return ast.JsonObj(ast.make_span(s, l, t), t)
def action_parse_ctor_arg(s, l, t):
if len(t) > 1:
return ast.ComponentCtorArg(ast.make_span(s, l, t), t[1], t[0])
else:
return ast.ComponentCtorArg(ast.make_span(s, l, t), t[0])
def action_parse_comp_ctor(s, l, tokens):
comp = ast.Component(ast.make_span(s, l, tokens))
for tok in tokens:
if isinstance(tok, ast.Ident):
comp.set_ctor(tok)
elif isinstance(tok, ast.ComponentCtorArg):
comp.add_arg(tok)
else:
raise exception.BananaGrammarBug(
'Bug found in the grammar, Please report this bug'
)
return comp
def action_assignment(s, l, t):
return ast.Assignment(ast.make_span(s, l, t), t[0], t[1])
def action_create_connections(s, l, t):
ast_conn = ast.into_connection(t[0])
ast_conn.span = ast.make_span(s, l, t)
for i in xrange(1, len(t)):
next_conn = ast.into_connection(t[i])
ast_conn.connect_to(next_conn, emitter)
return ast_conn
def action_merge_connections(s, l, t):
ast_conn = ast.Connection(ast.make_span(s, l, t))
ast_conn.merge_all(t, emitter)
return ast_conn
def action_root_ast(s, l, tokens):
root = ast.BananaFile(emitter)
for tok in tokens:
if isinstance(tok, ast.Assignment):
if isinstance(tok.rhs, ast.Component):
root.add_component_ctor(tok.lhs, tok.rhs)
else:
root.add_assignment(tok.lhs, tok.rhs)
elif isinstance(tok, ast.Connection):
root.add_connections(tok)
else:
raise exception.BananaGrammarBug(
'Bug found in the grammar, Please report this bug.'
)
return root
# TODO(Joan): Remove once it is no longer needed
def print_stmt(s, l, t):
print("\nPRINT AST")
print(l, map(lambda x: str(x), t))
print("END PRINT AST\n")
def action_unimplemented(s, l, t):
raise exception.BananaGrammarBug("unimplemented code reached")
# Tokens
equals = p.Literal("=").suppress().setName('"="').setDebug(debug_grammar)
arrow = p.Literal("->").suppress().setName('"->"').setDebug(debug_grammar)
lbra = p.Literal("[").suppress().setName('"["').setDebug(debug_grammar)
rbra = p.Literal("]").suppress().setName('"]"').setDebug(debug_grammar)
colon = p.Literal(":").suppress().setName('":"')
comma = p.Literal(",").suppress().setName(",")
less = p.Literal("<").suppress().setName('"<"')
greater = p.Literal(">").suppress().setName('">"')
lbrace = p.Literal("{").suppress().setName('"{"').setDebug(debug_grammar)
rbrace = p.Literal("}").suppress().setName('"}"').setDebug(debug_grammar)
lpar = p.Literal("(").suppress().setName('"("')
rpar = p.Literal(")").suppress().setName('")"')
# Keywords
ing = p.Literal("ing").suppress()
imp = p.Literal("import").suppress()
fro = p.Literal("from").suppress()
# String Literal, Numbers, Identifiers
string_lit = p.quotedString()\
.setParseAction(action_str_lit)\
.setName(const.STRING_LIT)
number_lit = p.Regex(r'\d+(\.\d*)?([eE]\d+)?')\
.setParseAction(action_num_lit)\
.setName(const.NUMBER)
ident = p.Word(p.alphas + "_", p.alphanums + "_")\
.setParseAction(action_ident)\
.setName(const.IDENT)
# Path for properties
dot_prop = ident | string_lit
dot_path = p.delimitedList(dot_prop, ".")\
.setParseAction(action_dot_path)\
.setName(const.DOT_PATH)\
.setDebug(debug_grammar)
# Expressions
# Here to simplify the logic, we can match directly
# against ident and string_lit to avoid having to deal
# only with dot_path. It also allow to remove the confusion
# where '"a"' could be interpreted as a dot_path and would thus
# be the same as 'a'. With the following, the first we
# always be type-checked as a String whereas the latter will
# be as the type of the variable.
expr = p.infixNotation(number_lit | dot_path, [
(p.oneOf('* /'), 2, p.opAssoc.LEFT),
(p.oneOf('+ -'), 2, p.opAssoc.LEFT),
], lpar=lpar, rpar=rpar)
expr.setParseAction(action_expr)\
.setName(const.EXPR)\
.setDebug(debug_grammar)
# Json-like object (value are much more)
json_obj = p.Forward()
json_value = p.Forward()
json_array = p.Group(
lbra + p.Optional(p.delimitedList(json_value)) + rbra
)
json_array.setDebug(debug_grammar)
json_array.setName(const.JSON_ARRAY)
json_value <<= expr | json_obj | json_array
json_value.setDebug(debug_grammar)\
.setName(const.JSON_VALUE)
json_members = p.delimitedList(p.Group(dot_path + colon - json_value)) +\
p.Optional(comma)
json_members.setDebug(debug_grammar)\
.setName(const.JSON_MEMBERS)
json_obj <<= p.Dict(lbrace + p.Optional(json_members) - rbrace)
json_obj.setParseAction(action_json_obj)\
.setName(const.JSON_OBJ)\
.setDebug(debug_grammar)
# Component constructor
arg = (ident + equals - (expr | json_obj)) | expr | json_obj
arg.setParseAction(action_parse_ctor_arg)
params = p.delimitedList(arg)
comp_ctor = ident + lpar - p.Optional(params) + rpar
comp_ctor.setParseAction(action_parse_comp_ctor)\
.setName(const.COMP_CTOR)\
.setDebug(debug_grammar)
# Assignments
assignment = dot_path + equals - (comp_ctor | expr | json_obj)
assignment.setParseAction(action_assignment)
# Connections
connection = p.Forward()
array_of_connection = p.Group(
lbra + p.Optional(p.delimitedList(connection)) + rbra
)
array_of_connection.setParseAction(action_merge_connections)
last_expr = ident | array_of_connection
this_expr = p.Forward()
match_expr = p.FollowedBy(last_expr + arrow - last_expr) + \
(last_expr + p.OneOrMore(arrow - last_expr))
this_expr <<= match_expr | last_expr
connection <<= this_expr
match_expr.setDebug(debug_grammar)\
.setName(const.CONNECTION) \
.setParseAction(action_create_connections)
# Definitions
definition = ing - less - string_lit - greater - ident - lbrace - rbrace
definition.setDebug(debug_grammar)\
.setName(const.DEFINITION)\
.setParseAction(action_unimplemented)
# Import directive
module_def = (imp - ident) | fro - ident - imp - ident
module_def.setDebug(debug_grammar)\
.setName(const.MOD_IMPORT)\
.setParseAction(action_unimplemented)
# Comments
comments = "#" + p.restOfLine
statement = assignment | \
match_expr | \
definition | \
module_def
statement.setName(const.STATEMENT)
statement.setDebug(debug_grammar)
statement.setParseAction(print_stmt)
# Grammar
grammar = p.OneOrMore(statement).ignore(comments)
grammar.setParseAction(action_root_ast)
return BananaScopeParser(grammar)
class BananaScopeParser(object):
"""
Aggregate and resolve conflicts as everything was define
within the same scope. Usefull for have cpp "include"-like
functionality when importing another file.
"""
def __init__(self, grammar):
self._grammar = grammar
def parse(self, string):
"""
Parse the given input string.
:type string: str
:param string: Input string.
:rtype: ast.BananaFile
:return: Returns the ast root.
"""
tree = self._grammar.parseString(string, parseAll=True)[0]
return tree

View File

@ -0,0 +1,31 @@
#!/usr/bin/env python
# Copyright (c) 2016 Hewlett Packard Enterprise Development Company, L.P.
#
# Licensed under the Apache License, Version 2.0 (the "License"); you may
# not use this file except in compliance with the License. You may obtain
# a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
# License for the specific language governing permissions and limitations
# under the License.
ASSIGNMENT = "assignment"
CONNECTION = "connection"
DEFINITION = "definition"
MOD_IMPORT = "mod_import"
DOT_PATH = "path"
NUMBER = "number"
STRING_LIT = "string_lit"
IDENT = "ident"
EXPR = "expression"
JSON_OBJ = "jsonlike_obj"
STATEMENT = "statement"
JSON_ARRAY = "jsonlike_array"
JSON_VALUE = "jsonlike_value"
JSON_MEMBERS = "jsonlike_members"
COMP_CTOR = "component_ctor"

View File

@ -0,0 +1,91 @@
#!/usr/bin/env python
# Copyright (c) 2016 Hewlett Packard Enterprise Development Company, L.P.
#
# Licensed under the Apache License, Version 2.0 (the "License"); you may
# not use this file except in compliance with the License. You may obtain
# a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
# License for the specific language governing permissions and limitations
# under the License.
import pyparsing as p
import monasca_analytics.banana.deadpathck.config as deadpathck
import monasca_analytics.banana.emitter as emit
import monasca_analytics.banana.eval.config as ev
import monasca_analytics.banana.eval.ctx as ctx
import monasca_analytics.banana.grammar.base_ast as span_util
import monasca_analytics.banana.grammar.config as grammar
import monasca_analytics.banana.typeck.config as typeck
import monasca_analytics.exception.banana as exception
def execute_banana_string(banana_str, driver, emitter=emit.PrintEmitter()):
"""
Execute the provided banana string.
It will run the parse phase, and the typechecker.
:type banana_str: str
:param banana_str: The string to parse and type check.
:type driver: monasca_analytics.spark.driver.DriverExecutor
:param driver: Driver that will manage the created
components and connect them together.
:type emitter: emit.Emitter
:param emitter: Emitter for reporting errors/warning.
"""
try:
# Convert the grammar into an AST
parser = grammar.banana_grammar(emitter)
ast = parser.parse(banana_str)
# Compute the type table for the given AST
type_table = typeck.typeck(ast)
# Remove from the tree path that are "dead"
deadpathck.deadpathck(ast, type_table, emitter)
# Check that there's at least one path to be executed
deadpathck.contains_at_least_one_path_to_a_sink(ast, type_table)
# Evaluate the script
ev.eval_ast(ast, type_table, driver)
except exception.BananaException as err:
emitter.emit_error(err.get_span(), str(err))
except p.ParseSyntaxException as err:
emitter.emit_error(span_util.from_parse_fatal(err), err.msg)
except p.ParseFatalException as err:
emitter.emit_error(span_util.from_parse_fatal(err), err.msg)
def compute_type_table(banana_str):
"""
Compute the type table for the provided banana string
if possible.
:type banana_str: str
:param banana_str: The string to parse and type check.
"""
# Convert the grammar into an AST
parser = grammar.banana_grammar()
ast = parser.parse(banana_str)
# Compute the type table for the given AST
return typeck.typeck(ast)
def compute_evaluation_context(banana_str, cb=lambda *a, **k: None):
"""
Compute the evaluation context for the provided
banana string.
:type banana_str: str
:param banana_str: The string to parse and type check.
:param cb: Callback called after each statement
"""
parser = grammar.banana_grammar()
ast = parser.parse(banana_str)
type_table = typeck.typeck(ast)
context = ctx.EvaluationContext()
def custom_cb(_type, lhs, value):
cb(context, _type, lhs, value)
ev.eval_statements_generic(ast.statements, type_table, context, custom_cb)

View File

@ -0,0 +1,85 @@
## Type-checker
This folder is all about the type checking of `banana` files.
The type checker purpose is to verify that components exist,
the type of local variable matches the requirements of components
parameters and assignments between them are correct. It also
checks that connections between components are valid.
The biggest difference between the old `validation` of the JSON
format is that we have more information available. We can warn
users when they make mistakes and point at the exact locations
using `Span`. Also, the type table generated is used by other passes
to perform other static analyses.
This is the second step of the pipeline:
```
+-------+ +---------------------+
| | | |
---> | AST | ---- typeck ---> | AST & TypeTable | --->
| | | |
+-------+ +---------------------+
```
The module `type_util.py` contains all the possible types that are
known by the type checker. The `TypeTable` built lives in the
`type_table.py` module.
### Current status
* [x] Type check numbers
* [x] Type check string literals
* [x] Type check variable assignments
* [x] Type check component assignments
* [x] Type check component parameters
* [x] Type check connections
* [x] Resolve variable names
* [ ] Resolve imports
* [ ] Type check disconnections
### Tests
All tests for the type checker (i.e. making sure that
inferred types are correct and that errors are raised in
appropriate situation) lives in `test/banana/typeck`.
This folder looks like this:
```
test/banana/typeck
├── should_fail
│   ├── ...
│   └── file.banana
├── should_pass
│   ├── ...
│   └── file.banana
└── test_typeck_config.py
```
The `test_typeck_config`, generates one test for each file
in the `should_pass` and `should_fail` directories.
For each generated test, we basically run the following passes:
* `grammar`: convert the input text into an AST.
* `typeck`: run the type checker.
Tests can assert various things in `banana` comments:
- In the `should_fail` directory, a test is expected to use
the `RAISE` instruction to specify a type of exceptions
that should be raised by the test.
- In the `should_pass` directory, a test is expected to not
raised any exception **and** to specify the state of the
`TypeTable` when the type checker has type checked everything
in the file. This is done with the `TYPE_TABLE_EQ` instruction.
#### Available instruction
* `# RAISE <exception-name>`: Check that `exception-name` is raised.
* `# TYPE_TABLE_EQ <string-version-of-the-type-table>`
* `# NEW_TEST`: This instruction splits the file into two tests. However,
the expected exception or type table should still be the same. It
allows us to verify what should be semantically equivalent in the
grammar.

View File

@ -0,0 +1,281 @@
#!/usr/bin/env python
# Copyright (c) 2016 Hewlett Packard Enterprise Development Company, L.P.
#
# Licensed under the Apache License, Version 2.0 (the "License"); you may
# not use this file except in compliance with the License. You may obtain
# a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
# License for the specific language governing permissions and limitations
# under the License.
import monasca_analytics.ingestor.base as ingestor
import monasca_analytics.ldp.base as ldp
import monasca_analytics.sink.base as sink
import monasca_analytics.sml.base as sml
import monasca_analytics.source.base as source
import monasca_analytics.voter.base as voter
import monasca_analytics.banana.grammar.ast as ast
import monasca_analytics.banana.typeck.connections as conn
import monasca_analytics.banana.typeck.type_table as typetbl
import monasca_analytics.banana.typeck.type_util as u
import monasca_analytics.exception.banana as exception
import monasca_analytics.exception.monanas as exception_monanas
import monasca_analytics.util.common_util as introspect
def typeck(banana_file):
"""
Type-check the provided BananaFile instance.
If it type check, it returns the associated TypeTable.
:type banana_file: ast.BananaFile
:param banana_file: The file to typecheck.
:rtype: typetbl.TypeTable
:return: Returns the TypeTable for this BananaFile
"""
type_table = typetbl.TypeTable()
statement_index = 0
for stmt in banana_file.statements:
lhs, rhs = stmt
type_computed = typeck_rhs(rhs, type_table)
type_table.set_type(lhs, type_computed, statement_index)
statement_index += 1
conn.typeck_connections(banana_file.connections, type_table)
return type_table
def typeck_rhs(ast_value, type_table):
"""
Type-check the provided ast value. And returns its type.
This function does not support assignment,
:type ast_value: ast.ASTNode
:param ast_value: The ast_value to type check.
:type type_table: typetbl.TypeTable
:param type_table: The type table. Used for type lookup.
:rtype: u.Component | u.Object | u.Number | u.String
:return: Returns the computed type.
"""
if isinstance(ast_value, ast.Number):
return u.Number()
if isinstance(ast_value, ast.StringLit):
return u.String()
if isinstance(ast_value, ast.Ident):
return type_table.get_type(ast_value)
if isinstance(ast_value, ast.DotPath):
return type_table.get_type(ast_value)
if isinstance(ast_value, ast.Expr):
return typeck_expr(ast_value, type_table)
if isinstance(ast_value, ast.JsonObj):
return typeck_jsonobj(ast_value, type_table)
if isinstance(ast_value, ast.Component):
return typeck_component(ast_value, type_table)
raise Exception("Unhandled ast value type {}!!".format(ast_value))
def typeck_jsonobj(json_obj, type_table):
"""
Type-check a json-like object. If it succeeds
it return the appropriate type describing this
json like object. Raise an exception otherwise.
:type json_obj: ast.JsonObj
:param json_obj: The JsonObj ast node.
:type type_table: typetbl.TypeTable
:param type_table: The type table.
:rtype: u.Object
:return: Returns an instance of util.Object describing
the full type of this json object.
"""
root_type = u.Object(strict_checking=False)
for k, v in json_obj.props.iteritems():
sub_type = u.create_object_tree(k, typeck_rhs(v, type_table))
u.attach_to_root(root_type, sub_type, json_obj.span)
return root_type
def typeck_expr(expr, type_table):
"""
Type-check the given expression. If the typecheck
pass, the resulting type will be used for the strategy
to use when evaluating this expression.
:type expr: ast.Expr
:param expr: The expression to typecheck.
:type type_table: typetbl.TypeTable
:param type_table: Type of the table
:rtype: u.Number | u.String
:return: Returns the type of the expression if possible
:raise: Raise an exception
"""
# In the case where we are just wrapping around
# only one expression, the logic below
# needs to be skipped.
if len(expr.expr_tree) == 1:
return typeck_rhs(expr.expr_tree[0], type_table)
_type = None
must_be_number = False
def check_type(old_type, new_type):
if new_type == old_type:
return old_type
elif new_type == u.String():
if must_be_number:
raise exception.BananaTypeError(
expected_type=u.Number,
found_type=new_type
)
if old_type is None:
return new_type
elif u.can_to_str(old_type):
return new_type
else:
raise exception.BananaTypeError(
expected_type=old_type,
found_type=new_type
)
elif new_type == u.Number():
if old_type is None:
return new_type
elif old_type == u.String():
return old_type
elif not old_type == u.Number():
raise exception.BananaTypeError(
expected_type=old_type,
found_type=new_type
)
else:
raise exception.BananaTypeError(
expected_type=old_type,
found_type=new_type
)
def allowed_symbol(current_type):
if current_type == u.String():
return ['+']
else:
return ['+', '-', '*', '/']
for el in expr.expr_tree:
if isinstance(el, ast.StringLit):
_type = check_type(_type, u.String())
elif isinstance(el, ast.Number):
_type = check_type(_type, u.Number())
elif isinstance(el, ast.Ident):
ident_type = type_table.get_type(el)
_type = check_type(_type, ident_type)
elif isinstance(el, ast.DotPath):
dotpath_type = type_table.get_type(el)
_type = check_type(_type, dotpath_type)
elif isinstance(el, ast.Expr):
_type = check_type(_type, typeck_expr(el, type_table))
elif isinstance(el, basestring):
if el not in allowed_symbol(_type):
raise exception.BananaUnknownOperator(expr.span, el, _type)
if el in ['-', '*', '/']:
must_be_number = True
else:
raise exception.BananaTypeError(
expected_type=[u.Number.__name__, u.String.__name__,
u.Object.__name__],
)
# The final type if we made until here!
return _type
def typeck_component(component, type_table):
"""
Type-check the provided component. Returns
the appropriate subclass of util.Component if
successful, or raise an exception if there's
an error.
:type component: ast.Component
:param component: The component ast node.
:type type_table: typetbl.TypeTable
:param type_table: the type table.
:rtype: u.Source | u.Sink | u.Voter | u.Ldp | u.Sml | u.Ingestor
:return: Returns the appropriate type for the component.
"""
# TODO(Joan): This wont't work for type that are defined
# TODO(Joan): at the language level. We need a registration service
# TODO(Joan): to manage the Types of component that we can create
# TODO(Joan): instead of this hacky function call.
try:
component_type = introspect.get_class_by_name(component.type_name.val)
comp_params = component_type.get_params()
except exception_monanas.MonanasNoSuchClassError:
raise exception.BananaUnknown(
component
)
# Compute the type of the component
if issubclass(component_type, source.BaseSource):
comp_type = u.Source(component_type.__name__, comp_params)
elif issubclass(component_type, sink.BaseSink):
comp_type = u.Sink(component_type.__name__, comp_params)
elif issubclass(component_type, sml.BaseSML):
comp_type = u.Sml(component_type.__name__, comp_params)
elif issubclass(component_type, voter.BaseVoter):
comp_type = u.Voter(component_type.__name__, comp_params)
elif issubclass(component_type, ldp.BaseLDP):
comp_type = u.Ldp(component_type.__name__, comp_params)
elif issubclass(component_type, ingestor.BaseIngestor):
comp_type = u.Ingestor(component_type.__name__, comp_params)
else:
raise exception.BananaTypeCheckerBug("Couldn't find a type for '{}'"
.format(component.type_name.val))
# Type check the parameters
if len(component.args) > len(comp_params):
raise exception.BananaComponentTooManyParams(component.span)
# Does saying that parameter should either all have a name
# or non at all satisfying? -> Yes
# Are parameter all named?
all_named = -1
for arg in component.args:
if arg.arg_name is not None:
if all_named == 0:
raise exception.BananaComponentMixingParams(arg.span, False)
all_named = 1
else:
if all_named == 1:
raise exception.BananaComponentMixingParams(arg.span, True)
all_named = 0
if all_named == 1:
for arg in component.args:
param = filter(lambda x: x.param_name == arg.arg_name.inner_val(),
comp_params)
if len(param) != 1:
raise exception.BananaComponentIncorrectParamName(
component=component.type_name,
found=arg.arg_name
)
param = param[0]
expr_type = typeck_rhs(arg.value, type_table)
if not u.can_be_cast_to(expr_type, param.param_type):
raise exception.BananaArgumentTypeError(
where=arg,
expected_type=param.param_type,
received_type=expr_type
)
else:
for arg, param in zip(component.args, comp_params):
arg.arg_name = ast.Ident(arg.span, param.param_name)
expr_type = typeck_rhs(arg.value, type_table)
if not u.can_be_cast_to(expr_type, param.param_type):
raise exception.BananaArgumentTypeError(
where=arg,
expected_type=param.param_type,
received_type=expr_type
)
return comp_type

View File

@ -0,0 +1,59 @@
#!/usr/bin/env python
# Copyright (c) 2016 Hewlett Packard Enterprise Development Company, L.P.
#
# Licensed under the Apache License, Version 2.0 (the "License"); you may
# not use this file except in compliance with the License. You may obtain
# a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
# License for the specific language governing permissions and limitations
# under the License.
import monasca_analytics.banana.typeck.type_util as util
import monasca_analytics.exception.banana as exception
valid_connections_types = {
util.Source: [util.Ingestor, util.Ldp],
util.Ingestor: [util.Sml, util.Sink],
util.Sml: [util.Voter, util.Sink],
util.Voter: [util.Ldp, util.Sink],
util.Ldp: [util.Sink],
util.Sink: []
}
def typeck_connections(connection, type_table):
"""
Once all variable have been type-checked, we can
try to type-check connections.
:type connection: monasca_analytics.banana.grammar.ast.Connection
:param connection: The connection to type-check
:type type_table: monasca_analytics.banana.typeck.type_table.TypeTable
:param type_table: The table with all variable already type-checked.
:raise Raise an exception if there's a type error in connections.
"""
if connection is not None:
for ident_from, ident_to in connection.connections:
type_from = type_table.get_type(ident_from)
type_to = type_table.get_type(ident_to)
if not util.is_comp(type_from):
raise exception.BananaTypeError(
expected_type=util.Component(),
found_type=type_from
)
if not util.is_comp(type_to):
raise exception.BananaTypeError(
expected_type=util.Component(),
found_type=type_to
)
if type(type_to) not in valid_connections_types[type(type_from)]:
possible_types = map(lambda x: x.__name__,
valid_connections_types[type(type_from)])
raise exception.BananaConnectionError(
ident_from, ident_to, type_from, possible_types
)

View File

@ -0,0 +1,199 @@
#!/usr/bin/env python
# Copyright (c) 2016 Hewlett Packard Enterprise Development Company, L.P.
#
# Licensed under the Apache License, Version 2.0 (the "License"); you may
# not use this file except in compliance with the License. You may obtain
# a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
# License for the specific language governing permissions and limitations
# under the License.
import copy
import monasca_analytics.banana.grammar.ast as ast
import monasca_analytics.banana.typeck.type_util as util
import monasca_analytics.exception.banana as exception
import monasca_analytics.util.string_util as strut
class TypeTable(object):
"""
Type table. Support lookup for JsonLike object.
Json-like object have properties that needs to be
type-checked. The TypeTable allows to store
that information as well. All type values are
rooted by their variable name.
Every-time a variable type is erased, we create a new
snapshot of the variables types. This allow to have
variable where the type change as the statement are
being executed.
"""
def __init__(self):
self._variables_snapshots = [(0, {})]
self._variables = self._variables_snapshots[0][1]
def get_type(self, var, statement_index=None):
variables = self.get_variables(statement_index)
if isinstance(var, ast.Ident):
if var in variables:
return variables[var]
else:
raise exception.BananaUnknown(var)
# If we encounter a dot path:
if isinstance(var, ast.DotPath):
if var.varname in variables:
if len(var.properties) > 0:
return variables[var.varname][var.next_dot_path()]
else:
return variables[var.varname]
else:
raise exception.BananaUnknown(var.varname)
raise exception.BananaTypeCheckerBug("Unkown type for {}".format(var))
def set_type(self, var, _type, statement_index):
"""
Set the type for the given var to _type.
:type var: ast.Ident | ast.DotPath
:param var: The var to set a type.
:type _type: util.Object | util.Component | util.String | util.Number
:param _type: The type for the var.
:type statement_index: int
:param statement_index: The statement at which this assignment was
made.
"""
if _type is None:
raise exception.BananaTypeCheckerBug(
"'None' is not a valid banana type"
)
if isinstance(var, ast.Ident):
self._check_needs_for_snapshot(var, _type, statement_index)
self._variables[var] = _type
return
if isinstance(var, ast.DotPath):
if util.is_comp(_type) and len(var.properties) > 0:
raise exception.BananaAssignCompError(var.span)
if len(var.properties) == 0:
self._check_needs_for_snapshot(
var.varname,
_type,
statement_index
)
self._variables[var.varname] = _type
else:
if var.varname in self._variables:
var_type = self._variables[var.varname]
if isinstance(var_type, util.Object):
new_type = util.create_object_tree(
var.next_dot_path(), _type)
util.attach_to_root(var_type, new_type, var.span,
erase_existing=True)
elif isinstance(var_type, util.Component):
var_type[var.next_dot_path()] = _type
else:
raise exception.BananaTypeError(
expected_type=util.Object,
found_type=type(var)
)
# Var undeclared, declare its own type
else:
new_type = util.create_object_tree(var.next_dot_path(),
_type)
self._variables[var.varname] = new_type
return
raise exception.BananaTypeCheckerBug("Unreachable code reached.")
def get_variables(self, statement_index=None):
"""
Returns the list of variables with their associated type.
:type statement_index: int
:param: Statement index.
:rtype: dict[str, util.Object|util.Component|util.String|util.Number]
"""
if statement_index is None:
return self._variables
variables = {}
for created_at, snap in self._variables_snapshots:
if created_at < statement_index:
variables = snap
else:
break
return variables
def get_variables_snapshots(self):
return self._variables_snapshots
def _check_needs_for_snapshot(self, var, _type, statement_index):
if var in self._variables:
# If we shadow a component, we need to raise an error
if util.is_comp(self._variables[var]):
raise exception.BananaShadowingComponentError(
where=var.span,
comp=self._variables[var].class_name
)
# If we change the type of the variable, we create a new snapshot:
# This is very strict but will allow to know exactly how
# the type of a variable (or a property) changed.
if self._variables[var] != _type:
self._create_snapshot(statement_index)
def _create_snapshot(self, statement_index):
"""
Create a new snapshot of the variables.
:type statement_index: int
:param statement_index: index of the statement
(should be strictly positive)
"""
new_snapshot = copy.deepcopy(self._variables)
self._variables_snapshots.append((
statement_index, new_snapshot
))
self._variables = new_snapshot
def __contains__(self, key):
"""
Test if the type table contains or not the provided
path. This function is more permissive than the other two.
It will never raise any exception (or should aim not to).
:type key: basestring | ast.Ident | ast.DothPath
:param key: The key to test.
:return: Returns True if the TypeTable contains a type for the
given path or identifier.
"""
if isinstance(key, basestring):
return key in self._variables
if isinstance(key, ast.Ident):
return key.val in self._variables
if isinstance(key, ast.DotPath):
res = key.varname in self._variables
if not res:
return False
val = self._variables[key.varname]
for prop in key.properties:
if isinstance(val, util.Object):
if prop in val.props:
val = val[prop]
else:
return False
return True
return False
def __str__(self):
return strut.dict_to_str(self._variables)

View File

@ -0,0 +1,559 @@
#!/usr/bin/env python
# Copyright (c) 2016 Hewlett Packard Enterprise Development Company, L.P.
#
# Licensed under the Apache License, Version 2.0 (the "License"); you may
# not use this file except in compliance with the License. You may obtain
# a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
# License for the specific language governing permissions and limitations
# under the License.
"""
Util files to manipulates banana types.
The list of possible types is as follow:
* `Number`
* `Boolean`
* `String`
* `Object` (Json-like object)
* `Component.Source.<class-name>`
* `Component.Ingestor.<class-name>`
* `Component.Sink.<class-name>`
* `Component.Voter.<class-name>`
* `Component.Ldp.<class-name>`
* `Component.Sml.<class-name>`
where <class-name> will be the component class name defined
in the code base.
For type defined in banana such as Json parsers, <class-name>
refers the name they are defined with.
"""
import abc
import six
import monasca_analytics.banana.grammar.ast as ast
import monasca_analytics.exception.banana as exception
import monasca_analytics.util.string_util as strut
@six.add_metaclass(abc.ABCMeta)
class IsType(object):
"""
Any class that represents a Banana type should inherit
from this class.
"""
def __ne__(self, other):
# Dispatch to eq function
return not self.__eq__(other)
@abc.abstractmethod
def default_value(self):
pass
class Any(IsType):
"""
Any type. This type should be used by component's writer when
they have a complex handling of parameters. This is not
recommended though as it move the error handling to
the component writer.
"""
def __str__(self):
return "TypeAny"
def __eq__(self, _):
# Any type is equal to nothing not even itself.
return False
def __getitem__(self, _):
return Any()
def __hash__(self):
raise Exception("Any type should not be used in dictionaries.")
def default_value(self):
return {}
class String(IsType):
"""
String Type.
"""
def __str__(self):
return "TypeString"
def __eq__(self, other):
return isinstance(other, String)
def __hash__(self):
return hash(str(self))
def default_value(self):
return ""
class Number(String):
"""
Number type. Banana has only floating point value.
"""
def __str__(self):
return "TypeNumber"
def __eq__(self, other):
return isinstance(other, Number)
def __hash__(self):
return hash(str(self))
def default_value(self):
return 0
class Enum(String):
"""
Enum type. This type is a way to constraint a string or number,
to a specific set of values.
"""
def __init__(self, variants):
self.variants = variants
def __eq__(self, other):
return isinstance(other, Enum) and self.variants == other.variants
def __hash__(self):
return hash(self.variants)
def __str__(self):
return "TypeEnum < {} >".format(','.join(self.variants))
def default_value(self):
return ""
def attach_to_root(root_obj, obj1, span, erase_existing=False):
"""
Attach the object obj1 to the root_obj object type.
:type root_obj: Object
:param root_obj: The root object
:type obj1: Object
:param obj1: The object to attach.
:type span: Span
:param span: The span for this change.
:type erase_existing: bool
:param erase_existing: Set to true if the root type should
always be erased.
"""
for key, child_type in obj1.props.iteritems():
if key in root_obj.props:
root_sub_type = root_obj.props[key]
# Both are object -> recurse
if isinstance(root_sub_type, Object) and\
isinstance(child_type, Object):
attach_to_root(root_sub_type, child_type, span, erase_existing)
elif erase_existing:
root_obj.props[key] = child_type
else:
raise exception.BananaTypeError(
expected_type=root_sub_type,
found_type=child_type,
span=span
)
else:
# We can simply attach the new type!
root_obj.props[key] = child_type
def create_object_tree(dot_path, value):
"""
Create a linear tree of object type from the dot_path.
Also work when dot_path is an Ident or StringLit.
:type dot_path: ast.DotPath | ast.Ident | ast.StringLit
:param dot_path: The ast node that forms a linear tree of type.
:type value: Object | String | Number
:param value: the value to set at the end of the linear tree.
:rtype: Object
:return: Returns the created object
"""
if is_comp(value):
raise exception.BananaAssignCompError(dot_path.span)
# {a.b.c: value}
root_object = Object(strict_checking=False)
if isinstance(dot_path, ast.DotPath):
# {a: value}
if len(dot_path.properties) == 0:
root_object.props[dot_path.varname.inner_val()] = value
else:
# {a: <Object>}
root_object.props[dot_path.varname.inner_val()] = \
Object(strict_checking=False)
# {b.c: value}
current_obj = root_object.props[dot_path.varname.inner_val()]
last_index = len(dot_path.properties) - 1
for index, sub_prop in enumerate(dot_path.properties):
sub_prop_name = sub_prop.inner_val()
if index != last_index:
current_obj.props[sub_prop_name] = \
Object(strict_checking=False)
current_obj = current_obj.props[sub_prop_name]
else:
current_obj.props[sub_prop_name] = value
else:
# Ident and StringLit are captured here.
root_object.props[dot_path.inner_val()] = value
return root_object
class Object(String):
"""
Object Type. The value that are dictionary-like have this type.
"""
def __init__(self, props=None, strict_checking=True):
if props is None:
props = {}
self.props = props
# Strict checking is off for all objects defined within the banana
# language. It is on by default for components so that they can
# force the type checker to throw errors when we try to access
# or to modify unknown properties
self.strict_checking = strict_checking
def __getitem__(self, key):
# a.b or a."b"
if isinstance(key, ast.Ident) or isinstance(key, ast.StringLit):
if key.inner_val() not in self.props:
raise exception.BananaPropertyDoesNotExists(key,
on_type=self)
return self.props[key.inner_val()]
# a.b.c
if isinstance(key, ast.DotPath):
if key.varname.inner_val() not in self.props:
raise exception.BananaPropertyDoesNotExists(key.varname,
on_type=self)
sub_object = self.props[key.varname.inner_val()]
if len(key.properties) == 0:
return sub_object
# Recurse
if isinstance(sub_object, Object):
return sub_object[key.next_dot_path()]
if isinstance(sub_object, Any):
return sub_object
raise exception.BananaPropertyDoesNotExists(key.next_dot_path(),
on_type=sub_object)
raise exception.BananaTypeCheckerBug(
"Unreachable code in Object.__getitem__ reached."
)
def __str__(self):
if self.strict_checking:
return "TypeStruct < {} >".format(strut.dict_to_str(self.props))
else:
return "TypeObject < {} >".format(strut.dict_to_str(self.props))
def __eq__(self, other):
return self.props == other
def __hash__(self):
return hash(self.props)
def default_value(self):
default_value = {}
for key, val in self.props.iteritems():
default_value[key] = val.default_value()
return default_value
class Component(IsType):
"""
Type of all components. While not strictly used directly, it
is very useful to performs checks on variable that are supposed
to be any of the available components.
"""
def __init__(self, ctor_properties=None, class_name=None):
"""
Component type
:type ctor_properties:
list[monasca_analytics.component.params.ParamDescriptor]
:param ctor_properties:
:type class_name: str
:param class_name: Name of the class if there's any.
"""
self.ctor_properties = ctor_properties
self.class_name = class_name
def __str__(self):
if self.class_name is None:
return "TypeComponent"
else:
return self.class_name + "(" +\
",".join(map(lambda x: x.param_name + "=" + str(x.param_type),
self.ctor_properties))\
+ ")"
def __setitem__(self, dot_path, value):
"""
Attempt to set the value at 'dot_path' to 'value'.
:type dot_path: ast.DotPath
:param dot_path: The path of the property
:type value: String | Enum | Object | Number
:param value: The new type to set.
"""
if self.ctor_properties is None:
raise exception.BananaTypeCheckerBug(
"Component type can't have properties"
)
if len(dot_path.properties) == 0:
for arg in self.ctor_properties:
if arg.param_name == dot_path.varname.inner_val():
if not can_be_cast_to(value, arg.param_type):
raise exception.BananaArgumentTypeError(
expected_type=arg.param_type,
received_type=value,
where=dot_path.span
)
else:
return
else:
for arg in self.ctor_properties:
if arg.param_name == dot_path.varname.inner_val():
if isinstance(arg.param_type, Any):
return
elif isinstance(arg.param_type, Object):
next_dot_path = dot_path.next_dot_path()
sub_arg_type = arg.param_type[next_dot_path]
if not can_be_cast_to(value, sub_arg_type):
raise exception.BananaArgumentTypeError(
expected_type=sub_arg_type,
received_type=value,
where=next_dot_path.span
)
else:
return
else:
raise exception.BananaPropertyDoesNotExists(
dot_path.next_dot_path(),
arg.param_type
)
raise exception.BananaPropertyDoesNotExists(dot_path, on_type=self)
def __getitem__(self, dot_path):
"""
Return the type of the given item.
:type dot_path: ast.DotPath
:param dot_path: The path to follow
:return:
"""
if self.ctor_properties is None:
raise exception.BananaTypeCheckerBug(
"Component type can't have properties"
)
if len(dot_path.properties) == 0:
for arg in self.ctor_properties:
if arg.param_name == dot_path.varname.inner_val():
return arg.param_type
else:
for arg in self.ctor_properties:
if arg.param_name == dot_path.varname.inner_val():
if isinstance(arg.param_type, Object):
return arg.param_type[dot_path.next_dot_path()]
else:
raise exception.BananaPropertyDoesNotExists(
dot_path.next_dot_path(),
arg.param_type
)
raise exception.BananaPropertyDoesNotExists(dot_path, on_type=self)
def __eq__(self, other):
return isinstance(other, Component)
def __hash__(self):
return hash(str(self))
def default_value(self):
return {}
class Source(Component):
"""
Source type. All component that inherits from BaseSource have
this type in Banana.
"""
def __init__(self, class_name, ctor_properties):
super(Source, self).__init__(ctor_properties, class_name)
def __eq__(self, other):
return self.class_name == other.class_name
def __hash__(self):
return hash(self.class_name)
class Ingestor(Component):
"""
Ingestor type. All component that inherits from BaseIngestor have
this type in Banana.
"""
def __init__(self, class_name, ctor_properties):
super(Ingestor, self).__init__(ctor_properties, class_name)
def __eq__(self, other):
return self.class_name == other.class_name
def __hash__(self):
return hash(self.class_name)
class Sink(Component):
"""
Sink type. All component that inherits from BaseSink have
this type in Banana.
"""
def __init__(self, class_name, ctor_properties):
super(Sink, self).__init__(ctor_properties, class_name)
def __eq__(self, other):
return self.class_name == other.class_name
def __hash__(self):
return hash(self.class_name)
class Voter(Component):
"""
Voter type. All component that inherits from BaseVoter have
this type in Banana.
"""
def __init__(self, class_name, ctor_properties):
super(Voter, self).__init__(ctor_properties, class_name)
def __eq__(self, other):
return self.class_name == other.class_name
def __hash__(self):
return hash(self.class_name)
class Ldp(Component):
"""
Ldp type. All component that inherits from BaseLdp have
this type in Banana.
"""
def __init__(self, class_name, ctor_properties):
super(Ldp, self).__init__(ctor_properties, class_name)
def __eq__(self, other):
return self.class_name == other.class_name
def __hash__(self):
return hash(self.class_name)
class Sml(Component):
"""
Sml type. All component that inherits from BaseSml have
this type in Banana.
"""
def __init__(self, class_name, ctor_properties):
super(Sml, self).__init__(ctor_properties, class_name)
def __eq__(self, other):
return self.class_name == other.class_name
def __hash__(self):
return hash(self.class_name)
def get_type(ast_node):
"""
Returns the type for the given ast node.
This function only works for literal node such as
Number, StringLit and JsonObj.
:type ast_node: ast.Number | ast.StringLit | ast.JsonObj | ast.Component
:param ast_node: the node.
:return: Returns the appropriate type.
"""
if isinstance(ast_node, ast.Number):
return Number()
if isinstance(ast_node, ast.StringLit):
return String()
if isinstance(ast_node, ast.JsonObj):
return Object(strict_checking=False)
if isinstance(ast_node, ast.Component):
return Component()
return None
def can_to_str(_type):
"""
Check if we the type can be cast to str.
:param _type: Type to check
:return: Returns True if it can be casted
"""
return isinstance(_type, String)
def is_comp(_type):
"""
:type _type: String | Number | Object | Component
:param _type: Type to check.
:rtype: bool
:return: Returns True if the provided _type is a component
"""
return isinstance(_type, Component)
def can_be_cast_to(_type1, _type2):
"""
Check if the given type `_type1` can be cast into `_type2`.
:type _type1: String | Number | Enum | Object
:param _type1: Type to try to change into _type2
:type _type2: String | Number | Enum | Object
:param _type2: Type reference.
:return: Returns true if the conversion can be done.
"""
if isinstance(_type2, Any):
return True
elif _type1 == _type2:
return True
elif _type2 == String():
return can_to_str(_type1)
elif isinstance(_type2, Enum):
return isinstance(_type1, String) or isinstance(_type2, Enum)
elif isinstance(_type1, Object) and isinstance(_type2, Object):
if not _type2.strict_checking:
return True
else:
for prop_name, prop_type in _type2.props.iteritems():
if prop_name not in _type1.props:
return False
if not can_be_cast_to(_type1.props[prop_name], prop_type):
return False
return True
return False

View File

@ -78,6 +78,38 @@ class BaseComponent(object):
"""
pass
@abstractstatic
def get_params(): # @NoSelf
"""Abstract static method that returns the description of the params.
To be implemented by BaseComponent children. It has to return
a list of the params description such as:
return [
ParamDescriptor('param1', type_util.String(), 'default value'),
ParamDescriptor('param2', type_util.Object({
'a': type_util.Number()
), {'a': 123}),
...
]
This function must be kept in sync with `get_default_config` and
`validate_config`, otherwise banana scripts using this component
will get runtime errors when being evaluated.
The order in the list maps to the order the parameter must be
passed when the component would be created, in banana:
a = MyComponent(param1, param2)
`param1` and `param2` would be type-checked respectively against
the first and the second element of the returned list.
:rtype: list[monasca_analytics.component.params.ParamDescriptor]
:return: Returns the list of parameters accepted by this component.
"""
pass
def id(self):
return self._id

View File

@ -0,0 +1,47 @@
#!/usr/bin/env python
# Copyright (c) 2016 Hewlett Packard Enterprise Development Company, L.P.
#
# Licensed under the Apache License, Version 2.0 (the "License"); you may
# not use this file except in compliance with the License. You may obtain
# a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
# License for the specific language governing permissions and limitations
# under the License.
import monasca_analytics.banana.typeck.type_util as u
class ParamDescriptor(object):
"""
Description of a component parameter. This object contains
information such as the name of the parameter, the type,
the default value and a validator that will be evaluated
when the component is instantiated.
"""
def __init__(self, name, _type, default=None, validator=None):
"""
Construct a parameter descriptor.
:type name: str
:param name: The name of the parameter
:type _type: u.String | u.Number | u.Object | u.Enum | u.Any
:param _type: The type of the parameter
:type default: str | float | int | dict
:param default: The default value for the parameter.
:param validator: Additional validator for the parameter.
"""
if not isinstance(_type, u.String) and\
not isinstance(_type, u.Number) and\
not isinstance(_type, u.Object) and\
not isinstance(_type, u.Enum) and\
not isinstance(_type, u.Any):
raise Exception("ParamDescriptor incorrectly defined")
self.param_name = name
self.default_value = default
self.param_type = _type
self.validator = validator

View File

@ -17,7 +17,6 @@
import logging
import monasca_analytics.config.const as const
import monasca_analytics.config.validation as validation
import monasca_analytics.exception.monanas as err
@ -54,7 +53,7 @@ def _perform_all_connections(connection_kind, _config, components):
"""
links = {}
for origin_id in _config[connection_kind].keys():
for comp_type in validation.valid_connection_types.keys():
for comp_type in const.components_types:
if origin_id in components[comp_type]:
component = components[comp_type][origin_id]
connections_list = _config[connection_kind][origin_id]

View File

@ -16,7 +16,7 @@
import logging
from monasca_analytics.config import validation
from monasca_analytics.config import const
from monasca_analytics.util import common_util
logger = logging.getLogger(__name__)
@ -31,7 +31,7 @@ def create_components(_config):
:returns: Created components indexed by type and ID
"""
components = {}
for component_type in validation.valid_connection_types.keys():
for component_type in const.components_types:
components[component_type] = \
_create_comps_by_module(component_type, _config)
return components

View File

@ -17,8 +17,11 @@
"""Banana Error classes."""
import abc
import pyparsing as p
import six
import monasca_analytics.banana.grammar.base_ast as ast
@six.add_metaclass(abc.ABCMeta)
class BananaException(Exception):
@ -27,6 +30,14 @@ class BananaException(Exception):
def __str__(self):
pass
@abc.abstractmethod
def get_span(self):
"""
:rtype: ast.Span
:return: Returns the span where the error occured if appropriate
"""
pass
class BananaInvalidExpression(BananaException):
def __init__(self, value):
@ -35,6 +46,9 @@ class BananaInvalidExpression(BananaException):
def __str__(self):
return repr(self._value)
def get_span(self):
return ast.DUMMY_SPAN
class BananaEnvironmentError(BananaException):
def __init__(self, value):
@ -43,11 +57,299 @@ class BananaEnvironmentError(BananaException):
def __str__(self):
return repr(self._value)
def get_span(self):
return ast.DUMMY_SPAN
class BananaArgumentTypeError(BananaException):
def __init__(self, expected_type, received_type):
self._value = "Wrong type of argument: expected '{}' got '{}'".\
format(expected_type.__name__, received_type.__name__)
class BananaNoFullPath(BananaException):
def __init__(self, missing):
self._value = "None of the paths can be executed. Missing at least" \
" one {}.".format(missing)
def __str__(self):
return repr(self._value)
return self._value
def get_span(self):
return ast.DUMMY_SPAN
class BananaArgumentTypeError(BananaException):
def __init__(self, where, expected_type, received_type):
if isinstance(where, ast.ASTNode):
self._span = where.span
where = where.span
else:
self._span = where
self._value = "'{}': Wrong type of argument. Expected '{}' got '{}'"\
.format(where.get_line(), expected_type, received_type)
def __str__(self):
return self._value
def get_span(self):
return self._span
class BananaComponentTooManyParams(BananaException):
def __init__(self, span):
self._span = span
self._value = "Too many params provided to '{}' (line {})".format(
span, span.get_lineno()
)
def __str__(self):
return self._value
def get_span(self):
return self._span
class BananaComponentMixingParams(BananaException):
def __init__(self, span, named_is_wrong):
self._span = span
if named_is_wrong:
self._value = "'{}' should be named as " \
"previous parameters are.".format(span)
else:
self._value = "'{}' should not be named as " \
"previous parameters are.".format(span)
def __str__(self):
return self._value
def get_span(self):
return self._span
class BananaComponentIncorrectParamName(BananaException):
def __init__(self, found, component):
if isinstance(component, ast.ASTNode):
component = component.span
if isinstance(found, ast.ASTNode):
self._span = found.span
found = found.span
else:
self._span = found
self._value = "Incorrect parameter name. Parameter '{}' " \
"does not exists on component {}."\
.format(found, component)
def __str__(self):
return self._value
def get_span(self):
return self._span
class BananaComponentAlreadyDefined(BananaException):
def __init__(self, first_def, second_def):
self._value = "Component already defined!\n" \
" First definition: '{}'\n" \
" Second definition: '{}'"\
.format(first_def, second_def)
def __str__(self):
return self._value
def get_span(self):
# TODO(Joan): This could be a real span instead of this one.
return ast.DUMMY_SPAN
class BananaShadowingComponentError(BananaException):
def __init__(self, where, comp):
self._span = where
self._value = "Shadowing component '{}'. " \
"Please use another variable name.".format(comp)
def __str__(self):
return self._value
def get_span(self):
return self._span
class BananaAssignmentError(BananaException):
def __init__(self, lhs, rhs):
self._value = "You can't assign '{}' to '{}'".format(lhs, rhs)
def __str__(self):
return self._value
def get_span(self):
return ast.DUMMY_SPAN
class BananaGrammarBug(BananaException, p.ParseFatalException):
def __init__(self, error):
super(BananaGrammarBug, self).__init__(pstr=error)
self._value = "Bug found in the grammar!" \
" Please report this error: {}".format(error)
def __str__(self):
return self._value
def get_span(self):
return ast.DUMMY_SPAN
class BananaJsonObjShadowingError(BananaException, p.ParseFatalException):
def __init__(self, span, error):
self._span = span
error = "Can't shadow property already defined in {}".format(error)
super(BananaJsonObjShadowingError, self).__init__(pstr=error)
def __str__(self):
return self.msg
def get_span(self):
return self._span
class BananaTypeCheckerBug(BananaException):
def __init__(self, error):
self._value = "Bug found in the TypeChecker!" \
" Please report this error: {}".format(error)
def __str__(self):
return self._value
def get_span(self):
return ast.DUMMY_SPAN
class BananaEvalBug(BananaException):
def __init__(self, error):
self._value = "Bug found in the evaluator!" \
" Please report this error: {}".format(error)
def __str__(self):
return self._value
def get_span(self):
return ast.DUMMY_SPAN
class BananaUnknown(BananaException):
def __init__(self, ident):
self._span = ident.span
self._value = "Unknown '{}'".format(
ident.into_unmodified_str()
)
def __str__(self):
return self._value
def get_span(self):
return self._span
class BananaUnknownOperator(BananaException):
def __init__(self, span, operator, for_type):
self._span = span
self._value = "Unknown operator '{}' for type '{}'".format(
operator,
for_type
)
def __str__(self):
return self._value
def get_span(self):
return self._span
class BananaPropertyDoesNotExists(BananaException):
def __init__(self, dotpath, on_type=None):
self._span = dotpath.span
if on_type is None:
self._value = "Error at '{}': Property '{}' " \
"does not exists"\
.format(
dotpath.span.get_line(),
dotpath.into_unmodified_str()
)
else:
self._value = "Error at '{}': Property '{}' " \
"does not exists on type '{}'"\
.format(
dotpath.span.get_line(),
dotpath.into_unmodified_str(),
str(on_type)
)
def __str__(self):
return self._value
def get_span(self):
return self._span
class BananaTypeError(BananaException):
def __init__(self, expected_type, found_type=None, span=None):
self._span = span
if expected_type is None:
class DummyType(object):
def __str__(self):
return "_"
expected_type = DummyType
if found_type is None:
if isinstance(expected_type, list):
self._value = "Type error found. Expected" \
" one among '{}'"\
.format(', '.join(map(lambda x: str(x), expected_type)))
else:
self._value = "Type error found. Expected '{}'".format(
str(expected_type)
)
else:
if isinstance(expected_type, list):
self._value = "Type error found. Expected" \
" one among '{}', found '{}'"\
.format(', '.join(map(lambda x: str(x), expected_type)),
str(found_type))
else:
self._value = "Type error found. Expected" \
" '{}', found '{}'"\
.format(str(expected_type), str(found_type))
def __str__(self):
return self._value
def get_span(self):
if self._span is None:
return ast.DUMMY_SPAN
return self._span
class BananaAssignCompError(BananaException):
def __init__(self, span):
self._span = span
self._value = "Component objects " \
"can't be assigned to " \
"properties of other objects"
def __str__(self):
return self._value
def get_span(self):
return self._span
class BananaConnectionError(BananaException):
def __init__(self, ident_from, ident_to, type_from, possible_connections):
self._span = ident_to.span
self._value = "Can't connect '{}' (line:{})" \
" to '{}' (line:{})," \
" '{}' can only be connected to {}"\
.format(
ident_from.val, ident_from.span.get_lineno(),
ident_to.val, ident_to.span.get_lineno(),
type_from, possible_connections)
def __str__(self):
return self._value
def get_span(self):
return self._span

View File

@ -40,6 +40,10 @@ class CloudIngestor(base.BaseIngestor):
}, required=True)
return cloud_schema(_config)
@staticmethod
def get_params():
return []
def map_dstream(self, dstream):
features_list = list(self._features)
return dstream.map(fn.from_json)\

View File

@ -53,6 +53,10 @@ class IptablesIngestor(base.BaseIngestor):
def get_default_config():
return {"module": IptablesIngestor.__name__}
@staticmethod
def get_params():
return []
def map_dstream(self, dstream):
features_list = list(self._features)
return dstream.map(fn.from_json)\

View File

@ -40,6 +40,10 @@ class CloudCausalityLDP(bt.BaseLDP):
def get_default_config():
return {"module": CloudCausalityLDP.__name__}
@staticmethod
def get_params():
return []
def map_dstream(self, dstream):
"""Executes _aggregate for each RDD in the dstream

View File

@ -44,6 +44,10 @@ class IptablesLDP(bt.BaseLDP):
def get_default_config():
return {"module": IptablesLDP.__name__}
@staticmethod
def get_params():
return []
def map_dstream(self, dstream):
"""Detect anomalies in a dstream using the learned classifier

View File

@ -17,6 +17,8 @@
import logging
import voluptuous
import monasca_analytics.banana.typeck.type_util as type_util
import monasca_analytics.component.params as params
import monasca_analytics.ldp.base as bt
import monasca_analytics.ldp.monasca.helpers as helpers
@ -31,24 +33,22 @@ class MonascaAggregateLDP(bt.BaseLDP):
def __init__(self, _id, _config):
super(MonascaAggregateLDP, self).__init__(_id, _config)
self._aggregation_period = _config["params"]["aggregation_period"]
self._aggregation_period = _config["period"]
self._reducer_func = MonascaAggregateLDP.select_reducer(_config)
self._suffix = "_" + _config["params"]["aggregation_function"]
self._suffix = "_" + _config["func"]
@staticmethod
def validate_config(_config):
monasca_ag_schema = voluptuous.Schema({
"module": voluptuous.And(basestring, vu.NoSpaceCharacter()),
"params": {
"aggregation_period": int,
"aggregation_function": voluptuous.Or(
"avg",
"max",
"sum",
"min",
"cnt"
)
}
"period": voluptuous.Or(float, int),
"func": voluptuous.Or(
"avg",
"max",
"sum",
"min",
"cnt"
)
}, required=True)
return monasca_ag_schema(_config)
@ -56,13 +56,21 @@ class MonascaAggregateLDP(bt.BaseLDP):
def get_default_config():
return {
"module": MonascaAggregateLDP.__name__,
"params": {
# One hour
"aggregation_period": 60 * 60,
"aggregation_function": "avg"
}
"period": 60.0 * 60.0,
"func": "avg"
}
@staticmethod
def get_params():
return [
params.ParamDescriptor('period', type_util.Number(), 60 * 60),
params.ParamDescriptor(
'func',
type_util.Enum(['avg', 'max', 'sum', 'min', 'cnt']),
'avg'
)
]
def map_dstream(self, dstream):
"""
Map the given DStream into a new DStream where metrics
@ -185,4 +193,4 @@ class MonascaAggregateLDP(bt.BaseLDP):
lambda m, cnt: m,
lambda acc, m, cnt: cnt,
),
}[_config["params"]["aggregation_function"]]
}[_config["func"]]

View File

@ -15,11 +15,15 @@
# under the License.
import logging
import math
import voluptuous
import monasca_analytics.banana.api as banana
import monasca_analytics.banana.typeck.type_util as type_util
import monasca_analytics.component.params as params
import monasca_analytics.ldp.base as bt
import monasca_analytics.ldp.monasca.helpers as helpers
import monasca_analytics.parsing.api as parsing
import monasca_analytics.util.spark_func as fn
from monasca_analytics.util import validation_utils as vu
@ -31,16 +35,15 @@ class MonascaCombineLDP(bt.BaseLDP):
def __init__(self, _id, _config):
super(MonascaCombineLDP, self).__init__(_id, _config)
logger.debug(_config["params"]["metric_names_binding"])
logger.debug(_config["params"]["lambda"])
self._combine_function = banana.create_fn_with_config(
env=_config["params"]["metric_names_binding"],
expr_string=_config["params"]["lambda"]
logger.debug(_config["bindings"])
logger.debug(_config["lambda"])
self._combine_function = parsing.create_fn_with_config(
env=_config["bindings"],
expr_string=_config["lambda"]
)
self._combine_period = _config["params"]["combine_period"]
self._combine_metric_name = _config["params"]["metric_name"]
self._metrics_of_interest = _config["params"][
"metric_names_binding"].values()
self._combine_period = _config["period"]
self._combine_metric_name = _config["metric"]
self._metrics_of_interest = _config["bindings"].values()
def map_dstream(self, dstream):
"""
@ -143,92 +146,115 @@ class MonascaCombineLDP(bt.BaseLDP):
def validate_config(_config):
monasca_comb_schema = voluptuous.Schema({
"module": voluptuous.And(basestring, vu.NoSpaceCharacter()),
"params": {
"metric_name": basestring,
"combine_period": int,
"lambda": basestring,
"metric_names_binding": {
basestring: voluptuous.Or(
"apache.net.kbytes_sec",
"apache.net.requests_sec",
"apache.performance.cpu_load_perc",
"cpu.idle_perc",
"cpu.stolen_perc",
"cpu.system_perc",
"cpu.total_logical_cores",
"cpu.user_perc",
"cpu.wait_perc",
"disk.allocation",
"disk.inode_used_perc",
"disk.space_used_perc",
"disk.total_space_mb",
"disk.total_used_space_mb",
"host_alive_status",
"io.read_kbytes_sec",
"io.read_req_sec",
"io.write_time_sec",
"kafka.consumer_lag",
"load.avg_1_min",
"load.avg_5_min",
"mem.free_mb",
"mem.swap_free_mb",
"mem.swap_total_mb",
"mem.total_mb",
"mem.usable_mb",
"mem.used_cache",
"metrics-added-to-batch-counter[0]",
"mysql.innodb.buffer_pool_free",
"mysql.innodb.buffer_pool_used",
"mysql.innodb.data_reads",
"mysql.innodb.mutex_spin_rounds",
"mysql.performance.com_delete_multi",
"mysql.performance.com_insert",
"mysql.performance.com_insert_select",
"mysql.performance.com_select",
"mysql.performance.com_update",
"mysql.performance.created_tmp_disk_tables",
"mysql.performance.created_tmp_files",
"mysql.performance.open_files",
"mysql.performance.questions",
"mysql.performance.user_time",
"net.in_bytes_sec",
"net.in_errors_sec",
"net.in_packets_dropped_sec",
"net.in_packets_sec",
"net.out_bytes_sec",
"net.out_errors_sec",
"net.out_packets_dropped_sec",
"net.out_packets_sec",
"nova.vm.disk.total_allocated_gb",
"process.pid_count",
"raw-sql.time.max",
"vcpus",
"vm.cpu.utilization_perc",
"vm.host_alive_status",
"vm.mem.total_mb",
"zookeeper.out_bytes",
"zookeeper.outstanding_bytes"
)
}
"metric": basestring,
"period": voluptuous.And(
voluptuous.Or(float, int),
lambda i: i >= 0 and math.floor(i) == math.ceil(i)),
"lambda": basestring,
"bindings": {
basestring: voluptuous.Or(
"apache.net.kbytes_sec",
"apache.net.requests_sec",
"apache.performance.cpu_load_perc",
"cpu.idle_perc",
"cpu.stolen_perc",
"cpu.system_perc",
"cpu.total_logical_cores",
"cpu.user_perc",
"cpu.wait_perc",
"disk.allocation",
"disk.inode_used_perc",
"disk.space_used_perc",
"disk.total_space_mb",
"disk.total_used_space_mb",
"host_alive_status",
"io.read_kbytes_sec",
"io.read_req_sec",
"io.write_time_sec",
"kafka.consumer_lag",
"load.avg_1_min",
"load.avg_5_min",
"mem.free_mb",
"mem.swap_free_mb",
"mem.swap_total_mb",
"mem.total_mb",
"mem.usable_mb",
"mem.used_cache",
"metrics-added-to-batch-counter[0]",
"mysql.innodb.buffer_pool_free",
"mysql.innodb.buffer_pool_used",
"mysql.innodb.data_reads",
"mysql.innodb.mutex_spin_rounds",
"mysql.performance.com_delete_multi",
"mysql.performance.com_insert",
"mysql.performance.com_insert_select",
"mysql.performance.com_select",
"mysql.performance.com_update",
"mysql.performance.created_tmp_disk_tables",
"mysql.performance.created_tmp_files",
"mysql.performance.open_files",
"mysql.performance.questions",
"mysql.performance.user_time",
"net.in_bytes_sec",
"net.in_errors_sec",
"net.in_packets_dropped_sec",
"net.in_packets_sec",
"net.out_bytes_sec",
"net.out_errors_sec",
"net.out_packets_dropped_sec",
"net.out_packets_sec",
"nova.vm.disk.total_allocated_gb",
"process.pid_count",
"raw-sql.time.max",
"vcpus",
"vm.cpu.utilization_perc",
"vm.host_alive_status",
"vm.mem.total_mb",
"zookeeper.out_bytes",
"zookeeper.outstanding_bytes"
)
}
}, required=True)
monasca_comb_schema(_config)
# Checks the expression and the environment
handle = banana.validate_expression(_config["params"]["lambda"])
banana.validate_name_binding(handle,
_config["params"]["metric_names_binding"])
handle = parsing.validate_expression(_config["lambda"])
parsing.validate_name_binding(handle,
_config["bindings"])
@staticmethod
def get_default_config():
return {
"module": MonascaCombineLDP.__name__,
"params": {
"metric_name": "cpu.logical_cores_actives",
"combine_period": 1,
"lambda": "a * b",
"metric_names_binding": {
"a": "cpu.idle_perc",
"b": "cpu.total_logical_cores"
}
"metric": "cpu.logical_cores_actives",
"period": 1,
"lambda": "a * b",
"bindings": {
"a": "cpu.idle_perc",
"b": "cpu.total_logical_cores"
}
}
@staticmethod
def get_params():
return [
params.ParamDescriptor(
'metric',
type_util.String(),
'cpu.logcal_cores_actives'
),
params.ParamDescriptor(
'period',
type_util.Number(),
1
),
params.ParamDescriptor(
'lambda',
type_util.String(),
'a * b'
),
params.ParamDescriptor(
'bindings',
type_util.Any(),
{'a': 'cpu.ilde_perc', 'b': 'cpu.total_logical_cores'}
)
]

View File

@ -15,8 +15,12 @@
# under the License.
import logging
import math
import voluptuous
import monasca_analytics.banana.typeck.type_util as type_util
import monasca_analytics.component.params as params
import monasca_analytics.ldp.base as bt
import monasca_analytics.ldp.monasca.helpers as helpers
import monasca_analytics.util.spark_func as fn
@ -38,16 +42,16 @@ class MonascaDerivativeLDP(bt.BaseLDP):
def __init__(self, _id, _config):
super(MonascaDerivativeLDP, self).__init__(_id, _config)
self._period = _config["params"]["derivative_period"]
self._period = _config["period"]
@staticmethod
def validate_config(_config):
monasca_der_schema = voluptuous.Schema({
"module": voluptuous.And(basestring, vu.NoSpaceCharacter()),
"params": {
# Derivative period in multiple of batch interval
"derivative_period": int
}
# Derivative period in multiple of batch interval
"period": voluptuous.And(
voluptuous.Or(float, int),
lambda i: i >= 0 and math.floor(i) == math.ceil(i))
}, required=True)
return monasca_der_schema(_config)
@ -55,11 +59,15 @@ class MonascaDerivativeLDP(bt.BaseLDP):
def get_default_config():
return {
"module": MonascaDerivativeLDP.__name__,
"params": {
"derivative_period": 1
}
"period": 1
}
@staticmethod
def get_params():
return [
params.ParamDescriptor('period', type_util.Number(), 1)
]
def map_dstream(self, dstream):
"""
Map the given DStream into a new DStream where metrics

View File

@ -21,10 +21,12 @@ import sys
from tornado import ioloop
import voluptuous
from monasca_analytics.exception import monanas as err
from monasca_analytics.spark import driver
from monasca_analytics.util import common_util as cu
from monasca_analytics.web_service import web_service as ws
import monasca_analytics.banana.emitter as emit
import monasca_analytics.banana.pass_manager as executor
import monasca_analytics.exception.monanas as err
import monasca_analytics.spark.driver as driver
import monasca_analytics.util.common_util as cu
import monasca_analytics.web_service.web_service as ws
logger = logging.getLogger(__name__)
@ -61,6 +63,19 @@ class Monanas(object):
"""
return self._is_streaming
def try_change_configuration(self, banana_str, emitter):
"""Try to change the configuration to the provided one.
:type banana_str: str
:param banana_str: New configuration.
:type emitter: emit.JsonEmitter
:param emitter: a Json emitter instance
"""
if not isinstance(emitter, emit.JsonEmitter):
raise err.MonanasException()
# Try to change the configuration.
executor.execute_banana_string(banana_str, self._driver, emitter)
def start_streaming(self):
"""Starts streaming data.

View File

View File

@ -14,10 +14,10 @@
# License for the specific language governing permissions and limitations
# under the License.
from expression import create_fn_with_config
from expression import validate_environment
from expression import validate_expression
from expression import validate_name_binding
from monasca_analytics.parsing.expression import create_fn_with_config
from monasca_analytics.parsing.expression import validate_environment
from monasca_analytics.parsing.expression import validate_expression
from monasca_analytics.parsing.expression import validate_name_binding
create_fn_with_config = create_fn_with_config
validate_expression = validate_expression

View File

@ -15,13 +15,12 @@
# under the License.
import logging
import pyparsing as p
import types
import monasca_analytics.banana.bytecode.assembler as asbl
import monasca_analytics.banana.private as priv
import monasca_analytics.exception.banana as exception
import pyparsing as p
import monasca_analytics.parsing.private as priv
logger = logging.getLogger(__name__)

View File

@ -14,12 +14,12 @@
# License for the specific language governing permissions and limitations
# under the License.
BANANA_OPERATOR_LIST = ['+', '-', '*', '/']
EXPRESSION_OPERATOR_LIST = ['+', '-', '*', '/']
def is_op(x):
return x in BANANA_OPERATOR_LIST
return x in EXPRESSION_OPERATOR_LIST
def is_not_op(x):
return x not in BANANA_OPERATOR_LIST
return x not in EXPRESSION_OPERATOR_LIST

View File

@ -20,6 +20,8 @@ import tempfile
import time
import voluptuous
import monasca_analytics.banana.typeck.type_util as type_util
import monasca_analytics.component.params as params
import monasca_analytics.sink.base as base
from monasca_analytics.util import validation_utils as vu
@ -32,8 +34,8 @@ class FileSink(base.BaseSink):
def __init__(self, _id, _config):
super(FileSink, self).__init__(_id, _config)
if "params" in _config:
_path = path.expanduser(_config["params"]["path"])
if _config["path"] is not None:
_path = path.expanduser(_config["path"])
if path.isdir(_path):
_path = path.join(_path, time.time() + '.log')
self._file_path = _path
@ -63,17 +65,19 @@ class FileSink(base.BaseSink):
def get_default_config():
return {
"module": FileSink.__name__,
"params": {
"path": None
}
"path": None
}
@staticmethod
def get_params():
return [params.ParamDescriptor('path', type_util.String())]
@staticmethod
def validate_config(_config):
file_schema = voluptuous.Schema({
"module": voluptuous.And(basestring, vu.NoSpaceCharacter()),
voluptuous.Optional("params"): {
"path": voluptuous.And(basestring, vu.ExistingPath())
}
"path": voluptuous.Or(
voluptuous.And(basestring, vu.ExistingPath()),
None)
}, required=True)
return file_schema(_config)

View File

@ -16,6 +16,8 @@
import voluptuous
import monasca_analytics.banana.typeck.type_util as type_util
import monasca_analytics.component.params as params
import monasca_analytics.sink.base_sqlite as base
from monasca_analytics.util import validation_utils as vu
@ -36,11 +38,17 @@ class IptablesSQLiteSink(base.BaseSQLiteSink):
def get_default_config():
return {
"module": IptablesSQLiteSink.__name__,
"params": {
"db_name": "sqlite_sink.db"
}
"db_name": "sqlite_sink.db"
}
@staticmethod
def get_params():
return [
params.ParamDescriptor('db_name',
type_util.String(),
'sqlite_sink.db')
]
@staticmethod
def validate_config(_config):
iptables_sql_schema = voluptuous.Schema({

View File

@ -19,6 +19,8 @@ import time
import kafka
import monasca_analytics.banana.typeck.type_util as type_util
import monasca_analytics.component.params as params
from monasca_analytics.sink import base
import monasca_analytics.sink.sink_config_validator as validator
@ -30,13 +32,14 @@ class KafkaSink(base.BaseSink):
self._topic = None
self._producer = None
super(KafkaSink, self).__init__(_id, _config)
host = _config["params"]["host"]
port = _config["params"]["port"]
self._topic = _config["params"]["topic"]
self._producer = kafka.KafkaProducer(bootstrap_servers="{0}:{1}"
.format(host, port))
self._host = _config["host"]
self._port = int(_config["port"])
self._topic = _config["topic"]
def sink_dstream(self, dstream):
if self._producer is None:
self._producer = kafka.KafkaProducer(
bootstrap_servers="{0}:{1}".format(self._host, self._port))
dstream.foreachRDD(self._persist)
def _persist(self, _, rdd):
@ -64,9 +67,16 @@ class KafkaSink(base.BaseSink):
def get_default_config():
return {
"module": KafkaSink.__name__,
"params": {
"host": "localhost",
"port": 9092,
"topic": "transformed_alerts"
}
"host": "localhost",
"port": 9092,
"topic": "transformed_alerts"
}
@staticmethod
def get_params():
return [
params.ParamDescriptor('host', type_util.String(), 'localhost'),
params.ParamDescriptor('port', type_util.Number(), 9092),
params.ParamDescriptor('topic', type_util.String(),
'transformed_alerts')
]

View File

@ -16,6 +16,7 @@
"""A list of functions for validating sink configs."""
import math
import voluptuous
from monasca_analytics.util import validation_utils as vu
@ -26,12 +27,12 @@ def validate_kafka_sink_config(config):
config_schema = voluptuous.Schema({
"module": voluptuous.And(basestring, vu.AvailableSink()),
"params": {
"host": voluptuous.And(
basestring, vu.NoSpaceCharacter()),
"port": int,
"topic": voluptuous.And(
basestring, vu.NoSpaceCharacter())
}
"host": voluptuous.And(
basestring, vu.NoSpaceCharacter()),
"port": voluptuous.And(
voluptuous.Or(float, int),
lambda i: i >= 0 and math.floor(i) == math.ceil(i)),
"topic": voluptuous.And(
basestring, vu.NoSpaceCharacter())
}, required=True)
return config_schema(config)

View File

@ -39,3 +39,7 @@ class StdoutSink(base.BaseSink):
"module": voluptuous.And(basestring, vu.NoSpaceCharacter())
}, required=True)
stdout_schema(_config)
@staticmethod
def get_params():
return []

View File

@ -21,6 +21,9 @@ import numpy as np
from sklearn import decomposition
import voluptuous
import monasca_analytics.banana.typeck.type_util as type_util
import monasca_analytics.component.params as params
from monasca_analytics.sml import base
from monasca_analytics.util import validation_utils as vu
@ -32,16 +35,14 @@ class LiNGAM(base.BaseSML):
def __init__(self, _id, _config):
super(LiNGAM, self).__init__(_id, _config)
self._threshold = _config["params"]["threshold"]
self._threshold = _config["threshold"]
self._threshold = 0.1
@staticmethod
def validate_config(_config):
lingam_schema = voluptuous.Schema({
"module": voluptuous.And(basestring, vu.NoSpaceCharacter()),
"params": {
"threshold": float
}
"threshold": float
}, required=True)
return lingam_schema(_config)
@ -49,11 +50,15 @@ class LiNGAM(base.BaseSML):
def get_default_config():
return {
"module": LiNGAM.__name__,
"params": {
"threshold": 0.1
}
"threshold": 0.1
}
@staticmethod
def get_params():
return [
params.ParamDescriptor('threshold', type_util.Number(), 0.1)
]
def number_of_samples_required(self):
return 5000

View File

@ -48,6 +48,10 @@ class SvmOneClass(base.BaseSML):
def get_default_config():
return {"module": SvmOneClass.__name__}
@staticmethod
def get_params():
return []
def number_of_samples_required(self):
return N_SAMPLES

View File

@ -18,6 +18,9 @@ import logging
import voluptuous
import monasca_analytics.banana.typeck.type_util as type_util
import monasca_analytics.component.params as params
from monasca_analytics.source.markov_chain import base
from monasca_analytics.source.markov_chain import events as ev
import monasca_analytics.source.markov_chain.prob_checks as pck
@ -35,11 +38,9 @@ class CloudMarkovChainSource(base.MarkovChainSource):
source_schema = voluptuous.Schema({
"module": voluptuous.And(
basestring, vu.NoSpaceCharacter()),
"params": {
"server_sleep_in_seconds": voluptuous.And(
float, voluptuous.Range(
min=0, max=1, min_included=False, max_included=False))
},
"sleep": voluptuous.And(
float, voluptuous.Range(
min=0, max=1, min_included=False, max_included=False)),
"transitions": {
"web_service": {
"run=>slow": {
@ -88,9 +89,7 @@ class CloudMarkovChainSource(base.MarkovChainSource):
def get_default_config():
return {
"module": CloudMarkovChainSource.__name__,
"params": {
"server_sleep_in_seconds": 0.01
},
"sleep": 0.01,
"transitions": {
"web_service": {
"run=>slow": {
@ -141,6 +140,33 @@ class CloudMarkovChainSource(base.MarkovChainSource):
}
}
@staticmethod
def get_params():
return [
params.ParamDescriptor('sleep', type_util.Number(), 0.01),
params.ParamDescriptor('transitions', type_util.Object({
'web_service': type_util.Object({
'run=>slow': type_util.Any(),
'slow=>run': type_util.Any(),
'stop=>run': type_util.Any(),
}),
'switch': type_util.Object({
'on=>off': type_util.Number(),
'off=>on': type_util.Number(),
}),
'host': type_util.Object({
'on=>off': type_util.Number(),
'off=>on': type_util.Number(),
})
})),
params.ParamDescriptor('triggers', type_util.Object({
'support': type_util.Object({
'get_called': type_util.Any()
})
})),
params.ParamDescriptor('graph', type_util.Any())
]
def get_feature_list(self):
node_names = [k.split(":")[0]
for k in dict(self._config["graph"]).keys()]

View File

@ -18,6 +18,9 @@ import logging
import voluptuous
import monasca_analytics.banana.typeck.type_util as type_util
import monasca_analytics.component.params as params
from monasca_analytics.source.markov_chain import base
from monasca_analytics.source.markov_chain import events
from monasca_analytics.source.markov_chain import prob_checks as pck
@ -65,11 +68,10 @@ class IPTablesSource(base.MarkovChainSource):
def validate_config(_config):
source_schema = voluptuous.Schema({
"module": voluptuous.And(basestring, vu.NoSpaceCharacter()),
"params": {
"server_sleep_in_seconds": voluptuous.And(
float, voluptuous.Range(
min=0, max=1, min_included=False, max_included=False))
}
"sleep": voluptuous.And(
float,
voluptuous.Range(
min=0, max=1, min_included=False, max_included=False)),
}, required=True)
return source_schema(_config)
@ -77,11 +79,15 @@ class IPTablesSource(base.MarkovChainSource):
def get_default_config():
return {
"module": IPTablesSource.__name__,
"params": {
"server_sleep_in_seconds": 0.01
}
"sleep": 0.01,
}
@staticmethod
def get_params():
return [
params.ParamDescriptor('sleep', type_util.Number(), 0.01)
]
def get_feature_list(self):
return iptable_types

View File

@ -19,6 +19,9 @@ import logging
from pyspark.streaming import kafka
import voluptuous
import monasca_analytics.banana.typeck.type_util as type_util
import monasca_analytics.component.params as params
from monasca_analytics.source import base
from monasca_analytics.util import validation_utils as vu
@ -58,6 +61,19 @@ class KafkaSource(base.BaseSource):
}
}
@staticmethod
def get_params():
return [
params.ParamDescriptor('zk_host', type_util.String(),
'localhost'),
params.ParamDescriptor('zk_port', type_util.Number(),
2181),
params.ParamDescriptor('group_id', type_util.String(),
'my_group_id'),
params.ParamDescriptor('topics',
type_util.Object(strict_checking=False))
]
def create_dstream(self, ssc):
"""Dstream creation

View File

@ -83,8 +83,7 @@ class MarkovChainSource(base.BaseSource):
self._server.server_activate()
self._server.terminate = False
self._server.system = system
self._server.sleep_in_seconds = self._config[
"params"]["server_sleep_in_seconds"]
self._server.sleep_in_seconds = self._config["sleep"]
self._server_thread = threading.Thread(target=self._serve_forever)
self._server_thread.start()

View File

@ -18,6 +18,8 @@ import logging
import random
import voluptuous
import monasca_analytics.banana.typeck.type_util as type_util
import monasca_analytics.component.params as params
import monasca_analytics.source.markov_chain.base as base
import monasca_analytics.source.markov_chain.events as ev
@ -37,11 +39,9 @@ class MonascaMarkovChainSource(base.MarkovChainSource):
def validate_config(_config):
markov_schema = voluptuous.Schema({
"module": voluptuous.And(basestring, vu.NoSpaceCharacter()),
"params": {
"server_sleep_in_seconds": voluptuous.And(
float, voluptuous.Range(
min=0, max=1, min_included=False, max_included=False))
},
"sleep": voluptuous.And(
float, voluptuous.Range(
min=0, max=1, min_included=False, max_included=False)),
}, required=True)
return markov_schema(_config)
@ -49,11 +49,15 @@ class MonascaMarkovChainSource(base.MarkovChainSource):
def get_default_config():
return {
"module": MonascaMarkovChainSource.__name__,
"params": {
"server_sleep_in_seconds": 0.01
}
"sleep": 0.01,
}
@staticmethod
def get_params():
return [
params.ParamDescriptor('sleep', type_util.Number(), 0.01)
]
def get_feature_list(self):
return ["vm1", "vm2", "host1", "host2"]

View File

@ -26,6 +26,9 @@ import time
import uuid
import voluptuous
import monasca_analytics.banana.typeck.type_util as type_util
import monasca_analytics.component.params as params
import monasca_analytics.exception.monanas as err
from monasca_analytics.source import base
from monasca_analytics.util import validation_utils as vu
@ -126,6 +129,22 @@ class RandomSource(base.BaseSource):
}
}
@staticmethod
def get_params():
return [
params.ParamDescriptor('host', type_util.String(), 'localhost'),
params.ParamDescriptor('port', type_util.Number(), 1010),
params.ParamDescriptor('model', type_util.Object({
'name': type_util.String(),
'params': type_util.Object({
'origin_types': type_util.Object(strict_checking=False)
})
})),
params.ParamDescriptor('alert_per_burst', type_util.Number(), 1),
params.ParamDescriptor('idle_time_between_bursts',
type_util.Number(), 1.0),
]
def _start_server(self):
if not self._is_server_running:
self._server_thread.start()

View File

@ -39,9 +39,10 @@ class DriverExecutor(object):
"""
def __init__(self, _config):
self._links = config.instantiate_components(_config)
self._sources = config.collect_sources(self._links)
self._orchestrator = agg.Aggregator(self)
self._links = None
self._sources = None
self._orchestrator = None
self.set_links(config.instantiate_components(_config))
def restart_spark():
self._ssc = streamingctx.create_streaming_context(
@ -52,6 +53,18 @@ class DriverExecutor(object):
self._sc = pyspark.SparkContext(
appName=_config["spark_config"]["appName"])
self._ssc = streamingctx.create_streaming_context(self._sc, _config)
def set_links(self, links):
"""Set new set of links
This function has no effect on the current pipeline.
In order to use them, you need to restart the pipeline.
"""
self._links = links
logger.debug("Collect sources...")
self._sources = config.collect_sources(self._links)
logger.debug("New list of sources: {}".format(self._sources))
self._orchestrator = agg.Aggregator(self)
logger.debug("Propagating feature list...")
self._propagate_feature_list()
@ -81,10 +94,12 @@ class DriverExecutor(object):
self._ssc.start()
def stop_pipeline(self):
logger.debug("Stop spark context.")
self._ssc.stop(False, False)
logger.debug("Terminate sources.")
self._terminate_sources()
self._ssc.awaitTermination()
self._ssc = None
self._sc = None
logger.debug("Restart spark context.")
self._restart_spark()
def move_to_phase2(self):
if self._ssc is not None:

View File

@ -36,9 +36,6 @@ from monasca_analytics import voter
logger = logging.getLogger(__name__)
available_classes = None
def parse_json_file(filename):
"""Parses json and return a dict.

View File

@ -0,0 +1,123 @@
#!/usr/bin/env python
# Copyright (c) 2016 Hewlett Packard Enterprise Development Company, L.P.
#
# Licensed under the Apache License, Version 2.0 (the "License"); you may
# not use this file except in compliance with the License. You may obtain
# a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
# License for the specific language governing permissions and limitations
# under the License.
def array_to_str(array, multiline=False, indent=None):
"""
Convert the provided dictionary into a readable str, by calling
str on both the keys and the values.
:type array: list
:param array: the dictionary to convert.
:type multiline: bool
:param multiline: If each key value pair should be on its own line.
:type indent: int
:param indent: Indentation if multiline is True.
:rtype: str
:return: Returns the converted dict.
"""
if len(array) == 0:
return "[]"
multiline = multiline or indent is not None
def dispatch(value):
if isinstance(value, list):
return array_to_str(value, multiline, indent)
if isinstance(value, dict):
return dict_to_str(value, multiline, indent)
return str(value)
res = "["
if multiline:
res += "\n"
join_str = ","
if multiline:
join_str += "\n"
else:
join_str += " "
if indent is not None:
join_str = " " * indent + join_str
res += join_str.join(map(dispatch, array))
if multiline:
res += "\n"
res += "]"
return res
def dict_to_str(dictionary, multiline=False, indent=None):
"""
Convert the provided dictionary into a readable str, by calling
str on both the keys and the values.
:type dictionary: dict
:param dictionary: the dictionary to convert.
:type multiline: bool
:param multiline: If each key value pair should be on its own line.
:type indent: int
:param indent: Indentation if multiline is True.
:rtype: str
:return: Returns the converted dict.
"""
if len(dictionary) == 0:
return "{}"
res = "{"
if multiline:
res += "\n"
multiline = multiline or indent is not None
for k, v in sorted(dictionary.iteritems(), key=lambda ke: str(ke[0])):
if indent is not None:
res += " " * indent
if isinstance(v, dict):
res += "{}: {}, ".format(str(k),
dict_to_str(v, multiline, indent))
elif isinstance(v, list):
res += "{}: {}, ".format(str(k),
array_to_str(v, multiline, indent))
else:
res += "{}: {}, ".format(str(k), str(v))
if multiline:
res += '\n'
res = res[0:-2]
res += "}"
return res
def stable_repr(obj):
"""
Convert the provided dictionary into a 'repr' str, by calling
repr on both the keys and the values.
:type obj: dict | str | float
:param obj: the dictionary to convert.
:rtype: str
:return: Returns the converted dict.
"""
if isinstance(obj, list):
return "[" + ", ".join(map(stable_repr, obj)) + "]"
elif not isinstance(obj, dict):
return repr(obj)
if len(obj) == 0:
return "{}"
res = "{"
for k, v in sorted(obj.iteritems(), key=lambda ke: str(ke[0])):
res += "{}: {}, ".format(repr(k), stable_repr(v))
res = res[0:-2]
res += "}"
return res

View File

@ -15,9 +15,11 @@
# under the License.
import logging
import math
import voluptuous
import monasca_analytics.banana.typeck.type_util as type_util
import monasca_analytics.component.params as params
from monasca_analytics.voter import base
logger = logging.getLogger(__name__)
@ -27,7 +29,7 @@ class PickIndexVoter(base.BaseVoter):
def __init__(self, _id, _config):
super(PickIndexVoter, self).__init__(_id, _config)
self._index = _config["params"]["index"]
self._index = _config["index"]
self._index = 0
@staticmethod
@ -35,9 +37,10 @@ class PickIndexVoter(base.BaseVoter):
pick_schema = voluptuous.Schema({
"module": voluptuous.And(
basestring, lambda i: not any(c.isspace() for c in i)),
"params": {
"index": voluptuous.And(int, lambda i: i >= 0)
}
"index": voluptuous.And(
voluptuous.Or(float, int),
lambda i: i >= 0 and math.ceil(i) == math.floor(i)
)
}, required=True)
return pick_schema(_config)
@ -45,11 +48,15 @@ class PickIndexVoter(base.BaseVoter):
def get_default_config():
return {
"module": PickIndexVoter.__name__,
"params": {
"index": 0
}
"index": 0
}
@staticmethod
def get_params():
return [
params.ParamDescriptor('index', type_util.Number(), 0),
]
def elect_structure(self, structures):
return structures[
min(len(structures) - 1,

View File

@ -17,10 +17,12 @@
import json
import logging
import sys
import traceback
from tornado import web
import voluptuous
import monasca_analytics.banana.emitter as emit
import monasca_analytics.exception.monanas as err
from monasca_analytics.web_service import web_service_model
@ -45,7 +47,7 @@ class MonanasHandler(web.RequestHandler):
try:
body = json.loads(self.request.body)
getattr(web_service_model, "action_model")(body)
web_service_model.action_model(body)
getattr(self._monanas, body["action"])()
except (AttributeError, voluptuous.Invalid, ValueError):
self.set_status(400, "The request body was malformed.")
@ -67,3 +69,52 @@ class MonanasHandler(web.RequestHandler):
if terminate[0]:
logger.error(terminate[1])
self._monanas.stop_streaming_and_terminate()
class BananaHandler(web.RequestHandler):
"""
Request handler to manage the active config using
the banana configuration language.
"""
def initialize(self, monanas):
"""Initialize the handler.
:param monanas: A Monana's instance.
"""
self._monanas = monanas
@web.asynchronous
def post(self):
"""Performs a Monanas's action."""
terminate = (False, "")
try:
body = json.loads(self.request.body)
web_service_model.banana_model(body)
emitter = emit.JsonEmitter()
# TODO(Joan): Change that
self._monanas.try_change_configuration(body["content"], emitter)
self.write(emitter.result)
except (AttributeError, voluptuous.Invalid, ValueError):
self.set_status(400, "The request body was malformed.")
except (err.MonanasBindSourcesError,
err.MonanasAlreadyStartedStreaming,
err.MonanasAlreadyStoppedStreaming) as e:
self.set_status(400, e.__str__())
except err.MonanasStreamingError as e:
self.set_status(500, e.__str__())
terminate = (True, e.__str__())
except Exception as e:
tb = traceback.format_exc()
print(tb)
logger.error("Unexpected error: {0}. {1}".
format(sys.exc_info()[0], e))
self.set_status(500, "Internal server error.")
self.flush()
self.finish()
if terminate[0]:
logger.error(terminate[1])
self._monanas.stop_streaming_and_terminate()

View File

@ -26,8 +26,10 @@ class WebService(web.Application):
"""WebService constructor."""
self._monanas = monanas
self._config = config
params = {"monanas": self._monanas}
handlers = [
(r"/", request_handler.MonanasHandler, {"monanas": self._monanas})
(r"/", request_handler.MonanasHandler, params),
(r"/banana", request_handler.BananaHandler, params),
]
settings = {}

View File

@ -26,3 +26,12 @@ def action_model(value):
}, required=True)
return action_model_schema(value)
def banana_model(value):
"""Validates the data against the banana_model schema."""
banana_model_schema = voluptuous.Schema({
"content": basestring
}, required=True)
return banana_model_schema(value)

View File

View File

@ -0,0 +1,95 @@
#!/usr/bin/env python
# Copyright (c) 2016 Hewlett Packard Enterprise Development Company, L.P.
#
# Licensed under the Apache License, Version 2.0 (the "License"); you may
# not use this file except in compliance with the License. You may obtain
# a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
# License for the specific language governing permissions and limitations
# under the License.
import logging
import monasca_analytics.banana.deadpathck.config as deadpathck
import monasca_analytics.banana.grammar.config as grammar
import monasca_analytics.banana.typeck.config as typeck
import monasca_analytics.exception.banana as exception
from test.util_for_testing import MonanasTestCase
logger = logging.getLogger(__name__)
class PassOneSinkSourceTestCase(MonanasTestCase):
def setUp(self):
super(PassOneSinkSourceTestCase, self).setUp()
def tearDown(self):
super(PassOneSinkSourceTestCase, self).tearDown()
def test_banana_should_fail_when_no_source(self):
banana_str = "" +\
"a = CloudMarkovChainSource()\n" +\
"b = StdoutSink()\n" +\
"c = CloudIngestor()\n" +\
"d = LiNGAM()\n" +\
"c -> d -> b"
# Convert the grammar into an AST
parser = grammar.banana_grammar()
ast = parser.parse(banana_str)
# Compute the type table for the given AST
type_table = typeck.typeck(ast)
# Remove from the tree path that are "dead"
deadpathck.deadpathck(ast, type_table)
self.assertRaises(
exception.BananaNoFullPath,
deadpathck.contains_at_least_one_path_to_a_sink,
ast,
type_table
)
def test_banana_should_fail_when_no_sink(self):
banana_str = "" +\
"a = CloudMarkovChainSource()\n" +\
"b = StdoutSink()\n" +\
"c = CloudIngestor()\n" +\
"d = LiNGAM()\n" +\
"a -> c -> d"
# Convert the grammar into an AST
parser = grammar.banana_grammar()
ast = parser.parse(banana_str)
# Compute the type table for the given AST
type_table = typeck.typeck(ast)
# Remove from the tree path that are "dead"
deadpathck.deadpathck(ast, type_table)
self.assertRaises(
exception.BananaNoFullPath,
deadpathck.contains_at_least_one_path_to_a_sink,
ast,
type_table
)
def test_banana_should_pass_when_more_source_sink(self):
banana_str = "" +\
"a = CloudMarkovChainSource()\n" +\
"b = StdoutSink()\n" +\
"c = CloudIngestor()\n" +\
"d = LiNGAM()\n" +\
"a -> c -> d -> b"
# Convert the grammar into an AST
parser = grammar.banana_grammar()
ast = parser.parse(banana_str)
# Compute the type table for the given AST
type_table = typeck.typeck(ast)
# Remove from the tree path that are "dead"
deadpathck.deadpathck(ast, type_table)
deadpathck.contains_at_least_one_path_to_a_sink(ast, type_table)
# We should reach this line.
self.assertTrue(True)

View File

@ -0,0 +1,111 @@
#!/usr/bin/env python
# Copyright (c) 2016 Hewlett Packard Enterprise Development Company, L.P.
#
# Licensed under the Apache License, Version 2.0 (the "License"); you may
# not use this file except in compliance with the License. You may obtain
# a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
# License for the specific language governing permissions and limitations
# under the License.
import logging
import monasca_analytics.banana.deadpathck.config as deadpathck
import monasca_analytics.banana.emitter as emit
import monasca_analytics.banana.grammar.config as grammar
import monasca_analytics.banana.typeck.config as typeck
from test.util_for_testing import MonanasTestCase
logger = logging.getLogger(__name__)
class DeadPathTestCase(MonanasTestCase):
def setUp(self):
super(DeadPathTestCase, self).setUp()
def tearDown(self):
super(DeadPathTestCase, self).tearDown()
def test_banana_should_remove_everything(self):
banana_str = "" +\
"a = CloudMarkovChainSource()\n" +\
"b = StdoutSink()\n" +\
"c = CloudIngestor()\n" +\
"d = LiNGAM()\n" +\
"a -> c -> d"
emitter = CustomEmitter()
# Convert the grammar into an AST
parser = grammar.banana_grammar(emitter)
ast = parser.parse(banana_str)
# Compute the type table for the given AST
type_table = typeck.typeck(ast)
# Remove from the tree path that are "dead"
deadpathck.deadpathck(ast, type_table, emitter)
self.assertEqual(emitter.nb_errors, 0)
self.assertEqual(emitter.nb_warnings, 4)
self.assertEqual(len(ast.components), 0)
self.assertEqual(len(ast.connections.connections), 0)
def test_banana_should_remove_one(self):
banana_str = "" +\
"a = CloudMarkovChainSource()\n" +\
"b = StdoutSink()\n" +\
"c = CloudIngestor()\n" +\
"d = LiNGAM()\n" +\
"a -> c -> [d, b]"
emitter = CustomEmitter()
# Convert the grammar into an AST
parser = grammar.banana_grammar(emitter)
ast = parser.parse(banana_str)
# Compute the type table for the given AST
type_table = typeck.typeck(ast)
# Remove from the tree path that are "dead"
deadpathck.deadpathck(ast, type_table, emitter)
self.assertEqual(emitter.nb_errors, 0)
self.assertEqual(emitter.nb_warnings, 1)
self.assertEqual(len(ast.components), 3)
self.assertEqual(len(ast.connections.connections), 2)
def test_banana_should_not_remove_anything(self):
banana_str = "" +\
"a = CloudMarkovChainSource()\n" +\
"b = StdoutSink()\n" +\
"c = CloudIngestor()\n" +\
"d = LiNGAM()\n" +\
"a -> c -> d -> b"
emitter = CustomEmitter()
# Convert the grammar into an AST
parser = grammar.banana_grammar(emitter)
ast = parser.parse(banana_str)
# Compute the type table for the given AST
type_table = typeck.typeck(ast)
# Remove from the tree path that are "dead"
deadpathck.deadpathck(ast, type_table, emitter)
self.assertEqual(emitter.nb_errors, 0)
self.assertEqual(emitter.nb_warnings, 0)
self.assertEqual(len(ast.components), 4)
self.assertEqual(len(ast.connections.connections), 3)
class CustomEmitter(emit.Emitter):
def __init__(self):
super(CustomEmitter, self).__init__()
self.nb_warnings = 0
self.nb_errors = 0
def emit_warning(self, span, message):
print(span.get_line(), str(span), message)
self.nb_warnings += 1
def emit_error(self, span, message):
print(span.get_line(), str(span), message)
self.nb_errors += 1

View File

View File

@ -0,0 +1,3 @@
a = CloudMarkovChainSource() # LHS_EQ IGNORE
c = a.sleep # LHS_EQ 0.01
c = a.transitions.web_service."run=>slow"."0" # LHS_EQ 0.001

View File

@ -0,0 +1,8 @@
a = 23 # LHS_EQ 23.0
a = a - a # LHS_EQ 0.0
a = 12 - 12 - 12 # LHS_EQ -12.0
a = 3 * 4 - 2 # LHS_EQ 10.0
a = 2 - 3 * 4 # LHS_EQ -10.0
b = 21 # LHS_EQ 21.0
d = 12 # LHS_EQ 12.0
e = b * (d + a) # LHS_EQ 42.0

View File

@ -0,0 +1,4 @@
a = {a.b.c: 12} # LHS_EQ {'a': {'b': {'c': 12.0}}}
b = a.a.b.c # LHS_EQ 12.0
a = {a.b: 21, a.c: "test"} # LHS_EQ {'a': {'b': 21.0, 'c': 'test'}}
d = a.a.c + " " + a.a.b # LHS_EQ 'test 21.0'

View File

@ -0,0 +1,77 @@
#!/usr/bin/env python
# Copyright (c) 2016 Hewlett Packard Enterprise Development Company, L.P.
#
# Licensed under the Apache License, Version 2.0 (the "License"); you may
# not use this file except in compliance with the License. You may obtain
# a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
# License for the specific language governing permissions and limitations
# under the License.
import logging
import os
import re
from monasca_analytics.banana.pass_manager import compute_evaluation_context
from monasca_analytics.util.string_util import stable_repr
from test.util_for_testing import MonanasTestCase
logger = logging.getLogger(__name__)
_has_some_file_that_should = dict()
_has_some_file_that_should["pass_eval_stmt"] = False
class EvalTestCase(MonanasTestCase):
def setUp(self):
super(EvalTestCase, self).setUp()
def tearDown(self):
super(EvalTestCase, self).tearDown()
def test_files_have_been_found(self):
self.assertTrue(_has_some_file_that_should["pass_eval_stmt"])
def upgrade_test_case():
regex_var_eq = re.compile("#(?: )*LHS_EQ(?: )+([^\n]+)")
for root, dirs, files in os.walk('./banana/eval/should_pass/'):
for filename in files:
name_no_ext, _ = os.path.splitext(filename)
_has_some_file_that_should["pass_eval_stmt"] = True
with open(os.path.join(root, filename), 'r') as f:
content = f.read()
expected_values = regex_var_eq.findall(content)
def create_test(test_str, expect_values):
def should_pass(self):
# Custom checks runned after each statement
box = {"counter": 0}
def custom_check(ctx, stmt_type, lhs_node, rhs_value):
if expect_values[box["counter"]] != "IGNORE":
self.assertEqual(
expect_values[box["counter"]],
stable_repr(rhs_value))
box["counter"] += 1
# Evaluate the file.
compute_evaluation_context(test_str, custom_check)
should_pass.__name__ = "test_banana_eval_" + name_no_ext
return should_pass
setattr(EvalTestCase, "test_banana_eval_" + name_no_ext,
create_test(content, expected_values))
# Fill the test case with generated test case from banana files
upgrade_test_case()

View File

View File

@ -0,0 +1,2 @@
a = { b: MyComponent() }
# RAISE ParseSyntaxException

View File

@ -0,0 +1,4 @@
# This one shadow the a.b, so this is still an error.
a = { a.b: "test", a.b: 23 }
# RAISE ParseSyntaxException

View File

@ -0,0 +1,3 @@
# This should fail
a -> b * c
# RAISE ParseException

View File

@ -0,0 +1,2 @@
a = b -> c
# RAISE ParseException

View File

@ -0,0 +1,12 @@
# Creating some components
a = Test()
b = Test(param={})
c = Test(param="test")
d = Test(param=234)
# Assigning parameters
b.param = {}
c.param = "test"
d.param = 234
# STMT_EQ { DotPath< Ident< a > > = Component { type_name: Ident< Test >, args: []}, DotPath< Ident< b > > = Component { type_name: Ident< Test >, args: [Ident< param > = JsonObj< {} >]}, DotPath< Ident< c > > = Component { type_name: Ident< Test >, args: [Ident< param > = Expr< [StringLit< "test" >] >]}, DotPath< Ident< d > > = Component { type_name: Ident< Test >, args: [Ident< param > = Expr< [Number< 234.0 >] >]}, DotPath< Ident< b >.Ident< param > > = JsonObj< {} >, DotPath< Ident< c >.Ident< param > > = Expr< [StringLit< "test" >] >, DotPath< Ident< d >.Ident< param > > = Expr< [Number< 234.0 >] > }

View File

@ -0,0 +1,13 @@
# This example would not pass later pass
# but exercise the grammar
a = Test()
b = Test()
a -> [b, c]
a -> b -> c
a -> [a -> b, b] -> e
a -> b
[[a]] -> b
# CONN_EQ Connection< [('Ident< a >', 'Ident< b >'), ('Ident< a >', 'Ident< c >'), ('Ident< b >', 'Ident< c >'), ('Ident< a >', 'Ident< a >'), ('Ident< b >', 'Ident< e >')] >

View File

@ -0,0 +1,10 @@
a = {}
a.param1 = 23
a."param3" = 1
a."param4"."para mmm" = a."param3"
a.param2 = {
"a": 34 + 12,
a.b.c: a.param1
}
# STMT_EQ { DotPath< Ident< a > > = JsonObj< {} >, DotPath< Ident< a >.Ident< param1 > > = Expr< [Number< 23.0 >] >, DotPath< Ident< a >.StringLit< "param3" > > = Expr< [Number< 1.0 >] >, DotPath< Ident< a >.StringLit< "param4" >.StringLit< "para mmm" > > = Expr< [DotPath< Ident< a >.StringLit< "param3" > >] >, DotPath< Ident< a >.Ident< param2 > > = JsonObj< {DotPath< Ident< a >.Ident< b >.Ident< c > >: Expr< [DotPath< Ident< a >.Ident< param1 > >] >, StringLit< "a" >: Expr< [Number< 34.0 >, +, Number< 12.0 >] >} > }

View File

@ -0,0 +1,6 @@
a = { a.b: "test" }
c = { e: a }
# VAR a EQ_JSON { "a": {"b": "test"} }
# VAR c EQ_JSON { "e": {"a": {"b": "test"}} }
# STMT_EQ { DotPath< Ident< a > > = JsonObj< {DotPath< Ident< a >.Ident< b > >: Expr< [StringLit< "test" >] >} >, DotPath< Ident< c > > = JsonObj< {DotPath< Ident< e > >: Expr< [DotPath< Ident< a > >] >} > }

View File

@ -0,0 +1,8 @@
a = 23 + 2
b = "test " + a
c = a + 4.0 * 2
# VAR a EQ_NUM 25
# VAR b EQ_STR "test 25"
# VAR c EQ_NUM 33
# STMT_EQ { DotPath< Ident< a > > = Expr< [Number< 23.0 >, +, Number< 2.0 >] >, DotPath< Ident< b > > = Expr< [StringLit< "test " >, +, DotPath< Ident< a > >] >, DotPath< Ident< c > > = Expr< [DotPath< Ident< a > >, +, Expr< [Number< 4.0 >, *, Number< 2.0 >] >] > }

View File

@ -0,0 +1,14 @@
# Connections
a -> b
# Some vars
d = 23
e = d - 23
c = 23 + 1.5 * 3 - d
# Components
a = Test(a=23, b=c+10)
b = Test(23, 10)
# STMT_EQ { DotPath< Ident< d > > = Expr< [Number< 23.0 >] >, DotPath< Ident< e > > = Expr< [DotPath< Ident< d > >, -, Number< 23.0 >] >, DotPath< Ident< c > > = Expr< [Number< 23.0 >, +, Expr< [Number< 1.5 >, *, Number< 3.0 >] >, -, DotPath< Ident< d > >] >, DotPath< Ident< a > > = Component { type_name: Ident< Test >, args: [Ident< a > = Expr< [Number< 23.0 >] >, Ident< b > = Expr< [DotPath< Ident< c > >, +, Number< 10.0 >] >]}, DotPath< Ident< b > > = Component { type_name: Ident< Test >, args: [Expr< [Number< 23.0 >] >, Expr< [Number< 10.0 >] >]} }
# CONN_EQ Connection< [('Ident< a >', 'Ident< b >')] >

View File

@ -0,0 +1,8 @@
# Various check for edge cases
a = {}
b = { a: {} }
c = { "a": {} }
d = { "": {} }
e = { "": "" }
# STMT_EQ { DotPath< Ident< a > > = JsonObj< {} >, DotPath< Ident< b > > = JsonObj< {DotPath< Ident< a > >: JsonObj< {} >} >, DotPath< Ident< c > > = JsonObj< {StringLit< "a" >: JsonObj< {} >} >, DotPath< Ident< d > > = JsonObj< {StringLit< "" >: JsonObj< {} >} >, DotPath< Ident< e > > = JsonObj< {StringLit< "" >: Expr< [StringLit< "" >] >} > }

View File

@ -0,0 +1,6 @@
a = {}
b = { "a": a }
c = { a.b: "b" }
d = { a.v."t t": 12 + 23 }
# STMT_EQ { DotPath< Ident< a > > = JsonObj< {} >, DotPath< Ident< b > > = JsonObj< {StringLit< "a" >: Expr< [DotPath< Ident< a > >] >} >, DotPath< Ident< c > > = JsonObj< {DotPath< Ident< a >.Ident< b > >: Expr< [StringLit< "b" >] >} >, DotPath< Ident< d > > = JsonObj< {DotPath< Ident< a >.Ident< v >.StringLit< "t t" > >: Expr< [Number< 12.0 >, +, Number< 23.0 >] >} > }

Some files were not shown because too many files have changed in this diff Show More