Add DecisionTreeClassifier as a SML

This patch adds a SML which uses DecisionTreeClassifier algorithm
(supervised).

Change-Id: I7e4ebe07824418d733c77c6f4750893836ecdd70
This commit is contained in:
Hisashi Osanai 2016-12-27 14:12:39 +00:00
parent 491e1b2b07
commit 7cc09363b1
3 changed files with 161 additions and 1 deletions

View File

@ -0,0 +1,101 @@
#!/usr/bin/env python
# Copyright (c) 2016 FUJITSU LIMITED
#
# Licensed under the Apache License, Version 2.0 (the "License"); you may
# not use this file except in compliance with the License. You may obtain
# a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
# License for the specific language governing permissions and limitations
# under the License.
import logging
import numpy as np
from sklearn.metrics import classification_report
from sklearn import tree
import voluptuous
from monasca_analytics.sml.base import BaseSML
from monasca_analytics.util.validation_utils import NoSpaceCharacter
logger = logging.getLogger(__name__)
ANOMALY = 1
NON_ANOMALY = 0
N_SAMPLES = 1000
class DecisionTreeClassifier(BaseSML):
"""Anomaly detection based on the DecisionTreeClassifier algorithm"""
def __init__(self, _id, _config):
super(DecisionTreeClassifier, self).__init__(_id, _config)
self._nb_samples = int(_config['nb_samples'])
@staticmethod
def validate_config(_config):
decisiontree_schema = voluptuous.Schema({
'module': voluptuous.And(
basestring, NoSpaceCharacter()),
'nb_samples': voluptuous.Or(float, int)
}, required=True)
return decisiontree_schema(_config)
@staticmethod
def get_default_config():
return {
'module': DecisionTreeClassifier.__name__,
'nb_samples': N_SAMPLES
}
@staticmethod
def get_params():
return [
params.ParamDescriptor('nb_samples', type_util.Number(), N_SAMPLES)
]
def number_of_samples_required(self):
return self._nb_samples
def _generate_train_test_sets(self, samples, ratio_train):
num_samples_train = int(len(samples) * ratio_train)
data, labels = np.hsplit(samples, [-1])
X_train = np.array(data[:num_samples_train])
_labels = np.array(labels[:num_samples_train])
X_train_label = _labels.ravel()
X_test = np.array(data[num_samples_train:])
_labels = np.array(labels[num_samples_train:])
X_test_label = _labels.ravel()
return X_train, X_train_label, X_test, X_test_label
def _get_best_detector(self, train, label):
detector = tree.DecisionTreeClassifier()
detector.fit(train, label)
return detector
def learn_structure(self, samples):
X_train, X_train_label, X_test, X_test_label = \
self._generate_train_test_sets(samples, 0.75)
logger.info('Trainig with ' + str(len(X_train)) +
'samples; testing with ' + str(len(X_test)) + ' samples.')
dt_detector = self._get_best_detector(X_train, X_train_label)
Y_test = dt_detector.predict(X_test)
num_anomalies = Y_test[Y_test == ANOMALY].size
logger.info('Found ' + str(num_anomalies) +
' anomalies in testing set')
logger.info('Confusion Matrix: \n{}'.
format(classification_report(
X_test_label,
Y_test,
target_names=['no', 'yes'])))
return dt_detector

View File

@ -0,0 +1,58 @@
#!/usr/bin/env python
# Copyright (c) 2016 Hewlett Packard Enterprise Development Company, L.P.
#
# Licensed under the Apache License, Version 2.0 (the "License"); you may
# not use this file except in compliance with the License. You may obtain
# a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
# License for the specific language governing permissions and limitations
# under the License.
import logging
import numpy as np
from sklearn import tree
from monasca_analytics.sml import decision_tree
from test.util_for_testing import MonanasTestCase
logger = logging.getLogger(__name__)
class TestDecisionTreeClassifier(MonanasTestCase):
def setUp(self):
super(TestDecisionTreeClassifier, self).setUp()
self.dt_sml = decision_tree.DecisionTreeClassifier(
"fakeid", {"module": "fake", "nb_samples": 1000})
def tearDown(self):
super(TestDecisionTreeClassifier, self).tearDown()
def get_testing_data(self):
a = np.random.uniform(size=1000)
b = np.random.uniform(size=1000)
c = np.random.uniform(size=1000)
d = np.random.uniform(size=1000)
labels = np.random.randint(2, size=1000)
return np.array([a, b, c, d, labels]).T
def test_generate_train_test_sets(self):
data = self.get_testing_data()
X_train, X_train_labeled, X_test, X_test_labeled =\
self.dt_sml._generate_train_test_sets(data, 0.6)
self.assertEqual(600, len(X_train))
self.assertEqual(600, len(X_train_labeled))
self.assertEqual(400, len(X_test))
self.assertEqual(400, len(X_test_labeled))
def test_learn_structure(self):
data = self.get_testing_data()
clf = self.dt_sml.learn_structure(data)
self.assertIsInstance(clf, tree.DecisionTreeClassifier)

View File

@ -112,7 +112,8 @@ class CommonUtilTest(unittest.TestCase):
['LiNGAM',
"SvmOneClass",
"IsolationForest",
"EllipticEnvelope"],
"EllipticEnvelope",
"DecisionTreeClassifier"],
names)
def test_get_voter_class_by_name(self):