Reliable CSV reports generation implemented

We should generate CSV reports in case of errors in the flatten
data processing. Invalid data is skipped.

Change-Id: I22ea7cb4f22024329f3bf55fd855e04b8f4f285d
Closes-Bug: #1440080
This commit is contained in:
Alexander Kislitsky 2015-04-03 17:47:11 +03:00
parent 0f91a26666
commit 4653ab749d
7 changed files with 167 additions and 66 deletions

View File

@ -168,7 +168,7 @@ def get_oswls_query(resource_type, from_date=None, to_date=None):
:return: SQLAlchemy query
"""
query = db.session.query(
OSWS.master_node_uid, OSWS.cluster_id,
OSWS.id, OSWS.master_node_uid, OSWS.cluster_id,
OSWS.created_date, # for checking if row is duplicated in CSV
OSWS.created_date.label('stats_on_date'), # for showing in CSV
OSWS.resource_type, OSWS.resource_data,

View File

@ -104,21 +104,29 @@ class OswlStatsToCsv(object):
:param oswls: list of OpenStack workloads
:return: generator on flatten resources info collection
"""
app.logger.debug("Getting flatten %s info started", resource_type)
app.logger.debug("Getting OSWL flatten %s info started", resource_type)
for oswl in oswls:
flatten_oswl = export_utils.get_flatten_data(oswl_keys_paths,
oswl)
resource_data = oswl.resource_data
current = resource_data.get('current', [])
removed = resource_data.get('removed', [])
# Filtering id, time only data
removed = filter(lambda x: len(x) > 2, removed)
for resource in itertools.chain(current, removed):
flatten_resource = export_utils.get_flatten_data(
resource_keys_paths, {resource_type: resource})
additional_info = self.get_additional_resource_info(
resource, oswl)
yield flatten_oswl + flatten_resource + additional_info
try:
flatten_oswl = export_utils.get_flatten_data(oswl_keys_paths,
oswl)
resource_data = oswl.resource_data
current = resource_data.get('current', [])
removed = resource_data.get('removed', [])
# Filtering id, time only data
removed = filter(lambda x: len(x) > 2, removed)
for resource in itertools.chain(current, removed):
flatten_resource = export_utils.get_flatten_data(
resource_keys_paths, {resource_type: resource})
additional_info = self.get_additional_resource_info(
resource, oswl)
yield flatten_oswl + flatten_resource + additional_info
except Exception as e:
# Generation of report should be reliable
app.logger.error("Getting OSWL flatten data failed. "
"Id: %s, master node uid: %s, "
"resource_data: %s, error: %s",
oswl.id, oswl.master_node_uid,
oswl.resource_data, six.text_type(e))
app.logger.debug("Getting flatten %s info finished", resource_type)
def get_last_sync_datetime(self, oswl):

View File

@ -14,6 +14,7 @@
import collections
import copy
import six
from six.moves import range
from fuel_analytics.api.app import app
@ -131,40 +132,50 @@ class StatsToCsv(object):
return extract_nodes_fields('platform_name', nodes)
for inst_structure in inst_structures:
structure = inst_structure.structure
clusters = structure.pop('clusters', [])
flatten_structure = export_utils.get_flatten_data(
structure_keys_paths, inst_structure)
try:
structure = inst_structure.structure
clusters = structure.pop('clusters', [])
flatten_structure = export_utils.get_flatten_data(
structure_keys_paths, inst_structure)
for cluster in clusters:
cluster.pop('installed_plugins', None)
flatten_cluster = export_utils.get_flatten_data(
cluster_keys_paths, cluster)
flatten_cluster.extend(flatten_structure)
nodes = cluster.get('nodes', [])
for cluster in clusters:
cluster.pop('installed_plugins', None)
flatten_cluster = export_utils.get_flatten_data(
cluster_keys_paths, cluster)
flatten_cluster.extend(flatten_structure)
nodes = cluster.get('nodes', [])
# Adding enumerated manufacturers
manufacturers = extract_nodes_manufacturers(nodes)
flatten_cluster += export_utils.align_enumerated_field_values(
manufacturers, self.MANUFACTURERS_NUM)
# Adding enumerated manufacturers
manufacturers = extract_nodes_manufacturers(nodes)
flatten_cluster += export_utils.\
align_enumerated_field_values(manufacturers,
self.MANUFACTURERS_NUM)
# Adding enumerated platforms
platform_names = extract_nodes_platform_name(nodes)
flatten_cluster += export_utils.align_enumerated_field_values(
platform_names, self.PLATFORM_NAMES_NUM)
# Adding network verification status
idx = export_utils.get_index(
{'master_node_uid': inst_structure.master_node_uid,
'cluster_id': cluster['id'],
'action_name': self.NETWORK_VERIFICATION_ACTION},
*self.ACTION_LOG_INDEX_FIELDS
)
al_info = action_logs_idx.get(idx)
nv_status = None if al_info is None else al_info.status
flatten_cluster.append(nv_status)
yield flatten_cluster
# Adding enumerated platforms
platform_names = extract_nodes_platform_name(nodes)
flatten_cluster += export_utils.\
align_enumerated_field_values(platform_names,
self.PLATFORM_NAMES_NUM)
# Adding network verification status
idx = export_utils.get_index(
{'master_node_uid': inst_structure.master_node_uid,
'cluster_id': cluster['id'],
'action_name': self.NETWORK_VERIFICATION_ACTION},
*self.ACTION_LOG_INDEX_FIELDS
)
al_info = action_logs_idx.get(idx)
nv_status = None if al_info is None else al_info.status
flatten_cluster.append(nv_status)
yield flatten_cluster
except Exception as e:
# Generation of report should be reliable
app.logger.error("Getting flatten cluster data failed. "
"Installation info id: %s, "
"master node uid: %s, error: %s",
inst_structure.id,
inst_structure.master_node_uid,
six.text_type(e))
app.logger.debug("Flatten clusters info is got")
def get_flatten_plugins(self, structure_keys_paths, cluster_keys_paths,
@ -181,22 +192,31 @@ class StatsToCsv(object):
app.logger.debug("Getting flatten plugins info started")
for inst_structure in inst_structures:
structure = inst_structure.structure
clusters = structure.pop('clusters', [])
flatten_structure = export_utils.get_flatten_data(
structure_keys_paths, inst_structure)
try:
structure = inst_structure.structure
clusters = structure.pop('clusters', [])
flatten_structure = export_utils.get_flatten_data(
structure_keys_paths, inst_structure)
for cluster in clusters:
cluster['cluster_id'] = cluster['id']
flatten_cluster = export_utils.get_flatten_data(
cluster_keys_paths, cluster)
plugins = cluster.pop('installed_plugins', [])
for plugin in plugins:
flatten_plugin = export_utils.get_flatten_data(
plugin_keys_paths, plugin)
flatten_plugin.extend(flatten_cluster)
flatten_plugin.extend(flatten_structure)
yield flatten_plugin
for cluster in clusters:
cluster['cluster_id'] = cluster['id']
flatten_cluster = export_utils.get_flatten_data(
cluster_keys_paths, cluster)
plugins = cluster.pop('installed_plugins', [])
for plugin in plugins:
flatten_plugin = export_utils.get_flatten_data(
plugin_keys_paths, plugin)
flatten_plugin.extend(flatten_cluster)
flatten_plugin.extend(flatten_structure)
yield flatten_plugin
except Exception as e:
# Generation of report should be reliable
app.logger.error("Getting flatten plugin data failed. "
"Installation info id: %s, "
"master node uid: %s, error: %s",
inst_structure.id,
inst_structure.master_node_uid,
six.text_type(e))
app.logger.debug("Getting flatten plugins info finished")
def export_clusters(self, inst_structures, action_logs):

View File

@ -63,7 +63,8 @@ class InstStructureTest(BaseTest):
release_versions=('6.0 TechPreview', '6.0 GA', '6.1'),
cluster_statuses=('new', 'deployment', 'stopped', 'operational',
'error', 'remove', 'update', 'update_error'),
libvirt_names=('qemu', 'kvm', 'vCenter')
libvirt_names=('qemu', 'kvm', 'vCenter'),
plugins_num_range=(0, 5)
):
nodes_num = random.randint(*nodes_range)
cluster = {
@ -86,7 +87,8 @@ class InstStructureTest(BaseTest):
}
network_configuration = self.generate_network_configuration()
cluster.update(network_configuration)
cluster['installed_plugins'] = self.generate_installed_plugins()
cluster['installed_plugins'] = self.generate_installed_plugins(
plugins_num_range=plugins_num_range)
for _ in six.moves.range(nodes_num):
cluster['nodes'].append(self.generate_node())
return cluster
@ -122,7 +124,8 @@ class InstStructureTest(BaseTest):
return plugins_info
def generate_structure(self, clusters_num_range=(0, 10),
unallocated_nodes_num_range=(0, 20)):
unallocated_nodes_num_range=(0, 20),
plugins_num_range=(0, 5)):
clusters_num = random.randint(*clusters_num_range)
fuel_release = {
'release': random.choice(("6.0-techpreview", "6.0-ga")),
@ -144,7 +147,8 @@ class InstStructureTest(BaseTest):
}
for _ in xrange(clusters_num):
cluster = self.generate_cluster()
cluster = self.generate_cluster(
plugins_num_range=plugins_num_range)
structure['clusters'].append(cluster)
structure['allocated_nodes_num'] += cluster['nodes_num']
return structure
@ -152,11 +156,13 @@ class InstStructureTest(BaseTest):
def generate_inst_structures(self, installations_num=100,
creation_date_range=(1, 10),
modification_date_range=(1, 10),
clusters_num_range=(0, 10)):
clusters_num_range=(0, 10),
plugins_num_range=(0, 5)):
for _ in xrange(installations_num):
mn_uid = '{}'.format(uuid.uuid4())
structure = self.generate_structure(
clusters_num_range=clusters_num_range)
clusters_num_range=clusters_num_range,
plugins_num_range=plugins_num_range)
creation_date = datetime.utcnow() - timedelta(
days=random.randint(*creation_date_range))
modification_date = datetime.utcnow() - timedelta(

View File

@ -489,3 +489,29 @@ class OswlStatsToCsvTest(OswlTest, DbTest):
volume = oswl.resource_data['current'][0]
self.assertEqual(fv[gt_field_pos],
len(volume['attachments']) > csv_att_num)
def test_oswl_invalid_data(self):
exporter = OswlStatsToCsv()
num = 10
for resource_type in self.RESOURCE_TYPES:
oswls_saved = self.get_saved_oswls(
num, resource_type, current_num_range=(1, 1),
removed_num_range=(0, 0), added_num_range=(0, 0),
modified_num_range=(0, 0))
# Saving installation structures for proper oswls filtering
self.get_saved_inst_structs(oswls_saved)
with app.test_request_context():
oswls = get_oswls(resource_type).all()
oswl_keys_paths, vm_keys_paths, csv_keys_paths = \
exporter.get_resource_keys_paths(resource_type)
side_effect = [[]] * num
side_effect[num / 2] = Exception
with mock.patch.object(exporter,
'get_additional_resource_info',
side_effect=side_effect):
flatten_resources = exporter.get_flatten_resources(
resource_type, oswl_keys_paths, vm_keys_paths, oswls)
# Checking only invalid data is not exported
self.assertEqual(num - 1, len(list(flatten_resources)))

View File

@ -72,3 +72,25 @@ class PluginsToCsvExportTest(InstStructureTest, DbTest):
reader = csv.reader(output)
for _ in reader:
pass
def test_plugin_invalid_data(self):
exporter = StatsToCsv()
num = 10
inst_structures = self.get_saved_inst_structures(
installations_num=num, clusters_num_range=(1, 1),
plugins_num_range=(1, 1))
with app.test_request_context():
# get_flatten_data 3 times called inside get_flatten_plugins
side_effect = [[]] * num * 3
side_effect[num / 2] = Exception
with mock.patch('fuel_analytics.api.resources.utils.'
'export_utils.get_flatten_data',
side_effect=side_effect):
structure_paths, cluster_paths, plugins_paths, csv_paths = \
exporter.get_plugin_keys_paths()
flatten_plugins = exporter.get_flatten_plugins(
structure_paths, cluster_paths,
plugins_paths, inst_structures)
# Checking only invalid data is not exported
self.assertEqual(num - 1, len(list(flatten_plugins)))

View File

@ -231,3 +231,22 @@ class StatsToCsvExportTest(InstStructureTest, DbTest):
result = exporter.export_clusters(inst_structures, [])
for _ in result:
pass
def test_cluster_invalid_data(self):
exporter = StatsToCsv()
num = 10
inst_structures = self.get_saved_inst_structures(
installations_num=num, clusters_num_range=(1, 1))
with app.test_request_context():
# get_flatten_data 2 times called inside get_flatten_plugins
side_effect = [[]] * num * 2
side_effect[num / 2] = Exception
with mock.patch('fuel_analytics.api.resources.utils.'
'export_utils.get_flatten_data',
side_effect=side_effect):
structure_paths, cluster_paths, csv_paths = \
exporter.get_cluster_keys_paths()
flatten_clusters = exporter.get_flatten_clusters(
structure_paths, cluster_paths, inst_structures, [])
self.assertEqual(num - 1, len(list(flatten_clusters)))