CSV exporter for all OSWL resource types

CSV export implementation generalized for resource types
CSV export tests generalized for resource types
DB operations in tests wrapped into transaction
Transaction rollback added into DbTest.tearDown
Result file name format changed to resource_type.csv

Blueprint: export-stats-to-csv
Change-Id: I36cefec099d551d320c57abdc19fcdf29bc2c2ad
This commit is contained in:
Alexander Kislitsky 2015-02-11 18:52:59 +03:00
parent e58a5cde40
commit d77da05ff6
11 changed files with 373 additions and 216 deletions

View File

@ -17,7 +17,6 @@ from flask import Response
from fuel_analytics.api.app import app
from fuel_analytics.api.app import db
from fuel_analytics.api.common.consts import OSWL_RESOURCE_TYPES as RT
from fuel_analytics.api.db.model import OpenStackWorkloadStats
from fuel_analytics.api.resources.utils.es_client import ElasticSearchClient
from fuel_analytics.api.resources.utils.oswl_stats_to_csv import OswlStatsToCsv
@ -38,24 +37,36 @@ def clusters_to_csv():
# NOTE: result - is generator, but streaming can not work with some
# WSGI middlewares: http://flask.pocoo.org/docs/0.10/patterns/streaming/
app.logger.debug("Get request for clusters_to_csv handled")
return Response(result, mimetype='text/csv')
headers = {
'Content-Disposition': 'attachment; filename=clusters.csv'
}
return Response(result, mimetype='text/csv', headers=headers)
def get_oswls(yield_per=1000):
app.logger.debug("Fetching oswls with yeld per %d", yield_per)
def get_oswls(resource_type, yield_per=1000):
app.logger.debug("Fetching %s oswls with yeld per %d",
resource_type, yield_per)
return db.session.query(OpenStackWorkloadStats).filter(
OpenStackWorkloadStats.resource_type == RT.vm).yield_per(yield_per)
OpenStackWorkloadStats.resource_type == resource_type).\
order_by(OpenStackWorkloadStats.created_date).\
yield_per(yield_per)
@bp.route('/vms', methods=['GET'])
def vms_to_csv():
app.logger.debug("Handling vms_to_csv get request")
oswls = get_oswls()
@bp.route('/<resource_type>', methods=['GET'])
def oswl_to_csv(resource_type):
app.logger.debug("Handling oswl_to_csv get request for resource %s",
resource_type)
exporter = OswlStatsToCsv()
result = exporter.export_vms(oswls)
oswls = get_oswls(resource_type)
result = exporter.export(resource_type, oswls)
# NOTE: result - is generator, but streaming can not work with some
# WSGI middlewares: http://flask.pocoo.org/docs/0.10/patterns/streaming/
app.logger.debug("Get request for vms_to_csv handled")
return Response(result, mimetype='text/csv')
app.logger.debug("Request oswl_to_csv for resource %s handled",
resource_type)
headers = {
'Content-Disposition': 'attachment; filename={}.csv'.format(
resource_type)
}
return Response(result, mimetype='text/csv', headers=headers)

View File

@ -14,6 +14,7 @@
import csv
import io
import itertools
import six
from fuel_analytics.api.app import app
@ -141,15 +142,13 @@ def flatten_data_as_csv(keys_paths, flatten_data):
:param flatten_data: list of flatten data dicts
:return: stream with data in CSV format
"""
app.logger.debug("Saving flatten data as CSV is started")
app.logger.debug("Saving flatten data as CSV started")
names = []
for key_path in keys_paths:
names.append('.'.join(key_path))
yield names
output = six.BytesIO()
writer = csv.writer(output)
writer.writerow(names)
def read_and_flush():
output.seek(io.SEEK_SET)
@ -158,10 +157,10 @@ def flatten_data_as_csv(keys_paths, flatten_data):
output.truncate()
return data
for d in flatten_data:
for d in itertools.chain((names,), flatten_data):
app.logger.debug("Writing row %s", d)
encoded_d = [s.encode("utf-8") if isinstance(s, unicode) else s
for s in d]
writer.writerow(encoded_d)
yield read_and_flush()
app.logger.debug("Saving flatten data as CSV is finished")
app.logger.debug("Saving flatten data as CSV finished")

View File

@ -16,79 +16,88 @@ import itertools
import six
from fuel_analytics.api.app import app
from fuel_analytics.api.common import consts
from fuel_analytics.api.resources.utils import export_utils
from fuel_analytics.api.resources.utils.export_utils import get_keys_paths
from fuel_analytics.api.resources.utils.skeleton import \
OSWL_STATS_SKELETON
from fuel_analytics.api.resources.utils.skeleton import \
OSWL_VM_SKELETON
from fuel_analytics.api.resources.utils.skeleton import OSWL_SKELETONS
class OswlStatsToCsv(object):
def get_vm_keys_paths(self):
"""Gets key paths for vm. csv key paths is combination
of oswl, vm and additional vm key paths
:return: tuple of lists of oswl, vm, csv key paths
def get_resource_keys_paths(self, resource_type):
"""Gets key paths for resource type. csv key paths is combination
of oswl, vm and additional resource type key paths
:return: tuple of lists of oswl, resource type, csv key paths
"""
app.logger.debug("Getting vm keys paths")
oswl_key_paths = get_keys_paths(OSWL_STATS_SKELETON)
vm_key_paths = get_keys_paths(OSWL_VM_SKELETON)
app.logger.debug("Getting %s keys paths", resource_type)
oswl_key_paths = get_keys_paths(OSWL_SKELETONS['general'])
vm_key_paths = get_keys_paths(
{resource_type: OSWL_SKELETONS[resource_type]})
# Additional key paths for vm info
vm_additional_key_paths = [['vm', 'is_added'], ['vm', 'is_modified'],
['vm', 'is_removed']]
# Additional key paths for resource type info
vm_additional_key_paths = [[resource_type, 'is_added'],
[resource_type, 'is_modified'],
[resource_type, 'is_removed']]
result_key_paths = oswl_key_paths + vm_key_paths + \
vm_additional_key_paths
app.logger.debug("Vm keys paths got")
app.logger.debug("%s keys paths got: %s", resource_type,
result_key_paths)
return oswl_key_paths, vm_key_paths, result_key_paths
def get_additional_vm_info(self, vm, oswl):
"""Gets additional info about vm operations
:param vm: vm info
def get_additional_resource_info(self, resource, oswl):
"""Gets additional info about operations with resource
:param resource: resource info
:param oswl: OpenStack workload
:return: list of is_added, is_removed, is_modified flags
:return: list of integer flags: is_added, is_removed, is_modified
"""
resource_data = oswl.resource_data
added = resource_data.get('added', {})
removed = resource_data.get('removed', {})
modified = resource_data.get('modified', {})
# After JSON saving in the object dict keys are converted into strings
vm_id = six.text_type(vm.get('id'))
vm_id = six.text_type(resource.get('id'))
is_added = vm_id in added
is_modified = vm_id in modified
is_removed = vm_id in removed
return [is_added, is_modified, is_removed]
def get_flatten_vms(self, oswl_keys_paths, vm_keys_paths, oswls):
def get_flatten_resources(self, resource_type, oswl_keys_paths,
resource_keys_paths, oswls):
"""Gets flatten vms data
:param oswl_keys_paths: list of keys paths in the OpenStack workload
info
:param vm_keys_paths: list of keys paths in the vm
:param resource_keys_paths: list of keys paths in the resource
:param oswls: list of OpenStack workloads
:return: list of flatten vms info
:return: list of flatten resources info
"""
app.logger.debug("Getting flatten vms info is started")
app.logger.debug("Getting flatten %s info started", resource_type)
for oswl in oswls:
flatten_oswl = export_utils.get_flatten_data(oswl_keys_paths,
oswl)
resource_data = oswl.resource_data
current = resource_data.get('current', [])
removed = resource_data.get('removed', {})
for vm in itertools.chain(current, six.itervalues(removed)):
flatten_vm = export_utils.get_flatten_data(vm_keys_paths,
{'vm': vm})
vm_additional_info = self.get_additional_vm_info(vm, oswl)
yield flatten_oswl + flatten_vm + vm_additional_info
app.logger.debug("Flatten vms info is got")
for resource in itertools.chain(current, six.itervalues(removed)):
flatten_resource = export_utils.get_flatten_data(
resource_keys_paths, {resource_type: resource})
additional_info = self.get_additional_resource_info(
resource, oswl)
yield flatten_oswl + flatten_resource + additional_info
app.logger.debug("Getting flatten %s info finished", resource_type)
def export(self, resource_type, oswls):
app.logger.info("Export oswls %s info into CSV started",
resource_type)
oswl_keys_paths, vm_keys_paths, csv_keys_paths = \
self.get_resource_keys_paths(resource_type)
flatten_resources = self.get_flatten_resources(
resource_type, oswl_keys_paths, vm_keys_paths, oswls)
result = export_utils.flatten_data_as_csv(csv_keys_paths,
flatten_resources)
app.logger.info("Export oswls %s info into CSV finished",
resource_type)
return result
def export_vms(self, oswls):
app.logger.info("Export oswls vms info into CSV is started")
oswl_keys_paths, vm_keys_paths, csv_keys_paths = \
self.get_vm_keys_paths()
flatten_vms = self.get_flatten_vms(oswl_keys_paths, vm_keys_paths,
oswls)
result = export_utils.flatten_data_as_csv(csv_keys_paths, flatten_vms)
app.logger.info("Export oswls vms info into CSV is finished")
return result
return self.export(consts.OSWL_RESOURCE_TYPES.vm, oswls)

View File

@ -11,6 +11,7 @@
# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
# License for the specific language governing permissions and limitations
# under the License.
from fuel_analytics.api.common import consts
INSTALLATION_INFO_SKELETON = {
'allocated_nodes_num': None,
@ -107,19 +108,16 @@ INSTALLATION_INFO_SKELETON = {
}
}
OSWL_STATS_SKELETON = {
'id': None,
'master_node_uid': None,
'external_id': None,
'cluster_id': None,
'created_date': None,
'updated_time': None,
'resource_type': None,
'resource_checksum': None,
}
OSWL_VM_SKELETON = {
'vm': {
OSWL_SKELETONS = {
'general': {
'id': None,
'master_node_uid': None,
'cluster_id': None,
'created_date': None,
'resource_type': None,
'resource_checksum': None,
},
consts.OSWL_RESOURCE_TYPES.vm: {
'id': None,
'status': None,
'tenant_id': None,
@ -128,5 +126,13 @@ OSWL_VM_SKELETON = {
'power_state': None,
'flavor_id': None,
'image_id': None
},
consts.OSWL_RESOURCE_TYPES.flavor: {
'id': None,
'ram': None,
'vcpus': None,
'OS-FLV-EXT-DATA:ephemeral': None,
'disk': None,
'swap': None,
}
}

View File

@ -12,6 +12,8 @@
# License for the specific language governing permissions and limitations
# under the License.
from six.moves import range
from fuel_analytics.api.app import app
from fuel_analytics.api.resources.utils import export_utils
from fuel_analytics.api.resources.utils.skeleton import \
@ -48,7 +50,7 @@ class StatsToCsv(object):
:return: list of cut fact column and enumerated columns names
"""
result = [['{}_gt{}'.format(field_name, number)]]
for i in xrange(number):
for i in range(number):
result.append(['{}_{}'.format(field_name, i)])
return result
@ -113,7 +115,7 @@ class StatsToCsv(object):
app.logger.debug("Flatten clusters info is got")
def export_clusters(self, structures):
app.logger.info("Export clusters info into CSV is started")
app.logger.info("Export clusters info into CSV started")
structure_keys_paths, cluster_keys_paths, csv_keys_paths = \
self.get_cluster_keys_paths()
flatten_clusters = self.get_flatten_clusters(structure_keys_paths,
@ -121,5 +123,5 @@ class StatsToCsv(object):
structures)
result = export_utils.flatten_data_as_csv(csv_keys_paths,
flatten_clusters)
app.logger.info("Export clusters info into CSV is finished")
app.logger.info("Export clusters info into CSV finished")
return result

View File

@ -0,0 +1,153 @@
# -*- coding: utf-8 -*-
# Copyright 2015 Mirantis, Inc.
#
# Licensed under the Apache License, Version 2.0 (the "License"); you may
# not use this file except in compliance with the License. You may obtain
# a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
# License for the specific language governing permissions and limitations
# under the License.
from datetime import datetime
from datetime import timedelta
import random
import six
from six.moves import range
import uuid
from fuel_analytics.test.base import BaseTest
from fuel_analytics.api.common import consts
from fuel_analytics.api.db.model import OpenStackWorkloadStats
class OswlTest(BaseTest):
RESOURCE_GENERATORS = {
consts.OSWL_RESOURCE_TYPES.vm: ('generate_vms',
'generate_modified_vms'),
consts.OSWL_RESOURCE_TYPES.flavor: ('generate_flavors',
'generate_modified_flavors'),
}
def generate_removed_resources(self, num, gen_func):
result = {}
for vm in gen_func(num):
vm['time'] = datetime.utcnow().time().isoformat()
result[vm['id']] = vm
return result
def generate_added_resources(self, num):
result = {}
for i in range(num):
result[i] = {'time': datetime.utcnow().time().isoformat()}
return result
def generate_vms(self, vms_num, statuses=('on', 'off'),
created_at_range=(1, 10),
power_states_range=(1, 10)):
result = []
for i in range(vms_num):
result.append({
'id': i,
'status': random.choice(statuses),
'tenant_id': 'tenant_id_{}'.format(i),
'host_id': 'host_id_{}'.format(i),
'created_at': (datetime.utcnow() - timedelta(
days=random.randint(*created_at_range))).isoformat(),
'power_state': random.randint(*power_states_range),
'flavor_id': 'flavor_id_{}'.format(i),
'image_id': 'image_id_{}'.format(i),
})
return result
def generate_modified_vms(self, vms_num, modifs_num_range=(0, 10),
power_states_range=(1, 10)):
result = {}
for i in range(vms_num):
for _ in range(random.randint(*modifs_num_range)):
result.setdefault(i, []).append({
'time': datetime.utcnow().time().isoformat(),
'power_state': random.choice(power_states_range)
})
return result
def generate_flavors(self, num, ram_range=(64, 24000),
vcpus_range=(1, 64), ephemeral_range=(1, 30),
disk_range=(1, 2048), swap_range=(1, 128)):
result = []
for i in range(num):
result.append({
'id': i,
'ram': random.randint(*ram_range),
'vcpus': random.randint(*vcpus_range),
'OS-FLV-EXT-DATA:ephemeral': random.randint(*ephemeral_range),
'disk': random.randint(*disk_range),
'swap': random.randint(*swap_range),
})
return result
def generate_modified_flavors(self, num, modifs_num_range=(0, 3),
swap_range=(1, 128),
disk_range=(13, 23)):
result = {}
for i in range(num):
for _ in range(random.randint(*modifs_num_range)):
result.setdefault(i, []).append({
'time': datetime.utcnow().time().isoformat(),
'swap': random.randint(*swap_range),
'disk': random.randint(*disk_range)
})
return result
def generate_oswls(self, oswl_num, resource_type,
current_num_range=(0, 7),
created_date_range=(1, 10),
added_num_range=(0, 5),
removed_num_range=(0, 3),
modified_num_range=(0, 15),
stats_per_mn_range=(1, 10),
cluster_ids_range=(1, 5)):
i = 1
current_mn_stats = 0
gen_name, gen_modified_name = self.RESOURCE_GENERATORS[resource_type]
gen = getattr(self, gen_name)
gen_modified = getattr(self, gen_modified_name)
while i <= oswl_num:
if not current_mn_stats:
mn_uid = six.text_type(uuid.uuid4())
current_mn_stats = random.randint(*stats_per_mn_range)
if current_mn_stats:
i += 1
created_date = (datetime.utcnow() - timedelta(
days=random.randint(*created_date_range))).\
date().isoformat()
obj = OpenStackWorkloadStats(
master_node_uid=mn_uid,
external_id=i,
cluster_id=random.choice(cluster_ids_range),
created_date=created_date,
updated_time=datetime.utcnow().time().isoformat(),
resource_type=resource_type,
resource_checksum=six.text_type(uuid.uuid4()),
resource_data={
'current': gen(
random.randint(*current_num_range)),
'added': self.generate_added_resources(
random.randint(*added_num_range)),
'modified': gen_modified(
random.randint(*modified_num_range)),
'removed': self.generate_removed_resources(
random.randint(*removed_num_range),
gen)
}
)
current_mn_stats -= 1
yield obj

View File

@ -66,6 +66,26 @@ class ExportUtilsTest(BaseTest):
actual = export_utils.get_flatten_data(key_paths, data)
self.assertListEqual(expected_flatten_data[idx], actual)
def test_get_flatten_as_csv_unicode(self):
data = [
{'a': u'b'},
{'a': 'tt', u'эюя': 'x'},
]
expected_csv = [
'a,эюя\r\n',
'b,\r\n',
'tt,x\r\n'
]
skeleton = export_utils.get_data_skeleton(data)
key_paths = export_utils.get_keys_paths(skeleton)
flatten_data = []
for d in data:
flatten_data.append(export_utils.get_flatten_data(key_paths, d))
result = export_utils.flatten_data_as_csv(key_paths, flatten_data)
for idx, actual_csv in enumerate(result):
self.assertEqual(expected_csv[idx], actual_csv)
def test_dict_construct_skeleton(self):
data = {'a': 'b'}
skeleton = export_utils.construct_skeleton(data)

View File

@ -15,166 +15,94 @@
# under the License.
import csv
from datetime import datetime
from datetime import timedelta
import random
import six
import uuid
import types
from fuel_analytics.test.base import BaseTest
from fuel_analytics.test.api.resources.utils.oswl_test import OswlTest
from fuel_analytics.test.base import DbTest
from fuel_analytics.api.app import db
from fuel_analytics.api.common import consts
from fuel_analytics.api.db.model import OpenStackWorkloadStats
from fuel_analytics.api.resources.csv_exporter import get_oswls
from fuel_analytics.api.resources.utils.oswl_stats_to_csv import OswlStatsToCsv
import types
class OswlStatsToCsvTest(BaseTest):
class OswlStatsToCsvTest(OswlTest, DbTest):
def generate_vms(self, vms_num, statuses=('on', 'off'),
created_at_range=(1, 10),
power_states_range=(1, 10)):
result = []
for i in xrange(vms_num):
result.append({
'id': i,
'status': random.choice(statuses),
'tenant_id': 'tenant_id_{}'.format(i),
'host_id': 'host_id_{}'.format(i),
'created_at': (datetime.utcnow() - timedelta(
days=random.randint(*created_at_range))).isoformat(),
'power_state': random.randint(*power_states_range),
'flavor_id': 'flavor_id_{}'.format(i),
'image_id': 'image_id_{}'.format(i),
})
return result
RESOURCE_TYPES = (
consts.OSWL_RESOURCE_TYPES.vm,
consts.OSWL_RESOURCE_TYPES.flavor
)
def generate_removed_vms(self, vms_num):
result = {}
for vm in self.generate_vms(vms_num):
vm['time'] = datetime.utcnow().time().isoformat()
result[vm['id']] = vm
return result
def test_get_keys_paths(self):
for resource_type in self.RESOURCE_TYPES:
exporter = OswlStatsToCsv()
oswl_keys_paths, resource_keys_paths, csv_keys_paths = \
exporter.get_resource_keys_paths(resource_type)
self.assertFalse(['external_id'] in oswl_keys_paths)
self.assertFalse(['updated_time'] in oswl_keys_paths)
self.assertTrue([resource_type, 'id'] in resource_keys_paths)
self.assertTrue([resource_type, 'is_added'] in csv_keys_paths)
self.assertTrue([resource_type, 'is_modified'] in csv_keys_paths)
self.assertTrue([resource_type, 'is_removed'] in csv_keys_paths)
def generate_added_vms(self, vms_num):
result = {}
for i in xrange(vms_num):
result[i] = {'time': datetime.utcnow().time().isoformat()}
return result
def test_get_flatten_resources(self):
for resource_type in self.RESOURCE_TYPES:
exporter = OswlStatsToCsv()
oswl_keys_paths, vm_keys_paths, csv_keys_paths = \
exporter.get_resource_keys_paths(resource_type)
oswls = self.generate_oswls(2, resource_type)
flatten_vms = exporter.get_flatten_resources(
resource_type, oswl_keys_paths, vm_keys_paths, oswls)
self.assertTrue(isinstance(flatten_vms, types.GeneratorType))
for _ in flatten_vms:
pass
def generate_modified_vms(self, vms_num, modifs_num_range=(0, 3),
power_states_range=(1, 10)):
result = {}
for i in xrange(vms_num):
for _ in xrange(random.randint(*modifs_num_range)):
result.setdefault(i, []).append({
'time': datetime.utcnow().time().isoformat(),
'power_state': random.choice(power_states_range)
})
return result
def generate_vm_oswls(self, oswl_num, current_vms_num_range=(0, 7),
created_date_range=(1, 10),
added_vms_num_range=(0, 5),
removed_vms_num_range=(0, 3),
modified_vms_num_range=(0, 15),
stats_per_mn_range=(1, 10),
cluster_ids_range=(1, 5)):
i = 1
current_mn_stats = 0
while i <= oswl_num:
if not current_mn_stats:
mn_uid = six.text_type(uuid.uuid4())
current_mn_stats = random.randint(*stats_per_mn_range)
if current_mn_stats:
i += 1
created_date = (datetime.utcnow() - timedelta(
days=random.randint(*created_date_range))).\
date().isoformat()
obj = OpenStackWorkloadStats(
master_node_uid=mn_uid,
external_id=i,
cluster_id=random.choice(cluster_ids_range),
created_date=created_date,
updated_time=datetime.utcnow().time().isoformat(),
resource_type=consts.OSWL_RESOURCE_TYPES.vm,
resource_checksum=six.text_type(uuid.uuid4()),
resource_data={
'current': self.generate_vms(
random.randint(*current_vms_num_range)),
'added': self.generate_added_vms(
random.randint(*added_vms_num_range)),
'modified': self.generate_modified_vms(
random.randint(*modified_vms_num_range)),
'removed': self.generate_removed_vms(
random.randint(*removed_vms_num_range))
}
)
current_mn_stats -= 1
yield obj
def test_get_vm_keys_paths(self):
exporter = OswlStatsToCsv()
oswl_keys_paths, vm_keys_paths, csv_keys_paths = \
exporter.get_vm_keys_paths()
self.assertTrue(['external_id'] in oswl_keys_paths)
self.assertTrue(['vm', 'id'] in vm_keys_paths)
self.assertTrue(['vm', 'is_added'] in csv_keys_paths)
self.assertTrue(['vm', 'is_modified'] in csv_keys_paths)
self.assertTrue(['vm', 'is_removed'] in csv_keys_paths)
def test_get_flatten_vms(self):
exporter = OswlStatsToCsv()
oswl_keys_paths, vm_keys_paths, csv_keys_paths = \
exporter.get_vm_keys_paths()
oswls = self.generate_vm_oswls(2)
flatten_vms = exporter.get_flatten_vms(oswl_keys_paths, vm_keys_paths,
oswls)
self.assertTrue(isinstance(flatten_vms, types.GeneratorType))
for _ in flatten_vms:
pass
def test_get_additional_vm_info(self):
def test_get_additional_info(self):
exporter = OswlStatsToCsv()
added_num = 0
modified_num = 3
removed_num = 5
oswls = self.generate_vm_oswls(
1,
added_vms_num_range=(added_num, added_num),
modified_vms_num_range=(modified_num, modified_num),
removed_vms_num_range=(removed_num, removed_num)
)
oswl = oswls.next()
num = 1
for resource_type in self.RESOURCE_TYPES:
oswls = self.generate_oswls(
num,
resource_type,
added_num_range=(added_num, added_num),
modified_num_range=(modified_num, modified_num),
removed_num_range=(removed_num, removed_num)
)
oswl = oswls.next()
# Saving data for true JSON loading from DB object
db.session.add(oswl)
db.session.commit()
# Saving data for true JSON loading from DB object
db.session.add(oswl)
db.session.commit()
resource_data = oswl.resource_data
for resource in resource_data['current']:
# After conversion into JSON dict keys became strings
resource_id = six.text_type(resource['id'])
expected = [
resource_id in resource_data['added'],
resource_id in resource_data['modified'],
resource_id in resource_data['removed'],
]
actual = exporter.get_additional_resource_info(resource, oswl)
self.assertListEqual(expected, actual)
resource_data = oswl.resource_data
for vm in resource_data['current']:
# After conversion into JSON dict keys became strings
vm_id = six.text_type(vm['id'])
expected = [
vm_id in resource_data['added'],
vm_id in resource_data['modified'],
vm_id in resource_data['removed'],
]
self.assertListEqual(expected,
exporter.get_additional_vm_info(vm, oswl))
# Cleaning DB
db.session.delete(oswl)
db.session.commit()
def test_export_vms(self):
def test_export(self):
exporter = OswlStatsToCsv()
oswls = self.generate_vm_oswls(200)
result = exporter.export_vms(oswls)
self.assertTrue(isinstance(result, types.GeneratorType))
output = six.StringIO(list(result))
reader = csv.reader(output)
for _ in reader:
pass
num = 200
for resource_type in self.RESOURCE_TYPES:
# Saving data for true JSON loading from DB object
gen_oswls = self.generate_oswls(num, resource_type)
for oswl in gen_oswls:
db.session.add(oswl)
db.session.commit()
oswls = get_oswls(resource_type)
result = exporter.export(resource_type, oswls)
self.assertTrue(isinstance(result, types.GeneratorType))
output = six.StringIO(list(result))
reader = csv.reader(output)
for _ in reader:
pass

View File

@ -12,9 +12,12 @@
# License for the specific language governing permissions and limitations
# under the License.
from sqlalchemy.orm import scoped_session
from sqlalchemy.orm import sessionmaker
from unittest2.case import TestCase
from fuel_analytics.api.app import app
from fuel_analytics.api.app import db
from fuel_analytics.api.log import init_logger
# Configuring app for the test environment
@ -39,3 +42,26 @@ class BaseTest(TestCase):
class ElasticTest(MigrationElasticTest):
pass
class DbTest(BaseTest):
def setUp(self):
super(DbTest, self).setUp()
# connect to the database
self.connection = db.session.connection()
# begin a non-ORM transaction
self.trans = self.connection.begin()
# bind an individual Session to the connection
db.session = scoped_session(sessionmaker(bind=self.connection))
def tearDown(self):
# rollback - everything that happened with the
# Session above (including calls to commit())
# is rolled back.
self.trans.rollback()
db.session.close()
super(DbTest, self).tearDown()

View File

@ -0,0 +1,4 @@
# Ignore everything in this directory
*
# Except this file
!.gitignore

View File

@ -5,4 +5,3 @@ nose==1.3.4
nose2==0.4.7
tox==1.8.0
unittest2==0.5.1