Merge "Single report for all CSV reports"

This commit is contained in:
Jenkins 2015-03-05 08:57:43 +00:00 committed by Gerrit Code Review
commit 47ff418ef0
5 changed files with 144 additions and 10 deletions

View File

@ -17,7 +17,13 @@ from datetime import timedelta
from flask import Blueprint
from flask import request
from flask import Response
from flask import send_file
import os
import shutil
from sqlalchemy import distinct
from sqlalchemy import or_
import tempfile
import zipfile
from fuel_analytics.api.app import app
from fuel_analytics.api.app import db
@ -149,3 +155,80 @@ def oswl_to_csv(resource_type):
resource_type)
}
return Response(result, mimetype='text/csv', headers=headers)
def get_resources_types():
"""Gets all available resource types
:return: generator of resources types names collection
"""
result = db.session.query(distinct(OSWS.resource_type))
return (row[0] for row in result)
def save_all_reports(tmp_dir):
"""Saves all available CSV reports into single directory
:param tmp_dir: path to target directory
"""
app.logger.debug("Saving all reports to %s", tmp_dir)
stats_exporter = StatsToCsv()
oswl_exporter = OswlStatsToCsv()
resources_types = get_resources_types()
with open(os.path.join(tmp_dir, 'clusters.csv'), mode='w') as f:
app.logger.debug("Getting installation structures started")
inst_strucutres = get_inst_structures()
clusters = stats_exporter.export_clusters(inst_strucutres)
f.writelines(clusters)
app.logger.debug("Getting installation structures finished")
for resource_type in resources_types:
app.logger.debug("Getting resource '%s' started", resource_type)
file_name = os.path.join(tmp_dir, '{}.csv'.format(resource_type))
oswls = get_oswls(resource_type)
with open(file_name, mode='w') as f:
resources = oswl_exporter.export(
resource_type, oswls, get_to_date())
f.writelines(resources)
app.logger.debug("Getting resource '%s' finished", resource_type)
app.logger.debug("All reports saved into %s", tmp_dir)
def archive_dir(dir_path):
"""Archives directory to zip file
:param dir_path: path to target directory
:return: ZipFile object
"""
app.logger.debug("Dir '%s' archiving started", dir_path)
tmp_file = tempfile.NamedTemporaryFile(delete=False)
with zipfile.ZipFile(tmp_file, 'w', zipfile.ZIP_DEFLATED) as archive:
for root, dirs, files in os.walk(dir_path):
for f in files:
archive.write(os.path.join(root, f), arcname=f)
app.logger.debug("Dir '%s' archiving to '%s' finished",
dir_path, archive.filename)
return archive
@bp.route('/all', methods=['GET'])
def all_reports():
"""Single report for all resource types and clusters info
:return: zip archive of CSV reports
"""
app.logger.debug("Handling all_reports get request")
tmp_dir = tempfile.mkdtemp()
try:
save_all_reports(tmp_dir)
try:
archive = archive_dir(tmp_dir)
name = 'reports_from{}_to{}.zip'.format(
get_from_date(), get_to_date())
return send_file(archive.filename, mimetype='application/zip',
as_attachment=True, attachment_filename=name)
finally:
app.logger.debug("Removing temporary archive")
os.unlink(archive.filename)
finally:
app.logger.debug("Removing temporary directory %s", tmp_dir)
shutil.rmtree(tmp_dir, ignore_errors=True)
app.logger.debug("Request all_reports handled")

View File

@ -287,7 +287,7 @@ class OswlTest(BaseTest):
master_node_uid=oswl.master_node_uid,
creation_date=creation_date,
modification_date=modification_date,
structure='',
structure={},
)
mn_uids.add(oswl.master_node_uid)
yield obj

View File

@ -17,19 +17,27 @@
from datetime import datetime
from datetime import timedelta
from flask import request
from fuel_analytics.api.common import consts
from fuel_analytics.api.errors import DateExtractionError
import itertools
import mock
import os
import shutil
import tempfile
import zipfile
from fuel_analytics.test.api.resources.utils.oswl_test import OswlTest
from fuel_analytics.test.base import DbTest
from fuel_analytics.api.app import app
from fuel_analytics.api.common import consts
from fuel_analytics.api.errors import DateExtractionError
from fuel_analytics.api.resources.csv_exporter import archive_dir
from fuel_analytics.api.resources.csv_exporter import extract_date
from fuel_analytics.api.resources.csv_exporter import get_from_date
from fuel_analytics.api.resources.csv_exporter import get_inst_structures_query
from fuel_analytics.api.resources.csv_exporter import get_oswls_query
from fuel_analytics.api.resources.csv_exporter import get_resources_types
from fuel_analytics.api.resources.csv_exporter import get_to_date
from fuel_analytics.api.resources.csv_exporter import save_all_reports
class CsvExporterTest(OswlTest, DbTest):
@ -141,3 +149,46 @@ class CsvExporterTest(OswlTest, DbTest):
datetime.utcnow().date(),
datetime.utcnow().date() - timedelta(days=100)).count()
self.assertEqual(0, count_after)
def test_get_resources_types(self):
for resource_type in self.RESOURCE_TYPES:
self.get_saved_oswls(1, resource_type)
resources_names = get_resources_types()
self.assertItemsEqual(self.RESOURCE_TYPES, resources_names)
def test_save_all_reports(self):
oswls = []
for resource_type in self.RESOURCE_TYPES:
oswls.extend(self.get_saved_oswls(10, resource_type))
self.get_saved_inst_structs(oswls)
tmp_dir = tempfile.mkdtemp()
try:
with app.test_request_context():
save_all_reports(tmp_dir)
files = itertools.chain(('clusters', ), self.RESOURCE_TYPES)
for f in files:
path = os.path.join(tmp_dir, '{}.csv'.format(f))
self.assertTrue(os.path.isfile(path), path)
finally:
shutil.rmtree(tmp_dir)
def test_archive_dir(self):
oswls = []
for resource_type in self.RESOURCE_TYPES:
oswls.extend(self.get_saved_oswls(10, resource_type))
self.get_saved_inst_structs(oswls)
tmp_dir = tempfile.mkdtemp()
try:
with app.test_request_context():
save_all_reports(tmp_dir)
files = itertools.chain(('clusters', ), self.RESOURCE_TYPES)
for f in files:
path = os.path.join(tmp_dir, '{}.csv'.format(f))
self.assertTrue(os.path.isfile(path), path)
archive = archive_dir(tmp_dir)
try:
self.assertTrue(zipfile.is_zipfile(archive.filename))
finally:
os.unlink(archive.filename)
finally:
shutil.rmtree(tmp_dir)

View File

@ -29,7 +29,6 @@ from fuel_analytics.test.base import DbTest
from fuel_analytics.api.app import app
from fuel_analytics.api.app import db
from fuel_analytics.api.common import consts
from fuel_analytics.api.db.model import InstallationStructure
from fuel_analytics.api.db.model import OpenStackWorkloadStats
from fuel_analytics.api.resources.csv_exporter import get_oswls
from fuel_analytics.api.resources.csv_exporter import get_oswls_query
@ -39,12 +38,6 @@ from fuel_analytics.api.resources.utils.oswl_stats_to_csv import OswlStatsToCsv
class OswlStatsToCsvTest(OswlTest, DbTest):
def setUp(self):
super(OswlTest, self).setUp()
OpenStackWorkloadStats.query.delete()
InstallationStructure.query.delete()
db.session.commit()
def test_get_keys_paths(self):
for resource_type in self.RESOURCE_TYPES:
exporter = OswlStatsToCsv()

View File

@ -18,6 +18,8 @@ from unittest2.case import TestCase
from fuel_analytics.api.app import app
from fuel_analytics.api.app import db
from fuel_analytics.api.db.model import InstallationStructure
from fuel_analytics.api.db.model import OpenStackWorkloadStats
from fuel_analytics.api.log import init_logger
# Configuring app for the test environment
@ -51,6 +53,11 @@ class DbTest(BaseTest):
# bind an individual Session to the connection
db.session = scoped_session(sessionmaker(bind=self.connection))
# Cleaning DB
OpenStackWorkloadStats.query.delete()
InstallationStructure.query.delete()
db.session.commit()
def tearDown(self):
# rollback - everything that happened with the
# Session above (including calls to commit())