Json exporters for stats added

For system tests purposes http handlers for fetching stats
added into analytics.
Implemented OSWLs exporters:
- /api/v1/json/oswls/<master_node_uid>,
- /api/v1/json/oswls/<master_node_uid>/<resource_type>
Implemented installation info exporter:
- /api/v1/json/installation_info/<master_node_uid>
Implemented action logs exporter:
- /api/v1/json/action_logs/<master_node_uid>

Change-Id: I746b43c387c90b20c70d5160d92859b4b44998c1
Closes-Bug: #1438228
This commit is contained in:
Alexander Kislitsky 2015-04-02 17:01:34 +03:00
parent 0f91a26666
commit 74a4fec297
5 changed files with 333 additions and 5 deletions

View File

@ -18,16 +18,24 @@ import flask_sqlalchemy
import six
from fuel_analytics.api.errors import DateExtractionError
from sqlalchemy.orm.exc import NoResultFound
app = Flask(__name__)
db = flask_sqlalchemy.SQLAlchemy(app)
# Registering blueprints
from fuel_analytics.api.resources.csv_exporter import bp as csv_exporter_bp
from fuel_analytics.api.resources.json_exporter import bp as json_exporter_bp
app.register_blueprint(csv_exporter_bp, url_prefix='/api/v1/csv')
app.register_blueprint(json_exporter_bp, url_prefix='/api/v1/json')
@app.errorhandler(DateExtractionError)
def date_parsing_error(error):
return make_response(six.text_type(error), 400)
@app.errorhandler(NoResultFound)
def db_object_not_found(error):
return make_response(six.text_type(error), 404)

View File

@ -31,6 +31,7 @@ class Production(object):
'postgresql://collector:*****@localhost/collector'
CSV_DEFAULT_FROM_DATE_DAYS = 90
CSV_DB_YIELD_PER = 1000
JSON_DB_DEFAULT_LIMIT = 1000
# Number of attachments included into volumes CSV report
CSV_VOLUME_ATTACHMENTS_NUM = 1

View File

@ -0,0 +1,136 @@
# Copyright 2015 Mirantis, Inc.
#
# Licensed under the Apache License, Version 2.0 (the "License"); you may
# not use this file except in compliance with the License. You may obtain
# a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
# License for the specific language governing permissions and limitations
# under the License.
from flask import Blueprint
from flask import request
from flask import Response
import json
import six
from sqlalchemy import and_
from fuel_analytics.api.app import app
from fuel_analytics.api.app import db
from fuel_analytics.api.db.model import ActionLog as AL
from fuel_analytics.api.db.model import InstallationStructure as IS
from fuel_analytics.api.db.model import OpenStackWorkloadStats as OSWL
bp = Blueprint('dto', __name__)
def row_as_dict(row):
return {c.name: six.text_type(getattr(row, c.name))
for c in row.__table__.columns}
def get_dict_param(name):
params = request.args.get(name)
if not isinstance(params, dict):
params = {}
return params
def get_paging_params():
params = get_dict_param('paging_params')
return {
'limit': params.get('limit', app.config['JSON_DB_DEFAULT_LIMIT']),
'offset': params.get('offset', 0)
}
@bp.route('/installation_info/<master_node_uid>', methods=['GET'])
def get_installation_info(master_node_uid):
app.logger.debug("Fetching installation info for: %s", master_node_uid)
result = db.session.query(IS).filter(
IS.master_node_uid == master_node_uid).one()
dict_result = row_as_dict(result)
app.logger.debug("Installation info for: %s fetched", master_node_uid)
return Response(json.dumps(dict_result), mimetype='application/json')
def _get_db_objs_data(model, sql_clauses, order_by, paging_params):
"""Gets DB objects by sql_clauses
:param model: DB model
:param sql_clauses: collection of clauses for selecting DB objects
:param order_by: tuple of orderings for DB objects
:param paging_params: dictionary with limit, offset values
:return: generator on dicts of DB objects data
"""
query = db.session.query(model).filter(and_(*sql_clauses))
for order in order_by:
query = query.order_by(order)
result = query.limit(paging_params['limit']).\
offset(paging_params['offset']).all()
return (row_as_dict(obj) for obj in result)
def _jsonify_collection(collection_iter):
"""Jsonifyes collection. Used for streaming
list of jsons into Flask application response
:param collection_iter: iterator on input collection
:return: generator on chunks of jsonifyed result
"""
yield '['
try:
yield json.dumps(collection_iter.next())
while True:
d = collection_iter.next()
yield ', {}'.format(json.dumps(d))
except StopIteration:
pass
finally:
yield ']'
@bp.route('/oswls/<master_node_uid>', methods=['GET'])
def get_oswls(master_node_uid):
paging_params = get_paging_params()
app.logger.debug("Fetching oswl info for: %s, paging prams: %s",
master_node_uid, paging_params)
sql_clauses = (OSWL.master_node_uid == master_node_uid,)
oswls_data = _get_db_objs_data(OSWL, sql_clauses,
(OSWL.id.asc(),), paging_params)
jsons_data = _jsonify_collection(oswls_data)
app.logger.debug("Oswl info for: %s, paging params: %s fetched",
master_node_uid, paging_params)
return Response(jsons_data, mimetype='application/json')
@bp.route('/oswls/<master_node_uid>/<resource_type>', methods=['GET'])
def get_oswls_by_resource_type(master_node_uid, resource_type):
paging_params = get_paging_params()
app.logger.debug("Fetching oswl info for: %s, %s, paging params: %s",
master_node_uid, resource_type, paging_params)
sql_clauses = (OSWL.master_node_uid == master_node_uid,
OSWL.resource_type == resource_type)
oswls_data = _get_db_objs_data(
OSWL, sql_clauses, (OSWL.id.asc(), OSWL.resource_type.asc()),
paging_params)
jsons_data = _jsonify_collection(oswls_data)
app.logger.debug("Oswl info for: %s, %s, paging prams: %s fetched",
master_node_uid, resource_type, paging_params)
return Response(jsons_data, mimetype='application/json')
@bp.route('/action_logs/<master_node_uid>', methods=['GET'])
def get_action_logs(master_node_uid):
paging_params = get_paging_params()
app.logger.debug("Fetching action_logs for: %s, paging params: %s",
master_node_uid, paging_params)
sql_clauses = (AL.master_node_uid == master_node_uid,)
action_logs_data = _get_db_objs_data(AL, sql_clauses,
(AL.id.asc(),), paging_params)
jsons_data = _jsonify_collection(action_logs_data)
app.logger.debug("Action_logs for: %s, paging params: %s fetched",
master_node_uid, paging_params)
return Response(jsons_data, mimetype='application/json')

View File

@ -23,6 +23,7 @@ import uuid
from fuel_analytics.test.base import BaseTest
from fuel_analytics.api.app import db
from fuel_analytics.api.db.model import ActionLog
from fuel_analytics.api.db.model import InstallationStructure
@ -169,11 +170,56 @@ class InstStructureTest(BaseTest):
)
yield obj
def get_saved_inst_structures(self, *args, **kwargs):
inst_structs = self.generate_inst_structures(*args, **kwargs)
def _get_saved_objs(self, generator_func, *args, **kwargs):
objs = generator_func(*args, **kwargs)
result = []
for inst_struct in inst_structs:
db.session.add(inst_struct)
result.append(inst_struct)
for obj in objs:
db.session.add(obj)
result.append(obj)
db.session.commit()
return result
def get_saved_inst_structures(self, *args, **kwargs):
return self._get_saved_objs(self.generate_inst_structures,
*args, **kwargs)
def generate_action_logs(
self, inst_sturctures, num_per_struct_range=(1, 100),
action_types=('nailgun_task',),
action_groups=('cluster_changes', 'cluster_checking',
'operations'),
action_names=('deploy', 'deployment', 'provision',
'stop_deployment', 'reset_environment',
'update', 'node_deletion', 'cluster_deletion',
'check_before_deployment', 'check_networks',
'verify_networks')):
for struct in inst_sturctures:
for idx in six.moves.range(random.randint(*num_per_struct_range)):
action_type = random.choice(action_types)
action_name = random.choice(action_names)
body = {
"id": idx,
"actor_id": six.text_type(uuid.uuid4()),
"action_group": random.choice(action_groups),
"action_name": random.choice(action_names),
"action_type": action_type,
"start_timestamp": datetime.utcnow().isoformat(),
"end_timestamp": datetime.utcnow().isoformat(),
"additional_info": {
"parent_task_id": None,
"subtasks_ids": [],
"operation": action_name
},
"is_sent": False,
"cluster_id": idx
}
obj = ActionLog(
master_node_uid=struct.master_node_uid,
external_id=idx,
body=body
)
yield obj
def get_saved_action_logs(self, *args, **kwargs):
return self._get_saved_objs(self.generate_action_logs,
*args, **kwargs)

View File

@ -0,0 +1,137 @@
# Copyright 2015 Mirantis, Inc.
#
# Licensed under the Apache License, Version 2.0 (the "License"); you may
# not use this file except in compliance with the License. You may obtain
# a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
# License for the specific language governing permissions and limitations
# under the License.
from flask import request
import json
import mock
import six
from fuel_analytics.test.api.resources.utils.inst_structure_test import \
InstStructureTest
from fuel_analytics.test.api.resources.utils.oswl_test import \
OswlTest
from fuel_analytics.test.base import DbTest
from fuel_analytics.api.app import app
from fuel_analytics.api.resources import json_exporter
class JsonExporterTest(InstStructureTest, OswlTest, DbTest):
def test_jsonify_collection(self):
variants = [[], [{}], [{'a': 'b'}, {'c': 'd'}]]
for variant in variants:
it = iter(variant)
jsonified = six.text_type(''.join(
json_exporter._jsonify_collection(it)))
restored = json.loads(jsonified)
self.assertItemsEqual(variant, restored)
def test_get_installation_info_not_found(self):
with app.test_request_context():
resp = self.client.get('/api/v1/json/installation_info/xxxx')
self.check_response_error(resp, 404)
def test_get_installation_info(self):
structs = self.get_saved_inst_structures(installations_num=10)
with app.test_request_context():
for struct in structs:
url = '/api/v1/json/installation_info/{}'.format(
struct.master_node_uid)
resp = self.client.get(url)
self.check_response_ok(resp)
# Checking response is json
json.loads(resp.data)
def test_get_oswls(self):
num = 10
for resource_type in self.RESOURCE_TYPES:
oswls = self.get_saved_oswls(num, resource_type)
structs = self.get_saved_inst_structs(oswls)
with app.test_request_context():
for struct in structs:
url = '/api/v1/json/oswls/{}'.format(
struct.master_node_uid)
resp = self.client.get(url)
self.check_response_ok(resp)
# Checking response is json
json.loads(resp.data)
def test_get_oswls_by_resource_type(self):
num = 10
for resource_type in self.RESOURCE_TYPES:
oswls = self.get_saved_oswls(num, resource_type)
structs = self.get_saved_inst_structs(oswls)
with app.test_request_context():
for struct in structs:
url = '/api/v1/json/oswls/{}/{}'.format(
struct.master_node_uid, resource_type)
resp = self.client.get(url)
self.check_response_ok(resp)
# Checking response is json
json.loads(resp.data)
def test_get_action_logs(self):
structs = self.get_saved_inst_structures(installations_num=10)
self.get_saved_action_logs(structs)
with app.test_request_context():
for struct in structs:
url = '/api/v1/json/action_logs/{}'.format(
struct.master_node_uid)
resp = self.client.get(url)
self.check_response_ok(resp)
# Checking response is json
json.loads(resp.data)
def test_get_dict_param(self):
# Pairs of param_name, param_value, expected
name = 'param_name'
variants = (
('wrong_name', {}, {}),
(name, {}, {}), (name, None, {}), (name, 'a', {}),
(name, 1, {}), (name, [], {}), (name, (), {}),
(name, {'a': 'b'}, {'a': 'b'})
)
with app.test_request_context():
for param_name, param_value, expected in variants:
with mock.patch.object(request, 'args',
{param_name: param_value}):
self.assertDictEqual(
json_exporter.get_dict_param(name),
expected
)
def test_get_paging_params(self):
name = 'paging_params'
limit_default = app.config.get('JSON_DB_DEFAULT_LIMIT')
variants = (
(name, {}, {'limit': limit_default, 'offset': 0}),
(name, [], {'limit': limit_default, 'offset': 0}),
(name, 4, {'limit': limit_default, 'offset': 0}),
('wrong_name', 4, {'limit': limit_default, 'offset': 0}),
(name, {'trash': 'x'}, {'limit': limit_default, 'offset': 0}),
(name, {'limit': limit_default + 1}, {'limit': limit_default + 1,
'offset': 0}),
(name, {'limit': limit_default + 1, 'offset': 50},
{'limit': limit_default + 1, 'offset': 50}),
)
with app.test_request_context():
for param_name, param_value, expected in variants:
with mock.patch.object(request, 'args',
{param_name: param_value}):
self.assertDictEqual(
json_exporter.get_paging_params(),
expected
)