Merge "Unify and fix `list_traces` function"

This commit is contained in:
Zuul 2018-01-24 06:16:42 +00:00 committed by Gerrit Code Review
commit ab01236a69
6 changed files with 65 additions and 24 deletions

View File

@ -168,7 +168,7 @@ class TraceCommands(BaseCommand):
fields = ("base_id", "timestamp")
pretty_table = prettytable.PrettyTable(fields)
pretty_table.align = "l"
traces = engine.list_traces({}, fields)
traces = engine.list_traces(fields)
for trace in traces:
row = [trace[field] for field in fields]
pretty_table.add_row(row)

View File

@ -53,6 +53,8 @@ class Driver(object):
and implemented by any class derived from this class.
"""
default_trace_fields = {"base_id", "timestamp"}
def __init__(self, connection_str, project=None, service=None, host=None):
self.connection_str = connection_str
self.project = project
@ -101,11 +103,13 @@ class Driver(object):
"""Returns backend specific name for the driver."""
return cls.__name__
def list_traces(self, query, fields):
"""Returns array of all base_id fields that match the given criteria
def list_traces(self, fields=None):
"""Query all traces from the storage.
:param query: dict that specifies the query criteria
:param fields: iterable of strings that specifies the output fields
:param fields: Set of trace fields to return. Defaults to 'base_id'
and 'timestamp'
:return List of traces, where each trace is a dictionary containing
at least `base_id` and `timestamp`.
"""
raise NotImplementedError("{0}: This method is either not supported "
"or has to be overridden".format(

View File

@ -90,15 +90,14 @@ class ElasticsearchDriver(base.Driver):
return result
def list_traces(self, query={"match_all": {}}, fields=[]):
def list_traces(self, fields=None):
"""Returns array of all base_id fields that match the given criteria
:param query: dict that specifies the query criteria
:param fields: iterable of strings that specifies the output fields
"""
for base_field in ["base_id", "timestamp"]:
if base_field not in fields:
fields.append(base_field)
query = {"match_all": {}}
fields = set(fields or self.default_trace_fields)
response = self.client.search(index=self.index_name,
doc_type=self.conf.profiler.es_doc_type,

View File

@ -60,13 +60,16 @@ class MongoDB(base.Driver):
data["service"] = self.service
self.db.profiler.insert_one(data)
def list_traces(self, query, fields=[]):
"""Returns array of all base_id fields that match the given criteria
def list_traces(self, fields=None):
"""Query all traces from the storage.
:param query: dict that specifies the query criteria
:param fields: iterable of strings that specifies the output fields
:param fields: Set of trace fields to return. Defaults to 'base_id'
and 'timestamp'
:return List of traces, where each trace is a dictionary containing
at least `base_id` and `timestamp`.
"""
ids = self.db.profiler.find(query).distinct("base_id")
fields = set(fields or self.default_trace_fields)
ids = self.db.profiler.find("*").distinct("base_id")
out_format = {"base_id": 1, "timestamp": 1, "_id": 0}
out_format.update({i: 1 for i in fields})
return [self.db.profiler.find(

View File

@ -70,21 +70,29 @@ class Redis(base.Driver):
data["timestamp"]
self.db.set(key, jsonutils.dumps(data))
def list_traces(self, query="*", fields=[]):
"""Returns array of all base_id fields that match the given criteria
def list_traces(self, fields=None):
"""Query all traces from the storage.
:param query: string that specifies the query criteria
:param fields: iterable of strings that specifies the output fields
:param fields: Set of trace fields to return. Defaults to 'base_id'
and 'timestamp'
:return List of traces, where each trace is a dictionary containing
at least `base_id` and `timestamp`.
"""
for base_field in ["base_id", "timestamp"]:
if base_field not in fields:
fields.append(base_field)
ids = self.db.scan_iter(match=self.namespace + query)
fields = set(fields or self.default_trace_fields)
# With current schema every event is stored under its own unique key
# To query all traces we first need to get all keys, then
# get all events, sort them and pick up only the first one
ids = self.db.scan_iter(match=self.namespace + "*")
traces = [jsonutils.loads(self.db.get(i)) for i in ids]
traces.sort(key=lambda x: x["timestamp"])
seen_ids = set()
result = []
for trace in traces:
result.append({key: value for key, value in trace.iteritems()
if key in fields})
if trace["base_id"] not in seen_ids:
seen_ids.add(trace["base_id"])
result.append({key: value for key, value in trace.items()
if key in fields})
return result
def get_report(self, base_id):

View File

@ -126,3 +126,30 @@ class RedisDriverTestCase(DriverTestCase):
enabled=True,
trace_sqlalchemy=False,
hmac_keys="SECRET_KEY")
def test_list_traces(self):
# initialize profiler notifier (the same way as in services)
initializer.init_from_conf(
CONF, {}, self.PROJECT, self.SERVICE, "host")
profiler.init("SECRET_KEY")
# grab base_id
base_id = profiler.get().get_base_id()
# execute profiled code
foo = Foo()
foo.bar(1)
# instantiate report engine (the same way as in osprofiler CLI)
engine = base.get_driver(CONF.profiler.connection_string,
project=self.PROJECT,
service=self.SERVICE,
host="host",
conf=CONF)
# generate the report
traces = engine.list_traces()
LOG.debug("Collected traces: %s", traces)
# ensure trace with base_id is in the list of traces
self.assertIn(base_id, [t["base_id"] for t in traces])