Index each document with a run UUID.

Creates a UUID at the start of browbeat, logs it at (start/end) and
addes it to each document with index.  This should make it easier to
remove large runs if they are invalid or display only a single run
from the ELK/EFK stack.

Change-Id: I8bbc1cda522d609cf27bbe88dce1d74a96afaa93
This commit is contained in:
Alex Krzos 2016-06-17 19:01:14 -04:00
parent 7c0d21c32f
commit 735138f6af
2 changed files with 7 additions and 1 deletions

View File

@ -11,6 +11,7 @@
# See the License for the specific language governing permissions and
# limitations under the License.
from lib.Elastic import browbeat_uuid
from lib.PerfKit import PerfKit
from lib.Rally import Rally
from lib.Shaker import Shaker
@ -28,7 +29,6 @@ _workload_opts = ['perfkit', 'rally', 'shaker']
_config_file = 'browbeat-config.yaml'
debug_log_file = 'log/debug.log'
def _load_config(path, _logger):
try:
stream = open(path, 'r')
@ -116,6 +116,7 @@ def main():
else:
time_stamp = datetime.datetime.now().strftime("%Y%m%d-%H%M%S")
_logger.info("Browbeat test suite kicked off")
_logger.info("Browbeat UUID: {}".format(browbeat_uuid))
_logger.info("Running workload(s): {}".format(','.join(_cli_args.workloads)))
for wkld_provider in _cli_args.workloads:
if wkld_provider in _config:
@ -131,6 +132,7 @@ def main():
_logger.info("Saved browbeat result summary to {}".format(
os.path.join(result_dir,time_stamp + '.' + 'report')))
WorkloadBase.print_summary()
_logger.info("Browbeat Finished, UUID: {}".format(browbeat_uuid))
if __name__ == '__main__':
sys.exit(main())

View File

@ -14,6 +14,9 @@ from elasticsearch import Elasticsearch
import logging
import json
import datetime
import uuid
browbeat_uuid = uuid.uuid4()
class Elastic:
@ -75,6 +78,7 @@ class Elastic:
"""
def index_result(self, result, _type='result', _id=None):
result['browbeat_uuid'] = browbeat_uuid
result['cloud_name'] = self.config['browbeat']['cloud_name']
return self.es.index(index=self.index,
id=_id,