Merge "Remove 'fill(0)' and fix influx time offset"

This commit is contained in:
Jenkins 2016-10-22 01:59:45 +00:00 committed by Gerrit Code Review
commit 62f2380731
4 changed files with 23 additions and 16 deletions

View File

@ -1,5 +1,5 @@
/* /*
* Copyright (c) 2014, 2016 Hewlett-Packard Development Company, L.P. * (C) Copyright 2014, 2016 Hewlett Packard Enterprise Development LP
* *
* Licensed under the Apache License, Version 2.0 (the "License"); you may not use this file except * Licensed under the Apache License, Version 2.0 (the "License"); you may not use this file except
* in compliance with the License. You may obtain a copy of the License at * in compliance with the License. You may obtain a copy of the License at

View File

@ -1,5 +1,5 @@
/* /*
* (C) Copyright 2014, 2016 Hewlett-Packard Development LP * (C) Copyright 2014, 2016 Hewlett Packard Enterprise Development LP
* *
* Licensed under the Apache License, Version 2.0 (the "License"); you may not use this file except * Licensed under the Apache License, Version 2.0 (the "License"); you may not use this file except
* in compliance with the License. You may obtain a copy of the License at * in compliance with the License. You may obtain a copy of the License at
@ -79,8 +79,9 @@ public class InfluxV9StatisticRepo implements StatisticRepo {
int indexOfUnderscore = offset.indexOf('_'); int indexOfUnderscore = offset.indexOf('_');
if (indexOfUnderscore > -1) { if (indexOfUnderscore > -1) {
offsetTimePart = offset.substring(indexOfUnderscore + 1); offsetTimePart = offset.substring(indexOfUnderscore + 1);
// Add the period to the offset to ensure only the next group of points are returned // Add the period minus one millisecond to the offset
DateTime offsetDateTime = DateTime.parse(offsetTimePart).plusSeconds(period); // to ensure only the next group of points are returned
DateTime offsetDateTime = DateTime.parse(offsetTimePart).plusSeconds(period).minusMillis(1);
// leave out any ID, as influx doesn't understand it // leave out any ID, as influx doesn't understand it
offset = offsetDateTime.toString(); offset = offsetDateTime.toString();
} }
@ -196,6 +197,8 @@ public class InfluxV9StatisticRepo implements StatisticRepo {
} }
List<Object> values = buildValsList(valueObjects); List<Object> values = buildValsList(valueObjects);
if (values == null)
continue;
if (((String) values.get(0)).compareTo(offsetTimestamp) >= 0 || index > offsetId) { if (((String) values.get(0)).compareTo(offsetTimestamp) >= 0 || index > offsetId) {
statistics.addMeasurement(values); statistics.addMeasurement(values);
@ -229,9 +232,13 @@ public class InfluxV9StatisticRepo implements StatisticRepo {
else else
valObjArryList.add(timestamp); valObjArryList.add(timestamp);
// All other values are doubles. // All other values are doubles or nulls.
for (int i = 1; i < values.length; ++i) { for (int i = 1; i < values.length; ++i) {
if (values[i] != null) {
valObjArryList.add(Double.parseDouble((String) values[i])); valObjArryList.add(Double.parseDouble((String) values[i]));
} else {
return null;
}
} }
return valObjArryList; return valObjArryList;

View File

@ -1,5 +1,5 @@
/* /*
* Copyright (c) 2015,2016 Hewlett Packard Enterprise Development Company, L.P. * (C) Copyright 2015,2016 Hewlett Packard Enterprise Development LP
* *
* Licensed under the Apache License, Version 2.0 (the "License"); you may not use this file except * Licensed under the Apache License, Version 2.0 (the "License"); you may not use this file except
* in compliance with the License. You may obtain a copy of the License at * in compliance with the License. You may obtain a copy of the License at
@ -255,14 +255,14 @@ public class InfluxV9Utils {
public String periodPartWithGroupBy(int period) { public String periodPartWithGroupBy(int period) {
return period > 0 ? String.format(" group by time(%1$ds), * fill(0)", period) return period > 0 ? String.format(" group by time(%1$ds), *", period)
: " group by time(300s), * fill(0)"; : " group by time(300s), *";
} }
public String periodPart(int period) { public String periodPart(int period) {
return period > 0 ? String.format(" group by time(%1$ds) fill(0)", period) return period > 0 ? String.format(" group by time(%1$ds)", period)
: " group by time(300s) fill(0)"; : " group by time(300s)";
} }
Map<String, String> filterPrivateTags(Map<String, String> tagMap) { Map<String, String> filterPrivateTags(Map<String, String> tagMap) {

View File

@ -181,19 +181,19 @@ class TestStatistics(base.BaseMonascaTest):
start_timestamp = int(time.time() * 1000) start_timestamp = int(time.time() * 1000)
name = data_utils.rand_name() name = data_utils.rand_name()
metric = [ metric = [
helpers.create_metric(name=name, timestamp=start_timestamp + 0, helpers.create_metric(name=name, timestamp=start_timestamp + 1,
dimensions={'key1': 'value-1', dimensions={'key1': 'value-1',
'key2': 'value-1'}, 'key2': 'value-1'},
value=1), value=1),
helpers.create_metric(name=name, timestamp=start_timestamp + 1000, helpers.create_metric(name=name, timestamp=start_timestamp + 1001,
dimensions={'key1': 'value-2', dimensions={'key1': 'value-2',
'key2': 'value-2'}, 'key2': 'value-2'},
value=2), value=2),
helpers.create_metric(name=name, timestamp=start_timestamp + 2000, helpers.create_metric(name=name, timestamp=start_timestamp + 2001,
dimensions={'key1': 'value-3', dimensions={'key1': 'value-3',
'key2': 'value-3'}, 'key2': 'value-3'},
value=3), value=3),
helpers.create_metric(name=name, timestamp=start_timestamp + 3000, helpers.create_metric(name=name, timestamp=start_timestamp + 3001,
dimensions={'key1': 'value-4', dimensions={'key1': 'value-4',
'key2': 'value-4'}, 'key2': 'value-4'},
value=4) value=4)
@ -213,7 +213,7 @@ class TestStatistics(base.BaseMonascaTest):
self._check_timeout(i, constants.MAX_RETRIES, elements, num_metrics) self._check_timeout(i, constants.MAX_RETRIES, elements, num_metrics)
start_time = helpers.timestamp_to_iso(start_timestamp) start_time = helpers.timestamp_to_iso(start_timestamp)
end_timestamp = start_timestamp + 4000 end_timestamp = start_timestamp + 4001
end_time = helpers.timestamp_to_iso(end_timestamp) end_time = helpers.timestamp_to_iso(end_timestamp)
query_parms = '?name=' + name + '&merge_metrics=true&statistics=avg' \ query_parms = '?name=' + name + '&merge_metrics=true&statistics=avg' \
+ '&start_time=' + str(start_time) + '&end_time=' + \ + '&start_time=' + str(start_time) + '&end_time=' + \