fix migration from v1.3 to v2.0

previous tests never really tested migration, it just took a v2.0
TimeSerieArchive and changed to v2.0 AggregatedTimeSerie. this
changes it so we take v1.3  TimeSerieArchive and convert it to V2.0
AggregatedTimeSerie.

Related-Bug: #1548367

Change-Id: I8eea10912517b1b0a77ad7c1a3c9037d00bf1960
This commit is contained in:
gordon chung 2016-02-24 22:56:52 -05:00
parent cff62330c7
commit 6ca313dec3
2 changed files with 38 additions and 26 deletions

View File

@ -361,12 +361,17 @@ class AggregatedTimeSerie(TimeSerie):
:returns: A TimeSerie object
"""
sampling = d.get('sampling')
prev_timestamp = pandas.Timestamp(d.get('first_timestamp') * 10e8)
timestamps = []
for delta in d.get('timestamps'):
prev_timestamp = datetime.timedelta(
seconds=delta * sampling) + prev_timestamp
timestamps.append(prev_timestamp)
if 'first_timestamp' in d:
prev_timestamp = pandas.Timestamp(d.get('first_timestamp') * 10e8)
timestamps = []
for delta in d.get('timestamps'):
prev_timestamp = datetime.timedelta(
seconds=delta * sampling) + prev_timestamp
timestamps.append(prev_timestamp)
else:
# migrate from v1.3, remove with TimeSerieArchive
timestamps, d['values'] = (
cls._timestamps_and_values_from_dict(d['values']))
return cls.from_data(
timestamps=timestamps,

View File

@ -17,6 +17,8 @@ import datetime
import uuid
import mock
import pandas
import six
from gnocchi import carbonara
from gnocchi import storage
@ -25,6 +27,18 @@ from gnocchi.tests import base as tests_base
from gnocchi import utils
def _to_dict_v1_3(self):
d = {'values': dict((timestamp.value, float(v))
for timestamp, v
in six.iteritems(self.ts.dropna()))}
sampling = pandas.tseries.offsets.Nano(self.sampling * 10e8)
d.update({
'aggregation_method': self.aggregation_method,
'max_size': self.max_size,
'sampling': six.text_type(sampling.n) + sampling.rule_code})
return d
class TestCarbonaraMigration(tests_base.TestCase):
def setUp(self):
super(TestCarbonaraMigration, self).setUp()
@ -54,15 +68,20 @@ class TestCarbonaraMigration(tests_base.TestCase):
self.storage._create_metric(self.metric)
self.storage._store_metric_archive(
self.metric,
archive.agg_timeseries[0].aggregation_method,
archive.serialize())
# serialise in old format
with mock.patch('gnocchi.carbonara.AggregatedTimeSerie.to_dict',
autospec=True) as f:
f.side_effect = _to_dict_v1_3
self.storage._store_metric_archive(
self.metric,
archive_max.agg_timeseries[0].aggregation_method,
archive_max.serialize())
self.storage._store_metric_archive(
self.metric,
archive.agg_timeseries[0].aggregation_method,
archive.serialize())
self.storage._store_metric_archive(
self.metric,
archive_max.agg_timeseries[0].aggregation_method,
archive_max.serialize())
def upgrade(self):
with mock.patch.object(self.index, 'list_metrics') as f:
@ -70,18 +89,6 @@ class TestCarbonaraMigration(tests_base.TestCase):
self.storage.upgrade(self.index)
def test_get_measures(self):
self.assertEqual([
(utils.datetime_utc(2014, 1, 1), 86400, 5),
(utils.datetime_utc(2014, 1, 1, 12), 3600, 5),
(utils.datetime_utc(2014, 1, 1, 12), 300, 5)
], self.storage.get_measures(self.metric))
self.assertEqual([
(utils.datetime_utc(2014, 1, 1), 86400, 6),
(utils.datetime_utc(2014, 1, 1, 12), 3600, 6),
(utils.datetime_utc(2014, 1, 1, 12), 300, 6)
], self.storage.get_measures(self.metric, aggregation='max'))
# This is to make gordc safer
self.assertIsNotNone(self.storage._get_metric_archive(
self.metric, "mean"))