Increased list of fields that are verified

Moved _monitor_message and _compute_update_message code and
tests to MonitorNotification and ComputeUpdateNotification
This commit is contained in:
Anuj Mathur 2013-06-10 12:06:24 +05:30
parent df186e7d62
commit 9421e9be95
15 changed files with 974 additions and 1152 deletions

0
manage.py Normal file → Executable file
View File

View File

@ -0,0 +1,95 @@
import os
import sys
try:
import ujson as json
except ImportError:
try:
import simplejson as json
except ImportError:
import json
POSSIBLE_TOPDIR = os.path.normpath(os.path.join(os.path.abspath(sys.argv[0]),
os.pardir, os.pardir))
if os.path.exists(os.path.join(POSSIBLE_TOPDIR, 'stacktach')):
sys.path.insert(0, POSSIBLE_TOPDIR)
if __name__ != '__main__':
sys.exit(1)
from stacktach import models
from stacktach.views import NOTIFICATIONS
usage_events = ['compute.instance.create.start',
'compute.instance.create.end',
'compute.instance.rebuild.start',
'compute.instance.rebuild.end',
'compute.instance.resize.prep.start',
'compute.instance.resize.prep.end',
'compute.instance.resize.revert.start',
'compute.instance.resize.revert.end',
'compute.instance.finish_resize.end',
'compute.instance.delete.end']
def _find_latest_usage_related_raw_id_for_request_id(rawdata_all_queryset, request_id):
rawdata = rawdata_all_queryset.filter(
request_id=request_id,
event__in=usage_events).order_by('id')[:1].values('id')
if rawdata.count() > 0:
return rawdata[0]['id']
return None
def _notification(json_message):
json_dict = json.loads(json_message)
routing_key = json_dict[0]
body = json_dict[1]
notification = NOTIFICATIONS[routing_key](body)
return notification
def populate_fields():
rawdata_all_queryset = models.RawData.objects.filter(event__in=usage_events)
rawdata_all = rawdata_all_queryset.values('json', 'id')
for rawdata in rawdata_all:
notification = _notification(rawdata['json'])
models.RawDataImageMeta.objects.create(
raw_id=rawdata['id'],
os_architecture=notification.os_architecture,
os_distro=notification.os_distro,
os_version=notification.os_version,
rax_options=notification.rax_options)
print "Populated %s records in RawDataImageMeta" % rawdata_all.count()
rawdata_exists = models.RawData.objects.filter(
event__in=['compute.instance.exists']).values('id')
for rawdata_exist in rawdata_exists:
image_metadata = models.RawDataImageMeta.objects.filter(raw_id=rawdata_exist['id'])[0]
models.InstanceExists.objects.filter(
raw_id=rawdata_exist['id']).update(
os_architecture=image_metadata.os_architecture,
os_distro=image_metadata.os_distro,
os_version=image_metadata.os_version,
rax_options=image_metadata.rax_options)
print "Populated %s records in InstanceExists" % rawdata_exists.count()
usages = models.InstanceUsage.objects.all().values('request_id')
update_count = 0
for usage in usages:
raw_id = _find_latest_usage_related_raw_id_for_request_id(rawdata_all_queryset, usage['request_id'])
if not raw_id:
print "No Rawdata entry found for a usage related event with request_id %s" % usage['request_id']
continue
image_metadata = models.RawDataImageMeta.objects.filter(raw_id=raw_id)[0]
models.InstanceUsage.objects.filter(
request_id=usage['request_id']).update(
os_architecture=image_metadata.os_architecture,
os_distro=image_metadata.os_distro,
os_version=image_metadata.os_version,
rax_options=image_metadata.rax_options)
update_count += 1
print "Populated %s records in InstanceUsages" % update_count
populate_fields()

View File

@ -21,8 +21,19 @@ def get_or_create_deployment(name):
def create_rawdata(**kwargs):
return models.RawData(**kwargs)
imagemeta_fields = ['os_architecture', 'os_version',
'os_distro', 'rax_options']
imagemeta_kwargs = \
dict((k, v) for k, v in kwargs.iteritems() if k in imagemeta_fields)
rawdata_kwargs = \
dict((k, v) for k, v in kwargs.iteritems() if k not in imagemeta_fields)
rawdata = models.RawData(**rawdata_kwargs)
rawdata.save()
imagemeta_kwargs.update({'raw_id': rawdata.id})
save(models.RawDataImageMeta(**imagemeta_kwargs))
return rawdata
def create_lifecycle(**kwargs):
return models.Lifecycle(**kwargs)

View File

@ -0,0 +1,211 @@
# -*- coding: utf-8 -*-
import datetime
from south.db import db
from south.v2 import SchemaMigration
from django.db import models
class Migration(SchemaMigration):
def forwards(self, orm):
# Adding model 'RawDataImageMeta'
db.create_table(u'stacktach_rawdataimagemeta', (
(u'id', self.gf('django.db.models.fields.AutoField')(primary_key=True)),
('raw', self.gf('django.db.models.fields.related.ForeignKey')(to=orm['stacktach.RawData'])),
('os_architecture', self.gf('django.db.models.fields.TextField')(null=True, blank=True)),
('os_distro', self.gf('django.db.models.fields.TextField')(null=True, blank=True)),
('os_version', self.gf('django.db.models.fields.TextField')(null=True, blank=True)),
('rax_options', self.gf('django.db.models.fields.TextField')(null=True, blank=True)),
))
db.send_create_signal(u'stacktach', ['RawDataImageMeta'])
# Adding field 'InstanceExists.os_architecture'
db.add_column(u'stacktach_instanceexists', 'os_architecture',
self.gf('django.db.models.fields.TextField')(null=True, blank=True),
keep_default=False)
# Adding field 'InstanceExists.os_distro'
db.add_column(u'stacktach_instanceexists', 'os_distro',
self.gf('django.db.models.fields.TextField')(null=True, blank=True),
keep_default=False)
# Adding field 'InstanceExists.os_version'
db.add_column(u'stacktach_instanceexists', 'os_version',
self.gf('django.db.models.fields.TextField')(null=True, blank=True),
keep_default=False)
# Adding field 'InstanceExists.rax_options'
db.add_column(u'stacktach_instanceexists', 'rax_options',
self.gf('django.db.models.fields.TextField')(null=True, blank=True),
keep_default=False)
# Adding field 'InstanceUsage.os_architecture'
db.add_column(u'stacktach_instanceusage', 'os_architecture',
self.gf('django.db.models.fields.TextField')(null=True, blank=True),
keep_default=False)
# Adding field 'InstanceUsage.os_distro'
db.add_column(u'stacktach_instanceusage', 'os_distro',
self.gf('django.db.models.fields.TextField')(null=True, blank=True),
keep_default=False)
# Adding field 'InstanceUsage.os_version'
db.add_column(u'stacktach_instanceusage', 'os_version',
self.gf('django.db.models.fields.TextField')(null=True, blank=True),
keep_default=False)
# Adding field 'InstanceUsage.rax_options'
db.add_column(u'stacktach_instanceusage', 'rax_options',
self.gf('django.db.models.fields.TextField')(null=True, blank=True),
keep_default=False)
def backwards(self, orm):
# Deleting model 'RawDataImageMeta'
db.delete_table(u'stacktach_rawdataimagemeta')
# Deleting field 'InstanceExists.os_architecture'
db.delete_column(u'stacktach_instanceexists', 'os_architecture')
# Deleting field 'InstanceExists.os_distro'
db.delete_column(u'stacktach_instanceexists', 'os_distro')
# Deleting field 'InstanceExists.os_version'
db.delete_column(u'stacktach_instanceexists', 'os_version')
# Deleting field 'InstanceExists.rax_options'
db.delete_column(u'stacktach_instanceexists', 'rax_options')
# Deleting field 'InstanceUsage.os_architecture'
db.delete_column(u'stacktach_instanceusage', 'os_architecture')
# Deleting field 'InstanceUsage.os_distro'
db.delete_column(u'stacktach_instanceusage', 'os_distro')
# Deleting field 'InstanceUsage.os_version'
db.delete_column(u'stacktach_instanceusage', 'os_version')
# Deleting field 'InstanceUsage.rax_options'
db.delete_column(u'stacktach_instanceusage', 'rax_options')
models = {
u'stacktach.deployment': {
'Meta': {'object_name': 'Deployment'},
u'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'name': ('django.db.models.fields.CharField', [], {'max_length': '50'})
},
u'stacktach.instancedeletes': {
'Meta': {'object_name': 'InstanceDeletes'},
'deleted_at': ('django.db.models.fields.DecimalField', [], {'null': 'True', 'max_digits': '20', 'decimal_places': '6', 'db_index': 'True'}),
u'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'instance': ('django.db.models.fields.CharField', [], {'db_index': 'True', 'max_length': '50', 'null': 'True', 'blank': 'True'}),
'launched_at': ('django.db.models.fields.DecimalField', [], {'null': 'True', 'max_digits': '20', 'decimal_places': '6', 'db_index': 'True'}),
'raw': ('django.db.models.fields.related.ForeignKey', [], {'to': u"orm['stacktach.RawData']", 'null': 'True'})
},
u'stacktach.instanceexists': {
'Meta': {'object_name': 'InstanceExists'},
'audit_period_beginning': ('django.db.models.fields.DecimalField', [], {'null': 'True', 'max_digits': '20', 'decimal_places': '6', 'db_index': 'True'}),
'audit_period_ending': ('django.db.models.fields.DecimalField', [], {'null': 'True', 'max_digits': '20', 'decimal_places': '6', 'db_index': 'True'}),
'delete': ('django.db.models.fields.related.ForeignKey', [], {'related_name': "'+'", 'null': 'True', 'to': u"orm['stacktach.InstanceDeletes']"}),
'deleted_at': ('django.db.models.fields.DecimalField', [], {'null': 'True', 'max_digits': '20', 'decimal_places': '6', 'db_index': 'True'}),
'fail_reason': ('django.db.models.fields.CharField', [], {'db_index': 'True', 'max_length': '300', 'null': 'True', 'blank': 'True'}),
u'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'instance': ('django.db.models.fields.CharField', [], {'db_index': 'True', 'max_length': '50', 'null': 'True', 'blank': 'True'}),
'instance_type_id': ('django.db.models.fields.CharField', [], {'db_index': 'True', 'max_length': '50', 'null': 'True', 'blank': 'True'}),
'launched_at': ('django.db.models.fields.DecimalField', [], {'null': 'True', 'max_digits': '20', 'decimal_places': '6', 'db_index': 'True'}),
'message_id': ('django.db.models.fields.CharField', [], {'db_index': 'True', 'max_length': '50', 'null': 'True', 'blank': 'True'}),
'os_architecture': ('django.db.models.fields.TextField', [], {'null': 'True', 'blank': 'True'}),
'os_distro': ('django.db.models.fields.TextField', [], {'null': 'True', 'blank': 'True'}),
'os_version': ('django.db.models.fields.TextField', [], {'null': 'True', 'blank': 'True'}),
'raw': ('django.db.models.fields.related.ForeignKey', [], {'related_name': "'+'", 'null': 'True', 'to': u"orm['stacktach.RawData']"}),
'rax_options': ('django.db.models.fields.TextField', [], {'null': 'True', 'blank': 'True'}),
'send_status': ('django.db.models.fields.IntegerField', [], {'default': '0', 'null': 'True', 'db_index': 'True'}),
'status': ('django.db.models.fields.CharField', [], {'default': "'pending'", 'max_length': '50', 'db_index': 'True'}),
'tenant': ('django.db.models.fields.CharField', [], {'db_index': 'True', 'max_length': '50', 'null': 'True', 'blank': 'True'}),
'usage': ('django.db.models.fields.related.ForeignKey', [], {'related_name': "'+'", 'null': 'True', 'to': u"orm['stacktach.InstanceUsage']"})
},
u'stacktach.instanceusage': {
'Meta': {'object_name': 'InstanceUsage'},
u'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'instance': ('django.db.models.fields.CharField', [], {'db_index': 'True', 'max_length': '50', 'null': 'True', 'blank': 'True'}),
'instance_type_id': ('django.db.models.fields.CharField', [], {'db_index': 'True', 'max_length': '50', 'null': 'True', 'blank': 'True'}),
'launched_at': ('django.db.models.fields.DecimalField', [], {'null': 'True', 'max_digits': '20', 'decimal_places': '6', 'db_index': 'True'}),
'os_architecture': ('django.db.models.fields.TextField', [], {'null': 'True', 'blank': 'True'}),
'os_distro': ('django.db.models.fields.TextField', [], {'null': 'True', 'blank': 'True'}),
'os_version': ('django.db.models.fields.TextField', [], {'null': 'True', 'blank': 'True'}),
'rax_options': ('django.db.models.fields.TextField', [], {'null': 'True', 'blank': 'True'}),
'request_id': ('django.db.models.fields.CharField', [], {'db_index': 'True', 'max_length': '50', 'null': 'True', 'blank': 'True'}),
'tenant': ('django.db.models.fields.CharField', [], {'db_index': 'True', 'max_length': '50', 'null': 'True', 'blank': 'True'})
},
u'stacktach.jsonreport': {
'Meta': {'object_name': 'JsonReport'},
'created': ('django.db.models.fields.DecimalField', [], {'max_digits': '20', 'decimal_places': '6', 'db_index': 'True'}),
u'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'json': ('django.db.models.fields.TextField', [], {}),
'name': ('django.db.models.fields.CharField', [], {'max_length': '50', 'db_index': 'True'}),
'period_end': ('django.db.models.fields.DateTimeField', [], {'db_index': 'True'}),
'period_start': ('django.db.models.fields.DateTimeField', [], {'db_index': 'True'}),
'version': ('django.db.models.fields.IntegerField', [], {'default': '1'})
},
u'stacktach.lifecycle': {
'Meta': {'object_name': 'Lifecycle'},
u'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'instance': ('django.db.models.fields.CharField', [], {'db_index': 'True', 'max_length': '50', 'null': 'True', 'blank': 'True'}),
'last_raw': ('django.db.models.fields.related.ForeignKey', [], {'to': u"orm['stacktach.RawData']", 'null': 'True'}),
'last_state': ('django.db.models.fields.CharField', [], {'db_index': 'True', 'max_length': '50', 'null': 'True', 'blank': 'True'}),
'last_task_state': ('django.db.models.fields.CharField', [], {'db_index': 'True', 'max_length': '50', 'null': 'True', 'blank': 'True'})
},
u'stacktach.rawdata': {
'Meta': {'object_name': 'RawData'},
'deployment': ('django.db.models.fields.related.ForeignKey', [], {'to': u"orm['stacktach.Deployment']"}),
'event': ('django.db.models.fields.CharField', [], {'db_index': 'True', 'max_length': '50', 'null': 'True', 'blank': 'True'}),
'host': ('django.db.models.fields.CharField', [], {'db_index': 'True', 'max_length': '100', 'null': 'True', 'blank': 'True'}),
u'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'image_type': ('django.db.models.fields.IntegerField', [], {'default': '0', 'null': 'True', 'db_index': 'True'}),
'instance': ('django.db.models.fields.CharField', [], {'db_index': 'True', 'max_length': '50', 'null': 'True', 'blank': 'True'}),
'json': ('django.db.models.fields.TextField', [], {}),
'old_state': ('django.db.models.fields.CharField', [], {'db_index': 'True', 'max_length': '20', 'null': 'True', 'blank': 'True'}),
'old_task': ('django.db.models.fields.CharField', [], {'db_index': 'True', 'max_length': '30', 'null': 'True', 'blank': 'True'}),
'publisher': ('django.db.models.fields.CharField', [], {'db_index': 'True', 'max_length': '100', 'null': 'True', 'blank': 'True'}),
'request_id': ('django.db.models.fields.CharField', [], {'db_index': 'True', 'max_length': '50', 'null': 'True', 'blank': 'True'}),
'routing_key': ('django.db.models.fields.CharField', [], {'db_index': 'True', 'max_length': '50', 'null': 'True', 'blank': 'True'}),
'service': ('django.db.models.fields.CharField', [], {'db_index': 'True', 'max_length': '50', 'null': 'True', 'blank': 'True'}),
'state': ('django.db.models.fields.CharField', [], {'db_index': 'True', 'max_length': '20', 'null': 'True', 'blank': 'True'}),
'task': ('django.db.models.fields.CharField', [], {'db_index': 'True', 'max_length': '30', 'null': 'True', 'blank': 'True'}),
'tenant': ('django.db.models.fields.CharField', [], {'db_index': 'True', 'max_length': '50', 'null': 'True', 'blank': 'True'}),
'when': ('django.db.models.fields.DecimalField', [], {'max_digits': '20', 'decimal_places': '6', 'db_index': 'True'})
},
u'stacktach.rawdataimagemeta': {
'Meta': {'object_name': 'RawDataImageMeta'},
u'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'os_architecture': ('django.db.models.fields.TextField', [], {'null': 'True', 'blank': 'True'}),
'os_distro': ('django.db.models.fields.TextField', [], {'null': 'True', 'blank': 'True'}),
'os_version': ('django.db.models.fields.TextField', [], {'null': 'True', 'blank': 'True'}),
'raw': ('django.db.models.fields.related.ForeignKey', [], {'to': u"orm['stacktach.RawData']"}),
'rax_options': ('django.db.models.fields.TextField', [], {'null': 'True', 'blank': 'True'})
},
u'stacktach.requesttracker': {
'Meta': {'object_name': 'RequestTracker'},
'completed': ('django.db.models.fields.BooleanField', [], {'default': 'False', 'db_index': 'True'}),
'duration': ('django.db.models.fields.DecimalField', [], {'max_digits': '20', 'decimal_places': '6', 'db_index': 'True'}),
u'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'last_timing': ('django.db.models.fields.related.ForeignKey', [], {'to': u"orm['stacktach.Timing']", 'null': 'True'}),
'lifecycle': ('django.db.models.fields.related.ForeignKey', [], {'to': u"orm['stacktach.Lifecycle']"}),
'request_id': ('django.db.models.fields.CharField', [], {'max_length': '50', 'db_index': 'True'}),
'start': ('django.db.models.fields.DecimalField', [], {'max_digits': '20', 'decimal_places': '6', 'db_index': 'True'})
},
u'stacktach.timing': {
'Meta': {'object_name': 'Timing'},
'diff': ('django.db.models.fields.DecimalField', [], {'null': 'True', 'max_digits': '20', 'decimal_places': '6', 'db_index': 'True'}),
'end_raw': ('django.db.models.fields.related.ForeignKey', [], {'related_name': "'+'", 'null': 'True', 'to': u"orm['stacktach.RawData']"}),
'end_when': ('django.db.models.fields.DecimalField', [], {'null': 'True', 'max_digits': '20', 'decimal_places': '6'}),
u'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'lifecycle': ('django.db.models.fields.related.ForeignKey', [], {'to': u"orm['stacktach.Lifecycle']"}),
'name': ('django.db.models.fields.CharField', [], {'max_length': '50', 'db_index': 'True'}),
'start_raw': ('django.db.models.fields.related.ForeignKey', [], {'related_name': "'+'", 'null': 'True', 'to': u"orm['stacktach.RawData']"}),
'start_when': ('django.db.models.fields.DecimalField', [], {'null': 'True', 'max_digits': '20', 'decimal_places': '6'})
}
}
complete_apps = ['stacktach']

View File

@ -59,6 +59,14 @@ class RawData(models.Model):
return "%s %s %s" % (self.event, self.instance, self.state)
class RawDataImageMeta(models.Model):
raw = models.ForeignKey(RawData, null=False)
os_architecture = models.TextField(null=True, blank=True)
os_distro = models.TextField(null=True, blank=True)
os_version = models.TextField(null=True, blank=True)
rax_options = models.TextField(null=True, blank=True)
class Lifecycle(models.Model):
"""The Lifecycle table is the Master for a group of
Timing detail records. There is one Lifecycle row for
@ -88,6 +96,10 @@ class InstanceUsage(models.Model):
db_index=True)
tenant = models.CharField(max_length=50, null=True, blank=True,
db_index=True)
os_architecture = models.TextField(null=True, blank=True)
os_distro = models.TextField(null=True, blank=True)
os_version = models.TextField(null=True, blank=True)
rax_options = models.TextField(null=True, blank=True)
class InstanceDeletes(models.Model):
instance = models.CharField(max_length=50, null=True,
@ -138,6 +150,10 @@ class InstanceExists(models.Model):
send_status = models.IntegerField(null=True, default=0, db_index=True)
tenant = models.CharField(max_length=50, null=True, blank=True,
db_index=True)
os_architecture = models.TextField(null=True, blank=True)
os_distro = models.TextField(null=True, blank=True)
os_version = models.TextField(null=True, blank=True)
rax_options = models.TextField(null=True, blank=True)
class Timing(models.Model):
@ -181,3 +197,7 @@ class JsonReport(models.Model):
name = models.CharField(max_length=50, db_index=True)
version = models.IntegerField(default=1)
json = models.TextField()
def get_model_fields(model):
return model._meta.fields

125
stacktach/notification.py Normal file
View File

@ -0,0 +1,125 @@
from stacktach import utils
from stacktach import image_type
class Notification(object):
def __init__(self, body):
self.body = body
self.request_id = body['_context_request_id']
self.payload = body.get('payload', {})
self.state = self.payload.get('state', "")
self.old_state = self.payload.get('old_state', "")
self.old_task = self.payload.get('old_task_state', "")
self.task = self.payload.get('new_task_state', "")
self.image_type = image_type.get_numeric_code(self.payload)
self.os_architecture = self.payload['image_meta']['org.openstack__1__architecture']
self.os_distro = self.payload['image_meta']['org.openstack__1__os_distro']
self.os_version = self.payload['image_meta']['org.openstack__1__os_version']
self.rax_options = self.payload['image_meta']['com.rackspace__1__options']
@property
def when(self):
when = self.body.get('timestamp', None)
if not when:
when = self.body['_context_timestamp'] # Old way of doing it
when = utils.str_time_to_unix(when)
return when
def rawdata_kwargs(self, deployment, routing_key, json):
return {
'deployment': deployment,
'routing_key': routing_key,
'event': self.event,
'publisher': self.publisher,
'json': json,
'state': self.state,
'old_state': self.old_state,
'task': self.task,
'old_task': self.old_task,
'image_type': self.image_type,
'when': self.when,
'publisher': self.publisher,
'service': self.service,
'host': self.host,
'instance': self.instance,
'request_id': self.request_id,
'tenant': self.tenant,
'os_architecture': self.os_architecture,
'os_distro': self.os_distro,
'os_version': self.os_version,
'rax_options': self.rax_options
}
from stacktach.notification import Notification
class ComputeUpdateNotification(Notification):
def __init__(self, body):
super(ComputeUpdateNotification, self).__init__(body)
@property
def instance(self):
return None
@property
def host(self):
return self.body['args']['host']
@property
def publisher(self):
return None
@property
def service(self):
return self.body['args']['service_name']
@property
def event(self):
return self.body['method']
@property
def tenant(self):
return self.body['args'].get('_context_project_id', None)
class MonitorNotification(Notification):
def __init__(self, body):
super(MonitorNotification, self).__init__(body)
@property
def instance(self):
# instance UUID's seem to hide in a lot of odd places.
instance = self.payload.get('instance_id', None)
instance = self.payload.get('instance_uuid', instance)
if not instance:
instance = self.payload.get('exception', {}).get('kwargs', {}).get('uuid')
if not instance:
instance = self.payload.get('instance', {}).get('uuid')
return instance
@property
def host(self):
host = None
parts = self.publisher.split('.')
if len(parts) > 1:
host = ".".join(parts[1:])
return host
@property
def publisher(self):
return self.body['publisher_id']
@property
def service(self):
parts = self.publisher.split('.')
return parts[0]
@property
def event(self):
return self.body['event_type']
@property
def tenant(self):
tenant = self.body.get('_context_project_id', None)
tenant = self.payload.get('tenant_id', tenant)
return tenant

View File

@ -1,4 +1,4 @@
# Copyright (c) 2012 - Rackspace Inc.
# Copyright (c) 2013 - Rackspace Inc.
#
# Permission is hereby granted, free of charge, to any person obtaining a copy
# of this software and associated documentation files (the "Software"), to
@ -18,900 +18,40 @@
# FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS
# IN THE SOFTWARE.
import datetime
import decimal
from django.utils import unittest
import datetime_to_decimal
from models import *
import test_utils
from test_utils import INSTANCE_ID_1
from test_utils import INSTANCE_ID_2
from test_utils import MESSAGE_ID_1
from test_utils import MESSAGE_ID_2
from test_utils import REQUEST_ID_1
from test_utils import REQUEST_ID_2
from test_utils import REQUEST_ID_3
from test_utils import create_raw
import utils
import views
class ViewsUtilsTestCase(unittest.TestCase):
def test_srt_time_to_unix(self):
unix = utils.str_time_to_unix('2012-12-21 12:34:56.123')
self.assertEqual(unix, decimal.Decimal('1356093296.123'))
class ViewsLifecycleWorkflowTestCase(unittest.TestCase):
def setUp(self):
self.deployment = Deployment(name='TestDeployment')
self.deployment.save()
when1 = utils.str_time_to_unix('2012-12-21 12:34:50.123')
when2 = utils.str_time_to_unix('2012-12-21 12:34:56.123')
when3 = utils.str_time_to_unix('2012-12-21 12:36:56.124')
self.update_raw = create_raw(self.deployment, when1,
'compute.instance.update',
host='api', service='api')
self.start_raw = create_raw(self.deployment, when2,
'compute.instance.reboot.start')
self.end_raw = create_raw(self.deployment, when3,
'compute.instance.reboot.end',
old_task='reboot')
def tearDown(self):
Deployment.objects.all().delete()
RawData.objects.all().delete()
Lifecycle.objects.all().delete()
Timing.objects.all().delete()
RequestTracker.objects.all().delete()
def assertOnLifecycle(self, lifecycle, instance, last_raw):
self.assertEqual(lifecycle.instance, instance)
self.assertEqual(lifecycle.last_raw.id, last_raw.id)
self.assertEqual(lifecycle.last_state, last_raw.state)
self.assertEqual(lifecycle.last_task_state, last_raw.old_task)
def assertOnTiming(self, timing, lifecycle, start_raw, end_raw, diff):
self.assertEqual(timing.lifecycle.id, lifecycle.id)
self.assertEqual(timing.start_raw.id, start_raw.id)
self.assertEqual(timing.end_raw.id, end_raw.id)
self.assertEqual(timing.start_when, start_raw.when)
self.assertEqual(timing.end_when, end_raw.when)
self.assertEqual(timing.diff, decimal.Decimal(diff))
def assertOnTracker(self, tracker, request_id, lifecycle, start, diff=None):
self.assertEqual(tracker.request_id, request_id)
self.assertEqual(tracker.lifecycle.id, lifecycle.id)
self.assertEqual(tracker.start, start)
if diff:
self.assertEqual(tracker.duration, diff)
def test_aggregate_lifecycle_and_timing(self):
views.aggregate_lifecycle(self.update_raw)
views.aggregate_lifecycle(self.start_raw)
lifecycles = Lifecycle.objects.select_related()\
.filter(instance=INSTANCE_ID_1)
self.assertEqual(len(lifecycles), 1)
lifecycle = lifecycles[0]
self.assertOnLifecycle(lifecycle, INSTANCE_ID_1, self.start_raw)
views.aggregate_lifecycle(self.end_raw)
lifecycles = Lifecycle.objects.select_related()\
.filter(instance=INSTANCE_ID_1)
self.assertEqual(len(lifecycles), 1)
lifecycle = lifecycles[0]
self.assertOnLifecycle(lifecycle, INSTANCE_ID_1, self.end_raw)
timings = Timing.objects.select_related()\
.filter(lifecycle=lifecycle)
self.assertEqual(len(lifecycles), 1)
timing = timings[0]
expected_diff = self.end_raw.when - self.start_raw.when
self.assertOnTiming(timing, lifecycle, self.start_raw, self.end_raw,
expected_diff)
def test_multiple_instance_lifecycles(self):
when1 = utils.str_time_to_unix('2012-12-21 13:32:50.123')
when2 = utils.str_time_to_unix('2012-12-21 13:34:50.123')
when3 = utils.str_time_to_unix('2012-12-21 13:37:50.124')
update_raw2 = create_raw(self.deployment, when1,
'compute.instance.update',
instance=INSTANCE_ID_2,
request_id=REQUEST_ID_2,
host='api', service='api')
start_raw2 = create_raw(self.deployment, when2,
'compute.instance.resize.start',
instance=INSTANCE_ID_2,
request_id=REQUEST_ID_2)
end_raw2 = create_raw(self.deployment, when3,
'compute.instance.resize.end',
old_task='resize',
instance=INSTANCE_ID_2,
request_id=REQUEST_ID_2)
views.aggregate_lifecycle(self.update_raw)
views.aggregate_lifecycle(self.start_raw)
views.aggregate_lifecycle(update_raw2)
views.aggregate_lifecycle(start_raw2)
lifecycles = Lifecycle.objects.all().order_by('id')
self.assertEqual(len(lifecycles), 2)
lifecycle1 = lifecycles[0]
self.assertOnLifecycle(lifecycle1, INSTANCE_ID_1, self.start_raw)
lifecycle2 = lifecycles[1]
self.assertOnLifecycle(lifecycle2, INSTANCE_ID_2, start_raw2)
views.aggregate_lifecycle(end_raw2)
views.aggregate_lifecycle(self.end_raw)
lifecycles = Lifecycle.objects.all().order_by('id')
self.assertEqual(len(lifecycles), 2)
lifecycle1 = lifecycles[0]
self.assertOnLifecycle(lifecycle1, INSTANCE_ID_1, self.end_raw)
lifecycle2 = lifecycles[1]
self.assertOnLifecycle(lifecycle2, INSTANCE_ID_2, end_raw2)
timings = Timing.objects.all().order_by('id')
self.assertEqual(len(timings), 2)
timing1 = timings[0]
expected_diff1 = self.end_raw.when - self.start_raw.when
self.assertOnTiming(timing1, lifecycle1, self.start_raw, self.end_raw,
expected_diff1)
expected_diff2 = end_raw2.when - start_raw2.when
timing2 = timings[1]
self.assertOnTiming(timing2, lifecycle2, start_raw2, end_raw2,
expected_diff2)
def test_same_instance_multiple_timings(self):
when1 = utils.str_time_to_unix('2012-12-21 13:32:50.123')
when2 = utils.str_time_to_unix('2012-12-21 13:34:50.123')
when3 = utils.str_time_to_unix('2012-12-21 13:37:50.124')
update_raw2 = create_raw(self.deployment, when1,
'compute.instance.update',
request_id=REQUEST_ID_2,
host='api', service='api')
start_raw2 = create_raw(self.deployment, when2,
'compute.instance.resize.start',
request_id=REQUEST_ID_2)
end_raw2 = create_raw(self.deployment, when3,
'compute.instance.resize.end',
old_task='resize',
request_id=REQUEST_ID_2)
# First action started
views.aggregate_lifecycle(self.update_raw)
views.aggregate_lifecycle(self.start_raw)
# Second action started, first end is late
views.aggregate_lifecycle(update_raw2)
views.aggregate_lifecycle(start_raw2)
# Finally get first end
views.aggregate_lifecycle(self.end_raw)
# Second end
views.aggregate_lifecycle(end_raw2)
lifecycles = Lifecycle.objects.select_related()\
.filter(instance=INSTANCE_ID_1)
self.assertEqual(len(lifecycles), 1)
lifecycle1 = lifecycles[0]
self.assertOnLifecycle(lifecycle1, INSTANCE_ID_1, end_raw2)
timings = Timing.objects.all().order_by('id')
self.assertEqual(len(timings), 2)
timing1 = timings[0]
expected_diff1 = self.end_raw.when - self.start_raw.when
self.assertOnTiming(timing1, lifecycle1, self.start_raw, self.end_raw,
expected_diff1)
expected_diff2 = end_raw2.when - start_raw2.when
timing2 = timings[1]
self.assertOnTiming(timing2, lifecycle1, start_raw2, end_raw2,
expected_diff2)
def test_aggregate_lifecycle_and_kpi(self):
views.aggregate_lifecycle(self.update_raw)
lifecycles = Lifecycle.objects.select_related()\
.filter(instance=INSTANCE_ID_1)
self.assertEqual(len(lifecycles), 1)
lifecycle = lifecycles[0]
self.assertOnLifecycle(lifecycle, INSTANCE_ID_1, self.update_raw)
trackers = RequestTracker.objects.filter(request_id=REQUEST_ID_1)
self.assertEqual(len(trackers), 1)
tracker = trackers[0]
self.assertOnTracker(tracker, REQUEST_ID_1, lifecycle,
self.update_raw.when)
views.aggregate_lifecycle(self.start_raw)
views.aggregate_lifecycle(self.end_raw)
trackers = RequestTracker.objects.filter(request_id=REQUEST_ID_1)
self.assertEqual(len(trackers), 1)
tracker = trackers[0]
expected_diff = self.end_raw.when-self.update_raw.when
self.assertOnTracker(tracker, REQUEST_ID_1, lifecycle,
self.update_raw.when, expected_diff)
def test_multiple_instance_kpi(self):
when1 = utils.str_time_to_unix('2012-12-21 13:32:50.123')
when2 = utils.str_time_to_unix('2012-12-21 13:34:50.123')
when3 = utils.str_time_to_unix('2012-12-21 13:37:50.124')
update_raw2 = create_raw(self.deployment, when1,
'compute.instance.update',
instance=INSTANCE_ID_2,
request_id=REQUEST_ID_2,
host='api', service='api')
start_raw2 = create_raw(self.deployment, when2,
'compute.instance.resize.start',
instance=INSTANCE_ID_2,
request_id=REQUEST_ID_2)
end_raw2 = create_raw(self.deployment, when3,
'compute.instance.resize.end',
instance=INSTANCE_ID_2,
old_task='resize',
request_id=REQUEST_ID_2)
views.aggregate_lifecycle(self.update_raw)
views.aggregate_lifecycle(self.start_raw)
views.aggregate_lifecycle(self.end_raw)
views.aggregate_lifecycle(update_raw2)
views.aggregate_lifecycle(start_raw2)
views.aggregate_lifecycle(end_raw2)
lifecycles = Lifecycle.objects.all().order_by('id')
self.assertEqual(len(lifecycles), 2)
lifecycle1 = lifecycles[0]
self.assertOnLifecycle(lifecycle1, INSTANCE_ID_1, self.end_raw)
lifecycle2 = lifecycles[1]
self.assertOnLifecycle(lifecycle2, INSTANCE_ID_2, end_raw2)
trackers = RequestTracker.objects.all().order_by('id')
self.assertEqual(len(trackers), 2)
tracker1 = trackers[0]
expected_diff = self.end_raw.when-self.update_raw.when
self.assertOnTracker(tracker1, REQUEST_ID_1, lifecycle1,
self.update_raw.when, expected_diff)
tracker2 = trackers[1]
expected_diff2 = end_raw2.when-update_raw2.when
self.assertOnTracker(tracker2, REQUEST_ID_2, lifecycle2,
update_raw2.when, expected_diff2)
def test_single_instance_multiple_kpi(self):
when1 = utils.str_time_to_unix('2012-12-21 13:32:50.123')
when2 = utils.str_time_to_unix('2012-12-21 13:34:50.123')
when3 = utils.str_time_to_unix('2012-12-21 13:37:50.124')
update_raw2 = create_raw(self.deployment, when1,
'compute.instance.update',
request_id=REQUEST_ID_2,
host='api', service='api')
start_raw2 = create_raw(self.deployment, when2,
'compute.instance.resize.start',
request_id=REQUEST_ID_2)
end_raw2 = create_raw(self.deployment, when3,
'compute.instance.resize.end',
old_task='resize',
request_id=REQUEST_ID_2)
views.aggregate_lifecycle(self.update_raw)
views.aggregate_lifecycle(self.start_raw)
views.aggregate_lifecycle(self.end_raw)
views.aggregate_lifecycle(update_raw2)
views.aggregate_lifecycle(start_raw2)
views.aggregate_lifecycle(end_raw2)
lifecycles = Lifecycle.objects.all().order_by('id')
self.assertEqual(len(lifecycles), 1)
lifecycle1 = lifecycles[0]
self.assertOnLifecycle(lifecycle1, INSTANCE_ID_1, end_raw2)
trackers = RequestTracker.objects.all().order_by('id')
self.assertEqual(len(trackers), 2)
tracker1 = trackers[0]
expected_diff1 = self.end_raw.when-self.update_raw.when
self.assertOnTracker(tracker1, REQUEST_ID_1, lifecycle1,
self.update_raw.when, expected_diff1)
tracker2 = trackers[1]
expected_diff2 = end_raw2.when-update_raw2.when
self.assertOnTracker(tracker2, REQUEST_ID_2, lifecycle1,
update_raw2.when, expected_diff2)
def test_single_instance_multiple_kpi_out_of_order(self):
when1 = utils.str_time_to_unix('2012-12-21 13:32:50.123')
when2 = utils.str_time_to_unix('2012-12-21 13:34:50.123')
when3 = utils.str_time_to_unix('2012-12-21 13:37:50.124')
update_raw2 = create_raw(self.deployment, when1,
'compute.instance.update',
request_id=REQUEST_ID_2,
host='api', service='api')
start_raw2 = create_raw(self.deployment, when2,
'compute.instance.resize.start',
request_id=REQUEST_ID_2)
end_raw2 = create_raw(self.deployment, when3,
'compute.instance.resize.end',
old_task='resize',
request_id=REQUEST_ID_2)
# First action started
views.aggregate_lifecycle(self.update_raw)
views.aggregate_lifecycle(self.start_raw)
# Second action started, first end is late
views.aggregate_lifecycle(update_raw2)
views.aggregate_lifecycle(start_raw2)
# Finally get first end
views.aggregate_lifecycle(self.end_raw)
# Second end
views.aggregate_lifecycle(end_raw2)
lifecycles = Lifecycle.objects.all().order_by('id')
self.assertEqual(len(lifecycles), 1)
lifecycle1 = lifecycles[0]
self.assertOnLifecycle(lifecycle1, INSTANCE_ID_1, end_raw2)
trackers = RequestTracker.objects.all().order_by('id')
self.assertEqual(len(trackers), 2)
tracker1 = trackers[0]
expected_diff1 = self.end_raw.when-self.update_raw.when
self.assertOnTracker(tracker1, REQUEST_ID_1, lifecycle1,
self.update_raw.when, expected_diff1)
tracker2 = trackers[1]
expected_diff2 = end_raw2.when-update_raw2.when
self.assertOnTracker(tracker2, REQUEST_ID_2, lifecycle1,
update_raw2.when, expected_diff2)
class ViewsUsageTestCase(unittest.TestCase):
def setUp(self):
self.deployment = Deployment(name='TestDeployment')
self.deployment.save()
def tearDown(self):
RawData.objects.all().delete()
InstanceUsage.objects.all().delete()
InstanceExists.objects.all().delete()
def test_process_new_launch_create_start(self):
when = utils.str_time_to_unix('2012-12-21 12:34:50.123')
json = test_utils.make_create_start_json()
raw = create_raw(self.deployment, when,
views.INSTANCE_EVENT['create_start'], json=json)
views._process_usage_for_new_launch(raw)
usages = InstanceUsage.objects.all()
self.assertEqual(len(usages), 1)
usage = usages[0]
self.assertEqual(usage.instance, INSTANCE_ID_1)
self.assertEqual(usage.instance_type_id, '1')
self.assertEqual(usage.request_id, REQUEST_ID_1)
def test_process_new_launch_resize_prep_start(self):
when = utils.str_time_to_unix('2012-12-21 12:34:50.123')
json = test_utils.make_resize_prep_start_json()
raw = create_raw(self.deployment, when,
views.INSTANCE_EVENT['resize_prep_start'], json=json)
views._process_usage_for_new_launch(raw)
usages = InstanceUsage.objects.all()
self.assertEqual(len(usages), 1)
usage = usages[0]
self.assertEqual(usage.instance, INSTANCE_ID_1)
self.assertEqual(usage.request_id, REQUEST_ID_1)
# The instance_type_id from resize prep notifications is the old one,
# thus we ignore it.
self.assertIsNone(usage.instance_type_id)
def test_process_new_launch_resize_revert_start(self):
when = utils.str_time_to_unix('2012-12-21 12:34:50.123')
json = test_utils.make_resize_revert_start_json()
raw = create_raw(self.deployment, when,
views.INSTANCE_EVENT['resize_revert_start'],
json=json)
views._process_usage_for_new_launch(raw)
usages = InstanceUsage.objects.all()
self.assertEqual(len(usages), 1)
usage = usages[0]
self.assertEqual(usage.instance, INSTANCE_ID_1)
self.assertEqual(usage.request_id, REQUEST_ID_1)
# The instance_type_id from resize revert notifications is the old one,
# thus we ignore it.
self.assertIsNone(usage.instance_type_id)
def test_process_updates_create_end(self):
values = {
'instance': INSTANCE_ID_1,
'request_id': REQUEST_ID_1,
'instance_type_id': '1',
}
InstanceUsage(**values).save()
sent = '2012-12-21 12:34:50.123'
when = utils.str_time_to_unix(sent)
json = test_utils.make_create_end_json(sent)
raw = create_raw(self.deployment, when,
views.INSTANCE_EVENT['create_end'], json=json)
views._process_usage_for_updates(raw)
usages = InstanceUsage.objects.all()
self.assertEqual(len(usages), 1)
usage = usages[0]
self.assertEqual(usage.launched_at, when)
def test_process_updates_resize_finish_end(self):
values = {
'instance': INSTANCE_ID_1,
'request_id': REQUEST_ID_1,
'instance_type_id': '2',
}
InstanceUsage(**values).save()
sent = '2012-12-21 12:34:50.123'
when = utils.str_time_to_unix(sent)
json = test_utils.make_resize_finish_json(sent)
raw = create_raw(self.deployment, when,
views.INSTANCE_EVENT['resize_finish_end'], json=json)
views._process_usage_for_updates(raw)
usages = InstanceUsage.objects.all()
self.assertEqual(len(usages), 1)
usage = usages[0]
self.assertEqual(usage.launched_at, when)
def test_process_updates_revert_end(self):
values = {
'instance': INSTANCE_ID_1,
'request_id': REQUEST_ID_1,
}
InstanceUsage(**values).save()
sent = '2012-12-21 12:34:50.123'
when = utils.str_time_to_unix(sent)
json = test_utils.make_resize_revert_end_json(sent)
raw = create_raw(self.deployment, when,
views.INSTANCE_EVENT['resize_revert_end'], json=json)
views._process_usage_for_updates(raw)
usages = InstanceUsage.objects.all()
self.assertEqual(len(usages), 1)
usage = usages[0]
self.assertEqual(usage.launched_at, when)
self.assertEqual(usage.instance_type_id, '1')
def test_process_updates_resize_prep_end(self):
values = {
'instance': INSTANCE_ID_1,
'request_id': REQUEST_ID_1,
}
InstanceUsage(**values).save()
sent = '2012-12-21 12:34:50.123'
when = utils.str_time_to_unix(sent)
json = test_utils.make_resize_prep_end_json(sent)
raw = create_raw(self.deployment, when,
views.INSTANCE_EVENT['resize_prep_end'], json=json)
views._process_usage_for_updates(raw)
usages = InstanceUsage.objects.all()
self.assertEqual(len(usages), 1)
usage = usages[0]
self.assertEqual(usage.instance_type_id, '2')
def test_process_delete(self):
launched_str = '2012-12-21 06:34:50.123'
launched = utils.str_time_to_unix(launched_str)
deleted_str = '2012-12-21 12:34:50.123'
deleted = utils.str_time_to_unix(deleted_str)
json = test_utils.make_delete_end_json(launched_str, deleted_str)
raw = create_raw(self.deployment, deleted,
views.INSTANCE_EVENT['delete_end'], json=json)
views._process_delete(raw)
delete = InstanceDeletes.objects.all()
self.assertEqual(len(delete), 1)
delete = delete[0]
self.assertEqual(delete.instance, INSTANCE_ID_1)
self.assertEqual(delete.launched_at, launched)
self.assertEqual(delete.deleted_at, deleted)
self.assertEqual(delete.raw.id, raw.id)
def test_process_exists(self):
launched_str = '2012-12-21 06:34:50.123'
launched = utils.str_time_to_unix(launched_str)
values = {
'instance': INSTANCE_ID_1,
'request_id': REQUEST_ID_1,
'instance_type_id': '1',
'launched_at': launched,
}
InstanceUsage(**values).save()
exists_str = '2012-12-21 23:30:00.000'
exists_time = utils.str_time_to_unix(exists_str)
json = test_utils.make_exists_json(launched_str)
raw = create_raw(self.deployment, exists_time,
views.INSTANCE_EVENT['exists'], json=json)
views._process_exists(raw)
usage = InstanceExists.objects.filter(instance=INSTANCE_ID_1,
launched_at = launched)[0]
exists_rows = InstanceExists.objects.all()
self.assertEqual(len(exists_rows), 1)
exists = exists_rows[0]
self.assertEqual(exists.instance, INSTANCE_ID_1)
self.assertEqual(exists.launched_at, launched)
self.assertEqual(exists.status, InstanceExists.PENDING)
self.assertEqual(exists.usage.id, usage.id)
self.assertEqual(exists.raw.id, raw.id)
self.assertEqual(exists.message_id, MESSAGE_ID_1)
self.assertIsNone(exists.deleted_at)
self.assertEqual(exists.instance_type_id, '1')
def test_process_exists_with_deleted_at(self):
launched_str = '2012-12-21 06:34:50.123'
launched = utils.str_time_to_unix(launched_str)
deleted_str = '2012-12-21 06:36:50.123'
deleted = utils.str_time_to_unix(deleted_str)
values = {
'instance': INSTANCE_ID_1,
'request_id': REQUEST_ID_1,
'instance_type_id': '1',
'launched_at': launched,
}
InstanceUsage(**values).save()
exists_str = '2012-12-21 23:30:00.000'
exists_time = utils.str_time_to_unix(exists_str)
json = test_utils.make_exists_json(launched_str, deleted_at=deleted_str)
raw = create_raw(self.deployment, exists_time,
views.INSTANCE_EVENT['exists'], json=json)
views._process_exists(raw)
usage = InstanceExists.objects.filter(instance=INSTANCE_ID_1,
launched_at = launched)[0]
exists_rows = InstanceExists.objects.all()
self.assertEqual(len(exists_rows), 1)
exists = exists_rows[0]
self.assertEqual(exists.instance, INSTANCE_ID_1)
self.assertEqual(exists.launched_at, launched)
self.assertEqual(exists.status, InstanceExists.PENDING)
self.assertEqual(exists.usage.id, usage.id)
self.assertEqual(exists.raw.id, raw.id)
self.assertEqual(exists.message_id, MESSAGE_ID_1)
self.assertEqual(exists.deleted_at, deleted)
self.assertEqual(exists.instance_type_id, '1')
class ViewsUsageWorkflowTestCase(unittest.TestCase):
def setUp(self):
self.deployment = Deployment(name='TestDeployment')
self.deployment.save()
def tearDown(self):
RawData.objects.all().delete()
InstanceUsage.objects.all().delete()
InstanceExists.objects.all().delete()
def assertOnUsage(self, usage, instance, type_id, launched, request_id):
self.assertEqual(usage.instance, instance)
self.assertEqual(usage.instance_type_id, type_id)
self.assertEqual(usage.launched_at, launched)
self.assertEqual(usage.request_id, request_id)
def test_create_workflow(self):
created_str = '2012-12-21 06:30:50.123'
created = utils.str_time_to_unix(created_str)
launched_str = '2012-12-21 06:34:50.123'
launched = utils.str_time_to_unix(launched_str)
create_start_json = test_utils.make_create_start_json()
create_end_json = test_utils.make_create_end_json(launched_str)
create_start_raw = create_raw(self.deployment, created,
views.INSTANCE_EVENT['create_start'],
json=create_start_json)
create_end_raw = create_raw(self.deployment, launched,
views.INSTANCE_EVENT['create_end'],
json=create_end_json)
views.aggregate_usage(create_start_raw)
views.aggregate_usage(create_end_raw)
usages = InstanceUsage.objects.all()
self.assertEqual(len(usages), 1)
usage = usages[0]
self.assertOnUsage(usage, INSTANCE_ID_1, '1', launched, REQUEST_ID_1)
def test_create_workflow_start_late(self):
created_str = '2012-12-21 06:30:50.123'
created = utils.str_time_to_unix(created_str)
launched_str = '2012-12-21 06:34:50.123'
launched = utils.str_time_to_unix(launched_str)
create_start_json = test_utils.make_create_start_json()
create_end_json = test_utils.make_create_end_json(launched_str)
create_start_raw = create_raw(self.deployment, created,
views.INSTANCE_EVENT['create_start'],
json=create_start_json)
create_end_raw = create_raw(self.deployment, launched,
views.INSTANCE_EVENT['create_end'],
json=create_end_json)
views.aggregate_usage(create_end_raw)
views.aggregate_usage(create_start_raw)
usages = InstanceUsage.objects.all()
self.assertEqual(len(usages), 1)
usage = usages[0]
self.assertOnUsage(usage, INSTANCE_ID_1, '1', launched, REQUEST_ID_1)
def test_resize_workflow(self):
launched_str = '2012-12-21 06:34:50.123'
launched = utils.str_time_to_unix(launched_str)
values = {
'instance': INSTANCE_ID_1,
'request_id': REQUEST_ID_1,
'instance_type_id': '1',
'launched_at': launched,
}
InstanceUsage(**values).save()
started_str = '2012-12-22 06:34:50.123'
started_time = utils.str_time_to_unix(started_str)
pre_end_str = '2012-12-22 06:36:50.123'
prep_end_time = utils.str_time_to_unix(pre_end_str)
finish_str = '2012-12-22 06:38:50.123'
finish_time = utils.str_time_to_unix(finish_str)
prep_start_json = test_utils\
.make_resize_prep_start_json(request_id=REQUEST_ID_2)
prep_end_json = test_utils\
.make_resize_prep_end_json(new_instance_type_id='2',
request_id=REQUEST_ID_2)
finish_json = test_utils\
.make_resize_finish_json(launched_at=finish_str,
request_id=REQUEST_ID_2)
prep_start_raw = create_raw(self.deployment, started_time,
views.INSTANCE_EVENT['resize_prep_start'],
request_id=REQUEST_ID_2,
json=prep_start_json)
prep_end_raw = create_raw(self.deployment, prep_end_time,
views.INSTANCE_EVENT['resize_prep_end'],
request_id=REQUEST_ID_2,
json=prep_end_json)
finish_raw = create_raw(self.deployment, finish_time,
views.INSTANCE_EVENT['resize_finish_end'],
request_id=REQUEST_ID_2,
json=finish_json)
views.aggregate_usage(prep_start_raw)
views.aggregate_usage(prep_end_raw)
views.aggregate_usage(finish_raw)
usages = InstanceUsage.objects.all().order_by('id')
self.assertEqual(len(usages), 2)
usage_before = usages[0]
usage_after = usages[1]
self.assertOnUsage(usage_before, INSTANCE_ID_1, '1', launched,
REQUEST_ID_1)
self.assertOnUsage(usage_after, INSTANCE_ID_1, '2', finish_time,
REQUEST_ID_2)
def test_resize_workflow_out_of_order(self):
launched_str = '2012-12-21 06:34:50.123'
launched = utils.str_time_to_unix(launched_str)
values = {
'instance': INSTANCE_ID_1,
'request_id': REQUEST_ID_1,
'instance_type_id': '1',
'launched_at': launched,
}
InstanceUsage(**values).save()
started_str = '2012-12-22 06:34:50.123'
started_time = utils.str_time_to_unix(started_str)
pre_end_str = '2012-12-22 06:36:50.123'
prep_end_time = utils.str_time_to_unix(pre_end_str)
finish_str = '2012-12-22 06:38:50.123'
finish_time = utils.str_time_to_unix(finish_str)
prep_start_json = test_utils\
.make_resize_prep_start_json(request_id=REQUEST_ID_2)
prep_end_json = test_utils\
.make_resize_prep_end_json(new_instance_type_id='2',
request_id=REQUEST_ID_2)
finish_json = test_utils\
.make_resize_finish_json(launched_at=finish_str,
request_id=REQUEST_ID_2)
prep_start_raw = create_raw(self.deployment, started_time,
views.INSTANCE_EVENT['resize_prep_start'],
request_id=REQUEST_ID_2,
json=prep_start_json)
prep_end_raw = create_raw(self.deployment, prep_end_time,
views.INSTANCE_EVENT['resize_prep_end'],
request_id=REQUEST_ID_2,
json=prep_end_json)
finish_raw = create_raw(self.deployment, finish_time,
views.INSTANCE_EVENT['resize_finish_end'],
request_id=REQUEST_ID_2,
json=finish_json)
# Resize Started, notification on time
views.aggregate_usage(prep_start_raw)
# Received finish_end, prep_end late
views.aggregate_usage(finish_raw)
# Finally receive the late prep_end
views.aggregate_usage(prep_end_raw)
usages = InstanceUsage.objects.all().order_by('id')
self.assertEqual(len(usages), 2)
usage_before = usages[0]
usage_after = usages[1]
self.assertOnUsage(usage_before, INSTANCE_ID_1, '1', launched,
REQUEST_ID_1)
self.assertOnUsage(usage_after, INSTANCE_ID_1, '2', finish_time,
REQUEST_ID_2)
def test_resize_workflow_start_late(self):
launched_str = '2012-12-21 06:34:50.123'
launched = utils.str_time_to_unix(launched_str)
values = {
'instance': INSTANCE_ID_1,
'request_id': REQUEST_ID_1,
'instance_type_id': '1',
'launched_at': launched,
}
InstanceUsage(**values).save()
started_str = '2012-12-22 06:34:50.123'
started_time = utils.str_time_to_unix(started_str)
pre_end_str = '2012-12-22 06:36:50.123'
prep_end_time = utils.str_time_to_unix(pre_end_str)
finish_str = '2012-12-22 06:38:50.123'
finish_time = utils.str_time_to_unix(finish_str)
prep_start_json = test_utils\
.make_resize_prep_start_json(request_id=REQUEST_ID_2)
prep_end_json = test_utils\
.make_resize_prep_end_json(new_instance_type_id='2',
request_id=REQUEST_ID_2)
finish_json = test_utils\
.make_resize_finish_json(launched_at=finish_str,
request_id=REQUEST_ID_2)
prep_start_raw = create_raw(self.deployment, started_time,
views.INSTANCE_EVENT['resize_prep_start'],
request_id=REQUEST_ID_2,
json=prep_start_json)
prep_end_raw = create_raw(self.deployment, prep_end_time,
views.INSTANCE_EVENT['resize_prep_end'],
request_id=REQUEST_ID_2,
json=prep_end_json)
finish_raw = create_raw(self.deployment, finish_time,
views.INSTANCE_EVENT['resize_finish_end'],
request_id=REQUEST_ID_2,
json=finish_json)
views.aggregate_usage(prep_end_raw)
views.aggregate_usage(prep_start_raw)
views.aggregate_usage(finish_raw)
usages = InstanceUsage.objects.all().order_by('id')
self.assertEqual(len(usages), 2)
usage_before = usages[0]
usage_after = usages[1]
self.assertOnUsage(usage_before, INSTANCE_ID_1, '1', launched,
REQUEST_ID_1)
self.assertOnUsage(usage_after, INSTANCE_ID_1, '2', finish_time,
REQUEST_ID_2)
def test_resize_revert_workflow(self):
launched_str = '2012-12-21 06:34:50.123'
launched = utils.str_time_to_unix(launched_str)
values = {
'instance': INSTANCE_ID_1,
'request_id': REQUEST_ID_1,
'instance_type_id': '1',
'launched_at': launched,
}
InstanceUsage(**values).save()
resize_launched_str = '2012-12-22 06:34:50.123'
resize_launched = utils.str_time_to_unix(resize_launched_str)
values = {
'instance': INSTANCE_ID_1,
'request_id': REQUEST_ID_2,
'instance_type_id': '2',
'launched_at': resize_launched,
}
InstanceUsage(**values).save()
started_str = '2012-12-22 06:34:50.123'
started_time = utils.str_time_to_unix(started_str)
end_str = '2012-12-22 06:36:50.123'
end_time = utils.str_time_to_unix(end_str)
start_json = test_utils\
.make_resize_revert_start_json(request_id=REQUEST_ID_3)
end_json = test_utils\
.make_resize_revert_end_json(launched_at=end_str,
request_id=REQUEST_ID_3)
start_raw = create_raw(self.deployment, started_time,
views.INSTANCE_EVENT['resize_revert_start'],
request_id=REQUEST_ID_3, json=start_json)
end_raw = create_raw(self.deployment, started_time,
views.INSTANCE_EVENT['resize_revert_end'],
request_id=REQUEST_ID_3, json=end_json)
views.aggregate_usage(start_raw)
views.aggregate_usage(end_raw)
usages = InstanceUsage.objects.all().order_by('id')
self.assertEqual(len(usages), 3)
usage_before_resize = usages[0]
usage_after_resize = usages[1]
usage_after_revert = usages[2]
self.assertOnUsage(usage_before_resize, INSTANCE_ID_1, '1', launched,
REQUEST_ID_1)
self.assertOnUsage(usage_after_resize, INSTANCE_ID_1, '2',
resize_launched, REQUEST_ID_2)
self.assertOnUsage(usage_after_revert, INSTANCE_ID_1, '1', end_time,
REQUEST_ID_3)
def test_resize_revert_workflow_start_late(self):
launched_str = '2012-12-21 06:34:50.123'
launched = utils.str_time_to_unix(launched_str)
values = {
'instance': INSTANCE_ID_1,
'request_id': REQUEST_ID_1,
'instance_type_id': '1',
'launched_at': launched,
}
InstanceUsage(**values).save()
resize_launched_str = '2012-12-22 06:34:50.123'
resize_launched = utils.str_time_to_unix(resize_launched_str)
values = {
'instance': INSTANCE_ID_1,
'request_id': REQUEST_ID_2,
'instance_type_id': '2',
'launched_at': resize_launched,
}
InstanceUsage(**values).save()
started_str = '2012-12-22 06:34:50.123'
started_time = utils.str_time_to_unix(started_str)
end_str = '2012-12-22 06:36:50.123'
end_time = utils.str_time_to_unix(end_str)
start_json = test_utils\
.make_resize_revert_start_json(request_id=REQUEST_ID_3)
end_json = test_utils\
.make_resize_revert_end_json(launched_at=end_str,
request_id=REQUEST_ID_3)
start_raw = create_raw(self.deployment, started_time,
views.INSTANCE_EVENT['resize_revert_start'],
request_id=REQUEST_ID_3, json=start_json)
end_raw = create_raw(self.deployment, started_time,
views.INSTANCE_EVENT['resize_revert_end'],
request_id=REQUEST_ID_3, json=end_json)
views.aggregate_usage(end_raw)
views.aggregate_usage(start_raw)
usages = InstanceUsage.objects.all().order_by('id')
self.assertEqual(len(usages), 3)
usage_before_resize = usages[0]
usage_after_resize = usages[1]
usage_after_revert = usages[2]
self.assertOnUsage(usage_before_resize, INSTANCE_ID_1, '1', launched,
REQUEST_ID_1)
self.assertOnUsage(usage_after_resize, INSTANCE_ID_1, '2',
resize_launched, REQUEST_ID_2)
self.assertOnUsage(usage_after_revert, INSTANCE_ID_1, '1', end_time,
REQUEST_ID_3)
from datetime import datetime
import unittest
import db
from stacktach.datetime_to_decimal import dt_to_decimal
from stacktach.models import RawDataImageMeta
from stacktach.models import RawData
from stacktach.models import get_model_fields
class RawDataImageMetaDbTestCase(unittest.TestCase):
def test_create_raw_data_should_populate_rawdata_and_rawdata_imagemeta(self):
deployment = db.get_or_create_deployment('deployment1')[0]
kwargs = {
'deployment': deployment,
'when': dt_to_decimal(datetime.utcnow()),
'tenant': '1', 'json': '{}', 'routing_key': 'monitor.info',
'state': 'verifying', 'old_state': 'pending',
'old_task': '', 'task': '', 'image_type': 1,
'publisher': '', 'event': 'compute.instance.exists',
'service': '', 'host': '', 'instance': '1234-5678-9012-3456',
'request_id': '1234', 'os_architecture': 'x86', 'os_version': '1',
'os_distro': 'windows', 'rax_options': '2'}
rawdata = db.create_rawdata(**kwargs)
for field in get_model_fields(RawData):
if field.name != 'id':
self.assertEquals(getattr(rawdata, field.name),
kwargs[field.name])
raw_image_meta = RawDataImageMeta.objects.all()[0]
self.assertEquals(raw_image_meta.raw, rawdata)
self.assertEquals(raw_image_meta.os_architecture,
kwargs['os_architecture'])
self.assertEquals(raw_image_meta.os_version, kwargs['os_version'])
self.assertEquals(raw_image_meta.os_distro, kwargs['os_distro'])
self.assertEquals(raw_image_meta.rax_options, kwargs['rax_options'])

View File

@ -9,11 +9,11 @@ from django.shortcuts import render_to_response
from stacktach import datetime_to_decimal as dt
from stacktach import db as stackdb
from stacktach import image_type
from stacktach import models
from stacktach import stacklog
from stacktach import utils
from stacktach.notification import MonitorNotification
from stacktach.notification import ComputeUpdateNotification
STACKDB = stackdb
@ -26,67 +26,12 @@ def log_warn(msg):
LOG.warn(msg)
def _extract_states(payload):
return {
'state' : payload.get('state', ""),
'old_state' : payload.get('old_state', ""),
'old_task' : payload.get('old_task_state', ""),
'task' : payload.get('new_task_state', ""),
'image_type' : image_type.get_numeric_code(payload)
}
def _monitor_message(routing_key, body):
event = body['event_type']
publisher = body['publisher_id']
request_id = body['_context_request_id']
parts = publisher.split('.')
service = parts[0]
if len(parts) > 1:
host = ".".join(parts[1:])
else:
host = None
payload = body['payload']
request_spec = payload.get('request_spec', None)
# instance UUID's seem to hide in a lot of odd places.
instance = payload.get('instance_id', None)
instance = payload.get('instance_uuid', instance)
if not instance:
instance = payload.get('exception', {}).get('kwargs', {}).get('uuid')
if not instance:
instance = payload.get('instance', {}).get('uuid')
tenant = body.get('_context_project_id', None)
tenant = payload.get('tenant_id', tenant)
resp = dict(host=host, instance=instance, publisher=publisher,
service=service, event=event, tenant=tenant,
request_id=request_id)
resp.update(_extract_states(payload))
return resp
def _compute_update_message(routing_key, body):
publisher = None
instance = None
args = body['args']
host = args['host']
request_id = body['_context_request_id']
service = args['service_name']
event = body['method']
tenant = args.get('_context_project_id', None)
resp = dict(host=host, instance=instance, publisher=publisher,
service=service, event=event, tenant=tenant,
request_id=request_id)
payload = body.get('payload', {})
resp.update(_extract_states(payload))
return resp
# routing_key : handler
HANDLERS = {'monitor.info':_monitor_message,
'monitor.error':_monitor_message,
'':_compute_update_message}
NOTIFICATIONS = {
'monitor.info': MonitorNotification,
'monitor.error': MonitorNotification,
'': ComputeUpdateNotification}
def start_kpi_tracking(lifecycle, raw):
@ -250,6 +195,10 @@ def _process_usage_for_new_launch(raw, body):
usage.launched_at = utils.str_time_to_unix(payload['launched_at'])
usage.tenant = payload['tenant_id']
usage.rax_options = payload['image_meta']['com.rackspace__1__options']
usage.os_architecture = payload['image_meta']['org.openstack__1__architecture']
usage.os_version = payload['image_meta']['org.openstack__1__os_version']
usage.os_distro = payload['image_meta']['org.openstack__1__os_distro']
STACKDB.save(usage)
@ -277,6 +226,11 @@ def _process_usage_for_updates(raw, body):
usage.instance_type_id = payload['new_instance_type_id']
usage.tenant = payload['tenant_id']
usage.rax_options = payload['image_meta']['com.rackspace__1__options']
usage.os_architecture = payload['image_meta']['org.openstack__1__architecture']
usage.os_version = payload['image_meta']['org.openstack__1__os_version']
usage.os_distro = payload['image_meta']['org.openstack__1__os_distro']
STACKDB.save(usage)
@ -321,6 +275,10 @@ def _process_exists(raw, body):
values['usage'] = usage
values['raw'] = raw
values['tenant'] = payload['tenant_id']
values['rax_options'] = payload['image_meta']['com.rackspace__1__options']
values['os_architecture'] = payload['image_meta']['org.openstack__1__architecture']
values['os_version'] = payload['image_meta']['org.openstack__1__os_version']
values['os_distro'] = payload['image_meta']['org.openstack__1__os_distro']
deleted_at = payload.get('deleted_at')
if deleted_at and deleted_at != '':
@ -370,22 +328,12 @@ def process_raw_data(deployment, args, json_args):
routing_key, body = args
record = None
handler = HANDLERS.get(routing_key, None)
if handler:
values = handler(routing_key, body)
notification = NOTIFICATIONS[routing_key](body)
if notification:
values = notification.rawdata_kwargs(deployment, routing_key, json_args)
if not values:
return record
values['deployment'] = deployment
try:
when = body['timestamp']
except KeyError:
when = body['_context_timestamp'] # Old way of doing it
values['when'] = utils.str_time_to_unix(when)
values['routing_key'] = routing_key
values['json'] = json_args
record = STACKDB.create_rawdata(**values)
STACKDB.save(record)
return record

View File

@ -0,0 +1,192 @@
# Copyright (c) 2013 - Rackspace Inc.
#
# Permission is hereby granted, free of charge, to any person obtaining a copy
# of this software and associated documentation files (the "Software"), to
# deal in the Software without restriction, including without limitation the
# rights to use, copy, modify, merge, publish, distribute, sublicense, and/or
# sell copies of the Software, and to permit persons to whom the Software is
# furnished to do so, subject to the following conditions:
#
# The above copyright notice and this permission notice shall be included in
# all copies or substantial portions of the Software.
#
# THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
# IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
# FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
# AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
# LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING
# FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS
# IN THE SOFTWARE.
from decimal import Decimal
import unittest
from stacktach.notification import MonitorNotification
from stacktach.notification import ComputeUpdateNotification
from tests.unit.utils import REQUEST_ID_1, TENANT_ID_1, INSTANCE_ID_1
class ComputeUpdateNotificationTestCase(unittest.TestCase):
def test_rawdata_kwargs(self):
message = {
'_context_request_id': REQUEST_ID_1,
'method': 'some_method',
'event_type': 'compute.instance.update',
'publisher_id': 'compute.c-10-13-137-10',
'_context_project_id': '5845730',
'timestamp': '2013-06-12 06:30:52.790476',
'args': {
'host': 'compute',
'service_name': 'compute',
'_context_project_id': TENANT_ID_1
},
'payload': {
'state': 'active',
'old_state': 'building',
'old_task_state': 'build',
'new_task_state': 'rebuild_spawning',
'image_meta': {
'image_type': 'base',
'org.openstack__1__architecture': 'x64',
'org.openstack__1__os_distro': 'com.microsoft.server',
'org.openstack__1__os_version': '2008.2',
'com.rackspace__1__options': '36'
}
}
}
kwargs = ComputeUpdateNotification(message).rawdata_kwargs('1', 'monitor.info', 'json')
self.assertEquals(kwargs['deployment'], '1')
self.assertEquals(kwargs['routing_key'], 'monitor.info')
self.assertEquals(kwargs['tenant'], TENANT_ID_1)
self.assertEquals(kwargs['json'], 'json')
self.assertEquals(kwargs['state'], 'active')
self.assertEquals(kwargs['old_state'], 'building')
self.assertEquals(kwargs['old_task'], 'build')
self.assertEquals(kwargs['task'], 'rebuild_spawning')
self.assertEquals(kwargs['image_type'], 1)
self.assertEquals(kwargs['when'], Decimal('1371018652.790476'))
self.assertEquals(kwargs['publisher'], None)
self.assertEquals(kwargs['event'], 'some_method')
self.assertEquals(kwargs['host'], 'compute')
self.assertEquals(kwargs['request_id'], REQUEST_ID_1)
class MonitorNotificationTestCase(unittest.TestCase):
def test_rawdata_kwargs(self):
message = {
'event_type': 'compute.instance.create.start',
'publisher_id': 'compute.cpu1-n01.example.com',
'_context_request_id': REQUEST_ID_1,
'_context_project_id': TENANT_ID_1,
'timestamp': '2013-06-12 06:30:52.790476',
'payload': {
'instance_id': INSTANCE_ID_1,
'state': 'active',
'old_state': 'building',
'old_task_state': 'build',
"new_task_state": 'rebuild_spawning',
'image_meta': {
'image_type': 'base',
'org.openstack__1__architecture': 'x64',
'org.openstack__1__os_distro': 'com.microsoft.server',
'org.openstack__1__os_version': '2008.2',
'com.rackspace__1__options': '36'
}
}
}
kwargs = MonitorNotification(message).rawdata_kwargs('1', 'monitor.info', 'json')
self.assertEquals(kwargs['host'], 'cpu1-n01.example.com')
self.assertEquals(kwargs['deployment'], '1')
self.assertEquals(kwargs['routing_key'], 'monitor.info')
self.assertEquals(kwargs['tenant'], TENANT_ID_1)
self.assertEquals(kwargs['json'], 'json')
self.assertEquals(kwargs['state'], 'active')
self.assertEquals(kwargs['old_state'], 'building')
self.assertEquals(kwargs['old_task'], 'build')
self.assertEquals(kwargs['task'], 'rebuild_spawning')
self.assertEquals(kwargs['image_type'], 1)
self.assertEquals(kwargs['when'], Decimal('1371018652.790476'))
self.assertEquals(kwargs['publisher'], 'compute.cpu1-n01.example.com')
self.assertEquals(kwargs['event'], 'compute.instance.create.start')
self.assertEquals(kwargs['request_id'], REQUEST_ID_1)
def test_rawdata_kwargs_for_message_with_no_host(self):
message = {
'event_type': 'compute.instance.create.start',
'publisher_id': 'compute',
'_context_request_id': REQUEST_ID_1,
'_context_project_id': TENANT_ID_1,
'timestamp': '2013-06-12 06:30:52.790476',
'payload': {
'instance_id': INSTANCE_ID_1,
'state': 'active',
'old_state': 'building',
'old_task_state': 'build',
"new_task_state": 'rebuild_spawning',
'image_meta': {
'image_type': 'base',
'org.openstack__1__architecture': 'x64',
'org.openstack__1__os_distro': 'com.microsoft.server',
'org.openstack__1__os_version': '2008.2',
'com.rackspace__1__options': '36'
}
}
}
kwargs = MonitorNotification(message).rawdata_kwargs('1', 'monitor.info', 'json')
self.assertEquals(kwargs['host'], None)
self.assertEquals(kwargs['deployment'], '1')
self.assertEquals(kwargs['routing_key'], 'monitor.info')
self.assertEquals(kwargs['tenant'], TENANT_ID_1)
self.assertEquals(kwargs['json'], 'json')
self.assertEquals(kwargs['state'], 'active')
self.assertEquals(kwargs['old_state'], 'building')
self.assertEquals(kwargs['old_task'], 'build')
self.assertEquals(kwargs['task'], 'rebuild_spawning')
self.assertEquals(kwargs['image_type'], 1)
self.assertEquals(kwargs['when'], Decimal('1371018652.790476'))
self.assertEquals(kwargs['publisher'], 'compute')
self.assertEquals(kwargs['event'], 'compute.instance.create.start')
self.assertEquals(kwargs['request_id'], REQUEST_ID_1)
def test_rawdata_kwargs_for_message_with_exception(self):
message = {
'event_type': 'compute.instance.create.start',
'publisher_id': 'compute.cpu1-n01.example.com',
'_context_request_id': REQUEST_ID_1,
'_context_project_id': TENANT_ID_1,
'timestamp': '2013-06-12 06:30:52.790476',
'payload': {
'exception': {'kwargs':{'uuid': INSTANCE_ID_1}},
'instance_id': INSTANCE_ID_1,
'state': 'active',
'old_state': 'building',
'old_task_state': 'build',
"new_task_state": 'rebuild_spawning',
'image_meta': {
'image_type': 'base',
'org.openstack__1__architecture': 'x64',
'org.openstack__1__os_distro': 'com.microsoft.server',
'org.openstack__1__os_version': '2008.2',
'com.rackspace__1__options': '36'
}
}
}
kwargs = MonitorNotification(message).rawdata_kwargs('1', 'monitor.info', 'json')
self.assertEquals(kwargs['host'], 'cpu1-n01.example.com')
self.assertEquals(kwargs['deployment'], '1')
self.assertEquals(kwargs['routing_key'], 'monitor.info')
self.assertEquals(kwargs['tenant'], TENANT_ID_1)
self.assertEquals(kwargs['json'], 'json')
self.assertEquals(kwargs['state'], 'active')
self.assertEquals(kwargs['old_state'], 'building')
self.assertEquals(kwargs['old_task'], 'build')
self.assertEquals(kwargs['task'], 'rebuild_spawning')
self.assertEquals(kwargs['image_type'], 1)
self.assertEquals(kwargs['when'], Decimal('1371018652.790476'))
self.assertEquals(kwargs['publisher'], 'compute.cpu1-n01.example.com')
self.assertEquals(kwargs['event'], 'compute.instance.create.start')
self.assertEquals(kwargs['request_id'], REQUEST_ID_1)

View File

@ -26,6 +26,10 @@ import mox
import utils
from utils import INSTANCE_ID_1
from utils import OS_VERSION_1
from utils import OS_ARCH_1
from utils import OS_DISTRO_1
from utils import RAX_OPTIONS_1
from utils import MESSAGE_ID_1
from utils import REQUEST_ID_1
from utils import TENANT_ID_1
@ -49,124 +53,6 @@ class StacktachRawParsingTestCase(unittest.TestCase):
self.assertTrue(key in resp, msg='%s not in response' % key)
self.assertEqual(resp[key], kwargs[key])
def test_monitor_message(self):
body = {
'event_type': 'compute.instance.create.start',
'publisher_id': 'compute.cpu1-n01.example.com',
'_context_request_id': REQUEST_ID_1,
'_context_project_id': TENANT_ID_1,
'payload': {
'instance_id': INSTANCE_ID_1,
'state': 'active',
'old_state': 'building',
'old_task_state': 'build',
},
}
resp = views._monitor_message(None, body)
self.assertOnHandlerResponse(resp, host='cpu1-n01.example.com',
instance=INSTANCE_ID_1,
publisher=body['publisher_id'],
service='compute',
event=body['event_type'],
tenant=TENANT_ID_1,
request_id=REQUEST_ID_1,
state='active',
old_state='building',
old_task='build')
def test_monitor_message_no_host(self):
body = {
'event_type': 'compute.instance.create.start',
'publisher_id': 'compute',
'_context_request_id': REQUEST_ID_1,
'_context_project_id': TENANT_ID_1,
'payload': {
'instance_id': INSTANCE_ID_1,
'state': 'active',
'old_state': 'building',
'old_task_state': 'build',
},
}
resp = views._monitor_message(None, body)
self.assertOnHandlerResponse(resp, host=None, instance=INSTANCE_ID_1,
publisher=body['publisher_id'],
service='compute',
event=body['event_type'],
tenant=TENANT_ID_1,
request_id=REQUEST_ID_1, state='active',
old_state='building', old_task='build')
def test_monitor_message_exception(self):
body = {
'event_type': 'compute.instance.create.start',
'publisher_id': 'compute.cpu1-n01.example.com',
'_context_request_id': REQUEST_ID_1,
'_context_project_id': TENANT_ID_1,
'payload': {
'exception': {'kwargs':{'uuid': INSTANCE_ID_1}},
'state': 'active',
'old_state': 'building',
'old_task_state': 'build',
},
}
resp = views._monitor_message(None, body)
self.assertOnHandlerResponse(resp, host='cpu1-n01.example.com',
instance=INSTANCE_ID_1,
publisher=body['publisher_id'],
service='compute',
event=body['event_type'],
tenant=TENANT_ID_1,
request_id=REQUEST_ID_1,
state='active', old_state='building',
old_task='build')
def test_monitor_message_exception(self):
body = {
'event_type': 'compute.instance.create.start',
'publisher_id': 'compute.cpu1-n01.example.com',
'_context_request_id': REQUEST_ID_1,
'_context_project_id': TENANT_ID_1,
'payload': {
'instance': {'uuid': INSTANCE_ID_1},
'state': 'active',
'old_state': 'building',
'old_task_state': 'build',
},
}
resp = views._monitor_message(None, body)
self.assertOnHandlerResponse(resp, host='cpu1-n01.example.com',
instance=INSTANCE_ID_1,
publisher=body['publisher_id'],
service='compute',
event=body['event_type'],
tenant=TENANT_ID_1,
request_id=REQUEST_ID_1,
state='active', old_state='building',
old_task='build')
def test_compute_update_message(self):
body = {
'_context_request_id': REQUEST_ID_1,
'method': 'some_method',
'args': {
'host': 'compute',
'service_name': 'compute',
'_context_project_id': TENANT_ID_1
},
'payload': {
'state': 'active',
'old_state': 'building',
'old_task_state': 'build',
}
}
resp = views._compute_update_message(None, body)
print resp
self.assertOnHandlerResponse(resp, publisher=None, instance=None,
host='compute', tenant=TENANT_ID_1,
event='some_method',
request_id=REQUEST_ID_1, state='active',
old_state='building', old_task='build')
def test_process_raw_data(self):
deployment = self.mox.CreateMockAnything()
when = '2013-1-25 13:38:23.123'
@ -175,22 +61,25 @@ class StacktachRawParsingTestCase(unittest.TestCase):
}
args = ('monitor.info', dict)
json_args = json.dumps(args)
old_info_handler = views.HANDLERS['monitor.info']
views.HANDLERS['monitor.info'] = lambda key, mess: {'host': 'api'}
raw_values = {
'deployment': deployment,
'when': utils.decimal_utc(datetime.datetime.strptime(when, "%Y-%m-%d %H:%M:%S.%f")),
'when': utils.decimal_utc(datetime.datetime.strptime(when, '%Y-%m-%d %H:%M:%S.%f')),
'host': 'api',
'routing_key': 'monitor.info',
'json': json_args
}
raw = self.mox.CreateMockAnything()
views.STACKDB.create_rawdata(**raw_values).AndReturn(raw)
views.STACKDB.save(raw)
old_info_handler = views.NOTIFICATIONS['monitor.info']
mock_notification = self.mox.CreateMockAnything()
mock_notification.rawdata_kwargs(deployment, 'monitor.info', json_args).AndReturn(raw_values)
views.NOTIFICATIONS['monitor.info'] = lambda message_body: mock_notification
views.STACKDB.create_rawdata(**raw_values)
self.mox.ReplayAll()
views.process_raw_data(deployment, args, json_args)
self.mox.VerifyAll()
views.HANDLERS['monitor.info'] = old_info_handler
views.NOTIFICATIONS['monitor.info'] = old_info_handler
def test_process_raw_data_old_timestamp(self):
deployment = self.mox.CreateMockAnything()
@ -199,24 +88,25 @@ class StacktachRawParsingTestCase(unittest.TestCase):
'_context_timestamp': when,
}
args = ('monitor.info', dict)
json_args = json.dumps(args)
old_info_handler = views.HANDLERS['monitor.info']
views.HANDLERS['monitor.info'] = lambda key, mess: {'host': 'api'}
json_args = json.dumps(args[1])
raw_values = {
'deployment': deployment,
'when': utils.decimal_utc(datetime.datetime.strptime(when, "%Y-%m-%dT%H:%M:%S.%f")),
'when': utils.decimal_utc(datetime.datetime.strptime(when, '%Y-%m-%dT%H:%M:%S.%f')),
'host': 'api',
'routing_key': 'monitor.info',
'json': json_args
}
raw = self.mox.CreateMockAnything()
views.STACKDB.create_rawdata(**raw_values).AndReturn(raw)
views.STACKDB.save(raw)
old_info_handler = views.NOTIFICATIONS['monitor.info']
mock_notification = self.mox.CreateMockAnything()
mock_notification.rawdata_kwargs(deployment, 'monitor.info', json_args).AndReturn(raw_values)
views.NOTIFICATIONS['monitor.info'] = lambda message_body: mock_notification
views.STACKDB.create_rawdata(**raw_values)
self.mox.ReplayAll()
views.process_raw_data(deployment, args, json_args)
self.mox.VerifyAll()
views.HANDLERS['monitor.info'] = old_info_handler
views.NOTIFICATIONS['monitor.info'] = old_info_handler
class StacktachLifecycleTestCase(unittest.TestCase):
def setUp(self):
@ -421,7 +311,8 @@ class StacktachUsageParsingTestCase(unittest.TestCase):
stacklog.get_logger(name=name).AndReturn(self.log)
def test_process_usage_for_new_launch_create_start(self):
kwargs = {'launched': str(DUMMY_TIME), 'tenant_id': TENANT_ID_1}
kwargs = {'launched': str(DUMMY_TIME), 'tenant_id': TENANT_ID_1, 'rax_options': RAX_OPTIONS_1,
'os_architecture': OS_ARCH_1, 'os_version': OS_VERSION_1, 'os_distro': OS_DISTRO_1 }
notification = utils.create_nova_notif(request_id=REQUEST_ID_1, **kwargs)
event = 'compute.instance.create.start'
raw, usage = self._setup_process_usage_mocks(event, notification)
@ -430,11 +321,16 @@ class StacktachUsageParsingTestCase(unittest.TestCase):
self.assertEquals(usage.instance_type_id, '1')
self.assertEquals(usage.tenant, TENANT_ID_1)
self.assertEquals(usage.os_architecture, OS_ARCH_1)
self.assertEquals(usage.os_version, OS_VERSION_1)
self.assertEquals(usage.os_distro, OS_DISTRO_1)
self.assertEquals(usage.rax_options, RAX_OPTIONS_1)
self.mox.VerifyAll()
def test_process_usage_for_new_launch_rebuild_start(self):
kwargs = {'launched': str(DUMMY_TIME), 'tenant_id': TENANT_ID_1}
kwargs = {'launched': str(DUMMY_TIME), 'tenant_id': TENANT_ID_1, 'rax_options': RAX_OPTIONS_1,
'os_architecture': OS_ARCH_1, 'os_version': OS_VERSION_1, 'os_distro': OS_DISTRO_1 }
notification = utils.create_nova_notif(request_id=REQUEST_ID_1, **kwargs)
event = 'compute.instance.rebuild.start'
raw, usage = self._setup_process_usage_mocks(event, notification)
@ -443,11 +339,15 @@ class StacktachUsageParsingTestCase(unittest.TestCase):
self.assertEquals(usage.instance_type_id, '1')
self.assertEquals(usage.tenant, TENANT_ID_1)
self.assertEquals(usage.os_architecture, OS_ARCH_1)
self.assertEquals(usage.os_version, OS_VERSION_1)
self.assertEquals(usage.os_distro, OS_DISTRO_1)
self.assertEquals(usage.rax_options, RAX_OPTIONS_1)
self.mox.VerifyAll()
def test_process_usage_for_new_launch_rebuild_start_when_no_launched_at_in_db(self):
kwargs = {'launched': str(DUMMY_TIME), 'tenant_id': TENANT_ID_1}
kwargs = {'launched': str(DUMMY_TIME), 'tenant_id': TENANT_ID_1, 'rax_options': RAX_OPTIONS_1,
'os_architecture': OS_ARCH_1, 'os_version': OS_VERSION_1, 'os_distro': OS_DISTRO_1 }
notification = utils.create_nova_notif(request_id=REQUEST_ID_1, **kwargs)
event = 'compute.instance.rebuild.start'
raw, usage = self._setup_process_usage_mocks(event, notification)
@ -457,11 +357,16 @@ class StacktachUsageParsingTestCase(unittest.TestCase):
self.assertEqual(usage.launched_at, utils.decimal_utc(DUMMY_TIME))
self.assertEquals(usage.tenant, TENANT_ID_1)
self.assertEquals(usage.os_architecture, OS_ARCH_1)
self.assertEquals(usage.os_version, OS_VERSION_1)
self.assertEquals(usage.os_distro, OS_DISTRO_1)
self.assertEquals(usage.rax_options, RAX_OPTIONS_1)
self.mox.VerifyAll()
def test_process_usage_for_new_launch_resize_prep_start_when_no_launched_at_in_db(self):
kwargs = {'launched': str(DUMMY_TIME), 'tenant_id': TENANT_ID_1}
kwargs = {'launched': str(DUMMY_TIME), 'tenant_id': TENANT_ID_1, 'rax_options': RAX_OPTIONS_1,
'os_architecture': OS_ARCH_1, 'os_version': OS_VERSION_1, 'os_distro': OS_DISTRO_1 }
notification = utils.create_nova_notif(request_id=REQUEST_ID_1, **kwargs)
event = 'compute.instance.resize.prep.start'
raw, usage = self._setup_process_usage_mocks(event, notification)
@ -471,11 +376,16 @@ class StacktachUsageParsingTestCase(unittest.TestCase):
self.assertEqual(usage.launched_at, utils.decimal_utc(DUMMY_TIME))
self.assertEquals(usage.tenant, TENANT_ID_1)
self.assertEquals(usage.os_architecture, OS_ARCH_1)
self.assertEquals(usage.os_version, OS_VERSION_1)
self.assertEquals(usage.os_distro, OS_DISTRO_1)
self.assertEquals(usage.rax_options, RAX_OPTIONS_1)
self.mox.VerifyAll()
def test_process_usage_for_new_launch_resize_revert_start_when_no_launched_at_in_db(self):
kwargs = {'launched': str(DUMMY_TIME), 'tenant_id': TENANT_ID_1}
kwargs = {'launched': str(DUMMY_TIME), 'tenant_id': TENANT_ID_1,'rax_options': RAX_OPTIONS_1,
'os_architecture': OS_ARCH_1, 'os_version': OS_VERSION_1, 'os_distro': OS_DISTRO_1 }
notification = utils.create_nova_notif(request_id=REQUEST_ID_1, **kwargs)
event = 'compute.instance.resize.revert.start'
raw, usage = self._setup_process_usage_mocks(event, notification)
@ -485,12 +395,19 @@ class StacktachUsageParsingTestCase(unittest.TestCase):
self.assertEquals(usage.tenant, TENANT_ID_1)
self.assertEqual(usage.launched_at, utils.decimal_utc(DUMMY_TIME))
self.assertEquals(usage.os_architecture, OS_ARCH_1)
self.assertEquals(usage.os_version, OS_VERSION_1)
self.assertEquals(usage.os_distro, OS_DISTRO_1)
self.assertEquals(usage.rax_options, RAX_OPTIONS_1)
self.mox.VerifyAll()
def test_process_usage_for_new_launch_resize_prep_start_when_launched_at_in_db(self):
kwargs = {'launched': str(DUMMY_TIME), 'tenant_id': TENANT_ID_1}
notification = utils.create_nova_notif(request_id=REQUEST_ID_1, **kwargs)
kwargs = {'launched': str(DUMMY_TIME), 'tenant_id': TENANT_ID_1,
'rax_options': RAX_OPTIONS_1, 'os_architecture': OS_ARCH_1,
'os_version': OS_VERSION_1, 'os_distro': OS_DISTRO_1 }
notification = utils.create_nova_notif(request_id=REQUEST_ID_1,
**kwargs)
event = 'compute.instance.resize.prep.start'
raw, usage = self._setup_process_usage_mocks(event, notification)
orig_launched_at = utils.decimal_utc(DUMMY_TIME - datetime.timedelta(days=1))
@ -500,12 +417,20 @@ class StacktachUsageParsingTestCase(unittest.TestCase):
self.assertEqual(usage.launched_at, orig_launched_at)
self.assertEqual(usage.tenant, TENANT_ID_1)
self.assertEquals(usage.os_architecture, OS_ARCH_1)
self.assertEquals(usage.os_version, OS_VERSION_1)
self.assertEquals(usage.os_distro, OS_DISTRO_1)
self.assertEquals(usage.rax_options, RAX_OPTIONS_1)
self.mox.VerifyAll()
def test_process_usage_for_updates_create_end(self):
kwargs = {'launched': str(DUMMY_TIME), 'tenant_id': TENANT_ID_1}
notification = utils.create_nova_notif(request_id=REQUEST_ID_1, **kwargs)
kwargs = {'launched': str(DUMMY_TIME),
'tenant_id': TENANT_ID_1, 'rax_options': RAX_OPTIONS_1,
'os_architecture': OS_ARCH_1, 'os_version': OS_VERSION_1,
'os_distro': OS_DISTRO_1 }
notification = utils.create_nova_notif(request_id=REQUEST_ID_1,
**kwargs)
event = 'compute.instance.create.end'
raw, usage = self._setup_process_usage_mocks(event, notification)
@ -513,12 +438,20 @@ class StacktachUsageParsingTestCase(unittest.TestCase):
self.assertEqual(usage.launched_at, utils.decimal_utc(DUMMY_TIME))
self.assertEqual(usage.tenant, TENANT_ID_1)
self.assertEquals(usage.os_architecture, OS_ARCH_1)
self.assertEquals(usage.os_version, OS_VERSION_1)
self.assertEquals(usage.os_distro, OS_DISTRO_1)
self.assertEquals(usage.rax_options, RAX_OPTIONS_1)
self.mox.VerifyAll()
def test_process_usage_for_updates_create_end_success_message(self):
kwargs = {'launched': str(DUMMY_TIME), 'tenant_id': TENANT_ID_1}
notification = utils.create_nova_notif(request_id=REQUEST_ID_1, **kwargs)
kwargs = {'launched': str(DUMMY_TIME),
'tenant_id': TENANT_ID_1, 'rax_options': RAX_OPTIONS_1,
'os_architecture': OS_ARCH_1, 'os_version': OS_VERSION_1,
'os_distro': OS_DISTRO_1 }
notification = utils.create_nova_notif(request_id=REQUEST_ID_1,
**kwargs)
notification[1]['payload']['message'] = "Success"
event = 'compute.instance.create.end'
raw, usage = self._setup_process_usage_mocks(event, notification)
@ -527,12 +460,20 @@ class StacktachUsageParsingTestCase(unittest.TestCase):
self.assertEqual(usage.launched_at, utils.decimal_utc(DUMMY_TIME))
self.assertEqual(usage.tenant, TENANT_ID_1)
self.assertEquals(usage.os_architecture, OS_ARCH_1)
self.assertEquals(usage.os_version, OS_VERSION_1)
self.assertEquals(usage.os_distro, OS_DISTRO_1)
self.assertEquals(usage.rax_options, RAX_OPTIONS_1)
self.mox.VerifyAll()
def test_process_usage_for_updates_create_end_error_message(self):
kwargs = {'launched': str(DUMMY_TIME), 'tenant_id': TENANT_ID_1}
notification = utils.create_nova_notif(request_id=REQUEST_ID_1, **kwargs)
kwargs = {'launched': str(DUMMY_TIME),
'tenant_id': TENANT_ID_1, 'rax_options': RAX_OPTIONS_1,
'os_architecture': OS_ARCH_1, 'os_version': OS_VERSION_1,
'os_distro': OS_DISTRO_1 }
notification = utils.create_nova_notif(request_id=REQUEST_ID_1,
**kwargs)
notification[1]['payload']['message'] = "Error"
event = 'compute.instance.create.end'
when_time = DUMMY_TIME
@ -547,8 +488,13 @@ class StacktachUsageParsingTestCase(unittest.TestCase):
self.mox.VerifyAll()
def test_process_usage_for_updates_revert_end(self):
kwargs = {'launched': str(DUMMY_TIME), 'type_id': INSTANCE_TYPE_ID_1, 'tenant_id': TENANT_ID_1}
notification = utils.create_nova_notif(request_id=REQUEST_ID_1, **kwargs)
kwargs = {'launched': str(DUMMY_TIME),
'type_id': INSTANCE_TYPE_ID_1,
'tenant_id': TENANT_ID_1, 'rax_options': RAX_OPTIONS_1,
'os_architecture': OS_ARCH_1, 'os_version': OS_VERSION_1,
'os_distro': OS_DISTRO_1 }
notification = utils.create_nova_notif(request_id=REQUEST_ID_1,
**kwargs)
event = 'compute.instance.resize.revert.end'
raw, usage = self._setup_process_usage_mocks(event, notification)
@ -557,12 +503,21 @@ class StacktachUsageParsingTestCase(unittest.TestCase):
self.assertEqual(usage.instance_type_id, INSTANCE_TYPE_ID_1)
self.assertEqual(usage.launched_at, utils.decimal_utc(DUMMY_TIME))
self.assertEquals(usage.tenant, TENANT_ID_1)
self.assertEquals(usage.os_architecture, OS_ARCH_1)
self.assertEquals(usage.os_version, OS_VERSION_1)
self.assertEquals(usage.os_distro, OS_DISTRO_1)
self.assertEquals(usage.rax_options, RAX_OPTIONS_1)
self.mox.VerifyAll()
def test_process_usage_for_updates_prep_end(self):
kwargs = {'launched': str(DUMMY_TIME), 'new_type_id': INSTANCE_TYPE_ID_2, 'tenant_id': TENANT_ID_1}
notification = utils.create_nova_notif(request_id=REQUEST_ID_1, **kwargs)
kwargs = {'launched': str(DUMMY_TIME),
'new_type_id': INSTANCE_TYPE_ID_2,
'tenant_id': TENANT_ID_1, 'rax_options': RAX_OPTIONS_1,
'os_architecture': OS_ARCH_1, 'os_version': OS_VERSION_1,
'os_distro': OS_DISTRO_1 }
notification = utils.create_nova_notif(request_id=REQUEST_ID_1,
**kwargs)
event = 'compute.instance.resize.prep.end'
raw, usage = self._setup_process_usage_mocks(event, notification)
@ -570,6 +525,10 @@ class StacktachUsageParsingTestCase(unittest.TestCase):
self.assertEqual(usage.instance_type_id, INSTANCE_TYPE_ID_2)
self.assertEquals(usage.tenant, TENANT_ID_1)
self.assertEquals(usage.os_architecture, OS_ARCH_1)
self.assertEquals(usage.os_version, OS_VERSION_1)
self.assertEquals(usage.os_distro, OS_DISTRO_1)
self.assertEquals(usage.rax_options, RAX_OPTIONS_1)
self.mox.VerifyAll()
@ -649,7 +608,11 @@ class StacktachUsageParsingTestCase(unittest.TestCase):
notif = utils.create_nova_notif(launched=str(launch_time),
audit_period_beginning=str(audit_beginning),
audit_period_ending=str(current_time),
tenant_id=TENANT_ID_1)
tenant_id=TENANT_ID_1,
os_architecture=OS_ARCH_1,
os_version=OS_VERSION_1,
os_distro=OS_DISTRO_1,
rax_options=RAX_OPTIONS_1)
json_str = json.dumps(notif)
event = 'compute.instance.exists'
raw = utils.create_raw(self.mox, current_decimal, event=event,
@ -668,7 +631,11 @@ class StacktachUsageParsingTestCase(unittest.TestCase):
'instance_type_id': '1',
'usage': usage,
'raw': raw,
'tenant': TENANT_ID_1
'tenant': TENANT_ID_1,
'rax_options': RAX_OPTIONS_1,
'os_architecture': OS_ARCH_1,
'os_version': OS_VERSION_1,
'os_distro': OS_DISTRO_1
}
exists = self.mox.CreateMockAnything()
views.STACKDB.create_instance_exists(**exists_values).AndReturn(exists)
@ -709,7 +676,11 @@ class StacktachUsageParsingTestCase(unittest.TestCase):
deleted=str(deleted_time),
audit_period_beginning=str(audit_beginning),
audit_period_ending=str(current_time),
tenant_id=TENANT_ID_1)
tenant_id=TENANT_ID_1,
os_architecture=OS_ARCH_1,
os_version=OS_VERSION_1,
os_distro=OS_DISTRO_1,
rax_options=RAX_OPTIONS_1)
json_str = json.dumps(notif)
event = 'compute.instance.exists'
raw = utils.create_raw(self.mox, current_decimal, event=event,
@ -734,7 +705,11 @@ class StacktachUsageParsingTestCase(unittest.TestCase):
'usage': usage,
'delete': delete,
'raw': raw,
'tenant': TENANT_ID_1
'tenant': TENANT_ID_1,
'rax_options': RAX_OPTIONS_1,
'os_architecture': OS_ARCH_1,
'os_version': OS_VERSION_1,
'os_distro': OS_DISTRO_1
}
exists = self.mox.CreateMockAnything()
views.STACKDB.create_instance_exists(**exists_values).AndReturn(exists)

View File

@ -129,9 +129,6 @@ class StacktachDBTestCase(unittest.TestCase):
self.assertEqual(returned, object)
self.mox.VerifyAll()
def test_create_rawdata(self):
self._test_db_create_func(models.RawData, db.create_rawdata)
def test_create_lifecycle(self):
self._test_db_create_func(models.Lifecycle, db.create_lifecycle)

View File

@ -1,4 +1,4 @@
# Copyright (c) 2012 - Rackspace Inc.
# Copyright (c) 2013 - Rackspace Inc.
#
# Permission is hereby granted, free of charge, to any person obtaining a copy
# of this software and associated documentation files (the "Software"), to
@ -23,20 +23,27 @@ import decimal
import json
import unittest
import uuid
import multiprocessing
import kombu.common
import kombu.entity
import kombu.pools
import mox
import multiprocessing
from stacktach import datetime_to_decimal as dt
from stacktach import models
from utils import INSTANCE_ID_1
from utils import RAX_OPTIONS_1
from utils import RAX_OPTIONS_2
from utils import OS_DISTRO_1
from utils import OS_DISTRO_2
from utils import OS_ARCH_1
from utils import OS_ARCH_2
from utils import OS_VERSION_1
from utils import OS_VERSION_2
from utils import TENANT_ID_1
from utils import TENANT_ID_2
from utils import INSTANCE_TYPE_ID_1
from verifier import dbverifier
from verifier import AmbiguousResults
from verifier import FieldMismatch
@ -159,6 +166,78 @@ class VerifierTestCase(unittest.TestCase):
self.mox.VerifyAll()
def test_verify_for_launch_rax_options_mismatch(self):
exist = self.mox.CreateMockAnything()
exist.rax_options = RAX_OPTIONS_1
exist.usage = self.mox.CreateMockAnything()
exist.usage.rax_options = RAX_OPTIONS_2
self.mox.ReplayAll()
with self.assertRaises(FieldMismatch) as cm:
dbverifier._verify_for_launch(exist)
exception = cm.exception
self.assertEqual(exception.field_name, 'rax_options')
self.assertEqual(exception.expected, RAX_OPTIONS_1)
self.assertEqual(exception.actual, RAX_OPTIONS_2)
self.mox.VerifyAll()
def test_verify_for_launch_os_distro_mismatch(self):
exist = self.mox.CreateMockAnything()
exist.os_distro = OS_DISTRO_1
exist.usage = self.mox.CreateMockAnything()
exist.usage.os_distro = OS_DISTRO_2
self.mox.ReplayAll()
with self.assertRaises(FieldMismatch) as cm:
dbverifier._verify_for_launch(exist)
exception = cm.exception
self.assertEqual(exception.field_name, 'os_distro')
self.assertEqual(exception.expected, OS_DISTRO_1)
self.assertEqual(exception.actual, OS_DISTRO_2)
self.mox.VerifyAll()
def test_verify_for_launch_os_architecture_mismatch(self):
exist = self.mox.CreateMockAnything()
exist.os_architecture = OS_ARCH_1
exist.usage = self.mox.CreateMockAnything()
exist.usage.os_architecture = OS_ARCH_2
self.mox.ReplayAll()
with self.assertRaises(FieldMismatch) as cm:
dbverifier._verify_for_launch(exist)
exception = cm.exception
self.assertEqual(exception.field_name, 'os_architecture')
self.assertEqual(exception.expected, OS_ARCH_1)
self.assertEqual(exception.actual, OS_ARCH_2)
self.mox.VerifyAll()
def test_verify_for_launch_os_version_mismatch(self):
exist = self.mox.CreateMockAnything()
exist.os_version = OS_VERSION_1
exist.usage = self.mox.CreateMockAnything()
exist.usage.os_version = OS_VERSION_2
self.mox.ReplayAll()
with self.assertRaises(FieldMismatch) as cm:
dbverifier._verify_for_launch(exist)
exception = cm.exception
self.assertEqual(exception.field_name, 'os_version')
self.assertEqual(exception.expected, OS_VERSION_1)
self.assertEqual(exception.actual, OS_VERSION_2)
self.mox.VerifyAll()
def test_verify_for_launch_late_usage(self):
exist = self.mox.CreateMockAnything()
exist.usage = None

View File

@ -40,6 +40,18 @@ REQUEST_ID_1 = 'req-611a4d70-9e47-4b27-a95e-27996cc40c06'
REQUEST_ID_2 = 'req-a951dec0-52ee-425d-9f56-d68bd1ad00ac'
REQUEST_ID_3 = 'req-039a33f7-5849-4406-8166-4db8cd085f52'
RAX_OPTIONS_1 = '1'
RAX_OPTIONS_2 = '2'
OS_DISTRO_1 = "linux"
OS_DISTRO_2 = "selinux"
OS_ARCH_1 = "x86"
OS_ARCH_2 = "x64"
OS_VERSION_1 = "1"
OS_VERSION_2 = "2"
def decimal_utc(t = datetime.datetime.utcnow()):
return dt.dt_to_decimal(t)
@ -48,29 +60,29 @@ def decimal_utc(t = datetime.datetime.utcnow()):
def create_nova_notif(request_id=None, instance=INSTANCE_ID_1, type_id='1',
launched=None, deleted=None, new_type_id=None,
message_id=MESSAGE_ID_1, audit_period_beginning=None,
audit_period_ending=None, tenant_id = None):
audit_period_ending=None, tenant_id=None,
rax_options=None, os_architecture=None,
os_version=None, os_distro=None):
notif = ['', {
'message_id': message_id,
'payload': {
'image_meta': {},
'instance_id': instance,
'instance_type_id': type_id,
}
}
}]
if request_id:
notif[1]['_context_request_id'] = request_id
if launched:
notif[1]['payload']['launched_at'] = launched
if deleted:
notif[1]['payload']['deleted_at'] = deleted
if new_type_id:
notif[1]['payload']['new_instance_type_id'] = new_type_id
if audit_period_beginning:
notif[1]['payload']['audit_period_beginning'] = audit_period_beginning
if audit_period_ending:
notif[1]['payload']['audit_period_ending'] = audit_period_ending
if tenant_id:
notif[1]['payload']['tenant_id'] = tenant_id
notif[1]['_context_request_id'] = request_id
notif[1]['payload']['launched_at'] = launched
notif[1]['payload']['deleted_at'] = deleted
notif[1]['payload']['new_instance_type_id'] = new_type_id
notif[1]['payload']['audit_period_beginning'] = audit_period_beginning
notif[1]['payload']['audit_period_ending'] = audit_period_ending
notif[1]['payload']['tenant_id'] = tenant_id
notif[1]['payload']['image_meta']['com.rackspace__1__options'] = rax_options
notif[1]['payload']['image_meta']['org.openstack__1__architecture'] = os_architecture
notif[1]['payload']['image_meta']['org.openstack__1__os_distro'] = os_distro
notif[1]['payload']['image_meta']['org.openstack__1__os_version'] = os_version
return notif

View File

@ -135,6 +135,22 @@ def _verify_field_mismatch(exists, launch):
raise FieldMismatch('tenant', exists.tenant,
launch.tenant)
if launch.rax_options != exists.rax_options:
raise FieldMismatch('rax_options', exists.rax_options,
launch.rax_options)
if launch.os_architecture != exists.os_architecture:
raise FieldMismatch('os_architecture', exists.os_architecture,
launch.os_architecture)
if launch.os_version != exists.os_version:
raise FieldMismatch('os_version', exists.os_version,
launch.os_version)
if launch.os_distro != exists.os_distro:
raise FieldMismatch('os_distro', exists.os_distro,
launch.os_distro)
def _verify_for_launch(exist):
if exist.usage:

View File

@ -55,7 +55,8 @@ class NovaConsumer(kombu.mixins.ConsumerMixin):
def _create_exchange(self, name, type, exclusive=False, auto_delete=False):
return kombu.entity.Exchange(name, type=type, exclusive=exclusive,
durable=self.durable, auto_delete=auto_delete)
durable=self.durable,
auto_delete=auto_delete)
def _create_queue(self, name, nova_exchange, routing_key, exclusive=False,
auto_delete=False):