Merge pull request #192 from rackerlabs/master_stable_test

Promoting Sept 11 Master to Stable
This commit is contained in:
Andrew Melton 2013-09-11 11:23:29 -07:00
commit 5226e403f3
51 changed files with 6021 additions and 2479 deletions

1
.gitignore vendored
View File

@ -1,4 +1,5 @@
.idea/
.venv/
*.pyc
local_settings.py

View File

@ -6,3 +6,5 @@ librabbitmq>=1.0.0
prettytable>=0.7.2
argparse
Pympler
requests
south

View File

@ -0,0 +1,27 @@
#!/bin/bash
# Example script to prune the Stacktach RawData table
# The following is one way you could keep your RawData table from growing
# very large -- keep only the last N days worth of data, N being a number
# convenient to your installation.
# Full path to where you have deployed the Stacktach app
PATH_TO_ST='/path/to/stacktach'
# Let us say we want to keep only 90 days' worth of RawData.
# 90 days = 90 * (24*60*60) seconds = 7776000 seconds.
KEEP_RAWDATA_DAYS=90
KEEP_RAWDATA_SECS=$((KEEP_RAWDATA_DAYS*24*60*60))
# Source the stacktach_config.sh script to populate the
# STACKTACH_DB_* variables among other things, and do the deed.
cd ${PATH_TO_ST} && \
. ${PATH_TO_ST}/etc/stacktach_config.sh && \
python manage.py dbshell <<EOF \
> /tmp/stacktach_prune.stdout \
2> /tmp/stacktach_prune.stderr
DELETE FROM stacktach_rawdata
WHERE stacktach_rawdata.when < (UNIX_TIMESTAMP(NOW())-${KEEP_RAWDATA_SECS});
EOF

View File

@ -11,7 +11,9 @@
"userid": "rabbit",
"password": "rabbit",
"virtual_host": "/",
"exchange_name": "stacktach",
"routing_keys": ["notifications.info"]
"topics": {
"nova": ["notifications.info"],
"glance": ["notifications.info"]
}
}
}
}

View File

@ -7,7 +7,30 @@
"rabbit_userid": "rabbit",
"rabbit_password": "rabbit",
"rabbit_virtual_host": "/",
"exit_on_exception": true
"exit_on_exception": true,
"queue_name": "stacktach",
"topics": {
"nova": [
{
"queue": "monitor.info",
"routing_key": "monitor.info"
},
{
"queue": "monitor.error",
"routing_key": "monitor.error"
}
],
"glance": [
{
"queue": "stacktach_monitor_glance.info",
"routing_key": "monitor_glance.info"
},
{
"queue": "stacktach_monitor_glance.error",
"routing_key": "monitor_glance.error"
},
]
}
},
{
"name": "east_coast.prod.cell1",
@ -17,6 +40,19 @@
"rabbit_userid": "rabbit",
"rabbit_password": "rabbit",
"rabbit_virtual_host": "/",
"exit_on_exception": false
"exit_on_exception": false,
"queue_name": "stacktach",
"topics": {
"nova": [
{
"queue": "monitor.info",
"routing_key": "monitor.info"
},
{
"queue": "monitor.error",
"routing_key": "monitor.error"
}
]
}
}]
}

4
etc/test-requires.txt Normal file
View File

@ -0,0 +1,4 @@
nose
coverage
mox
nose-exclude

View File

@ -49,6 +49,21 @@ select stacktach_instanceusage.id,
stacktach_instancereconcile.deleted_at > %s)
) and stacktach_instanceusage.launched_at < %s;"""
OLD_RECONCILES_QUERY = """
select stacktach_instancereconcile.id,
stacktach_instancereconcile.instance,
stacktach_instancereconcile.launched_at from stacktach_instancereconcile
left outer join stacktach_instancedeletes on
stacktach_instancereconcile.instance = stacktach_instancedeletes.instance
where (
stacktach_instancereconcile.deleted_at is null and (
stacktach_instancedeletes.deleted_at is null or
stacktach_instancedeletes.deleted_at > %s
)
or (stacktach_instancereconcile.deleted_at is not null and
stacktach_instancereconcile.deleted_at > %s)
) and stacktach_instancereconcile.launched_at < %s;"""
reconciler = None
@ -217,6 +232,21 @@ def _launch_audit_for_period(beginning, ending):
launch.launched_at):
old_launches_dict[instance] = l
# NOTE (apmelton)
# Django's safe substitution doesn't allow dict substitution...
# Thus, we send it 'beginning' three times...
old_recs = models.InstanceReconcile.objects\
.raw(OLD_RECONCILES_QUERY,
[beginning, beginning, beginning])
for rec in old_recs:
instance = rec.instance
l = {'id': rec.id, 'launched_at': rec.launched_at}
if instance not in old_launches_dict or \
(old_launches_dict[instance]['launched_at'] <
rec.launched_at):
old_launches_dict[instance] = l
for instance, launch in old_launches_dict.items():
if instance in launches_dict:
launches_dict[instance].append(launch)

7
run_tests_venv.sh Executable file
View File

@ -0,0 +1,7 @@
#!/bin/sh
virtualenv .venv
. .venv/bin/activate
pip install -r etc/pip-requires.txt
pip install -r etc/test-requires.txt
nosetests tests --exclude-dir=stacktach --with-coverage --cover-package=stacktach,worker,verifier --cover-erase

View File

@ -140,6 +140,8 @@ INSTALLED_APPS = (
'south'
)
SOUTH_TESTS_MIGRATE = False
ALLOWED_HOSTS = ['*']
# A sample logging configuration. The only tangible logging

View File

@ -20,7 +20,7 @@ def get_or_create_deployment(name):
return models.Deployment.objects.get_or_create(name=name)
def create_rawdata(**kwargs):
def create_nova_rawdata(**kwargs):
imagemeta_fields = ['os_architecture', 'os_version',
'os_distro', 'rax_options']
imagemeta_kwargs = \
@ -35,6 +35,7 @@ def create_rawdata(**kwargs):
return rawdata
def create_lifecycle(**kwargs):
return models.Lifecycle(**kwargs)
@ -88,4 +89,47 @@ def create_instance_exists(**kwargs):
def save(obj):
obj.save()
obj.save()
def create_glance_rawdata(**kwargs):
rawdata = models.GlanceRawData(**kwargs)
rawdata.save()
return rawdata
def create_generic_rawdata(**kwargs):
rawdata = models.GenericRawData(**kwargs)
rawdata.save()
return rawdata
def create_image_usage(**kwargs):
usage = models.ImageUsage(**kwargs)
usage.save()
return usage
def create_image_delete(**kwargs):
delete = models.ImageDeletes(**kwargs)
delete.save()
return delete
def create_image_exists(**kwargs):
exists = models.ImageExists(**kwargs)
exists.save()
return exists
def get_image_delete(**kwargs):
return _safe_get(models.ImageDeletes, **kwargs)
def get_image_usage(**kwargs):
return _safe_get(models.ImageUsage, **kwargs)

View File

@ -290,6 +290,10 @@ def get_db_objects(klass, request, default_order_by, direction='desc',
offset = request.GET.get('offset')
limit = request.GET.get('limit', DEFAULT_LIMIT)
if limit:
limit = int(limit)
if limit > HARD_LIMIT:
limit = HARD_LIMIT
if offset:

View File

@ -0,0 +1,31 @@
import kombu
import kombu.entity
import kombu.pools
import kombu.connection
import kombu.common
def send_notification(message, routing_key, connection, exchange):
with kombu.pools.producers[connection].acquire(block=True) as producer:
kombu.common.maybe_declare(exchange, producer.channel)
producer.publish(message, routing_key)
def create_exchange(name, exchange_type, exclusive=False, auto_delete=False,
durable=True):
return kombu.entity.Exchange(name, type=exchange_type, exclusive=exclusive,
auto_delete=auto_delete, durable=durable)
def create_connection(hostname, port, userid, password, transport,
virtual_host):
return kombu.connection.BrokerConnection(
hostname=hostname, port=port, userid=userid, password=password,
transport=transport, virtual_host=virtual_host)
def create_queue(name, exchange, routing_key, exclusive=False,
auto_delete=False, queue_arguments=None, durable=True):
return kombu.Queue(name, exchange, durable=durable,
auto_delete=auto_delete, exclusive=exclusive,
queue_arguments=queue_arguments,
routing_key=routing_key)

View File

@ -2,8 +2,7 @@
import copy
import gc
from south.v2 import DataMigration
from stacktach.notification import Notification
from stacktach.views import NOTIFICATIONS
from stacktach.notification import notification_factory
try:
import ujson as json
@ -65,8 +64,8 @@ class Migration(DataMigration):
json_dict = json.loads(json_message)
routing_key = json_dict[0]
body = json_dict[1]
notification = NOTIFICATIONS[routing_key](body)
return notification
return notification_factory(body, None, routing_key, json_message,
'nova')
def forwards(self, orm):
# Note: Don't use "from appname.models import ModelName".
@ -101,7 +100,7 @@ class Migration(DataMigration):
exists_update_count += 1
print "Updated %s records in InstanceExists" % exists_update_count
print "\nStarted updating records in InstacnceUsages"
print "\nStarted updating records in InstanceUsages"
usages = orm.InstanceUsage.objects.all().values('request_id')
usages_update_count = 0
for usage in usages:

View File

@ -0,0 +1,308 @@
# -*- coding: utf-8 -*-
import datetime
from south.db import db
from south.v2 import SchemaMigration
from django.db import models
class Migration(SchemaMigration):
def forwards(self, orm):
# Adding model 'ImageDeletes'
db.create_table(u'stacktach_imagedeletes', (
(u'id', self.gf('django.db.models.fields.AutoField')(primary_key=True)),
('uuid', self.gf('django.db.models.fields.CharField')(max_length=50, db_index=True)),
('deleted_at', self.gf('django.db.models.fields.DecimalField')(null=True, max_digits=20, decimal_places=6, db_index=True)),
('raw', self.gf('django.db.models.fields.related.ForeignKey')(to=orm['stacktach.GlanceRawData'], null=True)),
))
db.send_create_signal(u'stacktach', ['ImageDeletes'])
# Adding model 'GlanceRawData'
db.create_table(u'stacktach_glancerawdata', (
(u'id', self.gf('django.db.models.fields.AutoField')(primary_key=True)),
('deployment', self.gf('django.db.models.fields.related.ForeignKey')(to=orm['stacktach.Deployment'])),
('owner', self.gf('django.db.models.fields.CharField')(db_index=True, max_length=255, null=True, blank=True)),
('json', self.gf('django.db.models.fields.TextField')()),
('routing_key', self.gf('django.db.models.fields.CharField')(db_index=True, max_length=50, null=True, blank=True)),
('when', self.gf('django.db.models.fields.DecimalField')(max_digits=20, decimal_places=6, db_index=True)),
('publisher', self.gf('django.db.models.fields.CharField')(db_index=True, max_length=100, null=True, blank=True)),
('event', self.gf('django.db.models.fields.CharField')(db_index=True, max_length=50, null=True, blank=True)),
('service', self.gf('django.db.models.fields.CharField')(db_index=True, max_length=50, null=True, blank=True)),
('host', self.gf('django.db.models.fields.CharField')(db_index=True, max_length=100, null=True, blank=True)),
('instance', self.gf('django.db.models.fields.CharField')(db_index=True, max_length=50, null=True, blank=True)),
('request_id', self.gf('django.db.models.fields.CharField')(db_index=True, max_length=50, null=True, blank=True)),
('uuid', self.gf('django.db.models.fields.CharField')(db_index=True, max_length=36, null=True, blank=True)),
('status', self.gf('django.db.models.fields.CharField')(max_length=30, null=True, db_index=True)),
('image_type', self.gf('django.db.models.fields.IntegerField')(default=0, null=True, db_index=True)),
))
db.send_create_signal(u'stacktach', ['GlanceRawData'])
# Adding model 'ImageUsage'
db.create_table(u'stacktach_imageusage', (
(u'id', self.gf('django.db.models.fields.AutoField')(primary_key=True)),
('uuid', self.gf('django.db.models.fields.CharField')(max_length=50, db_index=True)),
('created_at', self.gf('django.db.models.fields.DecimalField')(max_digits=20, decimal_places=6, db_index=True)),
('owner', self.gf('django.db.models.fields.CharField')(max_length=50, db_index=True)),
('size', self.gf('django.db.models.fields.BigIntegerField')(max_length=20)),
('last_raw', self.gf('django.db.models.fields.related.ForeignKey')(to=orm['stacktach.GlanceRawData'], null=True)),
))
db.send_create_signal(u'stacktach', ['ImageUsage'])
# Adding model 'GenericRawData'
db.create_table(u'stacktach_genericrawdata', (
(u'id', self.gf('django.db.models.fields.AutoField')(primary_key=True)),
('deployment', self.gf('django.db.models.fields.related.ForeignKey')(to=orm['stacktach.Deployment'])),
('tenant', self.gf('django.db.models.fields.CharField')(db_index=True, max_length=50, null=True, blank=True)),
('json', self.gf('django.db.models.fields.TextField')()),
('routing_key', self.gf('django.db.models.fields.CharField')(db_index=True, max_length=50, null=True, blank=True)),
('when', self.gf('django.db.models.fields.DecimalField')(max_digits=20, decimal_places=6, db_index=True)),
('publisher', self.gf('django.db.models.fields.CharField')(db_index=True, max_length=100, null=True, blank=True)),
('event', self.gf('django.db.models.fields.CharField')(db_index=True, max_length=50, null=True, blank=True)),
('service', self.gf('django.db.models.fields.CharField')(db_index=True, max_length=50, null=True, blank=True)),
('host', self.gf('django.db.models.fields.CharField')(db_index=True, max_length=100, null=True, blank=True)),
('instance', self.gf('django.db.models.fields.CharField')(db_index=True, max_length=50, null=True, blank=True)),
('request_id', self.gf('django.db.models.fields.CharField')(db_index=True, max_length=50, null=True, blank=True)),
('message_id', self.gf('django.db.models.fields.CharField')(db_index=True, max_length=50, null=True, blank=True)),
))
db.send_create_signal(u'stacktach', ['GenericRawData'])
# Adding model 'ImageExists'
db.create_table(u'stacktach_imageexists', (
(u'id', self.gf('django.db.models.fields.AutoField')(primary_key=True)),
('uuid', self.gf('django.db.models.fields.CharField')(max_length=50, db_index=True)),
('created_at', self.gf('django.db.models.fields.DecimalField')(null=True, max_digits=20, decimal_places=6, db_index=True)),
('deleted_at', self.gf('django.db.models.fields.DecimalField')(null=True, max_digits=20, decimal_places=6, db_index=True)),
('audit_period_beginning', self.gf('django.db.models.fields.DecimalField')(max_digits=20, decimal_places=6, db_index=True)),
('audit_period_ending', self.gf('django.db.models.fields.DecimalField')(max_digits=20, decimal_places=6, db_index=True)),
('status', self.gf('django.db.models.fields.CharField')(default='pending', max_length=50, db_index=True)),
('fail_reason', self.gf('django.db.models.fields.CharField')(max_length=300, null=True)),
('raw', self.gf('django.db.models.fields.related.ForeignKey')(related_name='+', to=orm['stacktach.GlanceRawData'])),
('usage', self.gf('django.db.models.fields.related.ForeignKey')(related_name='+', null=True, to=orm['stacktach.ImageUsage'])),
('delete', self.gf('django.db.models.fields.related.ForeignKey')(related_name='+', null=True, to=orm['stacktach.ImageDeletes'])),
('send_status', self.gf('django.db.models.fields.IntegerField')(default=0, db_index=True)),
('owner', self.gf('django.db.models.fields.CharField')(max_length=255, db_index=True)),
('size', self.gf('django.db.models.fields.BigIntegerField')(max_length=20)),
))
db.send_create_signal(u'stacktach', ['ImageExists'])
def backwards(self, orm):
# Deleting model 'ImageDeletes'
db.delete_table(u'stacktach_imagedeletes')
# Deleting model 'GlanceRawData'
db.delete_table(u'stacktach_glancerawdata')
# Deleting model 'ImageUsage'
db.delete_table(u'stacktach_imageusage')
# Deleting model 'GenericRawData'
db.delete_table(u'stacktach_genericrawdata')
# Deleting model 'ImageExists'
db.delete_table(u'stacktach_imageexists')
models = {
u'stacktach.deployment': {
'Meta': {'object_name': 'Deployment'},
u'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'name': ('django.db.models.fields.CharField', [], {'max_length': '50'})
},
u'stacktach.genericrawdata': {
'Meta': {'object_name': 'GenericRawData'},
'deployment': ('django.db.models.fields.related.ForeignKey', [], {'to': u"orm['stacktach.Deployment']"}),
'event': ('django.db.models.fields.CharField', [], {'db_index': 'True', 'max_length': '50', 'null': 'True', 'blank': 'True'}),
'host': ('django.db.models.fields.CharField', [], {'db_index': 'True', 'max_length': '100', 'null': 'True', 'blank': 'True'}),
u'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'instance': ('django.db.models.fields.CharField', [], {'db_index': 'True', 'max_length': '50', 'null': 'True', 'blank': 'True'}),
'json': ('django.db.models.fields.TextField', [], {}),
'message_id': ('django.db.models.fields.CharField', [], {'db_index': 'True', 'max_length': '50', 'null': 'True', 'blank': 'True'}),
'publisher': ('django.db.models.fields.CharField', [], {'db_index': 'True', 'max_length': '100', 'null': 'True', 'blank': 'True'}),
'request_id': ('django.db.models.fields.CharField', [], {'db_index': 'True', 'max_length': '50', 'null': 'True', 'blank': 'True'}),
'routing_key': ('django.db.models.fields.CharField', [], {'db_index': 'True', 'max_length': '50', 'null': 'True', 'blank': 'True'}),
'service': ('django.db.models.fields.CharField', [], {'db_index': 'True', 'max_length': '50', 'null': 'True', 'blank': 'True'}),
'tenant': ('django.db.models.fields.CharField', [], {'db_index': 'True', 'max_length': '50', 'null': 'True', 'blank': 'True'}),
'when': ('django.db.models.fields.DecimalField', [], {'max_digits': '20', 'decimal_places': '6', 'db_index': 'True'})
},
u'stacktach.glancerawdata': {
'Meta': {'object_name': 'GlanceRawData'},
'deployment': ('django.db.models.fields.related.ForeignKey', [], {'to': u"orm['stacktach.Deployment']"}),
'event': ('django.db.models.fields.CharField', [], {'db_index': 'True', 'max_length': '50', 'null': 'True', 'blank': 'True'}),
'host': ('django.db.models.fields.CharField', [], {'db_index': 'True', 'max_length': '100', 'null': 'True', 'blank': 'True'}),
u'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'image_type': ('django.db.models.fields.IntegerField', [], {'default': '0', 'null': 'True', 'db_index': 'True'}),
'instance': ('django.db.models.fields.CharField', [], {'db_index': 'True', 'max_length': '50', 'null': 'True', 'blank': 'True'}),
'json': ('django.db.models.fields.TextField', [], {}),
'owner': ('django.db.models.fields.CharField', [], {'db_index': 'True', 'max_length': '255', 'null': 'True', 'blank': 'True'}),
'publisher': ('django.db.models.fields.CharField', [], {'db_index': 'True', 'max_length': '100', 'null': 'True', 'blank': 'True'}),
'request_id': ('django.db.models.fields.CharField', [], {'db_index': 'True', 'max_length': '50', 'null': 'True', 'blank': 'True'}),
'routing_key': ('django.db.models.fields.CharField', [], {'db_index': 'True', 'max_length': '50', 'null': 'True', 'blank': 'True'}),
'service': ('django.db.models.fields.CharField', [], {'db_index': 'True', 'max_length': '50', 'null': 'True', 'blank': 'True'}),
'status': ('django.db.models.fields.CharField', [], {'max_length': '30', 'null': 'True', 'db_index': 'True'}),
'uuid': ('django.db.models.fields.CharField', [], {'db_index': 'True', 'max_length': '36', 'null': 'True', 'blank': 'True'}),
'when': ('django.db.models.fields.DecimalField', [], {'max_digits': '20', 'decimal_places': '6', 'db_index': 'True'})
},
u'stacktach.imagedeletes': {
'Meta': {'object_name': 'ImageDeletes'},
'deleted_at': ('django.db.models.fields.DecimalField', [], {'null': 'True', 'max_digits': '20', 'decimal_places': '6', 'db_index': 'True'}),
u'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'raw': ('django.db.models.fields.related.ForeignKey', [], {'to': u"orm['stacktach.GlanceRawData']", 'null': 'True'}),
'uuid': ('django.db.models.fields.CharField', [], {'max_length': '50', 'db_index': 'True'})
},
u'stacktach.imageexists': {
'Meta': {'object_name': 'ImageExists'},
'audit_period_beginning': ('django.db.models.fields.DecimalField', [], {'max_digits': '20', 'decimal_places': '6', 'db_index': 'True'}),
'audit_period_ending': ('django.db.models.fields.DecimalField', [], {'max_digits': '20', 'decimal_places': '6', 'db_index': 'True'}),
'created_at': ('django.db.models.fields.DecimalField', [], {'null': 'True', 'max_digits': '20', 'decimal_places': '6', 'db_index': 'True'}),
'delete': ('django.db.models.fields.related.ForeignKey', [], {'related_name': "'+'", 'null': 'True', 'to': u"orm['stacktach.ImageDeletes']"}),
'deleted_at': ('django.db.models.fields.DecimalField', [], {'null': 'True', 'max_digits': '20', 'decimal_places': '6', 'db_index': 'True'}),
'fail_reason': ('django.db.models.fields.CharField', [], {'max_length': '300', 'null': 'True'}),
u'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'owner': ('django.db.models.fields.CharField', [], {'max_length': '255', 'db_index': 'True'}),
'raw': ('django.db.models.fields.related.ForeignKey', [], {'related_name': "'+'", 'to': u"orm['stacktach.GlanceRawData']"}),
'send_status': ('django.db.models.fields.IntegerField', [], {'default': '0', 'db_index': 'True'}),
'size': ('django.db.models.fields.BigIntegerField', [], {'max_length': '20'}),
'status': ('django.db.models.fields.CharField', [], {'default': "'pending'", 'max_length': '50', 'db_index': 'True'}),
'usage': ('django.db.models.fields.related.ForeignKey', [], {'related_name': "'+'", 'null': 'True', 'to': u"orm['stacktach.ImageUsage']"}),
'uuid': ('django.db.models.fields.CharField', [], {'max_length': '50', 'db_index': 'True'})
},
u'stacktach.imageusage': {
'Meta': {'object_name': 'ImageUsage'},
'created_at': ('django.db.models.fields.DecimalField', [], {'max_digits': '20', 'decimal_places': '6', 'db_index': 'True'}),
u'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'last_raw': ('django.db.models.fields.related.ForeignKey', [], {'to': u"orm['stacktach.GlanceRawData']", 'null': 'True'}),
'owner': ('django.db.models.fields.CharField', [], {'max_length': '50', 'db_index': 'True'}),
'size': ('django.db.models.fields.BigIntegerField', [], {'max_length': '20'}),
'uuid': ('django.db.models.fields.CharField', [], {'max_length': '50', 'db_index': 'True'})
},
u'stacktach.instancedeletes': {
'Meta': {'object_name': 'InstanceDeletes'},
'deleted_at': ('django.db.models.fields.DecimalField', [], {'null': 'True', 'max_digits': '20', 'decimal_places': '6', 'db_index': 'True'}),
u'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'instance': ('django.db.models.fields.CharField', [], {'db_index': 'True', 'max_length': '50', 'null': 'True', 'blank': 'True'}),
'launched_at': ('django.db.models.fields.DecimalField', [], {'null': 'True', 'max_digits': '20', 'decimal_places': '6', 'db_index': 'True'}),
'raw': ('django.db.models.fields.related.ForeignKey', [], {'to': u"orm['stacktach.RawData']", 'null': 'True'})
},
u'stacktach.instanceexists': {
'Meta': {'object_name': 'InstanceExists'},
'audit_period_beginning': ('django.db.models.fields.DecimalField', [], {'null': 'True', 'max_digits': '20', 'decimal_places': '6', 'db_index': 'True'}),
'audit_period_ending': ('django.db.models.fields.DecimalField', [], {'null': 'True', 'max_digits': '20', 'decimal_places': '6', 'db_index': 'True'}),
'delete': ('django.db.models.fields.related.ForeignKey', [], {'related_name': "'+'", 'null': 'True', 'to': u"orm['stacktach.InstanceDeletes']"}),
'deleted_at': ('django.db.models.fields.DecimalField', [], {'null': 'True', 'max_digits': '20', 'decimal_places': '6', 'db_index': 'True'}),
'fail_reason': ('django.db.models.fields.CharField', [], {'db_index': 'True', 'max_length': '300', 'null': 'True', 'blank': 'True'}),
u'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'instance': ('django.db.models.fields.CharField', [], {'db_index': 'True', 'max_length': '50', 'null': 'True', 'blank': 'True'}),
'instance_type_id': ('django.db.models.fields.CharField', [], {'db_index': 'True', 'max_length': '50', 'null': 'True', 'blank': 'True'}),
'launched_at': ('django.db.models.fields.DecimalField', [], {'null': 'True', 'max_digits': '20', 'decimal_places': '6', 'db_index': 'True'}),
'message_id': ('django.db.models.fields.CharField', [], {'db_index': 'True', 'max_length': '50', 'null': 'True', 'blank': 'True'}),
'os_architecture': ('django.db.models.fields.TextField', [], {'null': 'True', 'blank': 'True'}),
'os_distro': ('django.db.models.fields.TextField', [], {'null': 'True', 'blank': 'True'}),
'os_version': ('django.db.models.fields.TextField', [], {'null': 'True', 'blank': 'True'}),
'raw': ('django.db.models.fields.related.ForeignKey', [], {'related_name': "'+'", 'null': 'True', 'to': u"orm['stacktach.RawData']"}),
'rax_options': ('django.db.models.fields.TextField', [], {'null': 'True', 'blank': 'True'}),
'send_status': ('django.db.models.fields.IntegerField', [], {'default': '0', 'null': 'True', 'db_index': 'True'}),
'status': ('django.db.models.fields.CharField', [], {'default': "'pending'", 'max_length': '50', 'db_index': 'True'}),
'tenant': ('django.db.models.fields.CharField', [], {'db_index': 'True', 'max_length': '50', 'null': 'True', 'blank': 'True'}),
'usage': ('django.db.models.fields.related.ForeignKey', [], {'related_name': "'+'", 'null': 'True', 'to': u"orm['stacktach.InstanceUsage']"})
},
u'stacktach.instancereconcile': {
'Meta': {'object_name': 'InstanceReconcile'},
'deleted_at': ('django.db.models.fields.DecimalField', [], {'null': 'True', 'max_digits': '20', 'decimal_places': '6', 'db_index': 'True'}),
u'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'instance': ('django.db.models.fields.CharField', [], {'db_index': 'True', 'max_length': '50', 'null': 'True', 'blank': 'True'}),
'instance_type_id': ('django.db.models.fields.CharField', [], {'db_index': 'True', 'max_length': '50', 'null': 'True', 'blank': 'True'}),
'launched_at': ('django.db.models.fields.DecimalField', [], {'null': 'True', 'max_digits': '20', 'decimal_places': '6', 'db_index': 'True'}),
'os_architecture': ('django.db.models.fields.TextField', [], {'null': 'True', 'blank': 'True'}),
'os_distro': ('django.db.models.fields.TextField', [], {'null': 'True', 'blank': 'True'}),
'os_version': ('django.db.models.fields.TextField', [], {'null': 'True', 'blank': 'True'}),
'rax_options': ('django.db.models.fields.TextField', [], {'null': 'True', 'blank': 'True'}),
'row_created': ('django.db.models.fields.DateTimeField', [], {'auto_now_add': 'True', 'blank': 'True'}),
'row_updated': ('django.db.models.fields.DateTimeField', [], {'auto_now': 'True', 'blank': 'True'}),
'source': ('django.db.models.fields.CharField', [], {'db_index': 'True', 'max_length': '150', 'null': 'True', 'blank': 'True'}),
'tenant': ('django.db.models.fields.CharField', [], {'db_index': 'True', 'max_length': '50', 'null': 'True', 'blank': 'True'})
},
u'stacktach.instanceusage': {
'Meta': {'object_name': 'InstanceUsage'},
u'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'instance': ('django.db.models.fields.CharField', [], {'db_index': 'True', 'max_length': '50', 'null': 'True', 'blank': 'True'}),
'instance_type_id': ('django.db.models.fields.CharField', [], {'db_index': 'True', 'max_length': '50', 'null': 'True', 'blank': 'True'}),
'launched_at': ('django.db.models.fields.DecimalField', [], {'null': 'True', 'max_digits': '20', 'decimal_places': '6', 'db_index': 'True'}),
'os_architecture': ('django.db.models.fields.TextField', [], {'null': 'True', 'blank': 'True'}),
'os_distro': ('django.db.models.fields.TextField', [], {'null': 'True', 'blank': 'True'}),
'os_version': ('django.db.models.fields.TextField', [], {'null': 'True', 'blank': 'True'}),
'rax_options': ('django.db.models.fields.TextField', [], {'null': 'True', 'blank': 'True'}),
'request_id': ('django.db.models.fields.CharField', [], {'db_index': 'True', 'max_length': '50', 'null': 'True', 'blank': 'True'}),
'tenant': ('django.db.models.fields.CharField', [], {'db_index': 'True', 'max_length': '50', 'null': 'True', 'blank': 'True'})
},
u'stacktach.jsonreport': {
'Meta': {'object_name': 'JsonReport'},
'created': ('django.db.models.fields.DecimalField', [], {'max_digits': '20', 'decimal_places': '6', 'db_index': 'True'}),
u'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'json': ('django.db.models.fields.TextField', [], {}),
'name': ('django.db.models.fields.CharField', [], {'max_length': '50', 'db_index': 'True'}),
'period_end': ('django.db.models.fields.DateTimeField', [], {'db_index': 'True'}),
'period_start': ('django.db.models.fields.DateTimeField', [], {'db_index': 'True'}),
'version': ('django.db.models.fields.IntegerField', [], {'default': '1'})
},
u'stacktach.lifecycle': {
'Meta': {'object_name': 'Lifecycle'},
u'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'instance': ('django.db.models.fields.CharField', [], {'db_index': 'True', 'max_length': '50', 'null': 'True', 'blank': 'True'}),
'last_raw': ('django.db.models.fields.related.ForeignKey', [], {'to': u"orm['stacktach.RawData']", 'null': 'True'}),
'last_state': ('django.db.models.fields.CharField', [], {'db_index': 'True', 'max_length': '50', 'null': 'True', 'blank': 'True'}),
'last_task_state': ('django.db.models.fields.CharField', [], {'db_index': 'True', 'max_length': '50', 'null': 'True', 'blank': 'True'})
},
u'stacktach.rawdata': {
'Meta': {'object_name': 'RawData'},
'deployment': ('django.db.models.fields.related.ForeignKey', [], {'to': u"orm['stacktach.Deployment']"}),
'event': ('django.db.models.fields.CharField', [], {'db_index': 'True', 'max_length': '50', 'null': 'True', 'blank': 'True'}),
'host': ('django.db.models.fields.CharField', [], {'db_index': 'True', 'max_length': '100', 'null': 'True', 'blank': 'True'}),
u'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'image_type': ('django.db.models.fields.IntegerField', [], {'default': '0', 'null': 'True', 'db_index': 'True'}),
'instance': ('django.db.models.fields.CharField', [], {'db_index': 'True', 'max_length': '50', 'null': 'True', 'blank': 'True'}),
'json': ('django.db.models.fields.TextField', [], {}),
'old_state': ('django.db.models.fields.CharField', [], {'db_index': 'True', 'max_length': '20', 'null': 'True', 'blank': 'True'}),
'old_task': ('django.db.models.fields.CharField', [], {'db_index': 'True', 'max_length': '30', 'null': 'True', 'blank': 'True'}),
'publisher': ('django.db.models.fields.CharField', [], {'db_index': 'True', 'max_length': '100', 'null': 'True', 'blank': 'True'}),
'request_id': ('django.db.models.fields.CharField', [], {'db_index': 'True', 'max_length': '50', 'null': 'True', 'blank': 'True'}),
'routing_key': ('django.db.models.fields.CharField', [], {'db_index': 'True', 'max_length': '50', 'null': 'True', 'blank': 'True'}),
'service': ('django.db.models.fields.CharField', [], {'db_index': 'True', 'max_length': '50', 'null': 'True', 'blank': 'True'}),
'state': ('django.db.models.fields.CharField', [], {'db_index': 'True', 'max_length': '20', 'null': 'True', 'blank': 'True'}),
'task': ('django.db.models.fields.CharField', [], {'db_index': 'True', 'max_length': '30', 'null': 'True', 'blank': 'True'}),
'tenant': ('django.db.models.fields.CharField', [], {'db_index': 'True', 'max_length': '50', 'null': 'True', 'blank': 'True'}),
'when': ('django.db.models.fields.DecimalField', [], {'max_digits': '20', 'decimal_places': '6', 'db_index': 'True'})
},
u'stacktach.rawdataimagemeta': {
'Meta': {'object_name': 'RawDataImageMeta'},
u'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'os_architecture': ('django.db.models.fields.TextField', [], {'null': 'True', 'blank': 'True'}),
'os_distro': ('django.db.models.fields.TextField', [], {'null': 'True', 'blank': 'True'}),
'os_version': ('django.db.models.fields.TextField', [], {'null': 'True', 'blank': 'True'}),
'raw': ('django.db.models.fields.related.ForeignKey', [], {'to': u"orm['stacktach.RawData']"}),
'rax_options': ('django.db.models.fields.TextField', [], {'null': 'True', 'blank': 'True'})
},
u'stacktach.requesttracker': {
'Meta': {'object_name': 'RequestTracker'},
'completed': ('django.db.models.fields.BooleanField', [], {'default': 'False', 'db_index': 'True'}),
'duration': ('django.db.models.fields.DecimalField', [], {'max_digits': '20', 'decimal_places': '6', 'db_index': 'True'}),
u'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'last_timing': ('django.db.models.fields.related.ForeignKey', [], {'to': u"orm['stacktach.Timing']", 'null': 'True'}),
'lifecycle': ('django.db.models.fields.related.ForeignKey', [], {'to': u"orm['stacktach.Lifecycle']"}),
'request_id': ('django.db.models.fields.CharField', [], {'max_length': '50', 'db_index': 'True'}),
'start': ('django.db.models.fields.DecimalField', [], {'max_digits': '20', 'decimal_places': '6', 'db_index': 'True'})
},
u'stacktach.timing': {
'Meta': {'object_name': 'Timing'},
'diff': ('django.db.models.fields.DecimalField', [], {'null': 'True', 'max_digits': '20', 'decimal_places': '6', 'db_index': 'True'}),
'end_raw': ('django.db.models.fields.related.ForeignKey', [], {'related_name': "'+'", 'null': 'True', 'to': u"orm['stacktach.RawData']"}),
'end_when': ('django.db.models.fields.DecimalField', [], {'null': 'True', 'max_digits': '20', 'decimal_places': '6'}),
u'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'lifecycle': ('django.db.models.fields.related.ForeignKey', [], {'to': u"orm['stacktach.Lifecycle']"}),
'name': ('django.db.models.fields.CharField', [], {'max_length': '50', 'db_index': 'True'}),
'start_raw': ('django.db.models.fields.related.ForeignKey', [], {'related_name': "'+'", 'null': 'True', 'to': u"orm['stacktach.RawData']"}),
'start_when': ('django.db.models.fields.DecimalField', [], {'null': 'True', 'max_digits': '20', 'decimal_places': '6'})
}
}
complete_apps = ['stacktach']

View File

@ -0,0 +1,228 @@
# -*- coding: utf-8 -*-
import datetime
from south.db import db
from south.v2 import SchemaMigration
from django.db import models
class Migration(SchemaMigration):
def forwards(self, orm):
# Changing field 'ImageUsage.owner'
db.alter_column(u'stacktach_imageusage', 'owner', self.gf('django.db.models.fields.CharField')(max_length=50, null=True))
# Changing field 'ImageExists.owner'
db.alter_column(u'stacktach_imageexists', 'owner', self.gf('django.db.models.fields.CharField')(max_length=255, null=True))
def backwards(self, orm):
# User chose to not deal with backwards NULL issues for 'ImageUsage.owner'
raise RuntimeError("Cannot reverse this migration. 'ImageUsage.owner' and its values cannot be restored.")
# User chose to not deal with backwards NULL issues for 'ImageExists.owner'
raise RuntimeError("Cannot reverse this migration. 'ImageExists.owner' and its values cannot be restored.")
models = {
u'stacktach.deployment': {
'Meta': {'object_name': 'Deployment'},
u'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'name': ('django.db.models.fields.CharField', [], {'max_length': '50'})
},
u'stacktach.genericrawdata': {
'Meta': {'object_name': 'GenericRawData'},
'deployment': ('django.db.models.fields.related.ForeignKey', [], {'to': u"orm['stacktach.Deployment']"}),
'event': ('django.db.models.fields.CharField', [], {'db_index': 'True', 'max_length': '50', 'null': 'True', 'blank': 'True'}),
'host': ('django.db.models.fields.CharField', [], {'db_index': 'True', 'max_length': '100', 'null': 'True', 'blank': 'True'}),
u'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'instance': ('django.db.models.fields.CharField', [], {'db_index': 'True', 'max_length': '50', 'null': 'True', 'blank': 'True'}),
'json': ('django.db.models.fields.TextField', [], {}),
'message_id': ('django.db.models.fields.CharField', [], {'db_index': 'True', 'max_length': '50', 'null': 'True', 'blank': 'True'}),
'publisher': ('django.db.models.fields.CharField', [], {'db_index': 'True', 'max_length': '100', 'null': 'True', 'blank': 'True'}),
'request_id': ('django.db.models.fields.CharField', [], {'db_index': 'True', 'max_length': '50', 'null': 'True', 'blank': 'True'}),
'routing_key': ('django.db.models.fields.CharField', [], {'db_index': 'True', 'max_length': '50', 'null': 'True', 'blank': 'True'}),
'service': ('django.db.models.fields.CharField', [], {'db_index': 'True', 'max_length': '50', 'null': 'True', 'blank': 'True'}),
'tenant': ('django.db.models.fields.CharField', [], {'db_index': 'True', 'max_length': '50', 'null': 'True', 'blank': 'True'}),
'when': ('django.db.models.fields.DecimalField', [], {'max_digits': '20', 'decimal_places': '6', 'db_index': 'True'})
},
u'stacktach.glancerawdata': {
'Meta': {'object_name': 'GlanceRawData'},
'deployment': ('django.db.models.fields.related.ForeignKey', [], {'to': u"orm['stacktach.Deployment']"}),
'event': ('django.db.models.fields.CharField', [], {'db_index': 'True', 'max_length': '50', 'null': 'True', 'blank': 'True'}),
'host': ('django.db.models.fields.CharField', [], {'db_index': 'True', 'max_length': '100', 'null': 'True', 'blank': 'True'}),
u'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'image_type': ('django.db.models.fields.IntegerField', [], {'default': '0', 'null': 'True', 'db_index': 'True'}),
'instance': ('django.db.models.fields.CharField', [], {'db_index': 'True', 'max_length': '50', 'null': 'True', 'blank': 'True'}),
'json': ('django.db.models.fields.TextField', [], {}),
'owner': ('django.db.models.fields.CharField', [], {'db_index': 'True', 'max_length': '255', 'null': 'True', 'blank': 'True'}),
'publisher': ('django.db.models.fields.CharField', [], {'db_index': 'True', 'max_length': '100', 'null': 'True', 'blank': 'True'}),
'request_id': ('django.db.models.fields.CharField', [], {'db_index': 'True', 'max_length': '50', 'null': 'True', 'blank': 'True'}),
'routing_key': ('django.db.models.fields.CharField', [], {'db_index': 'True', 'max_length': '50', 'null': 'True', 'blank': 'True'}),
'service': ('django.db.models.fields.CharField', [], {'db_index': 'True', 'max_length': '50', 'null': 'True', 'blank': 'True'}),
'status': ('django.db.models.fields.CharField', [], {'max_length': '30', 'null': 'True', 'db_index': 'True'}),
'uuid': ('django.db.models.fields.CharField', [], {'db_index': 'True', 'max_length': '36', 'null': 'True', 'blank': 'True'}),
'when': ('django.db.models.fields.DecimalField', [], {'max_digits': '20', 'decimal_places': '6', 'db_index': 'True'})
},
u'stacktach.imagedeletes': {
'Meta': {'object_name': 'ImageDeletes'},
'deleted_at': ('django.db.models.fields.DecimalField', [], {'null': 'True', 'max_digits': '20', 'decimal_places': '6', 'db_index': 'True'}),
u'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'raw': ('django.db.models.fields.related.ForeignKey', [], {'to': u"orm['stacktach.GlanceRawData']", 'null': 'True'}),
'uuid': ('django.db.models.fields.CharField', [], {'max_length': '50', 'db_index': 'True'})
},
u'stacktach.imageexists': {
'Meta': {'object_name': 'ImageExists'},
'audit_period_beginning': ('django.db.models.fields.DecimalField', [], {'max_digits': '20', 'decimal_places': '6', 'db_index': 'True'}),
'audit_period_ending': ('django.db.models.fields.DecimalField', [], {'max_digits': '20', 'decimal_places': '6', 'db_index': 'True'}),
'created_at': ('django.db.models.fields.DecimalField', [], {'null': 'True', 'max_digits': '20', 'decimal_places': '6', 'db_index': 'True'}),
'delete': ('django.db.models.fields.related.ForeignKey', [], {'related_name': "'+'", 'null': 'True', 'to': u"orm['stacktach.ImageDeletes']"}),
'deleted_at': ('django.db.models.fields.DecimalField', [], {'null': 'True', 'max_digits': '20', 'decimal_places': '6', 'db_index': 'True'}),
'fail_reason': ('django.db.models.fields.CharField', [], {'max_length': '300', 'null': 'True'}),
u'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'owner': ('django.db.models.fields.CharField', [], {'max_length': '255', 'null': 'True', 'db_index': 'True'}),
'raw': ('django.db.models.fields.related.ForeignKey', [], {'related_name': "'+'", 'to': u"orm['stacktach.GlanceRawData']"}),
'send_status': ('django.db.models.fields.IntegerField', [], {'default': '0', 'db_index': 'True'}),
'size': ('django.db.models.fields.BigIntegerField', [], {'max_length': '20'}),
'status': ('django.db.models.fields.CharField', [], {'default': "'pending'", 'max_length': '50', 'db_index': 'True'}),
'usage': ('django.db.models.fields.related.ForeignKey', [], {'related_name': "'+'", 'null': 'True', 'to': u"orm['stacktach.ImageUsage']"}),
'uuid': ('django.db.models.fields.CharField', [], {'max_length': '50', 'db_index': 'True'})
},
u'stacktach.imageusage': {
'Meta': {'object_name': 'ImageUsage'},
'created_at': ('django.db.models.fields.DecimalField', [], {'max_digits': '20', 'decimal_places': '6', 'db_index': 'True'}),
u'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'last_raw': ('django.db.models.fields.related.ForeignKey', [], {'to': u"orm['stacktach.GlanceRawData']", 'null': 'True'}),
'owner': ('django.db.models.fields.CharField', [], {'max_length': '50', 'null': 'True', 'db_index': 'True'}),
'size': ('django.db.models.fields.BigIntegerField', [], {'max_length': '20'}),
'uuid': ('django.db.models.fields.CharField', [], {'max_length': '50', 'db_index': 'True'})
},
u'stacktach.instancedeletes': {
'Meta': {'object_name': 'InstanceDeletes'},
'deleted_at': ('django.db.models.fields.DecimalField', [], {'null': 'True', 'max_digits': '20', 'decimal_places': '6', 'db_index': 'True'}),
u'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'instance': ('django.db.models.fields.CharField', [], {'db_index': 'True', 'max_length': '50', 'null': 'True', 'blank': 'True'}),
'launched_at': ('django.db.models.fields.DecimalField', [], {'null': 'True', 'max_digits': '20', 'decimal_places': '6', 'db_index': 'True'}),
'raw': ('django.db.models.fields.related.ForeignKey', [], {'to': u"orm['stacktach.RawData']", 'null': 'True'})
},
u'stacktach.instanceexists': {
'Meta': {'object_name': 'InstanceExists'},
'audit_period_beginning': ('django.db.models.fields.DecimalField', [], {'null': 'True', 'max_digits': '20', 'decimal_places': '6', 'db_index': 'True'}),
'audit_period_ending': ('django.db.models.fields.DecimalField', [], {'null': 'True', 'max_digits': '20', 'decimal_places': '6', 'db_index': 'True'}),
'delete': ('django.db.models.fields.related.ForeignKey', [], {'related_name': "'+'", 'null': 'True', 'to': u"orm['stacktach.InstanceDeletes']"}),
'deleted_at': ('django.db.models.fields.DecimalField', [], {'null': 'True', 'max_digits': '20', 'decimal_places': '6', 'db_index': 'True'}),
'fail_reason': ('django.db.models.fields.CharField', [], {'db_index': 'True', 'max_length': '300', 'null': 'True', 'blank': 'True'}),
u'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'instance': ('django.db.models.fields.CharField', [], {'db_index': 'True', 'max_length': '50', 'null': 'True', 'blank': 'True'}),
'instance_type_id': ('django.db.models.fields.CharField', [], {'db_index': 'True', 'max_length': '50', 'null': 'True', 'blank': 'True'}),
'launched_at': ('django.db.models.fields.DecimalField', [], {'null': 'True', 'max_digits': '20', 'decimal_places': '6', 'db_index': 'True'}),
'message_id': ('django.db.models.fields.CharField', [], {'db_index': 'True', 'max_length': '50', 'null': 'True', 'blank': 'True'}),
'os_architecture': ('django.db.models.fields.TextField', [], {'null': 'True', 'blank': 'True'}),
'os_distro': ('django.db.models.fields.TextField', [], {'null': 'True', 'blank': 'True'}),
'os_version': ('django.db.models.fields.TextField', [], {'null': 'True', 'blank': 'True'}),
'raw': ('django.db.models.fields.related.ForeignKey', [], {'related_name': "'+'", 'null': 'True', 'to': u"orm['stacktach.RawData']"}),
'rax_options': ('django.db.models.fields.TextField', [], {'null': 'True', 'blank': 'True'}),
'send_status': ('django.db.models.fields.IntegerField', [], {'default': '0', 'null': 'True', 'db_index': 'True'}),
'status': ('django.db.models.fields.CharField', [], {'default': "'pending'", 'max_length': '50', 'db_index': 'True'}),
'tenant': ('django.db.models.fields.CharField', [], {'db_index': 'True', 'max_length': '50', 'null': 'True', 'blank': 'True'}),
'usage': ('django.db.models.fields.related.ForeignKey', [], {'related_name': "'+'", 'null': 'True', 'to': u"orm['stacktach.InstanceUsage']"})
},
u'stacktach.instancereconcile': {
'Meta': {'object_name': 'InstanceReconcile'},
'deleted_at': ('django.db.models.fields.DecimalField', [], {'null': 'True', 'max_digits': '20', 'decimal_places': '6', 'db_index': 'True'}),
u'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'instance': ('django.db.models.fields.CharField', [], {'db_index': 'True', 'max_length': '50', 'null': 'True', 'blank': 'True'}),
'instance_type_id': ('django.db.models.fields.CharField', [], {'db_index': 'True', 'max_length': '50', 'null': 'True', 'blank': 'True'}),
'launched_at': ('django.db.models.fields.DecimalField', [], {'null': 'True', 'max_digits': '20', 'decimal_places': '6', 'db_index': 'True'}),
'os_architecture': ('django.db.models.fields.TextField', [], {'null': 'True', 'blank': 'True'}),
'os_distro': ('django.db.models.fields.TextField', [], {'null': 'True', 'blank': 'True'}),
'os_version': ('django.db.models.fields.TextField', [], {'null': 'True', 'blank': 'True'}),
'rax_options': ('django.db.models.fields.TextField', [], {'null': 'True', 'blank': 'True'}),
'row_created': ('django.db.models.fields.DateTimeField', [], {'auto_now_add': 'True', 'blank': 'True'}),
'row_updated': ('django.db.models.fields.DateTimeField', [], {'auto_now': 'True', 'blank': 'True'}),
'source': ('django.db.models.fields.CharField', [], {'db_index': 'True', 'max_length': '150', 'null': 'True', 'blank': 'True'}),
'tenant': ('django.db.models.fields.CharField', [], {'db_index': 'True', 'max_length': '50', 'null': 'True', 'blank': 'True'})
},
u'stacktach.instanceusage': {
'Meta': {'object_name': 'InstanceUsage'},
u'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'instance': ('django.db.models.fields.CharField', [], {'db_index': 'True', 'max_length': '50', 'null': 'True', 'blank': 'True'}),
'instance_type_id': ('django.db.models.fields.CharField', [], {'db_index': 'True', 'max_length': '50', 'null': 'True', 'blank': 'True'}),
'launched_at': ('django.db.models.fields.DecimalField', [], {'null': 'True', 'max_digits': '20', 'decimal_places': '6', 'db_index': 'True'}),
'os_architecture': ('django.db.models.fields.TextField', [], {'null': 'True', 'blank': 'True'}),
'os_distro': ('django.db.models.fields.TextField', [], {'null': 'True', 'blank': 'True'}),
'os_version': ('django.db.models.fields.TextField', [], {'null': 'True', 'blank': 'True'}),
'rax_options': ('django.db.models.fields.TextField', [], {'null': 'True', 'blank': 'True'}),
'request_id': ('django.db.models.fields.CharField', [], {'db_index': 'True', 'max_length': '50', 'null': 'True', 'blank': 'True'}),
'tenant': ('django.db.models.fields.CharField', [], {'db_index': 'True', 'max_length': '50', 'null': 'True', 'blank': 'True'})
},
u'stacktach.jsonreport': {
'Meta': {'object_name': 'JsonReport'},
'created': ('django.db.models.fields.DecimalField', [], {'max_digits': '20', 'decimal_places': '6', 'db_index': 'True'}),
u'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'json': ('django.db.models.fields.TextField', [], {}),
'name': ('django.db.models.fields.CharField', [], {'max_length': '50', 'db_index': 'True'}),
'period_end': ('django.db.models.fields.DateTimeField', [], {'db_index': 'True'}),
'period_start': ('django.db.models.fields.DateTimeField', [], {'db_index': 'True'}),
'version': ('django.db.models.fields.IntegerField', [], {'default': '1'})
},
u'stacktach.lifecycle': {
'Meta': {'object_name': 'Lifecycle'},
u'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'instance': ('django.db.models.fields.CharField', [], {'db_index': 'True', 'max_length': '50', 'null': 'True', 'blank': 'True'}),
'last_raw': ('django.db.models.fields.related.ForeignKey', [], {'to': u"orm['stacktach.RawData']", 'null': 'True'}),
'last_state': ('django.db.models.fields.CharField', [], {'db_index': 'True', 'max_length': '50', 'null': 'True', 'blank': 'True'}),
'last_task_state': ('django.db.models.fields.CharField', [], {'db_index': 'True', 'max_length': '50', 'null': 'True', 'blank': 'True'})
},
u'stacktach.rawdata': {
'Meta': {'object_name': 'RawData'},
'deployment': ('django.db.models.fields.related.ForeignKey', [], {'to': u"orm['stacktach.Deployment']"}),
'event': ('django.db.models.fields.CharField', [], {'db_index': 'True', 'max_length': '50', 'null': 'True', 'blank': 'True'}),
'host': ('django.db.models.fields.CharField', [], {'db_index': 'True', 'max_length': '100', 'null': 'True', 'blank': 'True'}),
u'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'image_type': ('django.db.models.fields.IntegerField', [], {'default': '0', 'null': 'True', 'db_index': 'True'}),
'instance': ('django.db.models.fields.CharField', [], {'db_index': 'True', 'max_length': '50', 'null': 'True', 'blank': 'True'}),
'json': ('django.db.models.fields.TextField', [], {}),
'old_state': ('django.db.models.fields.CharField', [], {'db_index': 'True', 'max_length': '20', 'null': 'True', 'blank': 'True'}),
'old_task': ('django.db.models.fields.CharField', [], {'db_index': 'True', 'max_length': '30', 'null': 'True', 'blank': 'True'}),
'publisher': ('django.db.models.fields.CharField', [], {'db_index': 'True', 'max_length': '100', 'null': 'True', 'blank': 'True'}),
'request_id': ('django.db.models.fields.CharField', [], {'db_index': 'True', 'max_length': '50', 'null': 'True', 'blank': 'True'}),
'routing_key': ('django.db.models.fields.CharField', [], {'db_index': 'True', 'max_length': '50', 'null': 'True', 'blank': 'True'}),
'service': ('django.db.models.fields.CharField', [], {'db_index': 'True', 'max_length': '50', 'null': 'True', 'blank': 'True'}),
'state': ('django.db.models.fields.CharField', [], {'db_index': 'True', 'max_length': '20', 'null': 'True', 'blank': 'True'}),
'task': ('django.db.models.fields.CharField', [], {'db_index': 'True', 'max_length': '30', 'null': 'True', 'blank': 'True'}),
'tenant': ('django.db.models.fields.CharField', [], {'db_index': 'True', 'max_length': '50', 'null': 'True', 'blank': 'True'}),
'when': ('django.db.models.fields.DecimalField', [], {'max_digits': '20', 'decimal_places': '6', 'db_index': 'True'})
},
u'stacktach.rawdataimagemeta': {
'Meta': {'object_name': 'RawDataImageMeta'},
u'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'os_architecture': ('django.db.models.fields.TextField', [], {'null': 'True', 'blank': 'True'}),
'os_distro': ('django.db.models.fields.TextField', [], {'null': 'True', 'blank': 'True'}),
'os_version': ('django.db.models.fields.TextField', [], {'null': 'True', 'blank': 'True'}),
'raw': ('django.db.models.fields.related.ForeignKey', [], {'to': u"orm['stacktach.RawData']"}),
'rax_options': ('django.db.models.fields.TextField', [], {'null': 'True', 'blank': 'True'})
},
u'stacktach.requesttracker': {
'Meta': {'object_name': 'RequestTracker'},
'completed': ('django.db.models.fields.BooleanField', [], {'default': 'False', 'db_index': 'True'}),
'duration': ('django.db.models.fields.DecimalField', [], {'max_digits': '20', 'decimal_places': '6', 'db_index': 'True'}),
u'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'last_timing': ('django.db.models.fields.related.ForeignKey', [], {'to': u"orm['stacktach.Timing']", 'null': 'True'}),
'lifecycle': ('django.db.models.fields.related.ForeignKey', [], {'to': u"orm['stacktach.Lifecycle']"}),
'request_id': ('django.db.models.fields.CharField', [], {'max_length': '50', 'db_index': 'True'}),
'start': ('django.db.models.fields.DecimalField', [], {'max_digits': '20', 'decimal_places': '6', 'db_index': 'True'})
},
u'stacktach.timing': {
'Meta': {'object_name': 'Timing'},
'diff': ('django.db.models.fields.DecimalField', [], {'null': 'True', 'max_digits': '20', 'decimal_places': '6', 'db_index': 'True'}),
'end_raw': ('django.db.models.fields.related.ForeignKey', [], {'related_name': "'+'", 'null': 'True', 'to': u"orm['stacktach.RawData']"}),
'end_when': ('django.db.models.fields.DecimalField', [], {'null': 'True', 'max_digits': '20', 'decimal_places': '6'}),
u'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'lifecycle': ('django.db.models.fields.related.ForeignKey', [], {'to': u"orm['stacktach.Lifecycle']"}),
'name': ('django.db.models.fields.CharField', [], {'max_length': '50', 'db_index': 'True'}),
'start_raw': ('django.db.models.fields.related.ForeignKey', [], {'related_name': "'+'", 'null': 'True', 'to': u"orm['stacktach.RawData']"}),
'start_when': ('django.db.models.fields.DecimalField', [], {'null': 'True', 'max_digits': '20', 'decimal_places': '6'})
}
}
complete_apps = ['stacktach']

View File

@ -12,10 +12,19 @@
# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
# License for the specific language governing permissions and limitations
# under the License.
import datetime
import copy
from django import forms
from django.db import models
from stacktach import datetime_to_decimal as dt
def routing_key_type(key):
if key.endswith('error'):
return 'E'
return ' '
class Deployment(models.Model):
name = models.CharField(max_length=50)
@ -24,7 +33,52 @@ class Deployment(models.Model):
return self.name
class GenericRawData(models.Model):
result_titles = [["#", "?", "When", "Deployment", "Event", "Host",
"Instance", "Request id"]]
deployment = models.ForeignKey(Deployment)
tenant = models.CharField(max_length=50, null=True, blank=True,
db_index=True)
json = models.TextField()
routing_key = models.CharField(max_length=50, null=True,
blank=True, db_index=True)
when = models.DecimalField(max_digits=20, decimal_places=6,
db_index=True)
publisher = models.CharField(max_length=100, null=True,
blank=True, db_index=True)
event = models.CharField(max_length=50, null=True,
blank=True, db_index=True)
service = models.CharField(max_length=50, null=True,
blank=True, db_index=True)
host = models.CharField(max_length=100, null=True,
blank=True, db_index=True)
instance = models.CharField(max_length=50, null=True,
blank=True, db_index=True)
request_id = models.CharField(max_length=50, null=True,
blank=True, db_index=True)
message_id = models.CharField(max_length=50, null=True,
blank=True, db_index=True)
@staticmethod
def get_name():
return GenericRawData.__name__
@property
def uuid(self):
return self.instance
def search_results(self, results, when, routing_key_status):
if not results:
results = copy.deepcopy(self.result_titles)
results.append([self.id, routing_key_status, str(when),
self.deployment.name, self.event, self.host,
self.instance, self.request_id])
return results
class RawData(models.Model):
result_titles = [["#", "?", "When", "Deployment", "Event", "Host",
"State", "State'", "Task'"]]
deployment = models.ForeignKey(Deployment)
tenant = models.CharField(max_length=50, null=True, blank=True,
db_index=True)
@ -58,6 +112,22 @@ class RawData(models.Model):
def __repr__(self):
return "%s %s %s" % (self.event, self.instance, self.state)
@property
def uuid(self):
return self.instance
@staticmethod
def get_name():
return RawData.__name__
def search_results(self, results, when, routing_key_status):
if not results:
results = copy.deepcopy(self.result_titles)
results.append([self.id, routing_key_status, str(when),
self.deployment.name, self.event, self.host, self.state,
self.old_state, self.old_task])
return results
class RawDataImageMeta(models.Model):
raw = models.ForeignKey(RawData, null=False)
@ -108,6 +178,16 @@ class InstanceUsage(models.Model):
raw = raws[0]
return raw.deployment
@staticmethod
def find(instance, launched_at):
start = launched_at - datetime.timedelta(
microseconds=launched_at.microsecond)
end = start + datetime.timedelta(microseconds=999999)
params = {'instance': instance,
'launched_at__gte': dt.dt_to_decimal(start),
'launched_at__lte': dt.dt_to_decimal(end)}
return InstanceUsage.objects.filter(**params)
class InstanceDeletes(models.Model):
instance = models.CharField(max_length=50, null=True,
@ -121,6 +201,17 @@ class InstanceDeletes(models.Model):
def deployment(self):
return self.raw.deployment
@staticmethod
def find(instance, launched, deleted_max=None):
start = launched - datetime.timedelta(microseconds=launched.microsecond)
end = start + datetime.timedelta(microseconds=999999)
params = {'instance': instance,
'launched_at__gte': dt.dt_to_decimal(start),
'launched_at__lte': dt.dt_to_decimal(end)}
if deleted_max:
params['deleted_at__lte'] = dt.dt_to_decimal(deleted_max)
return InstanceDeletes.objects.filter(**params)
class InstanceReconcile(models.Model):
row_created = models.DateTimeField(auto_now_add=True)
@ -144,6 +235,15 @@ class InstanceReconcile(models.Model):
source = models.CharField(max_length=150, null=True,
blank=True, db_index=True)
@staticmethod
def find(instance, launched):
start = launched - datetime.timedelta(microseconds=launched.microsecond)
end = start + datetime.timedelta(microseconds=999999)
params = {'instance': instance,
'launched_at__gte': dt.dt_to_decimal(start),
'launched_at__lte': dt.dt_to_decimal(end)}
return InstanceReconcile.objects.filter(**params)
class InstanceExists(models.Model):
PENDING = 'pending'
@ -158,6 +258,7 @@ class InstanceExists(models.Model):
(RECONCILED, 'Passed Verification After Reconciliation'),
(FAILED, 'Failed Verification'),
]
instance = models.CharField(max_length=50, null=True,
blank=True, db_index=True)
launched_at = models.DecimalField(null=True, max_digits=20,
@ -194,6 +295,32 @@ class InstanceExists(models.Model):
def deployment(self):
return self.raw.deployment
@staticmethod
def find(ending_max, status):
params = {'audit_period_ending__lte': dt.dt_to_decimal(ending_max),
'status': status}
return InstanceExists.objects.select_related()\
.filter(**params).order_by('id')
def mark_verified(self, reconciled=False, reason=None):
if not reconciled:
self.status = InstanceExists.VERIFIED
else:
self.status = InstanceExists.RECONCILED
if reason is not None:
self.fail_reason = reason
self.save()
def mark_failed(self, reason=None):
self.status = InstanceExists.FAILED
if reason:
self.fail_reason = reason
self.save()
def update_status(self, new_status):
self.status = new_status
class Timing(models.Model):
"""Each Timing record corresponds to a .start/.end event pair
@ -238,5 +365,140 @@ class JsonReport(models.Model):
json = models.TextField()
class GlanceRawData(models.Model):
result_titles = [["#", "?", "When", "Deployment", "Event", "Host",
"Status"]]
ACTIVE = 'active'
DELETED = 'deleted'
KILLED = 'killed'
PENDING_DELETE = 'pending_delete'
QUEUED = 'queued'
SAVING = 'saving'
STATUS_CHOICES = [
(ACTIVE, 'Active'),
(DELETED, 'Deleted'),
(KILLED, 'Killed'),
(PENDING_DELETE, 'Pending delete'),
(QUEUED, 'Queued'),
(SAVING, 'Saving'),
]
deployment = models.ForeignKey(Deployment)
owner = models.CharField(max_length=255, null=True, blank=True,
db_index=True)
json = models.TextField()
routing_key = models.CharField(max_length=50, null=True, blank=True,
db_index=True)
when = models.DecimalField(max_digits=20, decimal_places=6, db_index=True)
publisher = models.CharField(max_length=100, null=True,
blank=True, db_index=True)
event = models.CharField(max_length=50, null=True, blank=True,
db_index=True)
service = models.CharField(max_length=50, null=True, blank=True,
db_index=True)
host = models.CharField(max_length=100, null=True, blank=True,
db_index=True)
instance = models.CharField(max_length=50, null=True, blank=True,
db_index=True)
request_id = models.CharField(max_length=50, null=True, blank=True,
db_index=True)
uuid = models.CharField(max_length=36, null=True, blank=True,
db_index=True)
status = models.CharField(max_length=30, db_index=True,
choices=STATUS_CHOICES, null=True)
image_type = models.IntegerField(null=True, default=0, db_index=True)
@staticmethod
def get_name():
return GlanceRawData.__name__
def search_results(self, results, when, routing_key_status):
if not results:
results = copy.deepcopy(self.result_titles)
results.append([self.id, routing_key_status, str(when),
self.deployment.name, self.event, self.host,
self.status])
return results
class ImageUsage(models.Model):
uuid = models.CharField(max_length=50, db_index=True)
created_at = models.DecimalField(max_digits=20,
decimal_places=6, db_index=True)
owner = models.CharField(max_length=50, db_index=True, null=True)
size = models.BigIntegerField(max_length=20)
last_raw = models.ForeignKey(GlanceRawData, null=True)
class ImageDeletes(models.Model):
uuid = models.CharField(max_length=50, db_index=True)
deleted_at = models.DecimalField(max_digits=20,
decimal_places=6, db_index=True,
null=True)
raw = models.ForeignKey(GlanceRawData, null=True)
@staticmethod
def find(uuid, deleted_max=None):
params = {'uuid': uuid}
if deleted_max:
params['deleted_at__lte'] = dt.dt_to_decimal(deleted_max)
return ImageDeletes.objects.filter(**params)
class ImageExists(models.Model):
PENDING = 'pending'
VERIFYING = 'verifying'
VERIFIED = 'verified'
FAILED = 'failed'
STATUS_CHOICES = [
(PENDING, 'Pending Verification'),
(VERIFYING, 'Currently Being Verified'),
(VERIFIED, 'Passed Verification'),
(FAILED, 'Failed Verification'),
]
uuid = models.CharField(max_length=50, db_index=True)
created_at = models.DecimalField(max_digits=20,
decimal_places=6, db_index=True,
null=True)
deleted_at = models.DecimalField(max_digits=20,
decimal_places=6, db_index=True,
null=True)
audit_period_beginning = models.DecimalField(max_digits=20,
decimal_places=6,
db_index=True)
audit_period_ending = models.DecimalField(max_digits=20,
decimal_places=6, db_index=True)
status = models.CharField(max_length=50, db_index=True,
choices=STATUS_CHOICES,
default=PENDING)
fail_reason = models.CharField(max_length=300, null=True)
raw = models.ForeignKey(GlanceRawData, related_name='+')
usage = models.ForeignKey(ImageUsage, related_name='+', null=True)
delete = models.ForeignKey(ImageDeletes, related_name='+', null=True)
send_status = models.IntegerField(default=0, db_index=True)
owner = models.CharField(max_length=255, db_index=True, null=True)
size = models.BigIntegerField(max_length=20)
def update_status(self, new_status):
self.status = new_status
@staticmethod
def find(ending_max, status):
params = {'audit_period_ending__lte': dt.dt_to_decimal(ending_max),
'status': status}
return ImageExists.objects.select_related().filter(**params).order_by('id')
def mark_verified(self):
self.status = InstanceExists.VERIFIED
self.save()
def mark_failed(self, reason=None):
self.status = InstanceExists.FAILED
if reason:
self.fail_reason = reason
self.save()
def get_model_fields(model):
return model._meta.fields

View File

@ -1,25 +1,38 @@
# Copyright (c) 2013 - Rackspace Inc.
#
# Permission is hereby granted, free of charge, to any person obtaining a copy
# of this software and associated documentation files (the "Software"), to
# deal in the Software without restriction, including without limitation the
# rights to use, copy, modify, merge, publish, distribute, sublicense, and/or
# sell copies of the Software, and to permit persons to whom the Software is
# furnished to do so, subject to the following conditions:
#
# The above copyright notice and this permission notice shall be included in
# all copies or substantial portions of the Software.
#
# THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
# IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
# FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
# AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
# LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING
# FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS
# IN THE SOFTWARE.
from stacktach import utils
from stacktach import stacklog
from stacktach import image_type
from stacktach import db
class Notification(object):
def __init__(self, body):
def __init__(self, body, deployment, routing_key, json):
self.body = body
self.request_id = body.get('_context_request_id', "")
self.deployment = deployment
self.routing_key = routing_key
self.json = json
self.payload = body.get('payload', {})
self.state = self.payload.get('state', "")
self.old_state = self.payload.get('old_state', "")
self.old_task = self.payload.get('old_task_state', "")
self.task = self.payload.get('new_task_state', "")
self.image_type = image_type.get_numeric_code(self.payload)
self.publisher = self.body['publisher_id']
self.event = self.body['event_type']
image_meta = self.payload.get('image_meta', {})
self.os_architecture = image_meta.get('org.openstack__1__architecture',
'')
self.os_distro = image_meta.get('org.openstack__1__os_distro', '')
self.os_version = image_meta.get('org.openstack__1__os_version', '')
self.rax_options = image_meta.get('com.rackspace__1__options', '')
@property
def when(self):
@ -29,30 +42,24 @@ class Notification(object):
when = utils.str_time_to_unix(when)
return when
def rawdata_kwargs(self, deployment, routing_key, json):
return {
'deployment': deployment,
'routing_key': routing_key,
'event': self.event,
'publisher': self.publisher,
'json': json,
'state': self.state,
'old_state': self.old_state,
'task': self.task,
'old_task': self.old_task,
'image_type': self.image_type,
'when': self.when,
'publisher': self.publisher,
'service': self.service,
'host': self.host,
'instance': self.instance,
'request_id': self.request_id,
'tenant': self.tenant,
'os_architecture': self.os_architecture,
'os_distro': self.os_distro,
'os_version': self.os_version,
'rax_options': self.rax_options
}
@property
def service(self):
parts = self.publisher.split('.')
return parts[0]
@property
def host(self):
host = None
parts = self.publisher.split('.')
if len(parts) > 1:
host = ".".join(parts[1:])
return host
@property
def tenant(self):
tenant = self.body.get('_context_project_id', None)
tenant = self.payload.get('tenant_id', tenant)
return tenant
@property
def instance(self):
@ -65,6 +72,158 @@ class Notification(object):
instance = self.payload.get('instance', {}).get('uuid')
return instance
@property
def message_id(self):
return self.body.get('message_id', None)
def save(self):
return db.create_generic_rawdata(deployment=self.deployment,
routing_key=self.routing_key,
tenant=self.tenant,
json=self.json,
when=self.when,
publisher=self.publisher,
event=self.event,
service=self.service,
host=self.host,
instance=self.instance,
request_id=self.request_id,
message_id=self.message_id)
class GlanceNotification(Notification):
def __init__(self, body, deployment, routing_key, json):
super(GlanceNotification, self).__init__(body, deployment,
routing_key, json)
if isinstance(self.payload, dict):
self.properties = self.payload.get('properties', {})
self.image_type = image_type.get_numeric_code(self.payload)
self.status = self.payload.get('status', None)
self.uuid = self.payload.get('id', None)
self.size = self.payload.get('size', None)
created_at = self.payload.get('created_at', None)
self.created_at = created_at and utils.str_time_to_unix(created_at)
audit_period_beginning = self.payload.get(
'audit_period_beginning', None)
self.audit_period_beginning = audit_period_beginning and\
utils.str_time_to_unix(audit_period_beginning)
audit_period_ending = self.payload.get(
'audit_period_ending', None)
self.audit_period_ending = audit_period_ending and \
utils.str_time_to_unix(audit_period_ending)
else:
self.properties = {}
self.image_type = None
self.status = None
self.uuid = None
self.size = None
self.created_at = None
self.audit_period_beginning = None
self.audit_period_ending = None
@property
def owner(self):
if isinstance(self.payload, dict):
return self.payload.get('owner', None)
else:
return None
@property
def instance(self):
return self.properties.get('instance_uuid', None)
@property
def deleted_at(self):
deleted_at = self.body.get('deleted_at', None)
if isinstance(self.payload, dict):
deleted_at = deleted_at or self.payload.get('deleted_at', None)
return deleted_at and utils.str_time_to_unix(deleted_at)
def save(self):
return db.create_glance_rawdata(deployment=self.deployment,
routing_key=self.routing_key,
owner=self.owner,
json=self.json,
when=self.when,
publisher=self.publisher,
event=self.event,
service=self.service,
host=self.host,
instance=self.instance,
request_id=self.request_id,
image_type=self.image_type,
status=self.status,
uuid=self.uuid)
def save_exists(self, raw):
if self.created_at:
values = {
'uuid': self.uuid,
'audit_period_beginning': self.audit_period_beginning,
'audit_period_ending': self.audit_period_ending,
'owner': self.owner,
'size': self.size,
'raw': raw
}
usage = db.get_image_usage(uuid=self.uuid)
values['usage'] = usage
values['created_at'] = self.created_at
if self.deleted_at:
delete = db.get_image_delete(uuid=self.uuid)
values['delete'] = delete
values['deleted_at'] = self.deleted_at
db.create_image_exists(**values)
else:
stacklog.warn("Ignoring exists without created_at. GlanceRawData(%s)"
% raw.id)
def save_usage(self, raw):
values = {
'uuid': self.uuid,
'created_at': self.created_at,
'owner': self.owner,
'size': self.size,
'last_raw': raw
}
db.create_image_usage(**values)
def save_delete(self, raw):
values = {
'uuid': self.uuid,
'raw': raw,
'deleted_at': self.deleted_at
}
db.create_image_delete(**values)
class NovaNotification(Notification):
def __init__(self, body, deployment, routing_key, json):
super(NovaNotification, self).__init__(body, deployment, routing_key,
json)
self.state = self.payload.get('state', '')
self.old_state = self.payload.get('old_state', '')
self.old_task = self.payload.get('old_task_state', '')
self.task = self.payload.get('new_task_state', '')
self.image_type = image_type.get_numeric_code(self.payload)
image_meta = self.payload.get('image_meta', {})
self.os_architecture = \
image_meta.get('org.openstack__1__architecture', '')
self.os_distro = image_meta.get('org.openstack__1__os_distro', '')
self.os_version = image_meta.get('org.openstack__1__os_version', '')
self.rax_options = image_meta.get('com.rackspace__1__options', '')
self.instance_type_id = self.payload.get('instance_type_id', None)
self.new_instance_type_id = \
self.payload.get('new_instance_type_id', None)
self.launched_at = self.payload.get('launched_at', None)
self.deleted_at = self.payload.get('deleted_at', None)
self.audit_period_beginning = self.payload.get(
'audit_period_beginning', None)
self.audit_period_ending = self.payload.get(
'audit_period_ending', None)
self.message = self.payload.get('message', None)
@property
def host(self):
host = None
@ -78,8 +237,32 @@ class Notification(object):
parts = self.publisher.split('.')
return parts[0]
@property
def tenant(self):
tenant = self.body.get('_context_project_id', None)
tenant = self.payload.get('tenant_id', tenant)
return tenant
def save(self):
return db.create_nova_rawdata(deployment=self.deployment,
routing_key=self.routing_key,
tenant=self.tenant,
json=self.json,
when=self.when,
publisher=self.publisher,
event=self.event,
service=self.service,
host=self.host,
instance=self.instance,
request_id=self.request_id,
image_type=self.image_type,
state=self.state,
old_state=self.old_state,
task=self.task,
old_task=self.old_task,
os_architecture=self.os_architecture,
os_distro=self.os_distro,
os_version=self.os_version,
rax_options=self.rax_options)
def notification_factory(body, deployment, routing_key, json, exchange):
if exchange == 'nova':
return NovaNotification(body, deployment, routing_key, json)
if exchange == "glance":
return GlanceNotification(body, deployment, routing_key, json)
return Notification(body, deployment, routing_key, json)

View File

@ -36,31 +36,33 @@ def set_default_logger_name(name):
default_logger_name = name
def _make_logger(name):
log = logging.getLogger(__name__)
log.setLevel(logging.DEBUG)
handler = logging.handlers.TimedRotatingFileHandler(default_logger_location % name,
when='midnight', interval=1, backupCount=3)
formatter = logging.Formatter('%(asctime)s - %(name)s - %(levelname)s - %(message)s')
handler.setFormatter(formatter)
log.addHandler(handler)
log.handlers[0].doRollover()
def _logger_factory(exchange, name):
if exchange:
return ExchangeLogger(exchange, name)
else:
logger = logging.getLogger(__name__)
_configure(logger, name)
return logger
def _make_logger(name, exchange=None):
log = _logger_factory(exchange, name)
return log
def init_logger(name=None):
def init_logger(name=None, exchange=None):
global LOGGERS
if name is None:
name = default_logger_name
if name not in LOGGERS:
LOGGERS[name] = _make_logger(name)
LOGGERS[name] = _make_logger(name, exchange)
def get_logger(name=None):
def get_logger(name=None, exchange=None):
global LOGGERS
if name is None:
name = default_logger_name
init_logger(name=name)
init_logger(name=name, exchange=exchange)
return LOGGERS[name]
@ -80,3 +82,38 @@ def info(msg, name=None):
if name is None:
name = default_logger_name
get_logger(name=name).info(msg)
def _configure(logger, name):
logger.setLevel(logging.DEBUG)
handler = logging.handlers.TimedRotatingFileHandler(
default_logger_location % name,
when='midnight', interval=1, backupCount=3)
formatter = logging.Formatter(
'%(asctime)s - %(name)s - %(levelname)s - %(message)s')
handler.setFormatter(formatter)
logger.addHandler(handler)
logger.handlers[0].doRollover()
class ExchangeLogger():
def __init__(self, exchange, name='stacktach-default'):
self.logger = logging.getLogger(__name__)
_configure(self.logger, name)
self.exchange = exchange
def info(self, msg, *args, **kwargs):
msg = self.exchange + ': ' + msg
self.logger.info(msg, *args, **kwargs)
def warn(self, msg, *args, **kwargs):
msg = self.exchange + ': ' + msg
self.logger.warn(msg, *args, **kwargs)
def error(self, msg, *args, **kwargs):
msg = self.exchange + ': ' + msg
self.logger.error(msg, *args, **kwargs)
def exception(self, msg, *args, **kwargs):
msg = self.exchange + ': ' + msg
self.logger.error(msg, *args, **kwargs)

View File

@ -9,13 +9,78 @@ from django.shortcuts import get_object_or_404
import datetime_to_decimal as dt
import models
import utils
from django.core.exceptions import ObjectDoesNotExist, FieldError
SECS_PER_HOUR = 60 * 60
SECS_PER_DAY = SECS_PER_HOUR * 24
DEFAULT_LIMIT = 50
HARD_LIMIT = 1000
def get_event_names():
return models.RawData.objects.values('event').distinct()
def _get_limit(request):
limit = request.GET.get('limit', DEFAULT_LIMIT)
if limit:
limit = int(limit)
if limit > HARD_LIMIT:
limit = HARD_LIMIT
return limit
def _get_query_range(request):
limit = _get_limit(request)
offset = request.GET.get('offset')
start = None
if offset:
start = int(offset)
else:
offset = 0
end = int(offset) + int(limit)
return start, end
def model_search(request, model, filters,
related=False, order_by=None, excludes=None):
query = model
if related:
query = query.select_related()
if filters:
query = query.filter(**filters)
else:
query = query.all()
if excludes:
for exclude in excludes:
if isinstance(exclude, dict):
query = query.exclude(**exclude)
else:
query = query.exclude(exclude)
if order_by:
query = query.order_by(order_by)
start, end = _get_query_range(request)
query = query[start:end]
return query
def _add_when_filters(request, filters):
when_max = request.GET.get('when_max')
if when_max:
filters['when__lte'] = decimal.Decimal(when_max)
when_min = request.GET.get('when_min')
if when_min:
filters['when__gte'] = decimal.Decimal(when_min)
def get_event_names(service='nova'):
return _model_factory(service).values('event').distinct()
def get_host_names():
@ -36,8 +101,10 @@ def get_deployments():
return models.Deployment.objects.all().order_by('name')
def get_timings_for_uuid(uuid):
lifecycles = models.Lifecycle.objects.filter(instance=uuid)
def get_timings_for_uuid(request, uuid):
model = models.Lifecycle.objects
filters = {'instance': uuid}
lifecycles = model_search(request, model, filters)
results = [["?", "Event", "Time (secs)"]]
for lc in lifecycles:
@ -106,20 +173,28 @@ def do_hosts(request):
def do_uuid(request):
uuid = str(request.GET['uuid'])
service = str(request.GET.get('service', 'nova'))
if not utils.is_uuid_like(uuid):
msg = "%s is not uuid-like" % uuid
return error_response(400, 'Bad Request', msg)
model = _model_factory(service)
result = []
filters = {}
related = models.RawData.objects.select_related().filter(instance=uuid)\
.order_by('when')
results = [["#", "?", "When", "Deployment", "Event", "Host", "State",
"State'", "Task'"]]
for e in related:
when = dt.dt_from_decimal(e.when)
results.append([e.id, routing_key_type(e.routing_key), str(when),
e.deployment.name, e.event, e.host, e.state,
e.old_state, e.old_task])
return rsp(json.dumps(results))
if service == 'nova' or service == 'generic':
filters = {'instance': uuid}
if service == 'glance':
filters = {'uuid': uuid}
_add_when_filters(request, filters)
related = model_search(request, model, filters,
related=True, order_by='when')
for event in related:
when = dt.dt_from_decimal(event.when)
routing_key_status = routing_key_type(event.routing_key)
result = event.search_results(result, when, routing_key_status)
return rsp(json.dumps(result))
def do_timings_uuid(request):
@ -127,24 +202,32 @@ def do_timings_uuid(request):
if not utils.is_uuid_like(uuid):
msg = "%s is not uuid-like" % uuid
return error_response(400, 'Bad Request', msg)
results = get_timings_for_uuid(uuid)
results = get_timings_for_uuid(request, uuid)
return rsp(json.dumps(results))
def do_timings(request):
name = request.GET['name']
results = [[name, "Time"]]
timings_query = models.Timing.objects.select_related()\
.filter(name=name)\
.exclude(Q(start_raw=None) | Q(end_raw=None))
model = models.Timing.objects
filters = {
'name': name
}
if request.GET.get('end_when_min') is not None:
min_when = decimal.Decimal(request.GET['end_when_min'])
timings_query = timings_query.filter(end_when__gte=min_when)
filters['end_when__gte'] = min_when
if request.GET.get('end_when_max') is not None:
max_when = decimal.Decimal(request.GET['end_when_max'])
timings_query = timings_query.filter(end_when__lte=max_when)
timings = timings_query.order_by('diff')
filters['end_when__lte'] = max_when
excludes = [Q(start_raw=None) | Q(end_raw=None), ]
timings = model_search(request, model, filters,
excludes=excludes, related=True,
order_by='diff')
results = [[name, "Time"]]
for t in timings:
results.append([t.lifecycle.instance, sec_to_time(t.diff)])
return rsp(json.dumps(results))
@ -161,9 +244,14 @@ def do_summary(request):
results = [["Event", "N", "Min", "Max", "Avg"]]
for name in interesting:
timings = models.Timing.objects.filter(name=name) \
.exclude(Q(start_raw=None) | Q(end_raw=None)) \
.exclude(diff__lt=0)
model = models.Timing.objects
filters = {'name': name}
excludes = [
Q(start_raw=None) | Q(end_raw=None),
{'diff__lt': 0}
]
timings = model_search(request, model, filters,
excludes=excludes)
if not timings:
continue
@ -190,8 +278,10 @@ def do_request(request):
msg = "%s is not request-id-like" % request_id
return error_response(400, 'Bad Request', msg)
events = models.RawData.objects.filter(request_id=request_id) \
.order_by('when')
model = models.RawData.objects
filters = {'request_id': request_id}
_add_when_filters(request, filters)
events = model_search(request, model, filters, order_by='when')
results = [["#", "?", "When", "Deployment", "Event", "Host",
"State", "State'", "Task'"]]
for e in events:
@ -202,15 +292,7 @@ def do_request(request):
return rsp(json.dumps(results))
def do_show(request, event_id):
event_id = int(event_id)
results = []
event = None
try:
event = models.RawData.objects.get(id=event_id)
except models.RawData.ObjectDoesNotExist:
return results
def append_nova_raw_attributes(event, results):
results.append(["Key", "Value"])
results.append(["#", event.id])
when = dt.dt_from_decimal(event.when)
@ -224,16 +306,83 @@ def do_show(request, event_id):
results.append(["Host", event.host])
results.append(["UUID", event.instance])
results.append(["Req ID", event.request_id])
return results
final = [results, ]
j = json.loads(event.json)
final.append(json.dumps(j, indent=2))
final.append(event.instance)
return rsp(json.dumps(final))
def append_glance_raw_attributes(event, results):
results.append(["Key", "Value"])
results.append(["#", event.id])
when = dt.dt_from_decimal(event.when)
results.append(["When", str(when)])
results.append(["Deployment", event.deployment.name])
results.append(["Category", event.routing_key])
results.append(["Publisher", event.publisher])
results.append(["Status", event.status])
results.append(["Event", event.event])
results.append(["Service", event.service])
results.append(["Host", event.host])
results.append(["UUID", event.uuid])
results.append(["Req ID", event.request_id])
return results
def append_generic_raw_attributes(event, results):
results.append(["Key", "Value"])
results.append(["#", event.id])
when = dt.dt_from_decimal(event.when)
results.append(["When", str(when)])
results.append(["Deployment", event.deployment.name])
results.append(["Category", event.routing_key])
results.append(["Publisher", event.publisher])
results.append(["State", event.state])
results.append(["Event", event.event])
results.append(["Service", event.service])
results.append(["Host", event.host])
results.append(["UUID", event.instance])
results.append(["Req ID", event.request_id])
return results
def _append_raw_attributes(event, results, service):
if service == 'nova':
return append_nova_raw_attributes(event, results)
if service == 'glance':
return append_glance_raw_attributes(event, results)
if service == 'generic':
return append_generic_raw_attributes(event, results)
def do_show(request, event_id):
service = str(request.GET.get('service', 'nova'))
event_id = int(event_id)
results = []
model = _model_factory(service)
try:
event = model.get(id=event_id)
results = _append_raw_attributes(event, results, service)
final = [results, ]
j = json.loads(event.json)
final.append(json.dumps(j, indent=2))
final.append(event.uuid)
return rsp(json.dumps(final))
except ObjectDoesNotExist:
return rsp({})
def _model_factory(service):
if service == 'glance':
return models.GlanceRawData.objects
elif service == 'nova':
return models.RawData.objects
elif service == 'generic':
return models.GenericRawData.objects
def do_watch(request, deployment_id):
service = str(request.GET.get('service', 'nova'))
model = _model_factory(service)
deployment_id = int(deployment_id)
since = request.GET.get('since')
event_name = request.GET.get('event_name')
@ -244,7 +393,7 @@ def do_watch(request, deployment_id):
events = get_event_names()
max_event_width = max([len(event['event']) for event in events])
base_events = models.RawData.objects.order_by('when')
base_events = model.order_by('when')
if deployment_id > 0:
base_events = base_events.filter(deployment=deployment_id)
@ -276,7 +425,7 @@ def do_watch(request, deployment_id):
results = []
for raw in events:
uuid = raw.instance
uuid = raw.uuid
if not uuid:
uuid = "-"
typ = routing_key_type(raw.routing_key)
@ -325,10 +474,11 @@ def do_list_usage_launches(request):
return error_response(400, 'Bad Request', msg)
filter_args['instance'] = uuid
model = models.InstanceUsage.objects
if len(filter_args) > 0:
launches = models.InstanceUsage.objects.filter(**filter_args)
launches = model_search(request, model, filter_args)
else:
launches = models.InstanceUsage.objects.all()
launches = model_search(request, model, None)
results = [["UUID", "Launched At", "Instance Type Id"]]
@ -351,10 +501,11 @@ def do_list_usage_deletes(request):
return error_response(400, 'Bad Request', msg)
filter_args['instance'] = uuid
model = models.InstanceDeletes.objects
if len(filter_args) > 0:
deletes = models.InstanceDeletes.objects.filter(**filter_args)
deletes = model_search(request, model, filter_args)
else:
deletes = models.InstanceDeletes.objects.all()
deletes = model_search(request, model, None)
results = [["UUID", "Launched At", "Deleted At"]]
@ -380,10 +531,11 @@ def do_list_usage_exists(request):
return error_response(400, 'Bad Request', msg)
filter_args['instance'] = uuid
model = models.InstanceExists.objects
if len(filter_args) > 0:
exists = models.InstanceExists.objects.filter(**filter_args)
exists = model_search(request, model, filter_args)
else:
exists = models.InstanceExists.objects.all()
exists = model_search(request, model, None)
results = [["UUID", "Launched At", "Deleted At", "Instance Type Id",
"Message ID", "Status"]]
@ -409,8 +561,12 @@ def do_jsonreports(request):
now = dt.dt_to_decimal(now)
_from = request.GET.get('created_from', yesterday)
_to = request.GET.get('created_to', now)
reports = models.JsonReport.objects.filter(created__gte=_from,
created__lte=_to)
model = models.JsonReport.objects
filters = {
'created__gte': _from,
'created__lte': _to
}
reports = model_search(request, model, filters)
results = [['Id', 'Start', 'End', 'Created', 'Name', 'Version']]
for report in reports:
results.append([report.id,
@ -426,3 +582,23 @@ def do_jsonreport(request, report_id):
report_id = int(report_id)
report = get_object_or_404(models.JsonReport, pk=report_id)
return rsp(report.json)
def search(request):
service = str(request.GET.get('service', 'nova'))
field = request.GET.get('field')
value = request.GET.get('value')
model = _model_factory(service)
filters = {field: value}
_add_when_filters(request, filters)
results = []
try:
events = model_search(request, model, filters)
for event in events:
when = dt.dt_from_decimal(event.when)
routing_key_status = routing_key_type(event.routing_key)
results = event.search_results(results, when, routing_key_status)
return rsp(json.dumps(results))
except ObjectDoesNotExist or FieldError:
return rsp([])

View File

@ -19,39 +19,224 @@
# IN THE SOFTWARE.
from datetime import datetime
import unittest
from django.test import TransactionTestCase
import db
from stacktach.datetime_to_decimal import dt_to_decimal
from stacktach.models import RawDataImageMeta
from stacktach.models import RawDataImageMeta, ImageUsage, ImageDeletes
from stacktach.models import GenericRawData
from stacktach.models import GlanceRawData
from stacktach.models import RawData
from stacktach.models import get_model_fields
from stacktach import datetime_to_decimal as dt
class RawDataImageMetaDbTestCase(unittest.TestCase):
class RawDataImageMetaDbTestCase(TransactionTestCase):
def test_create_raw_data_should_populate_rawdata_and_rawdata_imagemeta(self):
deployment = db.get_or_create_deployment('deployment1')[0]
kwargs = {
'deployment': deployment,
'when': dt_to_decimal(datetime.utcnow()),
'tenant': '1', 'json': '{}', 'routing_key': 'monitor.info',
'state': 'verifying', 'old_state': 'pending',
'old_task': '', 'task': '', 'image_type': 1,
'publisher': '', 'event': 'compute.instance.exists',
'service': '', 'host': '', 'instance': '1234-5678-9012-3456',
'request_id': '1234', 'os_architecture': 'x86', 'os_version': '1',
'os_distro': 'windows', 'rax_options': '2'}
'tenant': '1',
'json': '{}',
'routing_key': 'monitor.info',
'state': 'verifying',
'old_state': 'pending',
'old_task': 'building',
'task': 'saving',
'image_type': 1,
'publisher': 'publisher',
'event': 'compute.instance.exists',
'service': 'compute',
'host': 'host',
'instance': '1234-5678-9012-3456',
'request_id': '1234',
'os_architecture': 'x86',
'os_version': '1',
'os_distro': 'windows',
'rax_options': '2'}
rawdata = db.create_rawdata(**kwargs)
rawdata = db.create_nova_rawdata(**kwargs)
for field in get_model_fields(RawData):
if field.name != 'id':
self.assertEquals(getattr(rawdata, field.name),
kwargs[field.name])
raw_image_meta = RawDataImageMeta.objects.all()[0]
self.assertEquals(raw_image_meta.raw, rawdata)
raw_image_meta = RawDataImageMeta.objects.filter(raw_id=rawdata.id)[0]
self.assertEquals(raw_image_meta.os_architecture,
kwargs['os_architecture'])
self.assertEquals(raw_image_meta.os_version, kwargs['os_version'])
self.assertEquals(raw_image_meta.os_distro, kwargs['os_distro'])
self.assertEquals(raw_image_meta.rax_options, kwargs['rax_options'])
class GlanceTestCase(TransactionTestCase):
def _create_glance_rawdata(self):
deployment = db.get_or_create_deployment('deployment1')[0]
kwargs = {
'deployment': deployment,
'when': dt_to_decimal(datetime.utcnow()),
'owner': '1234567',
'json': '{}',
'routing_key': 'glance_monitor.info',
'image_type': 1,
'publisher': 'publisher',
'event': 'event',
'service': 'service',
'host': 'host',
'instance': '1234-5678-9012-3456',
'request_id': '1234',
'uuid': '1234-5678-0912-3456',
'status': 'active',
}
db.create_glance_rawdata(**kwargs)
rawdata = GlanceRawData.objects.all()[0]
return kwargs, rawdata
def test_create_rawdata_should_populate_glance_rawdata(self):
kwargs, rawdata = self._create_glance_rawdata()
for field in get_model_fields(GlanceRawData):
if field.name != 'id':
self.assertEquals(getattr(rawdata, field.name),
kwargs[field.name])
def test_create_glance_usage_should_populate_image_usage(self):
_, rawdata = self._create_glance_rawdata()
kwargs = {
'uuid': '1',
'created_at': dt_to_decimal(datetime.utcnow()),
'owner': '1234567',
'size': 12345,
'last_raw': rawdata
}
db.create_image_usage(**kwargs)
usage = ImageUsage.objects.all()[0]
for field in get_model_fields(ImageUsage):
if field.name != 'id':
self.assertEquals(getattr(usage, field.name),
kwargs[field.name])
def test_create_image_delete_should_populate_image_delete(self):
_, rawdata = self._create_glance_rawdata()
kwargs = {
'uuid': '1',
'raw': rawdata,
'deleted_at': dt_to_decimal(datetime.utcnow())
}
db.create_image_delete(**kwargs)
image_delete = ImageDeletes.objects.all()[0]
for field in get_model_fields(ImageDeletes):
if field.name != 'id':
self.assertEquals(getattr(image_delete, field.name),
kwargs[field.name])
class GenericRawDataTestCase(TransactionTestCase):
def test_create_generic_rawdata_should_populate_generic_rawdata(self):
deployment = db.get_or_create_deployment('deployment1')[0]
kwargs = {
'deployment': deployment,
'when': dt_to_decimal(datetime.utcnow()),
'tenant': '1234567',
'json': '{}',
'routing_key': 'monitor.info',
'publisher': 'publisher',
'event': 'event',
'service': 'service',
'host': 'host',
'instance': '1234-5678-9012-3456',
'request_id': '1234',
'message_id': 'message_id'}
db.create_generic_rawdata(**kwargs)
rawdata = GenericRawData.objects.all()[0]
for field in get_model_fields(GenericRawData):
if field.name != 'id':
self.assertEquals(getattr(rawdata, field.name),
kwargs[field.name])
class NovaRawDataSearchTestCase(TransactionTestCase):
def test_search_results_for_nova(self):
expected_result = [['#', '?', 'When', 'Deployment', 'Event', 'Host',
'State', "State'", "Task'"], [1L, ' ',
'2013-07-17 10:16:10.717219', 'depl', 'event',
'host', 'state', 'old_state', 'old_task']]
depl = db.get_or_create_deployment('depl')[0]
when = dt.dt_to_decimal(datetime.utcnow())
raw = db.create_nova_rawdata(deployment=depl,
routing_key='routing_key',
tenant='tenant',
json='json',
when=when,
publisher='publisher',
event='event',
service='nova',
host='host',
instance='instance',
request_id='req-1234',
state='state',
old_state='old_state',
task='task',
old_task='old_task',
os_architecture='arch',
os_distro='distro',
os_version='version',
rax_options=1)
results = raw.search_results({}, "2013-07-17 10:16:10.717219", ' ')
self.assertEqual(results,expected_result)
def test_search_results_for_glance(self):
expected_result = [['#', '?', 'When', 'Deployment', 'Event', 'Host',
'Status'], [1L, ' ',
'2013-07-17 10:16:10.717219', 'depl', 'event',
'host', 'status']]
depl = db.get_or_create_deployment('depl')[0]
when = dt.dt_to_decimal(datetime.utcnow())
glance_raw = db.create_glance_rawdata(deployment=depl,
routing_key='routing_key',
json='json',
when=when,
publisher='publisher',
event='event',
service='glance',
host='host',
uuid='instance',
request_id='req-1234',
status='status',
image_type=1)
results = glance_raw.search_results({}, "2013-07-17 10:16:10.717219",
' ')
self.assertEqual(results,expected_result)
def test_search_results_for_generic(self):
expected_result = [['#', '?', 'When', 'Deployment', 'Event', 'Host',
'Instance', 'Request id'], [1L, ' ',
'2013-07-17 10:16:10.717219', 'depl', 'event',
'host', 'instance', 'req-1234']]
depl = db.get_or_create_deployment('depl')[0]
when = dt.dt_to_decimal(datetime.utcnow())
generic_raw = db.create_generic_rawdata(deployment=depl,
routing_key='routing_key',
json='json',
when=when,
publisher='publisher',
event='event',
service='glance',
host='host',
instance='instance',
request_id='req-1234',
tenant='tenant')
results = generic_raw.search_results({}, "2013-07-17 10:16:10.717219",
' ')
self.assertEqual(results,expected_result)

View File

@ -1,9 +1,8 @@
from django.conf.urls.defaults import patterns, include, url
from django.conf.urls import patterns, url
urlpatterns = patterns('',
url(r'^$', 'stacktach.views.welcome', name='welcome'),
url(r'stacky/deployments/$', 'stacktach.stacky_server.do_deployments'),
url(r'stacky/events/$', 'stacktach.stacky_server.do_events'),
url(r'stacky/hosts/$', 'stacktach.stacky_server.do_hosts'),
@ -19,6 +18,7 @@ urlpatterns = patterns('',
'stacktach.stacky_server.do_show'),
url(r'stacky/watch/(?P<deployment_id>\d+)/$',
'stacktach.stacky_server.do_watch'),
url(r'stacky/search/$', 'stacktach.stacky_server.search'),
url(r'stacky/kpi/$', 'stacktach.stacky_server.do_kpi'),
url(r'stacky/kpi/(?P<tenant_id>\w+)/$', 'stacktach.stacky_server.do_kpi'),
url(r'stacky/usage/launches/$',

View File

@ -12,7 +12,7 @@ from stacktach import db as stackdb
from stacktach import models
from stacktach import stacklog
from stacktach import utils
from stacktach.notification import Notification
from stacktach import notification
STACKDB = stackdb
@ -25,13 +25,6 @@ def log_warn(msg):
LOG.warn(msg)
# routing_key : handler
NOTIFICATIONS = {
'monitor.info': Notification,
'monitor.error': Notification}
def start_kpi_tracking(lifecycle, raw):
"""Start the clock for kpi timings when we see an instance.update
coming in from an api node."""
@ -170,17 +163,16 @@ INSTANCE_EVENT = {
}
def _process_usage_for_new_launch(raw, body):
payload = body['payload']
def _process_usage_for_new_launch(raw, notification):
values = {}
values['instance'] = payload['instance_id']
values['request_id'] = body['_context_request_id']
values['instance'] = notification.instance
values['request_id'] = notification.request_id
(usage, new) = STACKDB.get_or_create_instance_usage(**values)
if raw.event in [INSTANCE_EVENT['create_start'],
INSTANCE_EVENT['rebuild_start']]:
usage.instance_type_id = payload['instance_type_id']
usage.instance_type_id = notification.instance_type_id
if raw.event in [INSTANCE_EVENT['rebuild_start'],
INSTANCE_EVENT['resize_prep_start'],
@ -190,27 +182,23 @@ def _process_usage_for_new_launch(raw, body):
# we will have a launch record corresponding to the exists.
# We don't want to override a launched_at if it is already set
# though, because we may have already received the end event
usage.launched_at = utils.str_time_to_unix(payload['launched_at'])
usage.launched_at = utils.str_time_to_unix(notification.launched_at)
usage.tenant = payload['tenant_id']
image_meta = payload.get('image_meta', {})
usage.rax_options = image_meta.get('com.rackspace__1__options', '')
usage.os_architecture = image_meta.get('org.openstack__1__architecture',
'')
usage.os_version = image_meta.get('org.openstack__1__os_version', '')
usage.os_distro = image_meta.get('org.openstack__1__os_distro', '')
usage.tenant = notification.tenant
usage.rax_options = notification.rax_options
usage.os_architecture = notification.os_architecture
usage.os_version = notification.os_version
usage.os_distro = notification.os_distro
STACKDB.save(usage)
def _process_usage_for_updates(raw, body):
payload = body['payload']
def _process_usage_for_updates(raw, notification):
if raw.event == INSTANCE_EVENT['create_end']:
if 'message' in payload and payload['message'] != 'Success':
if notification.message and notification.message != 'Success':
return
instance_id = payload['instance_id']
request_id = body['_context_request_id']
instance_id = notification.instance
request_id = notification.request_id
(usage, new) = STACKDB.get_or_create_instance_usage(instance=instance_id,
request_id=request_id)
@ -218,28 +206,25 @@ def _process_usage_for_updates(raw, body):
INSTANCE_EVENT['rebuild_end'],
INSTANCE_EVENT['resize_finish_end'],
INSTANCE_EVENT['resize_revert_end']]:
usage.launched_at = utils.str_time_to_unix(payload['launched_at'])
usage.launched_at = utils.str_time_to_unix(notification.launched_at)
if raw.event == INSTANCE_EVENT['resize_revert_end']:
usage.instance_type_id = payload['instance_type_id']
usage.instance_type_id = notification.instance_type_id
elif raw.event == INSTANCE_EVENT['resize_prep_end']:
usage.instance_type_id = payload['new_instance_type_id']
usage.instance_type_id = notification.new_instance_type_id
usage.tenant = payload['tenant_id']
image_meta = payload.get('image_meta', {})
usage.rax_options = image_meta.get('com.rackspace__1__options', '')
usage.os_architecture = image_meta.get('org.openstack__1__architecture',
'')
usage.os_version = image_meta.get('org.openstack__1__os_version', '')
usage.os_distro = image_meta.get('org.openstack__1__os_distro', '')
usage.tenant = notification.tenant
usage.rax_options = notification.rax_options
usage.os_architecture = notification.os_architecture
usage.os_version = notification.os_version
usage.os_distro = notification.os_distro
STACKDB.save(usage)
def _process_delete(raw, body):
payload = body['payload']
instance_id = payload['instance_id']
deleted_at = utils.str_time_to_unix(payload['deleted_at'])
def _process_delete(raw, notification):
instance_id = notification.instance
deleted_at = utils.str_time_to_unix(notification.deleted_at)
values = {
'instance': instance_id,
'deleted_at': deleted_at,
@ -247,7 +232,7 @@ def _process_delete(raw, body):
(delete, new) = STACKDB.get_or_create_instance_delete(**values)
delete.raw = raw
launched_at = payload.get('launched_at')
launched_at = notification.launched_at
if launched_at and launched_at != '':
launched_at = utils.str_time_to_unix(launched_at)
delete.launched_at = launched_at
@ -255,37 +240,33 @@ def _process_delete(raw, body):
STACKDB.save(delete)
def _process_exists(raw, body):
payload = body['payload']
instance_id = payload['instance_id']
launched_at_str = payload.get('launched_at')
def _process_exists(raw, notification):
instance_id = notification.instance
launched_at_str = notification.launched_at
if launched_at_str is not None and launched_at_str != '':
launched_at = utils.str_time_to_unix(payload['launched_at'])
launched_at = utils.str_time_to_unix(notification.launched_at)
launched_range = (launched_at, launched_at+1)
usage = STACKDB.get_instance_usage(instance=instance_id,
launched_at__range=launched_range)
values = {}
values['message_id'] = body['message_id']
values['message_id'] = notification.message_id
values['instance'] = instance_id
values['launched_at'] = launched_at
beginning = utils.str_time_to_unix(payload['audit_period_beginning'])
beginning = utils.str_time_to_unix(notification.audit_period_beginning)
values['audit_period_beginning'] = beginning
ending = utils.str_time_to_unix(payload['audit_period_ending'])
ending = utils.str_time_to_unix(notification.audit_period_ending)
values['audit_period_ending'] = ending
values['instance_type_id'] = payload['instance_type_id']
values['instance_type_id'] = notification.instance_type_id
if usage:
values['usage'] = usage
values['raw'] = raw
values['tenant'] = payload['tenant_id']
image_meta = payload.get('image_meta', {})
values['rax_options'] = image_meta.get('com.rackspace__1__options', '')
os_arch = image_meta.get('org.openstack__1__architecture', '')
values['os_architecture'] = os_arch
os_version = image_meta.get('org.openstack__1__os_version', '')
values['os_version'] = os_version
values['os_distro'] = image_meta.get('org.openstack__1__os_distro', '')
values['tenant'] = notification.tenant
values['rax_options'] = notification.rax_options
values['os_architecture'] = notification.os_architecture
values['os_version'] = notification.os_version
values['os_distro'] = notification.os_distro
deleted_at = payload.get('deleted_at')
deleted_at = notification.deleted_at
if deleted_at and deleted_at != '':
# We only want to pre-populate the 'delete' if we know this is in
# fact an exist event for a deleted instance. Otherwise, there
@ -304,6 +285,17 @@ def _process_exists(raw, body):
stacklog.warn("Ignoring exists without launched_at. RawData(%s)" % raw.id)
def _process_glance_usage(raw, notification):
notification.save_usage(raw)
def _process_glance_delete(raw, notification):
notification.save_delete(raw)
def _process_glance_exists(raw, notification):
notification.save_exists(raw)
USAGE_PROCESS_MAPPING = {
INSTANCE_EVENT['create_start']: _process_usage_for_new_launch,
INSTANCE_EVENT['rebuild_start']: _process_usage_for_new_launch,
@ -315,36 +307,51 @@ USAGE_PROCESS_MAPPING = {
INSTANCE_EVENT['resize_finish_end']: _process_usage_for_updates,
INSTANCE_EVENT['resize_revert_end']: _process_usage_for_updates,
INSTANCE_EVENT['delete_end']: _process_delete,
INSTANCE_EVENT['exists']: _process_exists,
}
INSTANCE_EVENT['exists']: _process_exists
}
GLANCE_USAGE_PROCESS_MAPPING = {
'image.activate': _process_glance_usage,
'image.delete': _process_glance_delete,
'image.exists': _process_glance_exists
}
def aggregate_usage(raw, body):
def aggregate_usage(raw, notification):
if not raw.instance:
return
if raw.event in USAGE_PROCESS_MAPPING:
USAGE_PROCESS_MAPPING[raw.event](raw, body)
USAGE_PROCESS_MAPPING[raw.event](raw, notification)
def process_raw_data(deployment, args, json_args):
def aggregate_glance_usage(raw, body):
if raw.event in GLANCE_USAGE_PROCESS_MAPPING.keys():
GLANCE_USAGE_PROCESS_MAPPING[raw.event](raw, body)
def process_raw_data(deployment, args, json_args, exchange):
"""This is called directly by the worker to add the event to the db."""
db.reset_queries()
routing_key, body = args
record = None
notification = NOTIFICATIONS[routing_key](body)
if notification:
values = notification.rawdata_kwargs(deployment, routing_key, json_args)
if not values:
return record
record = STACKDB.create_rawdata(**values)
return record
notif = notification.notification_factory(body, deployment, routing_key,
json_args, exchange)
raw = notif.save()
return raw, notif
def post_process(raw, body):
def post_process_rawdata(raw, notification):
aggregate_lifecycle(raw)
aggregate_usage(raw, body)
aggregate_usage(raw, notification)
def post_process_glancerawdata(raw, notification):
aggregate_glance_usage(raw, notification)
def post_process_genericrawdata(raw, notification):
pass
def _post_process_raw_data(rows, highlight=None):

View File

@ -19,7 +19,9 @@
# IN THE SOFTWARE.
import os
import re
import sys
import unittest
def setup_sys_path():
@ -44,4 +46,58 @@ setup_environment()
from stacktach import stacklog
stacklog.set_default_logger_location("%s.log")
stacklog.set_default_logger_location("/tmp/%s.log")
class _AssertRaisesContext(object):
"""A context manager used to implement TestCase.assertRaises* methods."""
def __init__(self, expected, test_case, expected_regexp=None):
self.expected = expected
self.failureException = test_case.failureException
self.expected_regexp = expected_regexp
def __enter__(self):
return self
def __exit__(self, exc_type, exc_value, tb):
if exc_type is None:
try:
exc_name = self.expected.__name__
except AttributeError:
exc_name = str(self.expected)
raise self.failureException(
"{0} not raised".format(exc_name))
if not issubclass(exc_type, self.expected):
# let unexpected exceptions pass through
return False
self.exception = exc_value # store for later retrieval
if self.expected_regexp is None:
return True
expected_regexp = self.expected_regexp
if isinstance(expected_regexp, basestring):
expected_regexp = re.compile(expected_regexp)
if not expected_regexp.search(str(exc_value)):
raise self.failureException('"%s" does not match "%s"' %
(expected_regexp.pattern, str(exc_value)))
return True
class StacktachBaseTestCase(unittest.TestCase):
def assertIsNotNone(self, obj, msg=None):
self.assertTrue(obj is not None, msg)
def assertIsNone(self, obj, msg=None):
self.assertTrue(obj is None, msg)
def assertIsInstance(self, obj, cls, msg=None):
self.assertTrue(isinstance(obj, cls), msg)
def assertRaises(self, excClass, callableObj=None, *args, **kwargs):
context = _AssertRaisesContext(excClass, self)
if callableObj is None:
return context
with context:
callableObj(*args, **kwargs)

View File

@ -0,0 +1,250 @@
import datetime
import time
from django.db import transaction
import mox
from stacktach import message_service
from tests.unit import StacktachBaseTestCase
from tests.unit.utils import HOST, PORT, VIRTUAL_HOST, USERID, PASSWORD, TICK_TIME, SETTLE_TIME, SETTLE_UNITS
from tests.unit.utils import make_verifier_config
from verifier import base_verifier
class BaseVerifierTestCase(StacktachBaseTestCase):
def setUp(self):
self.mox = mox.Mox()
config = make_verifier_config(False)
self.pool = self.mox.CreateMockAnything()
self.reconciler = self.mox.CreateMockAnything()
self.verifier_with_reconciler = base_verifier.Verifier(config,
pool=self.pool, reconciler=self.reconciler)
self.verifier_without_notifications = self\
._verifier_with_notifications_disabled()
self.verifier_with_notifications = self\
._verifier_with_notifications_enabled()
def _verifier_with_notifications_disabled(self):
config = make_verifier_config(False)
reconciler = self.mox.CreateMockAnything()
return base_verifier.Verifier(config,
pool=self.pool,
reconciler=reconciler)
def _verifier_with_notifications_enabled(self):
config = make_verifier_config(True)
reconciler = self.mox.CreateMockAnything()
return base_verifier.Verifier(config,
pool=self.pool,
reconciler=reconciler)
def tearDown(self):
self.mox.UnsetStubs()
def test_should_create_verifier_with_reconciler(self):
config = make_verifier_config(False)
rec = self.mox.CreateMockAnything()
verifier = base_verifier.Verifier(config, pool=None, reconciler=rec)
self.assertEqual(verifier.reconciler, rec)
def test_clean_results_full(self):
result_not_ready = self.mox.CreateMockAnything()
result_not_ready.ready().AndReturn(False)
result_unsuccessful = self.mox.CreateMockAnything()
result_unsuccessful.ready().AndReturn(True)
result_unsuccessful.successful().AndReturn(False)
result_successful = self.mox.CreateMockAnything()
result_successful.ready().AndReturn(True)
result_successful.successful().AndReturn(True)
result_successful.get().AndReturn((True, None))
result_failed_verification = self.mox.CreateMockAnything()
result_failed_verification.ready().AndReturn(True)
result_failed_verification.successful().AndReturn(True)
failed_exists = self.mox.CreateMockAnything()
result_failed_verification.get().AndReturn((False, failed_exists))
self.verifier_with_reconciler.results = [result_not_ready,
result_unsuccessful,
result_successful,
result_failed_verification]
self.mox.ReplayAll()
(result_count, success_count, errored) = self.verifier_with_reconciler.clean_results()
self.assertEqual(result_count, 1)
self.assertEqual(success_count, 2)
self.assertEqual(errored, 1)
self.assertEqual(len(self.verifier_with_reconciler.results), 1)
self.assertEqual(self.verifier_with_reconciler.results[0], result_not_ready)
self.assertEqual(len(self.verifier_with_reconciler.failed), 1)
self.assertEqual(self.verifier_with_reconciler.failed[0], result_failed_verification)
self.mox.VerifyAll()
def test_clean_results_pending(self):
result_not_ready = self.mox.CreateMockAnything()
result_not_ready.ready().AndReturn(False)
self.verifier_with_reconciler.results = [result_not_ready]
self.mox.ReplayAll()
(result_count, success_count, errored) = self.verifier_with_reconciler.clean_results()
self.assertEqual(result_count, 1)
self.assertEqual(success_count, 0)
self.assertEqual(errored, 0)
self.assertEqual(len(self.verifier_with_reconciler.results), 1)
self.assertEqual(self.verifier_with_reconciler.results[0], result_not_ready)
self.assertEqual(len(self.verifier_with_reconciler.failed), 0)
self.mox.VerifyAll()
def test_clean_results_successful(self):
self.verifier_with_reconciler.reconcile = True
result_successful = self.mox.CreateMockAnything()
result_successful.ready().AndReturn(True)
result_successful.successful().AndReturn(True)
result_successful.get().AndReturn((True, None))
self.verifier_with_reconciler.results = [result_successful]
self.mox.ReplayAll()
(result_count, success_count, errored) = self.verifier_with_reconciler.clean_results()
self.assertEqual(result_count, 0)
self.assertEqual(success_count, 1)
self.assertEqual(errored, 0)
self.assertEqual(len(self.verifier_with_reconciler.results), 0)
self.assertEqual(len(self.verifier_with_reconciler.failed), 0)
self.mox.VerifyAll()
def test_clean_results_unsuccessful(self):
result_unsuccessful = self.mox.CreateMockAnything()
result_unsuccessful.ready().AndReturn(True)
result_unsuccessful.successful().AndReturn(False)
self.verifier_with_reconciler.results = [result_unsuccessful]
self.mox.ReplayAll()
(result_count, success_count, errored) = \
self.verifier_with_reconciler.clean_results()
self.assertEqual(result_count, 0)
self.assertEqual(success_count, 0)
self.assertEqual(errored, 1)
self.assertEqual(len(self.verifier_with_reconciler.results), 0)
self.assertEqual(len(self.verifier_with_reconciler.failed), 0)
self.mox.VerifyAll()
def test_clean_results_fail_verification(self):
result_failed_verification = self.mox.CreateMockAnything()
result_failed_verification.ready().AndReturn(True)
result_failed_verification.successful().AndReturn(True)
failed_exists = self.mox.CreateMockAnything()
result_failed_verification.get().AndReturn((False, failed_exists))
self.verifier_with_reconciler.results = [result_failed_verification]
self.mox.ReplayAll()
(result_count, success_count, errored) = \
self.verifier_with_reconciler.clean_results()
self.assertEqual(result_count, 0)
self.assertEqual(success_count, 1)
self.assertEqual(errored, 0)
self.assertEqual(len(self.verifier_with_reconciler.results), 0)
self.assertEqual(len(self.verifier_with_reconciler.failed), 1)
self.assertEqual(self.verifier_with_reconciler.failed[0], failed_exists)
self.mox.VerifyAll()
def test_run_notifications(self):
self._mock_exchange_create_and_connect(self.verifier_with_notifications)
self.mox.StubOutWithMock(self.verifier_with_notifications, '_run')
self.verifier_with_notifications._run(callback=mox.Not(mox.Is(None)))
self.mox.ReplayAll()
self.verifier_with_notifications.run()
self.mox.VerifyAll()
def test_run_notifications_with_routing_keys(self):
self._mock_exchange_create_and_connect(self.verifier_with_notifications)
self.mox.StubOutWithMock(self.verifier_with_notifications, '_run')
self.verifier_with_notifications._run(callback=mox.Not(mox.Is(None)))
self.mox.ReplayAll()
self.verifier_with_notifications.run()
self.mox.VerifyAll()
def test_run_no_notifications(self):
self.mox.StubOutWithMock(self.verifier_without_notifications, '_run')
self.verifier_without_notifications._run()
self.mox.ReplayAll()
self.verifier_without_notifications.run()
self.mox.VerifyAll()
def test_run_full_no_notifications(self):
self.mox.StubOutWithMock(transaction, 'commit_on_success')
tran = self.mox.CreateMockAnything()
tran.__enter__().AndReturn(tran)
tran.__exit__(mox.IgnoreArg(), mox.IgnoreArg(), mox.IgnoreArg())
transaction.commit_on_success().AndReturn(tran)
self.mox.StubOutWithMock(self.verifier_without_notifications, '_keep_running')
self.verifier_without_notifications._keep_running().AndReturn(True)
start = datetime.datetime.utcnow()
self.mox.StubOutWithMock(self.verifier_without_notifications, '_utcnow')
self.verifier_without_notifications._utcnow().AndReturn(start)
settle_offset = {SETTLE_UNITS: SETTLE_TIME}
ending_max = start - datetime.timedelta(**settle_offset)
self.mox.StubOutWithMock(self.verifier_without_notifications, 'verify_for_range')
self.verifier_without_notifications.verify_for_range(ending_max, callback=None)
self.mox.StubOutWithMock(self.verifier_without_notifications, 'reconcile_failed')
result1 = self.mox.CreateMockAnything()
result2 = self.mox.CreateMockAnything()
self.verifier_without_notifications.results = [result1, result2]
result1.ready().AndReturn(True)
result1.successful().AndReturn(True)
result1.get().AndReturn((True, None))
result2.ready().AndReturn(True)
result2.successful().AndReturn(True)
result2.get().AndReturn((True, None))
self.verifier_without_notifications.reconcile_failed()
self.mox.StubOutWithMock(time, 'sleep', use_mock_anything=True)
time.sleep(TICK_TIME)
self.verifier_without_notifications._keep_running().AndReturn(False)
self.mox.ReplayAll()
self.verifier_without_notifications.run()
self.mox.VerifyAll()
def test_run_full(self):
self.mox.StubOutWithMock(transaction, 'commit_on_success')
tran = self.mox.CreateMockAnything()
tran.__enter__().AndReturn(tran)
tran.__exit__(mox.IgnoreArg(), mox.IgnoreArg(), mox.IgnoreArg())
transaction.commit_on_success().AndReturn(tran)
self._mock_exchange_create_and_connect(self.verifier_with_notifications)
self.verifier_with_notifications.exchange().AndReturn('exchange')
self.mox.StubOutWithMock(self.verifier_with_notifications, '_keep_running')
self.verifier_with_notifications._keep_running().AndReturn(True)
start = datetime.datetime.utcnow()
self.mox.StubOutWithMock(self.verifier_with_notifications, '_utcnow')
self.verifier_with_notifications._utcnow().AndReturn(start)
settle_offset = {SETTLE_UNITS: SETTLE_TIME}
ending_max = start - datetime.timedelta(**settle_offset)
self.mox.StubOutWithMock(self.verifier_with_notifications, 'verify_for_range')
self.verifier_with_notifications.verify_for_range(ending_max,
callback=mox.Not(mox.Is(None)))
self.mox.StubOutWithMock(self.verifier_with_notifications, 'reconcile_failed')
result1 = self.mox.CreateMockAnything()
result2 = self.mox.CreateMockAnything()
self.verifier_with_notifications.results = [result1, result2]
result1.ready().AndReturn(True)
result1.successful().AndReturn(True)
result1.get().AndReturn((True, None))
result2.ready().AndReturn(True)
result2.successful().AndReturn(True)
result2.get().AndReturn((True, None))
self.verifier_with_notifications.reconcile_failed()
self.mox.StubOutWithMock(time, 'sleep', use_mock_anything=True)
time.sleep(TICK_TIME)
self.verifier_with_notifications._keep_running().AndReturn(False)
self.mox.ReplayAll()
self.verifier_with_notifications.run()
self.mox.VerifyAll()
def _mock_exchange_create_and_connect(self, verifier):
self.mox.StubOutWithMock(verifier, 'exchange')
self.verifier_with_notifications.exchange().AndReturn('exchange')
self.mox.StubOutWithMock(message_service, 'create_exchange')
exchange = self.mox.CreateMockAnything()
message_service.create_exchange('exchange', 'topic', durable=True) \
.AndReturn(exchange)
self.mox.StubOutWithMock(message_service, 'create_connection')
conn = self.mox.CreateMockAnything()
conn.__enter__().AndReturn(conn)
conn.__exit__(None, None, None)
message_service.create_connection(HOST, PORT, USERID,
PASSWORD, "librabbitmq",
VIRTUAL_HOST).AndReturn(conn)

View File

@ -20,11 +20,11 @@
import datetime
import decimal
import unittest
from stacktach import datetime_to_decimal
from tests.unit import StacktachBaseTestCase
class DatetimeToDecimalTestCase(unittest.TestCase):
class DatetimeToDecimalTestCase(StacktachBaseTestCase):
def test_datetime_to_decimal(self):
expected_decimal = decimal.Decimal('1356093296.123')

View File

@ -20,7 +20,6 @@
import datetime
import json
import unittest
from django.db.models import FieldDoesNotExist
from django.db import transaction
@ -29,13 +28,14 @@ import mox
from stacktach import dbapi
from stacktach import models
from stacktach import utils as stacktach_utils
from tests.unit import StacktachBaseTestCase
import utils
from utils import INSTANCE_ID_1
from utils import MESSAGE_ID_1
from utils import MESSAGE_ID_2
class DBAPITestCase(unittest.TestCase):
class DBAPITestCase(StacktachBaseTestCase):
def setUp(self):
self.mox = mox.Mox()
dne_exception = models.InstanceExists.DoesNotExist
@ -195,7 +195,7 @@ class DBAPITestCase(unittest.TestCase):
def test_get_db_objects_limit(self):
fake_model = self.make_fake_model()
fake_request = self.mox.CreateMockAnything()
fake_request.GET = {'limit': 1}
fake_request.GET = {'limit': '1'}
self.mox.StubOutWithMock(dbapi, '_get_filter_args')
dbapi._get_filter_args(fake_model, fake_request,
custom_filters=None).AndReturn({})
@ -215,7 +215,7 @@ class DBAPITestCase(unittest.TestCase):
def test_get_db_objects_hard_limit(self):
fake_model = self.make_fake_model()
fake_request = self.mox.CreateMockAnything()
fake_request.GET = {'limit': dbapi.HARD_LIMIT + 1}
fake_request.GET = {'limit': str(dbapi.HARD_LIMIT + 1)}
self.mox.StubOutWithMock(dbapi, '_get_filter_args')
dbapi._get_filter_args(fake_model, fake_request,
custom_filters=None).AndReturn({})
@ -236,7 +236,7 @@ class DBAPITestCase(unittest.TestCase):
def test_get_db_objects_offset(self):
fake_model = self.make_fake_model()
fake_request = self.mox.CreateMockAnything()
fake_request.GET = {'offset': 1}
fake_request.GET = {'offset': '1'}
self.mox.StubOutWithMock(dbapi, '_get_filter_args')
dbapi._get_filter_args(fake_model, fake_request,
custom_filters=None).AndReturn({})
@ -256,7 +256,7 @@ class DBAPITestCase(unittest.TestCase):
def test_get_db_objects_offset_and_limit(self):
fake_model = self.make_fake_model()
fake_request = self.mox.CreateMockAnything()
fake_request.GET = {'offset': 2, 'limit': 2}
fake_request.GET = {'offset': '2', 'limit': '2'}
self.mox.StubOutWithMock(dbapi, '_get_filter_args')
dbapi._get_filter_args(fake_model, fake_request,
custom_filters=None).AndReturn({})

View File

@ -0,0 +1,416 @@
# Copyright (c) 2013 - Rackspace Inc.
#
# Permission is hereby granted, free of charge, to any person obtaining a copy
# of this software and associated documentation files (the "Software"), to
# deal in the Software without restriction, including without limitation the
# rights to use, copy, modify, merge, publish, distribute, sublicense, and/or
# sell copies of the Software, and to permit persons to whom the Software is
# furnished to do so, subject to the following conditions:
#
# The above copyright notice and this permission notice shall be included in
# all copies or substantial portions of the Software.
#
# THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
# IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
# FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
# AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
# LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING
# FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS
# IN THE SOFTWARE.
from datetime import datetime
import decimal
import json
import uuid
import kombu
import mox
from stacktach import datetime_to_decimal as dt
from stacktach import models
from tests.unit import StacktachBaseTestCase
from utils import IMAGE_UUID_1
from utils import make_verifier_config
from verifier import glance_verifier
from verifier import FieldMismatch
from verifier import NotFound
from verifier import VerificationException
class GlanceVerifierTestCase(StacktachBaseTestCase):
def setUp(self):
self.mox = mox.Mox()
self.mox.StubOutWithMock(models, 'ImageUsage', use_mock_anything=True)
models.ImageUsage.objects = self.mox.CreateMockAnything()
self.pool = self.mox.CreateMockAnything()
config = make_verifier_config(False)
self.glance_verifier = glance_verifier.GlanceVerifier(config,
pool=self.pool)
self.mox.StubOutWithMock(models, 'ImageDeletes',
use_mock_anything=True)
models.ImageDeletes.objects = self.mox.CreateMockAnything()
self.mox.StubOutWithMock(models, 'ImageExists',
use_mock_anything=True)
def tearDown(self):
self.mox.UnsetStubs()
self.verifier = None
def test_verify_usage_should_not_raise_exception_on_success(self):
exist = self.mox.CreateMockAnything()
exist.created_at = decimal.Decimal('1.1')
exist.owner = 'owner'
exist.size = 1234
exist.usage = self.mox.CreateMockAnything()
exist.usage.created_at = decimal.Decimal('1.1')
exist.usage.size = 1234
exist.usage.owner = 'owner'
self.mox.ReplayAll()
glance_verifier._verify_for_usage(exist)
self.mox.VerifyAll()
def test_verify_usage_created_at_mismatch(self):
exist = self.mox.CreateMockAnything()
exist.usage = self.mox.CreateMockAnything()
exist.created_at = decimal.Decimal('1.1')
exist.usage.created_at = decimal.Decimal('2.1')
self.mox.ReplayAll()
with self.assertRaises(FieldMismatch) as cm:
glance_verifier._verify_for_usage(exist)
exception = cm.exception
self.assertEqual(exception.field_name, 'created_at')
self.assertEqual(exception.expected, decimal.Decimal('1.1'))
self.assertEqual(exception.actual, decimal.Decimal('2.1'))
self.mox.VerifyAll()
def test_verify_usage_owner_mismatch(self):
exist = self.mox.CreateMockAnything()
exist.usage = self.mox.CreateMockAnything()
exist.owner = 'owner'
exist.usage.owner = 'not_owner'
self.mox.ReplayAll()
with self.assertRaises(FieldMismatch) as cm:
glance_verifier._verify_for_usage(exist)
exception = cm.exception
self.assertEqual(exception.field_name, 'owner')
self.assertEqual(exception.expected, 'owner')
self.assertEqual(exception.actual, 'not_owner')
self.mox.VerifyAll()
def test_verify_usage_size_mismatch(self):
exist = self.mox.CreateMockAnything()
exist.size = 1234
exist.usage = self.mox.CreateMockAnything()
exist.usage.size = 5678
self.mox.ReplayAll()
with self.assertRaises(FieldMismatch) as cm:
glance_verifier._verify_for_usage(exist)
exception = cm.exception
self.assertEqual(exception.field_name, 'size')
self.assertEqual(exception.expected, 1234)
self.assertEqual(exception.actual, 5678)
self.mox.VerifyAll()
def test_verify_usage_for_late_usage(self):
exist = self.mox.CreateMockAnything()
exist.usage = None
exist.uuid = IMAGE_UUID_1
exist.created_at = decimal.Decimal('1.1')
results = self.mox.CreateMockAnything()
models.ImageUsage.objects.filter(uuid=IMAGE_UUID_1)\
.AndReturn(results)
results.count().AndReturn(1)
usage = self.mox.CreateMockAnything()
results.__getitem__(0).AndReturn(usage)
usage.created_at = decimal.Decimal('1.1')
self.mox.ReplayAll()
glance_verifier._verify_for_usage(exist)
self.mox.VerifyAll()
def test_verify_usage_raises_not_found_for_no_usage(self):
exist = self.mox.CreateMockAnything()
exist.usage = None
exist.uuid = IMAGE_UUID_1
exist.created_at = decimal.Decimal('1.1')
results = self.mox.CreateMockAnything()
models.ImageUsage.objects.filter(uuid=IMAGE_UUID_1) \
.AndReturn(results)
results.count().AndReturn(0)
self.mox.ReplayAll()
with self.assertRaises(NotFound) as cm:
glance_verifier._verify_for_usage(exist)
exception = cm.exception
self.assertEqual(exception.object_type, 'ImageUsage')
self.assertEqual(exception.search_params, {'uuid': IMAGE_UUID_1})
self.mox.VerifyAll()
def test_verify_delete(self):
exist = self.mox.CreateMockAnything()
exist.delete = self.mox.CreateMockAnything()
exist.deleted_at = decimal.Decimal('5.1')
exist.delete.deleted_at = decimal.Decimal('5.1')
self.mox.ReplayAll()
glance_verifier._verify_for_delete(exist)
self.mox.VerifyAll()
def test_verify_delete_when_late_delete(self):
exist = self.mox.CreateMockAnything()
exist.uuid = IMAGE_UUID_1
exist.delete = None
exist.deleted_at = decimal.Decimal('5.1')
results = self.mox.CreateMockAnything()
models.ImageDeletes.find(uuid=IMAGE_UUID_1).AndReturn(results)
results.count().AndReturn(1)
delete = self.mox.CreateMockAnything()
delete.deleted_at = decimal.Decimal('5.1')
results.__getitem__(0).AndReturn(delete)
self.mox.ReplayAll()
glance_verifier._verify_for_delete(exist)
self.mox.VerifyAll()
def test_verify_delete_when_no_delete(self):
exist = self.mox.CreateMockAnything()
exist.delete = None
exist.uuid = IMAGE_UUID_1
exist.deleted_at = None
audit_period_ending = decimal.Decimal('1.2')
exist.audit_period_ending = audit_period_ending
results = self.mox.CreateMockAnything()
models.ImageDeletes.find(
IMAGE_UUID_1, dt.dt_from_decimal(audit_period_ending)).AndReturn(
results)
results.count().AndReturn(0)
self.mox.ReplayAll()
glance_verifier._verify_for_delete(exist)
self.mox.VerifyAll()
def test_verify_delete_found_delete_when_exist_deleted_at_is_none(self):
exist = self.mox.CreateMockAnything()
exist.delete = None
exist.uuid = IMAGE_UUID_1
audit_period_ending = decimal.Decimal('1.3')
exist.deleted_at = None
exist.audit_period_ending = audit_period_ending
results = self.mox.CreateMockAnything()
models.ImageDeletes.find(
IMAGE_UUID_1, dt.dt_from_decimal(audit_period_ending)).AndReturn(
results)
results.count().AndReturn(1)
self.mox.ReplayAll()
with self.assertRaises(VerificationException) as ve:
glance_verifier._verify_for_delete(exist)
exception = ve.exception
self.assertEqual(exception.reason,
'Found ImageDeletes for non-delete exist')
self.mox.VerifyAll()
def test_verify_delete_deleted_at_mismatch(self):
exist = self.mox.CreateMockAnything()
exist.delete = self.mox.CreateMockAnything()
exist.deleted_at = decimal.Decimal('5.1')
exist.delete.deleted_at = decimal.Decimal('4.1')
self.mox.ReplayAll()
with self.assertRaises(FieldMismatch) as fm:
glance_verifier._verify_for_delete(exist)
exception = fm.exception
self.assertEqual(exception.field_name, 'deleted_at')
self.assertEqual(exception.expected, decimal.Decimal('5.1'))
self.assertEqual(exception.actual, decimal.Decimal('4.1'))
self.mox.VerifyAll()
def test_verify_for_delete_size_mismatch(self):
exist = self.mox.CreateMockAnything()
exist.delete = self.mox.CreateMockAnything()
exist.launched_at = decimal.Decimal('1.1')
exist.deleted_at = decimal.Decimal('5.1')
exist.delete.launched_at = decimal.Decimal('1.1')
exist.delete.deleted_at = decimal.Decimal('6.1')
self.mox.ReplayAll()
try:
glance_verifier._verify_for_delete(exist)
self.fail()
except FieldMismatch, fm:
self.assertEqual(fm.field_name, 'deleted_at')
self.assertEqual(fm.expected, decimal.Decimal('5.1'))
self.assertEqual(fm.actual, decimal.Decimal('6.1'))
self.mox.VerifyAll()
def test_verify_should_verify_exists_for_usage_and_delete(self):
exist = self.mox.CreateMockAnything()
self.mox.StubOutWithMock(glance_verifier, '_verify_for_usage')
glance_verifier._verify_for_usage(exist)
self.mox.StubOutWithMock(glance_verifier, '_verify_for_delete')
glance_verifier._verify_for_delete(exist)
exist.mark_verified()
self.mox.ReplayAll()
verified, exist = glance_verifier._verify(exist)
self.mox.VerifyAll()
self.assertTrue(verified)
def test_verify_exist_marks_exist_as_failed_if_field_mismatch_exception_is_raised(self):
exist = self.mox.CreateMockAnything()
self.mox.StubOutWithMock(glance_verifier, '_verify_for_usage')
field_mismatch_exc = FieldMismatch('field', 'expected', 'actual')
glance_verifier._verify_for_usage(exist).AndRaise(exception=field_mismatch_exc)
exist.mark_failed(reason='FieldMismatch')
self.mox.ReplayAll()
verified, exist = glance_verifier._verify(exist)
self.mox.VerifyAll()
self.assertFalse(verified)
def test_verify_for_range_without_callback(self):
when_max = datetime.utcnow()
results = self.mox.CreateMockAnything()
models.ImageExists.PENDING = 'pending'
models.ImageExists.VERIFYING = 'verifying'
self.mox.StubOutWithMock(models.ImageExists, 'find')
models.ImageExists.find(
ending_max=when_max,
status=models.ImageExists.PENDING).AndReturn(results)
results.count().AndReturn(2)
exist1 = self.mox.CreateMockAnything()
exist2 = self.mox.CreateMockAnything()
results.__getslice__(0, 1000).AndReturn(results)
results.__iter__().AndReturn([exist1, exist2].__iter__())
exist1.save()
exist2.save()
self.pool.apply_async(glance_verifier._verify, args=(exist1,),
callback=None)
self.pool.apply_async(glance_verifier._verify, args=(exist2,),
callback=None)
self.mox.ReplayAll()
self.glance_verifier.verify_for_range(when_max)
self.assertEqual(exist1.status, 'verifying')
self.assertEqual(exist2.status, 'verifying')
self.mox.VerifyAll()
def test_verify_for_range_with_callback(self):
callback = self.mox.CreateMockAnything()
when_max = datetime.utcnow()
results = self.mox.CreateMockAnything()
models.ImageExists.PENDING = 'pending'
models.ImageExists.VERIFYING = 'verifying'
models.ImageExists.find(
ending_max=when_max,
status=models.ImageExists.PENDING).AndReturn(results)
results.count().AndReturn(2)
exist1 = self.mox.CreateMockAnything()
exist2 = self.mox.CreateMockAnything()
results.__getslice__(0, 1000).AndReturn(results)
results.__iter__().AndReturn([exist1, exist2].__iter__())
exist1.save()
exist2.save()
self.pool.apply_async(glance_verifier._verify, args=(exist1,),
callback=callback)
self.pool.apply_async(glance_verifier._verify, args=(exist2,),
callback=callback)
self.mox.ReplayAll()
self.glance_verifier.verify_for_range(
when_max, callback=callback)
self.assertEqual(exist1.status, 'verifying')
self.assertEqual(exist2.status, 'verifying')
self.mox.VerifyAll()
def test_send_verified_notification_routing_keys(self):
connection = self.mox.CreateMockAnything()
exchange = self.mox.CreateMockAnything()
exist = self.mox.CreateMockAnything()
exist.raw = self.mox.CreateMockAnything()
exist_dict = [
'monitor.info',
{
'event_type': 'test',
'message_id': 'some_uuid'
}
]
exist_str = json.dumps(exist_dict)
exist.raw.json = exist_str
self.mox.StubOutWithMock(uuid, 'uuid4')
uuid.uuid4().AndReturn('some_other_uuid')
self.mox.StubOutWithMock(kombu.pools, 'producers')
self.mox.StubOutWithMock(kombu.common, 'maybe_declare')
routing_keys = ['notifications.info', 'monitor.info']
for key in routing_keys:
producer = self.mox.CreateMockAnything()
producer.channel = self.mox.CreateMockAnything()
kombu.pools.producers[connection].AndReturn(producer)
producer.acquire(block=True).AndReturn(producer)
producer.__enter__().AndReturn(producer)
kombu.common.maybe_declare(exchange, producer.channel)
message = {'event_type': 'image.exists.verified.old',
'message_id': 'some_other_uuid',
'original_message_id': 'some_uuid'}
producer.publish(message, key)
producer.__exit__(None, None, None)
self.mox.ReplayAll()
self.glance_verifier.send_verified_notification(
exist, exchange, connection, routing_keys=routing_keys)
self.mox.VerifyAll()
def test_send_verified_notification_default_routing_key(self):
connection = self.mox.CreateMockAnything()
exchange = self.mox.CreateMockAnything()
exist = self.mox.CreateMockAnything()
exist.raw = self.mox.CreateMockAnything()
exist_dict = [
'monitor.info',
{
'event_type': 'test',
'message_id': 'some_uuid'
}
]
exist_str = json.dumps(exist_dict)
exist.raw.json = exist_str
self.mox.StubOutWithMock(kombu.pools, 'producers')
self.mox.StubOutWithMock(kombu.common, 'maybe_declare')
producer = self.mox.CreateMockAnything()
producer.channel = self.mox.CreateMockAnything()
kombu.pools.producers[connection].AndReturn(producer)
producer.acquire(block=True).AndReturn(producer)
producer.__enter__().AndReturn(producer)
kombu.common.maybe_declare(exchange, producer.channel)
self.mox.StubOutWithMock(uuid, 'uuid4')
uuid.uuid4().AndReturn('some_other_uuid')
message = {'event_type': 'image.exists.verified.old',
'message_id': 'some_other_uuid',
'original_message_id': 'some_uuid'}
producer.publish(message, exist_dict[0])
producer.__exit__(None, None, None)
self.mox.ReplayAll()
self.glance_verifier.send_verified_notification(exist, exchange,
connection)
self.mox.VerifyAll()

View File

@ -18,12 +18,11 @@
# FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS
# IN THE SOFTWARE.
import unittest
from stacktach import image_type
from tests.unit import StacktachBaseTestCase
class ImageTypeTestCase(unittest.TestCase):
class ImageTypeTestCase(StacktachBaseTestCase):
# Abstractions
def _test_get_numeric_code(self, image, os_type, os_distro, expected,

121
tests/unit/test_models.py Normal file
View File

@ -0,0 +1,121 @@
# Copyright (c) 2013 - Rackspace Inc.
#
# Permission is hereby granted, free of charge, to any person obtaining a copy
# of this software and associated documentation files (the "Software"), to
# deal in the Software without restriction, including without limitation the
# rights to use, copy, modify, merge, publish, distribute, sublicense, and/or
# sell copies of the Software, and to permit persons to whom the Software is
# furnished to do so, subject to the following conditions:
#
# The above copyright notice and this permission notice shall be included in
# all copies or substantial portions of the Software.
#
# THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
# IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
# FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
# AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
# LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING
# FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS
# IN THE SOFTWARE.
from datetime import datetime
import unittest
import mox
from stacktach.models import RawData, GlanceRawData, GenericRawData, ImageDeletes, InstanceExists, ImageExists
from tests.unit.utils import IMAGE_UUID_1
from stacktach import datetime_to_decimal as dt, models
from stacktach.models import RawData, GlanceRawData, GenericRawData
from tests.unit import StacktachBaseTestCase
class ModelsTestCase(StacktachBaseTestCase):
def test_get_name_for_rawdata(self):
self.assertEquals(RawData.get_name(), 'RawData')
def test_get_name_for_glancerawdata(self):
self.assertEquals(GlanceRawData.get_name(), 'GlanceRawData')
def test_get_name_for_genericrawdata(self):
self.assertEquals(GenericRawData.get_name(), 'GenericRawData')
class ImageDeletesTestCase(unittest.TestCase):
def setUp(self):
self.mox = mox.Mox()
def tearDown(self):
self.mox.UnsetStubs()
def test_find_delete_should_return_delete_issued_before_given_time(self):
delete = self.mox.CreateMockAnything()
deleted_max = datetime.utcnow()
self.mox.StubOutWithMock(ImageDeletes.objects, 'filter')
ImageDeletes.objects.filter(
uuid=IMAGE_UUID_1,
deleted_at__lte=dt.dt_to_decimal(deleted_max)).AndReturn(delete)
self.mox.ReplayAll()
self.assertEquals(ImageDeletes.find(
IMAGE_UUID_1, deleted_max), delete)
self.mox.VerifyAll()
def test_find_delete_should_return_delete_with_the_given_uuid(self):
delete = self.mox.CreateMockAnything()
self.mox.StubOutWithMock(ImageDeletes.objects, 'filter')
ImageDeletes.objects.filter(uuid=IMAGE_UUID_1).AndReturn(delete)
self.mox.ReplayAll()
self.assertEquals(ImageDeletes.find(IMAGE_UUID_1, None), delete)
self.mox.VerifyAll()
class ImageExistsTestCase(unittest.TestCase):
def setUp(self):
self.mox = mox.Mox()
def tearDown(self):
self.mox.UnsetStubs()
def test_find_should_return_records_with_date_and_status_in_audit_period(self):
end_max = datetime.utcnow()
status = 'pending'
unordered_results = self.mox.CreateMockAnything()
expected_results = [1, 2]
related_results = self.mox.CreateMockAnything()
self.mox.StubOutWithMock(ImageExists.objects, 'select_related')
ImageExists.objects.select_related().AndReturn(related_results)
related_results.filter(audit_period_ending__lte=dt.dt_to_decimal(
end_max), status=status).AndReturn(unordered_results)
unordered_results.order_by('id').AndReturn(expected_results)
self.mox.ReplayAll()
results = ImageExists.find(end_max, status)
self.mox.VerifyAll()
self.assertEqual(results, [1, 2])
class InstanceExistsTestCase(unittest.TestCase):
def setUp(self):
self.mox = mox.Mox()
def tearDown(self):
self.mox.UnsetStubs()
def test_find_should_return_records_with_date_and_status_in_audit_period(self):
end_max = datetime.utcnow()
status = 'pending'
unordered_results = self.mox.CreateMockAnything()
expected_results = [1, 2]
related_results = self.mox.CreateMockAnything()
self.mox.StubOutWithMock(InstanceExists.objects, 'select_related')
InstanceExists.objects.select_related().AndReturn(related_results)
related_results.filter(audit_period_ending__lte=dt.dt_to_decimal(
end_max), status=status).AndReturn(unordered_results)
unordered_results.order_by('id').AndReturn(expected_results)
self.mox.ReplayAll()
results = InstanceExists.find(end_max, status)
self.mox.VerifyAll()
self.assertEqual(results, [1, 2])

View File

@ -18,163 +18,487 @@
# FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS
# IN THE SOFTWARE.
from decimal import Decimal
import unittest
import json
import mox
from stacktach import notification
from stacktach import utils
from stacktach.notification import Notification
from tests.unit.utils import REQUEST_ID_1, TENANT_ID_1, INSTANCE_ID_1
from stacktach.notification import NovaNotification
from stacktach.notification import GlanceNotification
from stacktach import db
from stacktach import image_type
from tests.unit import StacktachBaseTestCase
from tests.unit.utils import REQUEST_ID_1
from tests.unit.utils import DECIMAL_DUMMY_TIME
from tests.unit.utils import DUMMY_TIME
from tests.unit.utils import TIMESTAMP_1
from tests.unit.utils import TENANT_ID_1
from tests.unit.utils import INSTANCE_ID_1
from tests.unit.utils import MESSAGE_ID_1
class NotificationTestCase(unittest.TestCase):
class NovaNotificationTestCase(StacktachBaseTestCase):
def test_rawdata_kwargs(self):
message = {
'event_type': 'compute.instance.create.start',
'publisher_id': 'compute.cpu1-n01.example.com',
def setUp(self):
self.mox = mox.Mox()
def tearDown(self):
self.mox.UnsetStubs()
def test_factory_should_return_nova_notification_for_nova_exchange(
self):
body = {}
deployment = "1"
json = "{}"
routing_key = "monitor.info"
self.mox.StubOutWithMock(notification, 'NovaNotification')
notification.NovaNotification(body, deployment, routing_key, json)
self.mox.ReplayAll()
notification.notification_factory(body, deployment, routing_key, json,
'nova')
self.mox.VerifyAll()
def test_factory_should_return_glance_notification_for_glance_exchange(
self):
body = {}
deployment = "1"
json = "{}"
routing_key = "monitor_glance.info"
self.mox.StubOutWithMock(notification, 'GlanceNotification')
notification.GlanceNotification(body, deployment, routing_key, json)
self.mox.ReplayAll()
notification.notification_factory(body, deployment, routing_key, json,
'glance')
self.mox.VerifyAll()
def test_factory_should_return_notification_for_unknown_exchange(
self):
body = {}
deployment = "1"
json = "{}"
routing_key = "unknown.info"
self.mox.StubOutWithMock(notification, 'Notification')
notification.Notification(body, deployment, routing_key, json)
self.mox.ReplayAll()
notification.notification_factory(body, deployment, routing_key, json,
'unknown_exchange')
self.mox.VerifyAll()
def test_save_should_persist_nova_rawdata_to_database(self):
body = {
"event_type": "compute.instance.exists",
'_context_request_id': REQUEST_ID_1,
'_context_project_id': TENANT_ID_1,
'timestamp': '2013-06-12 06:30:52.790476',
'payload': {
"timestamp": TIMESTAMP_1,
"publisher_id": "compute.global.preprod-ord.ohthree.com",
"payload": {
'instance_id': INSTANCE_ID_1,
'state': 'active',
'old_state': 'building',
'old_task_state': 'build',
"new_task_state": 'rebuild_spawning',
'image_meta': {
'image_type': 'base',
'org.openstack__1__architecture': 'x64',
'org.openstack__1__os_distro': 'com.microsoft.server',
'org.openstack__1__os_version': '2008.2',
'com.rackspace__1__options': '36'
}
"status": "saving",
"container_format": "ovf",
"properties": {
"image_type": "snapshot",
},
"tenant": "5877054",
"old_state": 'old_state',
"old_task_state": 'old_task',
"image_meta": {
"org.openstack__1__architecture": 'os_arch',
"org.openstack__1__os_distro": 'os_distro',
"org.openstack__1__os_version": 'os_version',
"com.rackspace__1__options": 'rax_opt',
},
"state": 'state',
"new_task_state": 'task'
}
}
kwargs = Notification(message).rawdata_kwargs('1', 'monitor.info', 'json')
deployment = "1"
routing_key = "monitor.info"
json_body = json.dumps([routing_key, body])
raw = self.mox.CreateMockAnything()
self.mox.StubOutWithMock(db, 'create_nova_rawdata')
db.create_nova_rawdata(
deployment="1",
tenant=TENANT_ID_1,
json=json_body,
routing_key=routing_key,
when=utils.str_time_to_unix(TIMESTAMP_1),
publisher="compute.global.preprod-ord.ohthree.com",
event="compute.instance.exists",
service="compute",
host="global.preprod-ord.ohthree.com",
instance=INSTANCE_ID_1,
request_id=REQUEST_ID_1,
image_type=image_type.get_numeric_code(body['payload']),
old_state='old_state',
old_task='old_task',
os_architecture='os_arch',
os_distro='os_distro',
os_version='os_version',
rax_options='rax_opt',
state='state',
task='task').AndReturn(raw)
self.assertEquals(kwargs['host'], 'cpu1-n01.example.com')
self.assertEquals(kwargs['deployment'], '1')
self.assertEquals(kwargs['routing_key'], 'monitor.info')
self.assertEquals(kwargs['tenant'], TENANT_ID_1)
self.assertEquals(kwargs['json'], 'json')
self.assertEquals(kwargs['state'], 'active')
self.assertEquals(kwargs['old_state'], 'building')
self.assertEquals(kwargs['old_task'], 'build')
self.assertEquals(kwargs['task'], 'rebuild_spawning')
self.assertEquals(kwargs['image_type'], 1)
self.assertEquals(kwargs['when'], Decimal('1371018652.790476'))
self.assertEquals(kwargs['publisher'], 'compute.cpu1-n01.example.com')
self.assertEquals(kwargs['event'], 'compute.instance.create.start')
self.assertEquals(kwargs['request_id'], REQUEST_ID_1)
self.mox.ReplayAll()
def test_rawdata_kwargs_missing_image_meta(self):
message = {
'event_type': 'compute.instance.create.start',
'publisher_id': 'compute.cpu1-n01.example.com',
notification = NovaNotification(body, deployment, routing_key, json_body)
self.assertEquals(notification.save(), raw)
self.mox.VerifyAll()
class GlanceNotificationTestCase(StacktachBaseTestCase):
def setUp(self):
self.mox = mox.Mox()
def tearDown(self):
self.mox.UnsetStubs()
def test_save_should_persist_glance_rawdata_to_database(self):
body = {
"event_type": "image.upload",
"timestamp": "2013-06-20 17:31:57.939614",
"publisher_id": "glance-api01-r2961.global.preprod-ord.ohthree.com",
"payload": {
"status": "saving",
"properties": {
"image_type": "snapshot",
"instance_uuid": INSTANCE_ID_1,
},
"owner": TENANT_ID_1,
"id": "2df2ccf6-bc1b-4853-aab0-25fda346b3bb",
}
}
deployment = "1"
routing_key = "glance_monitor.info"
json_body = json.dumps([routing_key, body])
raw = self.mox.CreateMockAnything()
self.mox.StubOutWithMock(db, 'create_glance_rawdata')
db.create_glance_rawdata(
deployment="1",
owner=TENANT_ID_1,
json=json_body,
routing_key=routing_key,
when=utils.str_time_to_unix("2013-06-20 17:31:57.939614"),
publisher="glance-api01-r2961.global.preprod-ord.ohthree.com",
event="image.upload",
service="glance-api01-r2961",
host="global.preprod-ord.ohthree.com",
instance=INSTANCE_ID_1,
request_id='',
image_type=0,
status="saving",
uuid="2df2ccf6-bc1b-4853-aab0-25fda346b3bb").AndReturn(raw)
self.mox.ReplayAll()
notification = GlanceNotification(body, deployment, routing_key,
json_body)
self.assertEquals(notification.save(), raw)
self.mox.VerifyAll()
def test_save_should_persist_glance_rawdata_erro_payload_to_database(self):
body = {
"event_type": "image.upload",
"timestamp": "2013-06-20 17:31:57.939614",
"publisher_id": "glance-api01-r2961.global.preprod-ord.ohthree.com",
"payload": "error_message"
}
deployment = "1"
routing_key = "glance_monitor.error"
json_body = json.dumps([routing_key, body])
raw = self.mox.CreateMockAnything()
self.mox.StubOutWithMock(db, 'create_glance_rawdata')
db.create_glance_rawdata(
deployment="1",
owner=None,
json=json_body,
routing_key=routing_key,
when=utils.str_time_to_unix("2013-06-20 17:31:57.939614"),
publisher="glance-api01-r2961.global.preprod-ord.ohthree.com",
event="image.upload",
service="glance-api01-r2961",
host="global.preprod-ord.ohthree.com",
instance=None,
request_id='',
image_type=None,
status=None,
uuid=None).AndReturn(raw)
self.mox.ReplayAll()
notification = GlanceNotification(body, deployment, routing_key,
json_body)
self.assertEquals(notification.save(), raw)
self.mox.VerifyAll()
def test_save_image_exists(self):
raw = self.mox.CreateMockAnything()
audit_period_beginning = "2013-05-20 17:31:57.939614"
audit_period_ending = "2013-06-20 17:31:57.939614"
size = 123
uuid = "2df2ccf6-bc1b-4853-aab0-25fda346b3bb"
body = {
"event_type": "image.upload",
"timestamp": "2013-06-20 18:31:57.939614",
"publisher_id": "glance-api01-r2961.global.preprod-ord.ohthree.com",
"payload": {
"created_at": str(DUMMY_TIME),
"status": "saving",
"audit_period_beginning": audit_period_beginning,
"audit_period_ending": audit_period_ending,
"properties": {
"image_type": "snapshot",
"instance_uuid": INSTANCE_ID_1,
},
"size": size,
"owner": TENANT_ID_1,
"id": uuid
}
}
deployment = "1"
routing_key = "glance_monitor.info"
json_body = json.dumps([routing_key, body])
self.mox.StubOutWithMock(db, 'create_image_exists')
self.mox.StubOutWithMock(db, 'get_image_usage')
db.get_image_usage(uuid=uuid).AndReturn(None)
db.create_image_exists(
created_at=utils.str_time_to_unix(str(DUMMY_TIME)),
owner=TENANT_ID_1,
raw=raw,
audit_period_beginning=utils.str_time_to_unix(audit_period_beginning),
audit_period_ending=utils.str_time_to_unix(audit_period_ending),
size=size,
uuid=uuid,
usage=None).AndReturn(raw)
self.mox.ReplayAll()
notification = GlanceNotification(body, deployment, routing_key,
json_body)
notification.save_exists(raw)
self.mox.VerifyAll()
def test_save_image_exists_with_delete_not_none(self):
raw = self.mox.CreateMockAnything()
delete = self.mox.CreateMockAnything()
audit_period_beginning = "2013-05-20 17:31:57.939614"
audit_period_ending = "2013-06-20 17:31:57.939614"
size = 123
uuid = "2df2ccf6-bc1b-4853-aab0-25fda346b3bb"
deleted_at = "2013-06-20 14:31:57.939614"
body = {
"event_type": "image.upload",
"timestamp": "2013-06-20 18:31:57.939614",
"publisher_id": "glance-api01-r2961.global.preprod-ord.ohthree.com",
"payload": {
"created_at": str(DUMMY_TIME),
"status": "saving",
"audit_period_beginning": audit_period_beginning,
"audit_period_ending": audit_period_ending,
"properties": {
"image_type": "snapshot",
"instance_uuid": INSTANCE_ID_1,
},
"deleted_at": deleted_at,
"size": size,
"owner": TENANT_ID_1,
"id": "2df2ccf6-bc1b-4853-aab0-25fda346b3bb",
}
}
deployment = "1"
routing_key = "glance_monitor.info"
json_body = json.dumps([routing_key, body])
self.mox.StubOutWithMock(db, 'create_image_exists')
self.mox.StubOutWithMock(db, 'get_image_usage')
self.mox.StubOutWithMock(db, 'get_image_delete')
db.get_image_usage(uuid=uuid).AndReturn(None)
db.get_image_delete(uuid=uuid).AndReturn(delete)
db.create_image_exists(
created_at=utils.str_time_to_unix(str(DUMMY_TIME)),
owner=TENANT_ID_1,
raw=raw,
audit_period_beginning=utils.str_time_to_unix(audit_period_beginning),
audit_period_ending=utils.str_time_to_unix(audit_period_ending),
size=size,
uuid=uuid,
usage=None,
delete=delete,
deleted_at=utils.str_time_to_unix(str(deleted_at))).AndReturn(raw)
self.mox.ReplayAll()
notification = GlanceNotification(body, deployment, routing_key,
json_body)
notification.save_exists(raw)
self.mox.VerifyAll()
def test_save_image_exists_with_usage_not_none(self):
raw = self.mox.CreateMockAnything()
usage = self.mox.CreateMockAnything()
audit_period_beginning = "2013-05-20 17:31:57.939614"
audit_period_ending = "2013-06-20 17:31:57.939614"
size = 123
uuid = "2df2ccf6-bc1b-4853-aab0-25fda346b3bb"
body = {
"event_type": "image.upload",
"timestamp": "2013-06-20 18:31:57.939614",
"publisher_id": "glance-api01-r2961.global.preprod-ord.ohthree.com",
"payload": {
"created_at": str(DUMMY_TIME),
"status": "saving",
"audit_period_beginning": audit_period_beginning,
"audit_period_ending": audit_period_ending,
"properties": {
"image_type": "snapshot",
"instance_uuid": INSTANCE_ID_1,
},
"size": size,
"owner": TENANT_ID_1,
"id": "2df2ccf6-bc1b-4853-aab0-25fda346b3bb",
}
}
deployment = "1"
routing_key = "glance_monitor.info"
json_body = json.dumps([routing_key, body])
self.mox.StubOutWithMock(db, 'create_image_exists')
self.mox.StubOutWithMock(db, 'get_image_usage')
self.mox.StubOutWithMock(db, 'get_image_delete')
db.get_image_usage(uuid=uuid).AndReturn(usage)
db.create_image_exists(
created_at=utils.str_time_to_unix(str(DUMMY_TIME)),
owner=TENANT_ID_1,
raw=raw,
audit_period_beginning=utils.str_time_to_unix(audit_period_beginning),
audit_period_ending=utils.str_time_to_unix(audit_period_ending),
size=size,
uuid=uuid,
usage=usage).AndReturn(raw)
self.mox.ReplayAll()
notification = GlanceNotification(body, deployment, routing_key,
json_body)
notification.save_exists(raw)
self.mox.VerifyAll()
def test_save_usage_should_persist_image_usage(self):
raw = self.mox.CreateMockAnything()
size = 123
uuid = "2df2ccf6-bc1b-4853-aab0-25fda346b3bb"
body = {
"event_type": "image.upload",
"timestamp": "2013-06-20 18:31:57.939614",
"publisher_id": "glance-api01-r2961.global.preprod-ord.ohthree.com",
"payload": {
"created_at": str(DUMMY_TIME),
"size": size,
"owner": TENANT_ID_1,
"id": "2df2ccf6-bc1b-4853-aab0-25fda346b3bb",
}
}
deployment = "1"
routing_key = "glance_monitor.info"
json_body = json.dumps([routing_key, body])
self.mox.StubOutWithMock(db, 'create_image_usage')
db.create_image_usage(
created_at=utils.str_time_to_unix(str(DUMMY_TIME)),
owner=TENANT_ID_1,
last_raw=raw,
size=size,
uuid=uuid).AndReturn(raw)
self.mox.ReplayAll()
notification = GlanceNotification(body, deployment, routing_key,
json_body)
notification.save_usage(raw)
self.mox.VerifyAll()
def test_save_delete_should_persist_image_delete(self):
raw = self.mox.CreateMockAnything()
uuid = "2df2ccf6-bc1b-4853-aab0-25fda346b3bb"
deleted_at = "2013-06-20 14:31:57.939614"
body = {
"event_type": "image.delete",
"publisher_id": "glance-api01-r2961.global.preprod-ord.ohthree.com",
"payload": {
"id": "2df2ccf6-bc1b-4853-aab0-25fda346b3bb",
"deleted_at": deleted_at
}
}
deployment = "1"
routing_key = "glance_monitor.info"
json_body = json.dumps([routing_key, body])
self.mox.StubOutWithMock(db, 'create_image_delete')
db.create_image_delete(
raw=raw,
uuid=uuid,
deleted_at=utils.str_time_to_unix(deleted_at)).AndReturn(raw)
self.mox.ReplayAll()
notification = GlanceNotification(body, deployment, routing_key,
json_body)
notification.save_delete(raw)
self.mox.VerifyAll()
class NotificationTestCase(StacktachBaseTestCase):
def setUp(self):
self.mox = mox.Mox()
def tearDown(self):
self.mox.UnsetStubs()
def test_save_should_persist_generic_rawdata_to_database(self):
body = {
"event_type": "image.upload",
'_context_request_id': REQUEST_ID_1,
'_context_project_id': TENANT_ID_1,
'timestamp': '2013-06-12 06:30:52.790476',
'payload': {
"timestamp": TIMESTAMP_1,
"publisher_id": "glance-api01-r2961.global.preprod-ord.ohthree.com",
"message_id": MESSAGE_ID_1,
"payload": {
'instance_id': INSTANCE_ID_1,
'state': 'active',
'old_state': 'building',
'old_task_state': 'build',
"new_task_state": 'rebuild_spawning',
'image_meta': {
'image_type': 'base',
}
"status": "saving",
"container_format": "ovf",
"tenant": "5877054"
}
}
kwargs = Notification(message).rawdata_kwargs('1', 'monitor.info', 'json')
deployment = "1"
routing_key = "generic_monitor.info"
json_body = json.dumps([routing_key, body])
raw = self.mox.CreateMockAnything()
self.mox.StubOutWithMock(db, 'create_generic_rawdata')
db.create_generic_rawdata(
deployment="1",
tenant=TENANT_ID_1,
json=json_body,
routing_key=routing_key,
when=utils.str_time_to_unix(TIMESTAMP_1),
publisher="glance-api01-r2961.global.preprod-ord.ohthree.com",
event="image.upload",
service="glance-api01-r2961",
host="global.preprod-ord.ohthree.com",
instance=INSTANCE_ID_1,
request_id=REQUEST_ID_1,
message_id=MESSAGE_ID_1).AndReturn(raw)
self.assertEquals(kwargs['host'], 'cpu1-n01.example.com')
self.assertEquals(kwargs['deployment'], '1')
self.assertEquals(kwargs['routing_key'], 'monitor.info')
self.assertEquals(kwargs['tenant'], TENANT_ID_1)
self.assertEquals(kwargs['json'], 'json')
self.assertEquals(kwargs['state'], 'active')
self.assertEquals(kwargs['old_state'], 'building')
self.assertEquals(kwargs['old_task'], 'build')
self.assertEquals(kwargs['task'], 'rebuild_spawning')
self.assertEquals(kwargs['image_type'], 1)
self.assertEquals(kwargs['when'], Decimal('1371018652.790476'))
self.assertEquals(kwargs['publisher'], 'compute.cpu1-n01.example.com')
self.assertEquals(kwargs['event'], 'compute.instance.create.start')
self.assertEquals(kwargs['request_id'], REQUEST_ID_1)
self.mox.ReplayAll()
def test_rawdata_kwargs_for_message_with_no_host(self):
message = {
'event_type': 'compute.instance.create.start',
'publisher_id': 'compute',
'_context_request_id': REQUEST_ID_1,
'_context_project_id': TENANT_ID_1,
'timestamp': '2013-06-12 06:30:52.790476',
'payload': {
'instance_id': INSTANCE_ID_1,
'state': 'active',
'old_state': 'building',
'old_task_state': 'build',
"new_task_state": 'rebuild_spawning',
'image_meta': {
'image_type': 'base',
'org.openstack__1__architecture': 'x64',
'org.openstack__1__os_distro': 'com.microsoft.server',
'org.openstack__1__os_version': '2008.2',
'com.rackspace__1__options': '36'
}
}
}
kwargs = Notification(message).rawdata_kwargs('1', 'monitor.info', 'json')
self.assertEquals(kwargs['host'], None)
self.assertEquals(kwargs['deployment'], '1')
self.assertEquals(kwargs['routing_key'], 'monitor.info')
self.assertEquals(kwargs['tenant'], TENANT_ID_1)
self.assertEquals(kwargs['json'], 'json')
self.assertEquals(kwargs['state'], 'active')
self.assertEquals(kwargs['old_state'], 'building')
self.assertEquals(kwargs['old_task'], 'build')
self.assertEquals(kwargs['task'], 'rebuild_spawning')
self.assertEquals(kwargs['image_type'], 1)
self.assertEquals(kwargs['when'], Decimal('1371018652.790476'))
self.assertEquals(kwargs['publisher'], 'compute')
self.assertEquals(kwargs['event'], 'compute.instance.create.start')
self.assertEquals(kwargs['request_id'], REQUEST_ID_1)
def test_rawdata_kwargs_for_message_with_exception(self):
message = {
'event_type': 'compute.instance.create.start',
'publisher_id': 'compute.cpu1-n01.example.com',
'_context_request_id': REQUEST_ID_1,
'_context_project_id': TENANT_ID_1,
'timestamp': '2013-06-12 06:30:52.790476',
'payload': {
'exception': {'kwargs':{'uuid': INSTANCE_ID_1}},
'instance_id': INSTANCE_ID_1,
'state': 'active',
'old_state': 'building',
'old_task_state': 'build',
"new_task_state": 'rebuild_spawning',
'image_meta': {
'image_type': 'base',
'org.openstack__1__architecture': 'x64',
'org.openstack__1__os_distro': 'com.microsoft.server',
'org.openstack__1__os_version': '2008.2',
'com.rackspace__1__options': '36'
}
}
}
kwargs = Notification(message).rawdata_kwargs('1', 'monitor.info', 'json')
self.assertEquals(kwargs['host'], 'cpu1-n01.example.com')
self.assertEquals(kwargs['deployment'], '1')
self.assertEquals(kwargs['routing_key'], 'monitor.info')
self.assertEquals(kwargs['tenant'], TENANT_ID_1)
self.assertEquals(kwargs['json'], 'json')
self.assertEquals(kwargs['state'], 'active')
self.assertEquals(kwargs['old_state'], 'building')
self.assertEquals(kwargs['old_task'], 'build')
self.assertEquals(kwargs['task'], 'rebuild_spawning')
self.assertEquals(kwargs['image_type'], 1)
self.assertEquals(kwargs['when'], Decimal('1371018652.790476'))
self.assertEquals(kwargs['publisher'], 'compute.cpu1-n01.example.com')
self.assertEquals(kwargs['event'], 'compute.instance.create.start')
self.assertEquals(kwargs['request_id'], REQUEST_ID_1)
notification = Notification(body, deployment, routing_key, json_body)
self.assertEquals(notification.save(), raw)
self.mox.VerifyAll()

View File

@ -0,0 +1,839 @@
# Copyright (c) 2013 - Rackspace Inc.
#
# Permission is hereby granted, free of charge, to any person obtaining a copy
# of this software and associated documentation files (the "Software"), to
# deal in the Software without restriction, including without limitation the
# rights to use, copy, modify, merge, publish, distribute, sublicense, and/or
# sell copies of the Software, and to permit persons to whom the Software is
# furnished to do so, subject to the following conditions:
#
# The above copyright notice and this permission notice shall be included in
# all copies or substantial portions of the Software.
#
# THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
# IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
# FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
# AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
# LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING
# FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS
# IN THE SOFTWARE.
import datetime
import decimal
import json
import uuid
import kombu.common
import kombu.entity
import kombu.pools
import mox
from stacktach import datetime_to_decimal as dt
from stacktach import models
from tests.unit import StacktachBaseTestCase
from utils import make_verifier_config
from utils import INSTANCE_ID_1
from utils import RAX_OPTIONS_1
from utils import RAX_OPTIONS_2
from utils import OS_DISTRO_1
from utils import OS_DISTRO_2
from utils import OS_ARCH_1
from utils import OS_ARCH_2
from utils import OS_VERSION_1
from utils import OS_VERSION_2
from utils import TENANT_ID_1
from utils import TENANT_ID_2
from utils import INSTANCE_TYPE_ID_1
from verifier import nova_verifier
from verifier import AmbiguousResults
from verifier import FieldMismatch
from verifier import NotFound
from verifier import VerificationException
class NovaVerifierTestCase(StacktachBaseTestCase):
def setUp(self):
self.mox = mox.Mox()
self.mox.StubOutWithMock(models, 'RawData', use_mock_anything=True)
models.RawData.objects = self.mox.CreateMockAnything()
self.mox.StubOutWithMock(models, 'Deployment', use_mock_anything=True)
models.Deployment.objects = self.mox.CreateMockAnything()
self.mox.StubOutWithMock(models, 'Lifecycle', use_mock_anything=True)
models.Lifecycle.objects = self.mox.CreateMockAnything()
self.mox.StubOutWithMock(models, 'Timing', use_mock_anything=True)
models.Timing.objects = self.mox.CreateMockAnything()
self.mox.StubOutWithMock(models, 'RequestTracker',
use_mock_anything=True)
models.RequestTracker.objects = self.mox.CreateMockAnything()
self.mox.StubOutWithMock(models, 'InstanceUsage',
use_mock_anything=True)
models.InstanceUsage.objects = self.mox.CreateMockAnything()
self.mox.StubOutWithMock(models, 'InstanceDeletes',
use_mock_anything=True)
models.InstanceDeletes.objects = self.mox.CreateMockAnything()
self.mox.StubOutWithMock(models, 'InstanceReconcile',
use_mock_anything=True)
models.InstanceReconcile.objects = self.mox.CreateMockAnything()
self.mox.StubOutWithMock(models, 'InstanceExists',
use_mock_anything=True)
models.InstanceExists.objects = self.mox.CreateMockAnything()
self.mox.StubOutWithMock(models, 'JsonReport', use_mock_anything=True)
models.JsonReport.objects = self.mox.CreateMockAnything()
self._setup_verifier()
def _setup_verifier(self):
self.pool = self.mox.CreateMockAnything()
self.reconciler = self.mox.CreateMockAnything()
config = make_verifier_config(False)
self.verifier = nova_verifier.NovaVerifier(config,
pool=self.pool, reconciler=self.reconciler)
def tearDown(self):
self.mox.UnsetStubs()
self.verifier = None
self.pool = None
self.verifier_notif = None
def test_verify_for_launch(self):
exist = self.mox.CreateMockAnything()
exist.launched_at = decimal.Decimal('1.1')
exist.instance_type_id = INSTANCE_TYPE_ID_1
exist.tenant = TENANT_ID_1
exist.usage = self.mox.CreateMockAnything()
exist.usage.launched_at = decimal.Decimal('1.1')
exist.usage.instance_type_id = INSTANCE_TYPE_ID_1
exist.usage.tenant = TENANT_ID_1
self.mox.ReplayAll()
nova_verifier._verify_for_launch(exist)
self.mox.VerifyAll()
def test_verify_for_launch_launched_at_in_range(self):
exist = self.mox.CreateMockAnything()
exist.usage = self.mox.CreateMockAnything()
exist.launched_at = decimal.Decimal('1.0')
exist.instance_type_id = 2
exist.usage.launched_at = decimal.Decimal('1.4')
exist.usage.instance_type_id = 2
self.mox.ReplayAll()
result = nova_verifier._verify_for_launch(exist)
self.assertIsNone(result)
self.mox.VerifyAll()
def test_verify_for_launch_launched_at_missmatch(self):
exist = self.mox.CreateMockAnything()
exist.usage = self.mox.CreateMockAnything()
exist.launched_at = decimal.Decimal('1.1')
exist.instance_type_id = 2
exist.usage.launched_at = decimal.Decimal('2.1')
exist.usage.instance_type_id = 2
self.mox.ReplayAll()
try:
nova_verifier._verify_for_launch(exist)
self.fail()
except FieldMismatch, fm:
self.assertEqual(fm.field_name, 'launched_at')
self.assertEqual(fm.expected, decimal.Decimal('1.1'))
self.assertEqual(fm.actual, decimal.Decimal('2.1'))
self.mox.VerifyAll()
def test_verify_for_launch_instance_type_id_missmatch(self):
exist = self.mox.CreateMockAnything()
exist.usage = self.mox.CreateMockAnything()
exist.launched_at = decimal.Decimal('1.1')
exist.instance_type_id = 2
exist.usage.launched_at = decimal.Decimal('1.1')
exist.usage.instance_type_id = 3
self.mox.ReplayAll()
try:
nova_verifier._verify_for_launch(exist)
self.fail()
except FieldMismatch, fm:
self.assertEqual(fm.field_name, 'instance_type_id')
self.assertEqual(fm.expected, 2)
self.assertEqual(fm.actual, 3)
self.mox.VerifyAll()
def test_verify_for_launch_tenant_id_mismatch(self):
exist = self.mox.CreateMockAnything()
exist.tenant = TENANT_ID_1
exist.usage = self.mox.CreateMockAnything()
exist.usage.tenant = TENANT_ID_2
self.mox.ReplayAll()
with self.assertRaises(FieldMismatch) as cm:
nova_verifier._verify_for_launch(exist)
exception = cm.exception
self.assertEqual(exception.field_name, 'tenant')
self.assertEqual(exception.expected, TENANT_ID_1)
self.assertEqual(exception.actual, TENANT_ID_2)
self.mox.VerifyAll()
def test_verify_for_launch_rax_options_mismatch(self):
exist = self.mox.CreateMockAnything()
exist.rax_options = RAX_OPTIONS_1
exist.usage = self.mox.CreateMockAnything()
exist.usage.rax_options = RAX_OPTIONS_2
self.mox.ReplayAll()
with self.assertRaises(FieldMismatch) as cm:
nova_verifier._verify_for_launch(exist)
exception = cm.exception
self.assertEqual(exception.field_name, 'rax_options')
self.assertEqual(exception.expected, RAX_OPTIONS_1)
self.assertEqual(exception.actual, RAX_OPTIONS_2)
self.mox.VerifyAll()
def test_verify_for_launch_os_distro_mismatch(self):
exist = self.mox.CreateMockAnything()
exist.os_distro = OS_DISTRO_1
exist.usage = self.mox.CreateMockAnything()
exist.usage.os_distro = OS_DISTRO_2
self.mox.ReplayAll()
with self.assertRaises(FieldMismatch) as cm:
nova_verifier._verify_for_launch(exist)
exception = cm.exception
self.assertEqual(exception.field_name, 'os_distro')
self.assertEqual(exception.expected, OS_DISTRO_1)
self.assertEqual(exception.actual, OS_DISTRO_2)
self.mox.VerifyAll()
def test_verify_for_launch_os_architecture_mismatch(self):
exist = self.mox.CreateMockAnything()
exist.os_architecture = OS_ARCH_1
exist.usage = self.mox.CreateMockAnything()
exist.usage.os_architecture = OS_ARCH_2
self.mox.ReplayAll()
with self.assertRaises(FieldMismatch) as cm:
nova_verifier._verify_for_launch(exist)
exception = cm.exception
self.assertEqual(exception.field_name, 'os_architecture')
self.assertEqual(exception.expected, OS_ARCH_1)
self.assertEqual(exception.actual, OS_ARCH_2)
self.mox.VerifyAll()
def test_verify_for_launch_os_version_mismatch(self):
exist = self.mox.CreateMockAnything()
exist.os_version = OS_VERSION_1
exist.usage = self.mox.CreateMockAnything()
exist.usage.os_version = OS_VERSION_2
self.mox.ReplayAll()
with self.assertRaises(FieldMismatch) as cm:
nova_verifier._verify_for_launch(exist)
exception = cm.exception
self.assertEqual(exception.field_name, 'os_version')
self.assertEqual(exception.expected, OS_VERSION_1)
self.assertEqual(exception.actual, OS_VERSION_2)
self.mox.VerifyAll()
def test_verify_for_launch_late_usage(self):
exist = self.mox.CreateMockAnything()
exist.usage = None
exist.instance = INSTANCE_ID_1
launched_at = decimal.Decimal('1.1')
exist.launched_at = launched_at
exist.instance_type_id = 2
results = self.mox.CreateMockAnything()
models.InstanceUsage.objects.filter(instance=INSTANCE_ID_1)\
.AndReturn(results)
results.count().AndReturn(2)
models.InstanceUsage.find(INSTANCE_ID_1, dt.dt_from_decimal(
launched_at)).AndReturn(results)
results.count().AndReturn(1)
usage = self.mox.CreateMockAnything()
results.__getitem__(0).AndReturn(usage)
usage.launched_at = decimal.Decimal('1.1')
usage.instance_type_id = 2
self.mox.ReplayAll()
nova_verifier._verify_for_launch(exist)
self.mox.VerifyAll()
def test_verify_for_launch_no_usage(self):
exist = self.mox.CreateMockAnything()
exist.usage = None
exist.instance = INSTANCE_ID_1
exist.launched_at = decimal.Decimal('1.1')
exist.instance_type_id = 2
results = self.mox.CreateMockAnything()
models.InstanceUsage.objects.filter(instance=INSTANCE_ID_1) \
.AndReturn(results)
results.count().AndReturn(0)
self.mox.ReplayAll()
try:
nova_verifier._verify_for_launch(exist)
self.fail()
except NotFound, nf:
self.assertEqual(nf.object_type, 'InstanceUsage')
self.assertEqual(nf.search_params, {'instance': INSTANCE_ID_1})
self.mox.VerifyAll()
def test_verify_for_launch_late_ambiguous_usage(self):
exist = self.mox.CreateMockAnything()
exist.usage = None
exist.instance = INSTANCE_ID_1
launched_at = decimal.Decimal('1.1')
exist.launched_at = launched_at
exist.instance_type_id = 2
results = self.mox.CreateMockAnything()
models.InstanceUsage.objects.filter(
instance=INSTANCE_ID_1).AndReturn(results)
results.count().AndReturn(1)
models.InstanceUsage.find(
INSTANCE_ID_1, dt.dt_from_decimal(launched_at)).AndReturn(results)
results.count().AndReturn(2)
self.mox.ReplayAll()
try:
nova_verifier._verify_for_launch(exist)
self.fail()
except AmbiguousResults, nf:
self.assertEqual(nf.object_type, 'InstanceUsage')
search_params = {'instance': INSTANCE_ID_1,
'launched_at': decimal.Decimal('1.1')}
self.assertEqual(nf.search_params, search_params)
self.mox.VerifyAll()
def test_verify_for_delete(self):
exist = self.mox.CreateMockAnything()
exist.delete = self.mox.CreateMockAnything()
exist.launched_at = decimal.Decimal('1.1')
exist.deleted_at = decimal.Decimal('5.1')
exist.delete.launched_at = decimal.Decimal('1.1')
exist.delete.deleted_at = decimal.Decimal('5.1')
self.mox.ReplayAll()
nova_verifier._verify_for_delete(exist)
self.mox.VerifyAll()
def test_verify_for_delete_found_delete(self):
exist = self.mox.CreateMockAnything()
exist.delete = None
exist.instance = INSTANCE_ID_1
exist.launched_at = decimal.Decimal('1.1')
exist.deleted_at = decimal.Decimal('5.1')
launched_at = decimal.Decimal('1.1')
results = self.mox.CreateMockAnything()
models.InstanceDeletes.find(INSTANCE_ID_1, dt.dt_from_decimal(
launched_at)).AndReturn(results)
results.count().AndReturn(1)
delete = self.mox.CreateMockAnything()
delete.launched_at = decimal.Decimal('1.1')
delete.deleted_at = decimal.Decimal('5.1')
results.__getitem__(0).AndReturn(delete)
self.mox.ReplayAll()
nova_verifier._verify_for_delete(exist)
self.mox.VerifyAll()
def test_verify_for_delete_non_delete(self):
launched_at = decimal.Decimal('1.1')
deleted_at = decimal.Decimal('1.1')
exist = self.mox.CreateMockAnything()
exist.delete = None
exist.instance = INSTANCE_ID_1
exist.launched_at = launched_at
exist.deleted_at = None
exist.audit_period_ending = deleted_at
results = self.mox.CreateMockAnything()
models.InstanceDeletes.find(
INSTANCE_ID_1, dt.dt_from_decimal(launched_at),
dt.dt_from_decimal(deleted_at)).AndReturn(results)
results.count().AndReturn(0)
self.mox.ReplayAll()
nova_verifier._verify_for_delete(exist)
self.mox.VerifyAll()
def test_verify_for_delete_non_delete_found_deletes(self):
exist = self.mox.CreateMockAnything()
exist.delete = None
exist.instance = INSTANCE_ID_1
launched_at = decimal.Decimal('1.1')
deleted_at = decimal.Decimal('1.3')
exist.launched_at = launched_at
exist.deleted_at = None
exist.audit_period_ending = deleted_at
results = self.mox.CreateMockAnything()
models.InstanceDeletes.find(
INSTANCE_ID_1, dt.dt_from_decimal(launched_at),
dt.dt_from_decimal(deleted_at)).AndReturn(results)
results.count().AndReturn(1)
self.mox.ReplayAll()
try:
nova_verifier._verify_for_delete(exist)
self.fail()
except VerificationException, ve:
msg = 'Found InstanceDeletes for non-delete exist'
self.assertEqual(ve.reason, msg)
self.mox.VerifyAll()
def test_verify_for_delete_launched_at_mismatch(self):
exist = self.mox.CreateMockAnything()
exist.delete = self.mox.CreateMockAnything()
exist.launched_at = decimal.Decimal('1.1')
exist.deleted_at = decimal.Decimal('5.1')
exist.delete.launched_at = decimal.Decimal('2.1')
exist.delete.deleted_at = decimal.Decimal('5.1')
self.mox.ReplayAll()
try:
nova_verifier._verify_for_delete(exist)
self.fail()
except FieldMismatch, fm:
self.assertEqual(fm.field_name, 'launched_at')
self.assertEqual(fm.expected, decimal.Decimal('1.1'))
self.assertEqual(fm.actual, decimal.Decimal('2.1'))
self.mox.VerifyAll()
def test_verify_for_delete_deleted_at_mismatch(self):
exist = self.mox.CreateMockAnything()
exist.delete = self.mox.CreateMockAnything()
exist.launched_at = decimal.Decimal('1.1')
exist.deleted_at = decimal.Decimal('5.1')
exist.delete.launched_at = decimal.Decimal('1.1')
exist.delete.deleted_at = decimal.Decimal('6.1')
self.mox.ReplayAll()
try:
nova_verifier._verify_for_delete(exist)
self.fail()
except FieldMismatch, fm:
self.assertEqual(fm.field_name, 'deleted_at')
self.assertEqual(fm.expected, decimal.Decimal('5.1'))
self.assertEqual(fm.actual, decimal.Decimal('6.1'))
self.mox.VerifyAll()
def test_verify_with_reconciled_data(self):
exists = self.mox.CreateMockAnything()
exists.instance = INSTANCE_ID_1
launched_at = decimal.Decimal('1.1')
exists.launched_at = launched_at
results = self.mox.CreateMockAnything()
models.InstanceReconcile.objects.filter(instance=INSTANCE_ID_1)\
.AndReturn(results)
results.count().AndReturn(1)
launched_at = dt.dt_from_decimal(decimal.Decimal('1.1'))
recs = self.mox.CreateMockAnything()
models.InstanceReconcile.find(INSTANCE_ID_1, launched_at).AndReturn(recs)
recs.count().AndReturn(1)
reconcile = self.mox.CreateMockAnything()
reconcile.deleted_at = None
recs[0].AndReturn(reconcile)
self.mox.StubOutWithMock(nova_verifier, '_verify_for_launch')
nova_verifier._verify_for_launch(exists, launch=reconcile,
launch_type='InstanceReconcile')
self.mox.StubOutWithMock(nova_verifier, '_verify_for_delete')
nova_verifier._verify_for_delete(exists, delete=None,
delete_type='InstanceReconcile')
self.mox.ReplayAll()
nova_verifier._verify_with_reconciled_data(exists)
self.mox.VerifyAll()
def test_verify_with_reconciled_data_deleted(self):
exists = self.mox.CreateMockAnything()
exists.instance = INSTANCE_ID_1
launched_at = decimal.Decimal('1.1')
deleted_at = decimal.Decimal('2.1')
exists.launched_at = launched_at
exists.deleted_at = deleted_at
results = self.mox.CreateMockAnything()
models.InstanceReconcile.objects.filter(instance=INSTANCE_ID_1)\
.AndReturn(results)
results.count().AndReturn(1)
launched_at = dt.dt_from_decimal(decimal.Decimal('1.1'))
recs = self.mox.CreateMockAnything()
models.InstanceReconcile.find(INSTANCE_ID_1, launched_at).AndReturn(recs)
recs.count().AndReturn(1)
reconcile = self.mox.CreateMockAnything()
reconcile.deleted_at = deleted_at
recs[0].AndReturn(reconcile)
self.mox.StubOutWithMock(nova_verifier, '_verify_for_launch')
nova_verifier._verify_for_launch(exists, launch=reconcile,
launch_type='InstanceReconcile')
self.mox.StubOutWithMock(nova_verifier, '_verify_for_delete')
nova_verifier._verify_for_delete(exists, delete=reconcile,
delete_type='InstanceReconcile')
self.mox.ReplayAll()
nova_verifier._verify_with_reconciled_data(exists)
self.mox.VerifyAll()
def test_verify_with_reconciled_data_not_launched(self):
exists = self.mox.CreateMockAnything()
exists.instance = INSTANCE_ID_1
exists.launched_at = None
self.mox.ReplayAll()
with self.assertRaises(VerificationException) as cm:
nova_verifier._verify_with_reconciled_data(exists)
exception = cm.exception
self.assertEquals(exception.reason, 'Exists without a launched_at')
self.mox.VerifyAll()
def test_verify_with_reconciled_data_ambiguous_results(self):
exists = self.mox.CreateMockAnything()
exists.instance = INSTANCE_ID_1
launched_at = decimal.Decimal('1.1')
deleted_at = decimal.Decimal('2.1')
exists.launched_at = launched_at
exists.deleted_at = deleted_at
results = self.mox.CreateMockAnything()
models.InstanceReconcile.objects.filter(instance=INSTANCE_ID_1)\
.AndReturn(results)
results.count().AndReturn(1)
launched_at = dt.dt_from_decimal(decimal.Decimal('1.1'))
recs = self.mox.CreateMockAnything()
models.InstanceReconcile.find(INSTANCE_ID_1, launched_at).AndReturn(recs)
recs.count().AndReturn(2)
self.mox.ReplayAll()
with self.assertRaises(AmbiguousResults) as cm:
nova_verifier._verify_with_reconciled_data(exists)
exception = cm.exception
self.assertEquals(exception.object_type, 'InstanceReconcile')
self.mox.VerifyAll()
def test_verify_with_reconciled_data_instance_not_found(self):
exists = self.mox.CreateMockAnything()
exists.instance = INSTANCE_ID_1
launched_at = decimal.Decimal('1.1')
deleted_at = decimal.Decimal('2.1')
exists.launched_at = launched_at
exists.deleted_at = deleted_at
results = self.mox.CreateMockAnything()
models.InstanceReconcile.objects.filter(instance=INSTANCE_ID_1)\
.AndReturn(results)
results.count().AndReturn(0)
self.mox.ReplayAll()
with self.assertRaises(NotFound) as cm:
nova_verifier._verify_with_reconciled_data(exists)
exception = cm.exception
self.assertEquals(exception.object_type, 'InstanceReconcile')
self.mox.VerifyAll()
def test_verify_with_reconciled_data_reconcile_not_found(self):
exists = self.mox.CreateMockAnything()
exists.instance = INSTANCE_ID_1
launched_at = decimal.Decimal('1.1')
deleted_at = decimal.Decimal('2.1')
exists.launched_at = launched_at
exists.deleted_at = deleted_at
results = self.mox.CreateMockAnything()
models.InstanceReconcile.objects.filter(instance=INSTANCE_ID_1)\
.AndReturn(results)
results.count().AndReturn(1)
launched_at = dt.dt_from_decimal(decimal.Decimal('1.1'))
recs = self.mox.CreateMockAnything()
models.InstanceReconcile.find(INSTANCE_ID_1, launched_at).AndReturn(recs)
recs.count().AndReturn(0)
self.mox.ReplayAll()
with self.assertRaises(NotFound) as cm:
nova_verifier._verify_with_reconciled_data(exists)
exception = cm.exception
self.assertEquals(exception.object_type, 'InstanceReconcile')
self.mox.VerifyAll()
def test_verify_pass(self):
exist = self.mox.CreateMockAnything()
exist.launched_at = decimal.Decimal('1.1')
self.mox.StubOutWithMock(nova_verifier, '_verify_for_launch')
self.mox.StubOutWithMock(nova_verifier, '_verify_for_delete')
self.mox.StubOutWithMock(exist, 'mark_verified')
nova_verifier._verify_for_launch(exist)
nova_verifier._verify_for_delete(exist)
exist.mark_verified()
self.mox.ReplayAll()
result, exists = nova_verifier._verify(exist)
self.assertTrue(result)
self.mox.VerifyAll()
def test_verify_no_launched_at(self):
exist = self.mox.CreateMockAnything()
exist.launched_at = None
self.mox.StubOutWithMock(nova_verifier, '_verify_for_launch')
self.mox.StubOutWithMock(nova_verifier, '_verify_for_delete')
self.mox.StubOutWithMock(exist, 'mark_failed')
exist.mark_failed(reason="Exists without a launched_at")
self.mox.StubOutWithMock(nova_verifier, '_verify_with_reconciled_data')
nova_verifier._verify_with_reconciled_data(exist)\
.AndRaise(NotFound('InstanceReconcile', {}))
self.mox.ReplayAll()
result, exists = nova_verifier._verify(exist)
self.assertFalse(result)
self.mox.VerifyAll()
def test_verify_fails_reconciled_verify_uses_second_exception(self):
exist = self.mox.CreateMockAnything()
self.mox.StubOutWithMock(nova_verifier, '_verify_for_launch')
ex1 = VerificationException('test1')
nova_verifier._verify_for_launch(exist).AndRaise(ex1)
self.mox.StubOutWithMock(nova_verifier, '_verify_for_delete')
self.mox.StubOutWithMock(exist, 'mark_failed')
self.mox.StubOutWithMock(nova_verifier, '_verify_with_reconciled_data')
nova_verifier._verify_with_reconciled_data(exist)\
.AndRaise(VerificationException('test2'))
exist.mark_failed(reason='test2')
self.mox.ReplayAll()
result, exists = nova_verifier._verify(exist)
self.assertFalse(result)
self.mox.VerifyAll()
def test_verify_launch_fail(self):
exist = self.mox.CreateMockAnything()
exist.launched_at = decimal.Decimal('1.1')
self.mox.StubOutWithMock(nova_verifier, '_verify_for_launch')
self.mox.StubOutWithMock(nova_verifier, '_verify_for_delete')
self.mox.StubOutWithMock(exist, 'mark_failed')
verify_exception = VerificationException('test')
nova_verifier._verify_for_launch(exist).AndRaise(verify_exception)
self.mox.StubOutWithMock(nova_verifier, '_verify_with_reconciled_data')
nova_verifier._verify_with_reconciled_data(exist)\
.AndRaise(NotFound('InstanceReconcile', {}))
exist.mark_failed(reason='test')
self.mox.ReplayAll()
result, exists = nova_verifier._verify(exist)
self.assertFalse(result)
self.mox.VerifyAll()
def test_verify_fail_reconcile_success(self):
exist = self.mox.CreateMockAnything()
exist.launched_at = decimal.Decimal('1.1')
self.mox.StubOutWithMock(nova_verifier, '_verify_for_launch')
self.mox.StubOutWithMock(nova_verifier, '_verify_for_delete')
self.mox.StubOutWithMock(exist, 'mark_verified')
verify_exception = VerificationException('test')
nova_verifier._verify_for_launch(exist).AndRaise(verify_exception)
self.mox.StubOutWithMock(nova_verifier, '_verify_with_reconciled_data')
nova_verifier._verify_with_reconciled_data(exist)
exist.mark_verified(reconciled=True)
self.mox.ReplayAll()
result, exists = nova_verifier._verify(exist)
self.assertTrue(result)
self.mox.VerifyAll()
def test_verify_fail_with_reconciled_data_exception(self):
exist = self.mox.CreateMockAnything()
exist.launched_at = decimal.Decimal('1.1')
self.mox.StubOutWithMock(nova_verifier, '_verify_for_launch')
self.mox.StubOutWithMock(nova_verifier, '_verify_for_delete')
self.mox.StubOutWithMock(exist, 'mark_failed')
verify_exception = VerificationException('test')
nova_verifier._verify_for_launch(exist).AndRaise(verify_exception)
self.mox.StubOutWithMock(nova_verifier, '_verify_with_reconciled_data')
nova_verifier._verify_with_reconciled_data(exist)\
.AndRaise(Exception())
exist.mark_failed(reason='Exception')
self.mox.ReplayAll()
result, exists = nova_verifier._verify(exist)
self.assertFalse(result)
self.mox.VerifyAll()
def test_verify_delete_fail(self):
exist = self.mox.CreateMockAnything()
exist.launched_at = decimal.Decimal('1.1')
self.mox.StubOutWithMock(nova_verifier, '_verify_for_launch')
self.mox.StubOutWithMock(nova_verifier, '_verify_for_delete')
self.mox.StubOutWithMock(exist, 'mark_failed')
verify_exception = VerificationException('test')
nova_verifier._verify_for_launch(exist)
nova_verifier._verify_for_delete(exist).AndRaise(verify_exception)
self.mox.StubOutWithMock(nova_verifier, '_verify_with_reconciled_data')
nova_verifier._verify_with_reconciled_data(exist)\
.AndRaise(NotFound('InstanceReconcile', {}))
exist.mark_failed(reason='test')
self.mox.ReplayAll()
result, exists = nova_verifier._verify(exist)
self.assertFalse(result)
self.mox.VerifyAll()
def test_verify_exception_during_launch(self):
exist = self.mox.CreateMockAnything()
exist.launched_at = decimal.Decimal('1.1')
self.mox.StubOutWithMock(nova_verifier, '_verify_for_launch')
self.mox.StubOutWithMock(nova_verifier, '_verify_for_delete')
self.mox.StubOutWithMock(exist, 'mark_failed')
nova_verifier._verify_for_launch(exist).AndRaise(Exception())
exist.mark_failed(reason='Exception')
self.mox.ReplayAll()
result, exists = nova_verifier._verify(exist)
self.assertFalse(result)
self.mox.VerifyAll()
def test_verify_exception_during_delete(self):
exist = self.mox.CreateMockAnything()
exist.launched_at = decimal.Decimal('1.1')
self.mox.StubOutWithMock(nova_verifier, '_verify_for_launch')
self.mox.StubOutWithMock(nova_verifier, '_verify_for_delete')
self.mox.StubOutWithMock(exist, 'mark_failed')
nova_verifier._verify_for_launch(exist)
nova_verifier._verify_for_delete(exist).AndRaise(Exception())
exist.mark_failed(reason='Exception')
self.mox.ReplayAll()
result, exists = nova_verifier._verify(exist)
self.assertFalse(result)
self.mox.VerifyAll()
def test_verify_for_range_without_callback(self):
when_max = datetime.datetime.utcnow()
results = self.mox.CreateMockAnything()
models.InstanceExists.PENDING = 'pending'
models.InstanceExists.VERIFYING = 'verifying'
models.InstanceExists.find(
ending_max=when_max, status='pending').AndReturn(results)
results.count().AndReturn(2)
exist1 = self.mox.CreateMockAnything()
exist2 = self.mox.CreateMockAnything()
results.__getslice__(0, 1000).AndReturn(results)
results.__iter__().AndReturn([exist1, exist2].__iter__())
exist1.update_status('verifying')
exist2.update_status('verifying')
exist1.save()
exist2.save()
self.pool.apply_async(nova_verifier._verify, args=(exist1,),
callback=None)
self.pool.apply_async(nova_verifier._verify, args=(exist2,),
callback=None)
self.mox.ReplayAll()
self.verifier.verify_for_range(when_max)
self.mox.VerifyAll()
def test_verify_for_range_with_callback(self):
callback = self.mox.CreateMockAnything()
when_max = datetime.datetime.utcnow()
results = self.mox.CreateMockAnything()
models.InstanceExists.PENDING = 'pending'
models.InstanceExists.VERIFYING = 'verifying'
models.InstanceExists.find(
ending_max=when_max, status='pending').AndReturn(results)
results.count().AndReturn(2)
exist1 = self.mox.CreateMockAnything()
exist2 = self.mox.CreateMockAnything()
results.__getslice__(0, 1000).AndReturn(results)
results.__iter__().AndReturn([exist1, exist2].__iter__())
exist1.update_status('verifying')
exist2.update_status('verifying')
exist1.save()
exist2.save()
self.pool.apply_async(nova_verifier._verify, args=(exist1,),
callback=callback)
self.pool.apply_async(nova_verifier._verify, args=(exist2,),
callback=callback)
self.mox.ReplayAll()
self.verifier.verify_for_range(when_max, callback=callback)
self.mox.VerifyAll()
def test_reconcile_failed(self):
self.verifier.reconcile = True
exists1 = self.mox.CreateMockAnything()
exists2 = self.mox.CreateMockAnything()
self.verifier.failed = [exists1, exists2]
self.reconciler.failed_validation(exists1)
self.reconciler.failed_validation(exists2)
self.mox.ReplayAll()
self.verifier.reconcile_failed()
self.assertEqual(len(self.verifier.failed), 0)
self.mox.VerifyAll()
def test_send_verified_notification_routing_keys(self):
connection = self.mox.CreateMockAnything()
exchange = self.mox.CreateMockAnything()
exist = self.mox.CreateMockAnything()
exist.raw = self.mox.CreateMockAnything()
exist_dict = [
'monitor.info',
{
'event_type': 'test',
'message_id': 'some_uuid'
}
]
exist_str = json.dumps(exist_dict)
exist.raw.json = exist_str
self.mox.StubOutWithMock(uuid, 'uuid4')
uuid.uuid4().AndReturn('some_other_uuid')
self.mox.StubOutWithMock(kombu.pools, 'producers')
self.mox.StubOutWithMock(kombu.common, 'maybe_declare')
routing_keys = ['notifications.info', 'monitor.info']
for key in routing_keys:
producer = self.mox.CreateMockAnything()
producer.channel = self.mox.CreateMockAnything()
kombu.pools.producers[connection].AndReturn(producer)
producer.acquire(block=True).AndReturn(producer)
producer.__enter__().AndReturn(producer)
kombu.common.maybe_declare(exchange, producer.channel)
message = {'event_type': 'compute.instance.exists.verified.old',
'message_id': 'some_other_uuid',
'original_message_id': 'some_uuid'}
producer.publish(message, key)
producer.__exit__(None, None, None)
self.mox.ReplayAll()
self.verifier.send_verified_notification(exist, exchange, connection,
routing_keys=routing_keys)
self.mox.VerifyAll()
def test_send_verified_notification_default_routing_key(self):
connection = self.mox.CreateMockAnything()
exchange = self.mox.CreateMockAnything()
exist = self.mox.CreateMockAnything()
exist.raw = self.mox.CreateMockAnything()
exist_dict = [
'monitor.info',
{
'event_type': 'test',
'message_id': 'some_uuid'
}
]
exist_str = json.dumps(exist_dict)
exist.raw.json = exist_str
self.mox.StubOutWithMock(kombu.pools, 'producers')
self.mox.StubOutWithMock(kombu.common, 'maybe_declare')
producer = self.mox.CreateMockAnything()
producer.channel = self.mox.CreateMockAnything()
kombu.pools.producers[connection].AndReturn(producer)
producer.acquire(block=True).AndReturn(producer)
producer.__enter__().AndReturn(producer)
kombu.common.maybe_declare(exchange, producer.channel)
self.mox.StubOutWithMock(uuid, 'uuid4')
uuid.uuid4().AndReturn('some_other_uuid')
message = {'event_type': 'compute.instance.exists.verified.old',
'message_id': 'some_other_uuid',
'original_message_id': 'some_uuid'}
producer.publish(message, exist_dict[0])
producer.__exit__(None, None, None)
self.mox.ReplayAll()
self.verifier.send_verified_notification(exist, exchange, connection)
self.mox.VerifyAll()

View File

@ -19,7 +19,6 @@
# IN THE SOFTWARE.
import datetime
import unittest
import mox
import requests
@ -30,6 +29,7 @@ from stacktach import utils as stackutils
from stacktach.reconciler import exceptions
from stacktach.reconciler import nova
from stacktach.reconciler import utils as rec_utils
from tests.unit import StacktachBaseTestCase
from tests.unit import utils
from tests.unit.utils import INSTANCE_ID_1
from tests.unit.utils import TENANT_ID_1
@ -45,7 +45,7 @@ DEFAULT_OS_VERSION = "1.1"
DEFAULT_RAX_OPTIONS = "rax_ops"
class ReconcilerTestCase(unittest.TestCase):
class ReconcilerTestCase(StacktachBaseTestCase):
def setUp(self):
self.mox = mox.Mox()
self.client = self.mox.CreateMockAnything()
@ -445,7 +445,7 @@ json_bridge_config = {
}
class NovaJSONBridgeClientTestCase(unittest.TestCase):
class NovaJSONBridgeClientTestCase(StacktachBaseTestCase):
def setUp(self):
self.mox = mox.Mox()
self.client = nova.JSONBridgeClient(json_bridge_config)

100
tests/unit/test_stacklog.py Normal file
View File

@ -0,0 +1,100 @@
import glob
import logging
import os
import mox
from stacktach import stacklog
from stacktach.stacklog import ExchangeLogger
from tests.unit import StacktachBaseTestCase
class StacklogTestCase(StacktachBaseTestCase):
def setUp(self):
self.mox = mox.Mox()
def tearDown(self):
self.mox.UnsetStubs()
def test_get_logger_should_get_exchange_logger_if_exchange_provided(self):
filename = 'filename'
logger = stacklog.get_logger(filename, 'nova')
self.assertIsInstance(logger, ExchangeLogger)
for file in glob.glob('{0}.log*'.format(filename)):
os.remove(file)
def test_get_logger_should_get_default_logger_if_exchange_not_provided(self):
filename = 'default_logger'
logger = stacklog.get_logger(filename)
self.assertIsInstance(logger, logging.Logger)
for file in glob.glob('{0}.log*'.format(filename)):
os.remove(file)
class ExchangeLoggerTestCase(StacktachBaseTestCase):
def setUp(self):
self.mox = mox.Mox()
def tearDown(self):
self.mox.UnsetStubs()
def _setup_logger_mocks(self, name='name'):
mock_logger = self.mox.CreateMockAnything()
self.mox.StubOutWithMock(logging, 'getLogger')
logging.getLogger(stacklog.__name__).AndReturn(mock_logger)
mock_logger.setLevel(logging.DEBUG)
self.mox.StubOutClassWithMocks(logging.handlers,
'TimedRotatingFileHandler')
filename = "/tmp/{0}.log".format(name)
handler = logging.handlers.TimedRotatingFileHandler(
filename, backupCount=3, interval=1, when='midnight')
self.mox.StubOutClassWithMocks(logging, 'Formatter')
mock_formatter = logging.Formatter(
"%(asctime)s - %(name)s - %(levelname)s - %(message)s")
handler.setFormatter(mock_formatter)
mock_logger.addHandler(handler)
mock_logger.handlers = [handler]
handler.doRollover()
return mock_logger
def test_exchange_logger_should_append_exchange_name_to_info(self):
mock_logger = self._setup_logger_mocks()
mock_logger.info('exchange: Log %s', 'args', xyz='xyz')
self.mox.ReplayAll()
log = ExchangeLogger('exchange', 'name')
log.info("Log %s", 'args', xyz='xyz')
self.mox.VerifyAll()
def test_exchange_logger_should_append_exchange_name_to_warn(self):
mock_logger = self._setup_logger_mocks()
mock_logger.warn('exchange: Log %s', 'args', xyz='xyz')
self.mox.ReplayAll()
logger = ExchangeLogger('exchange', 'name')
logger.warn("Log %s", 'args', xyz='xyz')
self.mox.VerifyAll()
def test_exchange_logger_should_append_exchange_name_to_error(self):
mock_logger = self._setup_logger_mocks()
mock_logger.error('exchange: Log %s', 'args', xyz='xyz')
self.mox.ReplayAll()
logger = ExchangeLogger('exchange', 'name')
logger.error("Log %s", 'args', xyz='xyz')
self.mox.VerifyAll()
def test_exchange_logger_should_append_exchange_name_to_exception(self):
mock_logger = self._setup_logger_mocks()
mock_logger.error('exchange: Log %s', 'args', xyz='xyz')
self.mox.ReplayAll()
logger = ExchangeLogger('exchange', 'name')
logger.exception("Log %s", 'args', xyz='xyz')
self.mox.VerifyAll()
def test_exchange_logger_should_use_default_name_if_not_provided(self):
self._setup_logger_mocks('stacktach-default')
self.mox.ReplayAll()
ExchangeLogger('exchange')
self.mox.VerifyAll()

View File

@ -20,7 +20,6 @@
import datetime
import json
import unittest
import mox
@ -36,11 +35,14 @@ from utils import TENANT_ID_1
from utils import INSTANCE_TYPE_ID_1
from utils import DUMMY_TIME
from utils import INSTANCE_TYPE_ID_2
from utils import IMAGE_UUID_1
from stacktach import stacklog
from stacktach import notification
from stacktach import views
from tests.unit import StacktachBaseTestCase
class StacktachRawParsingTestCase(unittest.TestCase):
class StacktachRawParsingTestCase(StacktachBaseTestCase):
def setUp(self):
self.mox = mox.Mox()
views.STACKDB = self.mox.CreateMockAnything()
@ -59,56 +61,47 @@ class StacktachRawParsingTestCase(unittest.TestCase):
dict = {
'timestamp': when,
}
args = ('monitor.info', dict)
routing_key = 'monitor.info'
args = (routing_key, dict)
json_args = json.dumps(args)
raw_values = {
'deployment': deployment,
'when': utils.decimal_utc(datetime.datetime.strptime(when, '%Y-%m-%d %H:%M:%S.%f')),
'host': 'api',
'routing_key': 'monitor.info',
'json': json_args
}
old_info_handler = views.NOTIFICATIONS['monitor.info']
mock_record = self.mox.CreateMockAnything()
mock_notification = self.mox.CreateMockAnything()
mock_notification.rawdata_kwargs(deployment, 'monitor.info', json_args).AndReturn(raw_values)
views.NOTIFICATIONS['monitor.info'] = lambda message_body: mock_notification
views.STACKDB.create_rawdata(**raw_values)
mock_notification.save().AndReturn(mock_record)
self.mox.StubOutWithMock(notification, 'notification_factory')
exchange = 'nova'
notification.notification_factory(dict, deployment, routing_key,
json_args, exchange).AndReturn(
mock_notification)
self.mox.ReplayAll()
views.process_raw_data(deployment, args, json_args)
self.mox.VerifyAll()
views.NOTIFICATIONS['monitor.info'] = old_info_handler
self.assertEquals(
views.process_raw_data(deployment, args, json_args, exchange),
(mock_record, mock_notification))
self.mox.VerifyAll()
def test_process_raw_data_old_timestamp(self):
deployment = self.mox.CreateMockAnything()
when = '2013-1-25T13:38:23.123'
dict = {
'_context_timestamp': when,
}
}
routing_key = 'monitor.info'
args = ('monitor.info', dict)
json_args = json.dumps(args[1])
raw_values = {
'deployment': deployment,
'when': utils.decimal_utc(datetime.datetime.strptime(when, '%Y-%m-%dT%H:%M:%S.%f')),
'host': 'api',
'routing_key': 'monitor.info',
'json': json_args
}
old_info_handler = views.NOTIFICATIONS['monitor.info']
mock_notification = self.mox.CreateMockAnything()
mock_notification.rawdata_kwargs(deployment, 'monitor.info', json_args).AndReturn(raw_values)
views.NOTIFICATIONS['monitor.info'] = lambda message_body: mock_notification
views.STACKDB.create_rawdata(**raw_values)
mock_notification = self.mox.CreateMockAnything()
mock_notification.save()
self.mox.StubOutWithMock(notification, 'notification_factory')
exchange = 'nova'
notification.notification_factory(dict, deployment, routing_key,
json_args, exchange).AndReturn(mock_notification)
self.mox.ReplayAll()
views.process_raw_data(deployment, args, json_args)
views.process_raw_data(deployment, args, json_args, exchange)
self.mox.VerifyAll()
views.NOTIFICATIONS['monitor.info'] = old_info_handler
class StacktachLifecycleTestCase(unittest.TestCase):
class StacktachLifecycleTestCase(StacktachBaseTestCase):
def setUp(self):
self.mox = mox.Mox()
views.STACKDB = self.mox.CreateMockAnything()
@ -294,7 +287,7 @@ class StacktachLifecycleTestCase(unittest.TestCase):
self.mox.VerifyAll()
class StacktachUsageParsingTestCase(unittest.TestCase):
class StacktachUsageParsingTestCase(StacktachBaseTestCase):
def setUp(self):
self.mox = mox.Mox()
views.STACKDB = self.mox.CreateMockAnything()
@ -311,15 +304,30 @@ class StacktachUsageParsingTestCase(unittest.TestCase):
stacklog.get_logger(name=name).AndReturn(self.log)
def test_process_usage_for_new_launch_create_start(self):
kwargs = {'launched': str(DUMMY_TIME), 'tenant_id': TENANT_ID_1, 'rax_options': RAX_OPTIONS_1,
'os_architecture': OS_ARCH_1, 'os_version': OS_VERSION_1, 'os_distro': OS_DISTRO_1 }
notification = utils.create_nova_notif(request_id=REQUEST_ID_1, **kwargs)
event = 'compute.instance.create.start'
raw, usage = self._setup_process_usage_mocks(event, notification)
notification = self.mox.CreateMockAnything()
notification.launched_at = str(DUMMY_TIME)
notification.tenant = TENANT_ID_1
notification.rax_options = RAX_OPTIONS_1
notification.os_architecture = OS_ARCH_1
notification.os_version = OS_VERSION_1
notification.os_distro = OS_DISTRO_1
notification.instance = INSTANCE_ID_1
notification.request_id = REQUEST_ID_1
notification.instance_type_id = INSTANCE_TYPE_ID_1
views._process_usage_for_new_launch(raw, notification[1])
raw = self.mox.CreateMockAnything()
raw.event = 'compute.instance.create.start'
self.assertEquals(usage.instance_type_id, '1')
usage = self.mox.CreateMockAnything()
views.STACKDB.get_or_create_instance_usage(instance=INSTANCE_ID_1,
request_id=REQUEST_ID_1) \
.AndReturn((usage, True))
views.STACKDB.save(usage)
self.mox.ReplayAll()
views._process_usage_for_new_launch(raw, notification)
self.assertEquals(usage.instance_type_id, INSTANCE_TYPE_ID_1)
self.assertEquals(usage.tenant, TENANT_ID_1)
self.assertEquals(usage.os_architecture, OS_ARCH_1)
self.assertEquals(usage.os_version, OS_VERSION_1)
@ -329,15 +337,29 @@ class StacktachUsageParsingTestCase(unittest.TestCase):
self.mox.VerifyAll()
def test_process_usage_for_new_launch_rebuild_start(self):
kwargs = {'launched': str(DUMMY_TIME), 'tenant_id': TENANT_ID_1, 'rax_options': RAX_OPTIONS_1,
'os_architecture': OS_ARCH_1, 'os_version': OS_VERSION_1, 'os_distro': OS_DISTRO_1 }
notification = utils.create_nova_notif(request_id=REQUEST_ID_1, **kwargs)
event = 'compute.instance.rebuild.start'
raw, usage = self._setup_process_usage_mocks(event, notification)
notification = self.mox.CreateMockAnything()
notification.launched_at = str(DUMMY_TIME)
notification.tenant = TENANT_ID_1
notification.rax_options = RAX_OPTIONS_1
notification.os_architecture = OS_ARCH_1
notification.os_version = OS_VERSION_1
notification.os_distro = OS_DISTRO_1
notification.instance = INSTANCE_ID_1
notification.request_id = REQUEST_ID_1
notification.instance_type_id = INSTANCE_TYPE_ID_1
views._process_usage_for_new_launch(raw, notification[1])
raw = self.mox.CreateMockAnything()
raw.event = 'compute.instance.rebuild.start'
usage = self.mox.CreateMockAnything()
views.STACKDB.get_or_create_instance_usage(instance=INSTANCE_ID_1,
request_id=REQUEST_ID_1) \
.AndReturn((usage, True))
views.STACKDB.save(usage)
self.mox.ReplayAll()
self.assertEquals(usage.instance_type_id, '1')
views._process_usage_for_new_launch(raw, notification)
self.assertEquals(usage.instance_type_id, INSTANCE_TYPE_ID_1)
self.assertEquals(usage.tenant, TENANT_ID_1)
self.assertEquals(usage.os_architecture, OS_ARCH_1)
self.assertEquals(usage.os_version, OS_VERSION_1)
@ -346,14 +368,29 @@ class StacktachUsageParsingTestCase(unittest.TestCase):
self.mox.VerifyAll()
def test_process_usage_for_new_launch_rebuild_start_when_no_launched_at_in_db(self):
kwargs = {'launched': str(DUMMY_TIME), 'tenant_id': TENANT_ID_1, 'rax_options': RAX_OPTIONS_1,
'os_architecture': OS_ARCH_1, 'os_version': OS_VERSION_1, 'os_distro': OS_DISTRO_1 }
notification = utils.create_nova_notif(request_id=REQUEST_ID_1, **kwargs)
event = 'compute.instance.rebuild.start'
raw, usage = self._setup_process_usage_mocks(event, notification)
usage.launched_at = None
notification = self.mox.CreateMockAnything()
notification.launched_at = str(DUMMY_TIME)
notification.tenant = TENANT_ID_1
notification.rax_options = RAX_OPTIONS_1
notification.os_architecture = OS_ARCH_1
notification.os_version = OS_VERSION_1
notification.os_distro = OS_DISTRO_1
notification.instance = INSTANCE_ID_1
notification.request_id = REQUEST_ID_1
notification.instance_type_id = INSTANCE_TYPE_ID_1
views._process_usage_for_new_launch(raw, notification[1])
raw = self.mox.CreateMockAnything()
raw.event = 'compute.instance.rebuild.start'
usage = self.mox.CreateMockAnything()
usage.launched_at = None
views.STACKDB.get_or_create_instance_usage(instance=INSTANCE_ID_1,
request_id=REQUEST_ID_1) \
.AndReturn((usage, True))
views.STACKDB.save(usage)
self.mox.ReplayAll()
views._process_usage_for_new_launch(raw, notification)
self.assertEqual(usage.launched_at, utils.decimal_utc(DUMMY_TIME))
self.assertEquals(usage.tenant, TENANT_ID_1)
@ -365,14 +402,31 @@ class StacktachUsageParsingTestCase(unittest.TestCase):
self.mox.VerifyAll()
def test_process_usage_for_new_launch_resize_prep_start_when_no_launched_at_in_db(self):
kwargs = {'launched': str(DUMMY_TIME), 'tenant_id': TENANT_ID_1, 'rax_options': RAX_OPTIONS_1,
'os_architecture': OS_ARCH_1, 'os_version': OS_VERSION_1, 'os_distro': OS_DISTRO_1 }
notification = utils.create_nova_notif(request_id=REQUEST_ID_1, **kwargs)
event = 'compute.instance.resize.prep.start'
raw, usage = self._setup_process_usage_mocks(event, notification)
notification = self.mox.CreateMockAnything()
notification.launched_at = str(DUMMY_TIME)
notification.tenant = TENANT_ID_1
notification.rax_options = RAX_OPTIONS_1
notification.os_architecture = OS_ARCH_1
notification.os_version = OS_VERSION_1
notification.os_distro = OS_DISTRO_1
notification.instance = INSTANCE_ID_1
notification.request_id = REQUEST_ID_1
notification.instance_type_id = INSTANCE_TYPE_ID_1
raw = self.mox.CreateMockAnything()
raw.event = 'compute.instance.resize.prep.start'
usage = self.mox.CreateMockAnything()
usage.launched_at = None
views.STACKDB.get_or_create_instance_usage(instance=INSTANCE_ID_1,
request_id=REQUEST_ID_1) \
.AndReturn((usage, True))
views.STACKDB.save(usage)
self.mox.ReplayAll()
usage.launched_at = None
views._process_usage_for_new_launch(raw, notification[1])
views._process_usage_for_new_launch(raw, notification)
self.assertEqual(usage.launched_at, utils.decimal_utc(DUMMY_TIME))
self.assertEquals(usage.tenant, TENANT_ID_1)
@ -384,14 +438,29 @@ class StacktachUsageParsingTestCase(unittest.TestCase):
self.mox.VerifyAll()
def test_process_usage_for_new_launch_resize_revert_start_when_no_launched_at_in_db(self):
kwargs = {'launched': str(DUMMY_TIME), 'tenant_id': TENANT_ID_1,'rax_options': RAX_OPTIONS_1,
'os_architecture': OS_ARCH_1, 'os_version': OS_VERSION_1, 'os_distro': OS_DISTRO_1 }
notification = utils.create_nova_notif(request_id=REQUEST_ID_1, **kwargs)
event = 'compute.instance.resize.revert.start'
raw, usage = self._setup_process_usage_mocks(event, notification)
usage.launched_at = None
notification = self.mox.CreateMockAnything()
notification.launched_at = str(DUMMY_TIME)
notification.tenant = TENANT_ID_1
notification.rax_options = RAX_OPTIONS_1
notification.os_architecture = OS_ARCH_1
notification.os_version = OS_VERSION_1
notification.os_distro = OS_DISTRO_1
notification.instance = INSTANCE_ID_1
notification.request_id = REQUEST_ID_1
notification.instance_type_id = INSTANCE_TYPE_ID_1
views._process_usage_for_new_launch(raw, notification[1])
raw = self.mox.CreateMockAnything()
raw.event = 'compute.instance.resize.revert.start'
usage = self.mox.CreateMockAnything()
usage.launched_at = None
views.STACKDB.get_or_create_instance_usage(instance=INSTANCE_ID_1,
request_id=REQUEST_ID_1) \
.AndReturn((usage, True))
views.STACKDB.save(usage)
self.mox.ReplayAll()
views._process_usage_for_new_launch(raw, notification)
self.assertEquals(usage.tenant, TENANT_ID_1)
self.assertEqual(usage.launched_at, utils.decimal_utc(DUMMY_TIME))
@ -403,17 +472,30 @@ class StacktachUsageParsingTestCase(unittest.TestCase):
self.mox.VerifyAll()
def test_process_usage_for_new_launch_resize_prep_start_when_launched_at_in_db(self):
kwargs = {'launched': str(DUMMY_TIME), 'tenant_id': TENANT_ID_1,
'rax_options': RAX_OPTIONS_1, 'os_architecture': OS_ARCH_1,
'os_version': OS_VERSION_1, 'os_distro': OS_DISTRO_1 }
notification = utils.create_nova_notif(request_id=REQUEST_ID_1,
**kwargs)
event = 'compute.instance.resize.prep.start'
raw, usage = self._setup_process_usage_mocks(event, notification)
orig_launched_at = utils.decimal_utc(DUMMY_TIME - datetime.timedelta(days=1))
usage.launched_at = orig_launched_at
notification = self.mox.CreateMockAnything()
notification.launched_at = str(DUMMY_TIME)
notification.tenant = TENANT_ID_1
notification.rax_options = RAX_OPTIONS_1
notification.os_architecture = OS_ARCH_1
notification.os_version = OS_VERSION_1
notification.os_distro = OS_DISTRO_1
notification.instance = INSTANCE_ID_1
notification.request_id = REQUEST_ID_1
notification.instance_type_id = INSTANCE_TYPE_ID_1
views._process_usage_for_new_launch(raw, notification[1])
raw = self.mox.CreateMockAnything()
raw.event = 'compute.instance.resize.prep.start'
orig_launched_at = utils.decimal_utc(DUMMY_TIME - datetime.timedelta(days=1))
usage = self.mox.CreateMockAnything()
usage.launched_at = orig_launched_at
views.STACKDB.get_or_create_instance_usage(instance=INSTANCE_ID_1,
request_id=REQUEST_ID_1) \
.AndReturn((usage, True))
views.STACKDB.save(usage)
self.mox.ReplayAll()
views._process_usage_for_new_launch(raw, notification)
self.assertEqual(usage.launched_at, orig_launched_at)
self.assertEqual(usage.tenant, TENANT_ID_1)
@ -425,16 +507,30 @@ class StacktachUsageParsingTestCase(unittest.TestCase):
self.mox.VerifyAll()
def test_process_usage_for_updates_create_end(self):
kwargs = {'launched': str(DUMMY_TIME),
'tenant_id': TENANT_ID_1, 'rax_options': RAX_OPTIONS_1,
'os_architecture': OS_ARCH_1, 'os_version': OS_VERSION_1,
'os_distro': OS_DISTRO_1 }
notification = utils.create_nova_notif(request_id=REQUEST_ID_1,
**kwargs)
event = 'compute.instance.create.end'
raw, usage = self._setup_process_usage_mocks(event, notification)
notification = self.mox.CreateMockAnything()
notification.launched_at = str(DUMMY_TIME)
notification.tenant = TENANT_ID_1
notification.rax_options = RAX_OPTIONS_1
notification.os_architecture = OS_ARCH_1
notification.os_version = OS_VERSION_1
notification.os_distro = OS_DISTRO_1
notification.instance = INSTANCE_ID_1
notification.request_id = REQUEST_ID_1
notification.instance_type_id = INSTANCE_TYPE_ID_1
notification.message = None
views._process_usage_for_updates(raw, notification[1])
raw = self.mox.CreateMockAnything()
raw.event = 'compute.instance.create.end'
usage = self.mox.CreateMockAnything()
usage.launched_at = None
views.STACKDB.get_or_create_instance_usage(instance=INSTANCE_ID_1,
request_id=REQUEST_ID_1) \
.AndReturn((usage, True))
views.STACKDB.save(usage)
self.mox.ReplayAll()
views._process_usage_for_updates(raw, notification)
self.assertEqual(usage.launched_at, utils.decimal_utc(DUMMY_TIME))
self.assertEqual(usage.tenant, TENANT_ID_1)
@ -446,17 +542,30 @@ class StacktachUsageParsingTestCase(unittest.TestCase):
self.mox.VerifyAll()
def test_process_usage_for_updates_create_end_success_message(self):
kwargs = {'launched': str(DUMMY_TIME),
'tenant_id': TENANT_ID_1, 'rax_options': RAX_OPTIONS_1,
'os_architecture': OS_ARCH_1, 'os_version': OS_VERSION_1,
'os_distro': OS_DISTRO_1 }
notification = utils.create_nova_notif(request_id=REQUEST_ID_1,
**kwargs)
notification[1]['payload']['message'] = "Success"
event = 'compute.instance.create.end'
raw, usage = self._setup_process_usage_mocks(event, notification)
notification = self.mox.CreateMockAnything()
notification.launched_at = str(DUMMY_TIME)
notification.tenant = TENANT_ID_1
notification.rax_options = RAX_OPTIONS_1
notification.os_architecture = OS_ARCH_1
notification.os_version = OS_VERSION_1
notification.os_distro = OS_DISTRO_1
notification.instance = INSTANCE_ID_1
notification.request_id = REQUEST_ID_1
notification.instance_type_id = INSTANCE_TYPE_ID_1
notification.message = 'Success'
views._process_usage_for_updates(raw, notification[1])
raw = self.mox.CreateMockAnything()
raw.event = 'compute.instance.create.end'
usage = self.mox.CreateMockAnything()
usage.launched_at = None
views.STACKDB.get_or_create_instance_usage(instance=INSTANCE_ID_1,
request_id=REQUEST_ID_1) \
.AndReturn((usage, True))
views.STACKDB.save(usage)
self.mox.ReplayAll()
views._process_usage_for_updates(raw, notification)
self.assertEqual(usage.launched_at, utils.decimal_utc(DUMMY_TIME))
self.assertEqual(usage.tenant, TENANT_ID_1)
@ -468,37 +577,42 @@ class StacktachUsageParsingTestCase(unittest.TestCase):
self.mox.VerifyAll()
def test_process_usage_for_updates_create_end_error_message(self):
kwargs = {'launched': str(DUMMY_TIME),
'tenant_id': TENANT_ID_1, 'rax_options': RAX_OPTIONS_1,
'os_architecture': OS_ARCH_1, 'os_version': OS_VERSION_1,
'os_distro': OS_DISTRO_1 }
notification = utils.create_nova_notif(request_id=REQUEST_ID_1,
**kwargs)
notification[1]['payload']['message'] = "Error"
event = 'compute.instance.create.end'
when_time = DUMMY_TIME
when_decimal = utils.decimal_utc(when_time)
json_str = json.dumps(notification)
raw = utils.create_raw(self.mox, when_decimal, event=event,
json_str=json_str)
notification = self.mox.CreateMockAnything()
notification.message = 'Error'
raw = self.mox.CreateMockAnything()
raw.event = 'compute.instance.create.end'
self.mox.ReplayAll()
views._process_usage_for_updates(raw, notification[1])
views._process_usage_for_updates(raw, notification)
self.mox.VerifyAll()
def test_process_usage_for_updates_revert_end(self):
kwargs = {'launched': str(DUMMY_TIME),
'type_id': INSTANCE_TYPE_ID_1,
'tenant_id': TENANT_ID_1, 'rax_options': RAX_OPTIONS_1,
'os_architecture': OS_ARCH_1, 'os_version': OS_VERSION_1,
'os_distro': OS_DISTRO_1 }
notification = utils.create_nova_notif(request_id=REQUEST_ID_1,
**kwargs)
event = 'compute.instance.resize.revert.end'
raw, usage = self._setup_process_usage_mocks(event, notification)
notification = self.mox.CreateMockAnything()
notification.launched_at = str(DUMMY_TIME)
notification.tenant = TENANT_ID_1
notification.rax_options = RAX_OPTIONS_1
notification.os_architecture = OS_ARCH_1
notification.os_version = OS_VERSION_1
notification.os_distro = OS_DISTRO_1
notification.instance = INSTANCE_ID_1
notification.request_id = REQUEST_ID_1
notification.instance_type_id = INSTANCE_TYPE_ID_1
notification.message = None
views._process_usage_for_updates(raw, notification[1])
raw = self.mox.CreateMockAnything()
raw.event = 'compute.instance.resize.revert.end'
usage = self.mox.CreateMockAnything()
usage.launched_at = None
views.STACKDB.get_or_create_instance_usage(instance=INSTANCE_ID_1,
request_id=REQUEST_ID_1) \
.AndReturn((usage, True))
views.STACKDB.save(usage)
self.mox.ReplayAll()
views._process_usage_for_updates(raw, notification)
self.assertEqual(usage.instance_type_id, INSTANCE_TYPE_ID_1)
self.assertEqual(usage.launched_at, utils.decimal_utc(DUMMY_TIME))
@ -511,17 +625,30 @@ class StacktachUsageParsingTestCase(unittest.TestCase):
self.mox.VerifyAll()
def test_process_usage_for_updates_prep_end(self):
kwargs = {'launched': str(DUMMY_TIME),
'new_type_id': INSTANCE_TYPE_ID_2,
'tenant_id': TENANT_ID_1, 'rax_options': RAX_OPTIONS_1,
'os_architecture': OS_ARCH_1, 'os_version': OS_VERSION_1,
'os_distro': OS_DISTRO_1 }
notification = utils.create_nova_notif(request_id=REQUEST_ID_1,
**kwargs)
event = 'compute.instance.resize.prep.end'
raw, usage = self._setup_process_usage_mocks(event, notification)
notification = self.mox.CreateMockAnything()
notification.launched_at = str(DUMMY_TIME)
notification.tenant = TENANT_ID_1
notification.rax_options = RAX_OPTIONS_1
notification.os_architecture = OS_ARCH_1
notification.os_version = OS_VERSION_1
notification.os_distro = OS_DISTRO_1
notification.instance = INSTANCE_ID_1
notification.request_id = REQUEST_ID_1
notification.new_instance_type_id = INSTANCE_TYPE_ID_2
notification.message = None
views._process_usage_for_updates(raw, notification[1])
raw = self.mox.CreateMockAnything()
raw.event = 'compute.instance.resize.prep.end'
usage = self.mox.CreateMockAnything()
usage.launched_at = None
views.STACKDB.get_or_create_instance_usage(instance=INSTANCE_ID_1,
request_id=REQUEST_ID_1) \
.AndReturn((usage, True))
views.STACKDB.save(usage)
self.mox.ReplayAll()
views._process_usage_for_updates(raw, notification)
self.assertEqual(usage.instance_type_id, INSTANCE_TYPE_ID_2)
self.assertEquals(usage.tenant, TENANT_ID_1)
@ -532,43 +659,29 @@ class StacktachUsageParsingTestCase(unittest.TestCase):
self.mox.VerifyAll()
def _setup_process_usage_mocks(self, event, notification):
when_time = DUMMY_TIME
when_decimal = utils.decimal_utc(when_time)
json_str = json.dumps(notification)
raw = utils.create_raw(self.mox, when_decimal, event=event,
json_str=json_str)
usage = self.mox.CreateMockAnything()
views.STACKDB.get_or_create_instance_usage(instance=INSTANCE_ID_1,
request_id=REQUEST_ID_1) \
.AndReturn((usage, True))
views.STACKDB.save(usage)
self.mox.ReplayAll()
return raw, usage
def test_process_delete(self):
delete_time = datetime.datetime.utcnow()
launch_time = delete_time-datetime.timedelta(days=1)
launch_decimal = utils.decimal_utc(launch_time)
delete_decimal = utils.decimal_utc(delete_time)
notif = utils.create_nova_notif(request_id=REQUEST_ID_1,
launched=str(launch_time),
deleted=str(delete_time))
json_str = json.dumps(notif)
event = 'compute.instance.delete.end'
raw = utils.create_raw(self.mox, delete_decimal, event=event,
json_str=json_str)
notification = self.mox.CreateMockAnything()
notification.instance = INSTANCE_ID_1
notification.deleted_at = str(delete_time)
notification.launched_at = str(launch_time)
raw = self.mox.CreateMockAnything()
delete = self.mox.CreateMockAnything()
delete.instance = INSTANCE_ID_1
delete.launched_at = launch_decimal
delete.deleted_at = delete_decimal
views.STACKDB.get_or_create_instance_delete(instance=INSTANCE_ID_1,
deleted_at=delete_decimal)\
.AndReturn((delete, True))
views.STACKDB.get_or_create_instance_delete(
instance=INSTANCE_ID_1, deleted_at=delete_decimal)\
.AndReturn((delete, True))
views.STACKDB.save(delete)
self.mox.ReplayAll()
views._process_delete(raw, notif[1])
views._process_delete(raw, notification)
self.assertEqual(delete.instance, INSTANCE_ID_1)
self.assertEqual(delete.launched_at, launch_decimal)
self.assertEqual(delete.deleted_at, delete_decimal)
@ -576,47 +689,50 @@ class StacktachUsageParsingTestCase(unittest.TestCase):
def test_process_delete_no_launch(self):
delete_time = datetime.datetime.utcnow()
launch_time = delete_time-datetime.timedelta(days=1)
delete_decimal = utils.decimal_utc(delete_time)
notif = utils.create_nova_notif(request_id=REQUEST_ID_1,
deleted=str(delete_time))
json_str = json.dumps(notif)
event = 'compute.instance.delete.end'
raw = utils.create_raw(self.mox, delete_decimal, event=event,
json_str=json_str)
notification = self.mox.CreateMockAnything()
notification.instance = INSTANCE_ID_1
notification.deleted_at = str(delete_time)
notification.launched_at = str(launch_time)
raw = self.mox.CreateMockAnything()
delete = self.mox.CreateMockAnything()
delete.instance = INSTANCE_ID_1
delete.deleted_at = delete_decimal
views.STACKDB.get_or_create_instance_delete(instance=INSTANCE_ID_1,
deleted_at=delete_decimal)\
.AndReturn((delete, True))
views.STACKDB.get_or_create_instance_delete(
instance=INSTANCE_ID_1, deleted_at=delete_decimal)\
.AndReturn((delete, True))
views.STACKDB.save(delete)
self.mox.ReplayAll()
views._process_delete(raw, notif[1])
views._process_delete(raw, notification)
self.assertEqual(delete.instance, INSTANCE_ID_1)
self.assertEqual(delete.deleted_at, delete_decimal)
self.mox.VerifyAll()
def test_process_exists(self):
notification = self.mox.CreateMockAnything()
current_time = datetime.datetime.utcnow()
launch_time = current_time - datetime.timedelta(hours=23)
launch_decimal = utils.decimal_utc(launch_time)
current_decimal = utils.decimal_utc(current_time)
audit_beginning = current_time - datetime.timedelta(hours=20)
audit_beginning_decimal = utils.decimal_utc(audit_beginning)
audit_ending_decimal = utils.decimal_utc(current_time)
notif = utils.create_nova_notif(launched=str(launch_time),
audit_period_beginning=str(audit_beginning),
audit_period_ending=str(current_time),
tenant_id=TENANT_ID_1,
os_architecture=OS_ARCH_1,
os_version=OS_VERSION_1,
os_distro=OS_DISTRO_1,
rax_options=RAX_OPTIONS_1)
json_str = json.dumps(notif)
event = 'compute.instance.exists'
raw = utils.create_raw(self.mox, current_decimal, event=event,
json_str=json_str)
notification.launched_at = str(launch_time)
notification.audit_period_beginning = str(audit_beginning)
notification.audit_period_ending = str(current_time)
notification.tenant = TENANT_ID_1
notification.os_architecture = OS_ARCH_1
notification.os_version = OS_VERSION_1
notification.os_distro = OS_DISTRO_1
notification.rax_options = RAX_OPTIONS_1
notification.instance = INSTANCE_ID_1
notification.deleted_at = ''
notification.instance_type_id = INSTANCE_TYPE_ID_1
notification.message_id = MESSAGE_ID_1
raw = self.mox.CreateMockAnything()
usage = self.mox.CreateMockAnything()
launched_range = (launch_decimal, launch_decimal+1)
views.STACKDB.get_instance_usage(instance=INSTANCE_ID_1,
@ -628,7 +744,7 @@ class StacktachUsageParsingTestCase(unittest.TestCase):
'launched_at': launch_decimal,
'audit_period_beginning': audit_beginning_decimal,
'audit_period_ending': audit_ending_decimal,
'instance_type_id': '1',
'instance_type_id': INSTANCE_TYPE_ID_1,
'usage': usage,
'raw': raw,
'tenant': TENANT_ID_1,
@ -641,50 +757,45 @@ class StacktachUsageParsingTestCase(unittest.TestCase):
views.STACKDB.create_instance_exists(**exists_values).AndReturn(exists)
views.STACKDB.save(exists)
self.mox.ReplayAll()
views._process_exists(raw, notif[1])
views._process_exists(raw, notification)
self.mox.VerifyAll()
def test_process_exists_no_launched_at(self):
current_time = datetime.datetime.utcnow()
current_decimal = utils.decimal_utc(current_time)
audit_beginning = current_time - datetime.timedelta(hours=20)
notif = utils.create_nova_notif(audit_period_beginning=str(audit_beginning),
audit_period_ending=str(current_time),
tenant_id=TENANT_ID_1)
json_str = json.dumps(notif)
event = 'compute.instance.exists'
raw = utils.create_raw(self.mox, current_decimal, event=event,
json_str=json_str)
raw.id = 1
notification = self.mox.CreateMockAnything()
notification.instance = INSTANCE_ID_1
notification.launched_at = None
raw = self.mox.CreateMockAnything()
raw.id = '1'
self.setup_mock_log()
self.log.warn('Ignoring exists without launched_at. RawData(1)')
self.mox.ReplayAll()
views._process_exists(raw, notif[1])
views._process_exists(raw, notification)
self.mox.VerifyAll()
def test_process_exists_with_deleted_at(self):
notification = self.mox.CreateMockAnything()
current_time = datetime.datetime.utcnow()
launch_time = current_time - datetime.timedelta(hours=23)
launch_decimal = utils.decimal_utc(launch_time)
deleted_time = current_time - datetime.timedelta(hours=12)
deleted_decimal = utils.decimal_utc(deleted_time)
current_decimal = utils.decimal_utc(current_time)
delete_time = datetime.datetime.utcnow()
deleted_decimal = utils.decimal_utc(delete_time)
audit_beginning = current_time - datetime.timedelta(hours=20)
audit_beginning_decimal = utils.decimal_utc(audit_beginning)
audit_ending_decimal = utils.decimal_utc(current_time)
notif = utils.create_nova_notif(launched=str(launch_time),
deleted=str(deleted_time),
audit_period_beginning=str(audit_beginning),
audit_period_ending=str(current_time),
tenant_id=TENANT_ID_1,
os_architecture=OS_ARCH_1,
os_version=OS_VERSION_1,
os_distro=OS_DISTRO_1,
rax_options=RAX_OPTIONS_1)
json_str = json.dumps(notif)
event = 'compute.instance.exists'
raw = utils.create_raw(self.mox, current_decimal, event=event,
json_str=json_str)
notification.launched_at = str(launch_time)
notification.audit_period_beginning = str(audit_beginning)
notification.audit_period_ending = str(current_time)
notification.tenant = TENANT_ID_1
notification.os_architecture = OS_ARCH_1
notification.os_version = OS_VERSION_1
notification.os_distro = OS_DISTRO_1
notification.rax_options = RAX_OPTIONS_1
notification.instance = INSTANCE_ID_1
notification.instance_type_id = INSTANCE_TYPE_ID_1
notification.message_id = MESSAGE_ID_1
notification.deleted_at = str(delete_time)
raw = self.mox.CreateMockAnything()
usage = self.mox.CreateMockAnything()
launched_range = (launch_decimal, launch_decimal+1)
views.STACKDB.get_instance_usage(instance=INSTANCE_ID_1,
@ -701,7 +812,7 @@ class StacktachUsageParsingTestCase(unittest.TestCase):
'deleted_at': deleted_decimal,
'audit_period_beginning': audit_beginning_decimal,
'audit_period_ending': audit_ending_decimal,
'instance_type_id': '1',
'instance_type_id': INSTANCE_TYPE_ID_1,
'usage': usage,
'delete': delete,
'raw': raw,
@ -715,6 +826,41 @@ class StacktachUsageParsingTestCase(unittest.TestCase):
views.STACKDB.create_instance_exists(**exists_values).AndReturn(exists)
views.STACKDB.save(exists)
self.mox.ReplayAll()
views._process_exists(raw, notif[1])
views._process_exists(raw, notification)
self.mox.VerifyAll()
class StacktachImageUsageParsingTestCase(StacktachBaseTestCase):
def setUp(self):
self.mox = mox.Mox()
views.STACKDB = self.mox.CreateMockAnything()
def tearDown(self):
self.mox.UnsetStubs()
def test_save_image_usage(self):
raw = self.mox.CreateMockAnything()
notification = self.mox.CreateMockAnything()
notification.save_usage(raw)
self.mox.ReplayAll()
views._process_glance_usage(raw, notification)
self.mox.VerifyAll()
def test_save_image_delete(self):
raw = self.mox.CreateMockAnything()
notification = self.mox.CreateMockAnything()
notification.save_delete(raw)
self.mox.ReplayAll()
views._process_glance_delete(raw, notification)
self.mox.VerifyAll()
def test_save_image_exists(self):
raw = self.mox.CreateMockAnything()
notification = self.mox.CreateMockAnything()
notification.save_exists(raw)
self.mox.ReplayAll()
views._process_glance_exists(raw, notification)
self.mox.VerifyAll()

View File

@ -18,17 +18,15 @@
# FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS
# IN THE SOFTWARE.
import datetime
import unittest
import mox
from stacktach import db
from stacktach import stacklog
from stacktach import models
from tests.unit import StacktachBaseTestCase
class StacktachDBTestCase(unittest.TestCase):
class StacktachDBTestCase(StacktachBaseTestCase):
def setUp(self):
self.mox = mox.Mox()
self.log = self.mox.CreateMockAnything()

View File

@ -18,17 +18,16 @@
# FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS
# IN THE SOFTWARE.
import unittest
import mox
from stacktach import utils as stacktach_utils
from utils import INSTANCE_ID_1
from utils import MESSAGE_ID_1
from utils import REQUEST_ID_1
from tests.unit import StacktachBaseTestCase
class StacktachUtilsTestCase(unittest.TestCase):
class StacktachUtilsTestCase(StacktachBaseTestCase):
def setUp(self):
self.mox = mox.Mox()

View File

@ -21,7 +21,6 @@
import datetime
import decimal
import json
import unittest
import mox
@ -33,13 +32,21 @@ from utils import INSTANCE_ID_1
from utils import INSTANCE_ID_2
from utils import REQUEST_ID_1
from tests.unit import StacktachBaseTestCase
class StackyServerTestCase(unittest.TestCase):
class StackyServerTestCase(StacktachBaseTestCase):
def setUp(self):
self.mox = mox.Mox()
self.mox.StubOutWithMock(models, 'RawData', use_mock_anything=True)
models.RawData.objects = self.mox.CreateMockAnything()
self.mox.StubOutWithMock(models, 'Deployment', use_mock_anything=True)
self.mox.StubOutWithMock(models, 'GlanceRawData',
use_mock_anything=True)
models.GlanceRawData.objects = self.mox.CreateMockAnything()
self.mox.StubOutWithMock(models, 'GenericRawData',
use_mock_anything=True)
models.GenericRawData.objects = self.mox.CreateMockAnything()
models.Deployment.objects = self.mox.CreateMockAnything()
self.mox.StubOutWithMock(models, 'Lifecycle', use_mock_anything=True)
models.Lifecycle.objects = self.mox.CreateMockAnything()
@ -65,7 +72,8 @@ class StackyServerTestCase(unittest.TestCase):
def _create_raw(self):
raw = self.mox.CreateMockAnything()
raw.when = utils.decimal_utc()
raw.when = utils.decimal_utc(datetime.datetime(2013, 7, 17, 10, 16,
10, 717219))
raw.instance = INSTANCE_ID_1
raw.id = 1
raw.routing_key = 'monitor.info'
@ -80,13 +88,19 @@ class StackyServerTestCase(unittest.TestCase):
raw.publisher = "api.example.com"
raw.service = 'api'
raw.host = 'example.com'
raw.status = 'state'
raw.request_id = REQUEST_ID_1
raw.json = '{"key": "value"}'
raw.uuid = 'uuid'
raw.tenant = 'tenant'
return raw
def test_get_event_names(self):
model = self.mox.CreateMockAnything()
result = self.mox.CreateMockAnything()
models.RawData.objects.values('event').AndReturn(result)
self.mox.StubOutWithMock(stacky_server, '_model_factory')
stacky_server._model_factory('nova').AndReturn(model)
model.values('event').AndReturn(result)
result.distinct().AndReturn(result)
self.mox.ReplayAll()
@ -118,10 +132,13 @@ class StackyServerTestCase(unittest.TestCase):
self.mox.VerifyAll()
def test_get_timings_for_uuid_start_only(self):
fake_request = self.mox.CreateMockAnything()
fake_request.GET = {}
lc_result = self.mox.CreateMockAnything()
lifecycle = self.mox.CreateMockAnything()
models.Lifecycle.objects.filter(instance=INSTANCE_ID_1)\
.AndReturn(lc_result)
lc_result[None:50].AndReturn(lc_result)
lc_result.__iter__().AndReturn([lifecycle].__iter__())
t_result = self.mox.CreateMockAnything()
timing = self.mox.CreateMockAnything()
@ -133,7 +150,8 @@ class StackyServerTestCase(unittest.TestCase):
timing.diff = None
self.mox.ReplayAll()
event_names = stacky_server.get_timings_for_uuid(INSTANCE_ID_1)
event_names = stacky_server.get_timings_for_uuid(fake_request,
INSTANCE_ID_1)
self.assertEqual(len(event_names), 2)
self.assertEqual(event_names[0], ['?', 'Event', 'Time (secs)'])
@ -141,10 +159,13 @@ class StackyServerTestCase(unittest.TestCase):
self.mox.VerifyAll()
def test_get_timings_for_uuid_end_only(self):
fake_request = self.mox.CreateMockAnything()
fake_request.GET = {}
lc_result = self.mox.CreateMockAnything()
lifecycle = self.mox.CreateMockAnything()
models.Lifecycle.objects.filter(instance=INSTANCE_ID_1) \
.AndReturn(lc_result)
lc_result[None:50].AndReturn(lc_result)
lc_result.__iter__().AndReturn([lifecycle].__iter__())
t_result = self.mox.CreateMockAnything()
timing = self.mox.CreateMockAnything()
@ -156,7 +177,8 @@ class StackyServerTestCase(unittest.TestCase):
timing.diff = None
self.mox.ReplayAll()
event_names = stacky_server.get_timings_for_uuid(INSTANCE_ID_1)
event_names = stacky_server.get_timings_for_uuid(fake_request,
INSTANCE_ID_1)
self.assertEqual(len(event_names), 2)
self.assertEqual(event_names[0], ['?', 'Event', 'Time (secs)'])
@ -164,10 +186,13 @@ class StackyServerTestCase(unittest.TestCase):
self.mox.VerifyAll()
def test_get_timings_for_uuid(self):
fake_request = self.mox.CreateMockAnything()
fake_request.GET = {}
lc_result = self.mox.CreateMockAnything()
lifecycle = self.mox.CreateMockAnything()
models.Lifecycle.objects.filter(instance=INSTANCE_ID_1) \
.AndReturn(lc_result)
lc_result[None:50].AndReturn(lc_result)
lc_result.__iter__().AndReturn([lifecycle].__iter__())
t_result = self.mox.CreateMockAnything()
timing = self.mox.CreateMockAnything()
@ -178,7 +203,8 @@ class StackyServerTestCase(unittest.TestCase):
timing.end_raw = self.mox.CreateMockAnything()
timing.diff = 20
self.mox.ReplayAll()
event_names = stacky_server.get_timings_for_uuid(INSTANCE_ID_1)
event_names = stacky_server.get_timings_for_uuid(fake_request,
INSTANCE_ID_1)
self.assertEqual(len(event_names), 2)
self.assertEqual(event_names[0], ['?', 'Event', 'Time (secs)'])
@ -248,6 +274,10 @@ class StackyServerTestCase(unittest.TestCase):
self.mox.VerifyAll()
def test_do_uuid(self):
search_result = [["#", "?", "When", "Deployment", "Event", "Host",
"State", "State'", "Task'"], [1, " ",
"2013-07-17 10:16:10.717219", "deployment",
"test.start", "example.com", "active", None, None]]
fake_request = self.mox.CreateMockAnything()
fake_request.GET = {'uuid': INSTANCE_ID_1}
result = self.mox.CreateMockAnything()
@ -255,7 +285,9 @@ class StackyServerTestCase(unittest.TestCase):
result.filter(instance=INSTANCE_ID_1).AndReturn(result)
result.order_by('when').AndReturn(result)
raw = self._create_raw()
result[None:50].AndReturn(result)
result.__iter__().AndReturn([raw].__iter__())
raw.search_results([], mox.IgnoreArg(), ' ').AndReturn(search_result)
self.mox.ReplayAll()
resp = stacky_server.do_uuid(fake_request)
@ -272,6 +304,108 @@ class StackyServerTestCase(unittest.TestCase):
self.assertEqual(json_resp[1], body)
self.mox.VerifyAll()
def test_do_uuid_when_filters(self):
search_result = [["#", "?", "When", "Deployment", "Event", "Host",
"State", "State'", "Task'"], [1, " ",
"2013-07-17 10:16:10.717219", "deployment",
"test.start", "example.com", "active", None, None]]
fake_request = self.mox.CreateMockAnything()
fake_request.GET = {'uuid': INSTANCE_ID_1,
'when_min': '1.1',
'when_max': '2.1'}
result = self.mox.CreateMockAnything()
models.RawData.objects.select_related().AndReturn(result)
result.filter(instance=INSTANCE_ID_1,
when__gte=decimal.Decimal('1.1'),
when__lte=decimal.Decimal('2.1')).AndReturn(result)
result.order_by('when').AndReturn(result)
raw = self._create_raw()
result[None:50].AndReturn(result)
result.__iter__().AndReturn([raw].__iter__())
raw.search_results([], mox.IgnoreArg(), ' ').AndReturn(search_result)
self.mox.ReplayAll()
resp = stacky_server.do_uuid(fake_request)
self.assertEqual(resp.status_code, 200)
json_resp = json.loads(resp.content)
self.assertEqual(len(json_resp), 2)
header = ["#", "?", "When", "Deployment", "Event", "Host",
"State", "State'", "Task'"]
self.assertEqual(json_resp[0], header)
datetime = dt.dt_from_decimal(raw.when)
body = [1, " ", str(datetime), "deployment", "test.start",
"example.com", "active", None, None]
self.assertEqual(json_resp[1], body)
self.mox.VerifyAll()
def test_do_uuid_for_glance(self):
search_result = [["#", "?", "When", "Deployment", "Event", "Host",
"Status"], [1, " ",
"2013-07-17 10:16:10.717219", "deployment",
"test.start", "example.com", "state"]]
fake_request = self.mox.CreateMockAnything()
fake_request.GET = {'uuid': INSTANCE_ID_1, 'service': 'glance'}
result = self.mox.CreateMockAnything()
models.GlanceRawData.objects.select_related().AndReturn(result)
result.filter(uuid=INSTANCE_ID_1).AndReturn(result)
result.order_by('when').AndReturn(result)
raw = self._create_raw()
result[None:50].AndReturn(result)
result.__iter__().AndReturn([raw].__iter__())
raw.search_results([], mox.IgnoreArg(), ' ').AndReturn(search_result)
self.mox.ReplayAll()
resp = stacky_server.do_uuid(fake_request)
self.assertEqual(resp.status_code, 200)
json_resp = json.loads(resp.content)
self.assertEqual(len(json_resp), 2)
header = ["#", "?", "When", "Deployment", "Event", "Host",
"Status"]
self.assertEqual(json_resp[0], header)
datetime = dt.dt_from_decimal(raw.when)
body = [1, " ", str(datetime), "deployment", "test.start",
"example.com", "state"]
self.assertEqual(json_resp[1], body)
self.mox.VerifyAll()
def test_do_uuid_for_glance_when_filters(self):
search_result = [["#", "?", "When", "Deployment", "Event", "Host",
"Status"], [1, " ",
"2013-07-17 10:16:10.717219", "deployment",
"test.start", "example.com", "state"]]
fake_request = self.mox.CreateMockAnything()
fake_request.GET = {'uuid': INSTANCE_ID_1,
'when_min': '1.1',
'when_max': '2.1',
'service': 'glance'}
result = self.mox.CreateMockAnything()
models.GlanceRawData.objects.select_related().AndReturn(result)
result.filter(uuid=INSTANCE_ID_1,
when__gte=decimal.Decimal('1.1'),
when__lte=decimal.Decimal('2.1')).AndReturn(result)
result.order_by('when').AndReturn(result)
raw = self._create_raw()
result[None:50].AndReturn(result)
result.__iter__().AndReturn([raw].__iter__())
raw.search_results([], mox.IgnoreArg(), ' ').AndReturn(search_result)
self.mox.ReplayAll()
resp = stacky_server.do_uuid(fake_request)
self.assertEqual(resp.status_code, 200)
json_resp = json.loads(resp.content)
self.assertEqual(len(json_resp), 2)
header = ["#", "?", "When", "Deployment", "Event", "Host",
"Status"]
self.assertEqual(json_resp[0], header)
datetime = dt.dt_from_decimal(raw.when)
body = [1, " ", str(datetime), "deployment", "test.start",
"example.com", "state"]
self.assertEqual(json_resp[1], body)
self.mox.VerifyAll()
def test_do_uuid_bad_uuid(self):
fake_request = self.mox.CreateMockAnything()
fake_request.GET = {'uuid': "obviouslybaduuid"}
@ -318,6 +452,7 @@ class StackyServerTestCase(unittest.TestCase):
timing2.lifecycle = self.mox.CreateMockAnything()
timing2.lifecycle.instance = INSTANCE_ID_2
timing2.diff = 20
results[None:50].AndReturn(results)
results.__iter__().AndReturn([timing1, timing2].__iter__())
self.mox.ReplayAll()
@ -337,9 +472,9 @@ class StackyServerTestCase(unittest.TestCase):
fake_request.GET = {'name': 'test.event', 'end_when_min': '1.1'}
results = self.mox.CreateMockAnything()
models.Timing.objects.select_related().AndReturn(results)
results.filter(name='test.event').AndReturn(results)
results.filter(name='test.event',
end_when__gte=decimal.Decimal('1.1')).AndReturn(results)
results.exclude(mox.IgnoreArg()).AndReturn(results)
results.filter(end_when__gte=decimal.Decimal('1.1')).AndReturn(results)
results.order_by('diff').AndReturn(results)
timing1 = self.mox.CreateMockAnything()
timing1.lifecycle = self.mox.CreateMockAnything()
@ -349,6 +484,7 @@ class StackyServerTestCase(unittest.TestCase):
timing2.lifecycle = self.mox.CreateMockAnything()
timing2.lifecycle.instance = INSTANCE_ID_2
timing2.diff = 20
results[None:50].AndReturn(results)
results.__iter__().AndReturn([timing1, timing2].__iter__())
self.mox.ReplayAll()
@ -368,9 +504,9 @@ class StackyServerTestCase(unittest.TestCase):
fake_request.GET = {'name': 'test.event', 'end_when_max': '1.1'}
results = self.mox.CreateMockAnything()
models.Timing.objects.select_related().AndReturn(results)
results.filter(name='test.event').AndReturn(results)
results.filter(name='test.event',
end_when__lte=decimal.Decimal('1.1')).AndReturn(results)
results.exclude(mox.IgnoreArg()).AndReturn(results)
results.filter(end_when__lte=decimal.Decimal('1.1')).AndReturn(results)
results.order_by('diff').AndReturn(results)
timing1 = self.mox.CreateMockAnything()
timing1.lifecycle = self.mox.CreateMockAnything()
@ -380,6 +516,7 @@ class StackyServerTestCase(unittest.TestCase):
timing2.lifecycle = self.mox.CreateMockAnything()
timing2.lifecycle.instance = INSTANCE_ID_2
timing2.diff = 20
results[None:50].AndReturn(results)
results.__iter__().AndReturn([timing1, timing2].__iter__())
self.mox.ReplayAll()
@ -401,10 +538,10 @@ class StackyServerTestCase(unittest.TestCase):
'end_when_max': '2.1'}
results = self.mox.CreateMockAnything()
models.Timing.objects.select_related().AndReturn(results)
results.filter(name='test.event').AndReturn(results)
results.filter(name='test.event',
end_when__gte=decimal.Decimal('1.1'),
end_when__lte=decimal.Decimal('2.1')).AndReturn(results)
results.exclude(mox.IgnoreArg()).AndReturn(results)
results.filter(end_when__gte=decimal.Decimal('1.1')).AndReturn(results)
results.filter(end_when__lte=decimal.Decimal('2.1')).AndReturn(results)
results.order_by('diff').AndReturn(results)
timing1 = self.mox.CreateMockAnything()
timing1.lifecycle = self.mox.CreateMockAnything()
@ -414,6 +551,7 @@ class StackyServerTestCase(unittest.TestCase):
timing2.lifecycle = self.mox.CreateMockAnything()
timing2.lifecycle.instance = INSTANCE_ID_2
timing2.diff = 20
results[None:50].AndReturn(results)
results.__iter__().AndReturn([timing1, timing2].__iter__())
self.mox.ReplayAll()
@ -446,6 +584,7 @@ class StackyServerTestCase(unittest.TestCase):
timing2.lifecycle = self.mox.CreateMockAnything()
timing2.lifecycle.instance = INSTANCE_ID_2
timing2.diff = 20
results[None:50].AndReturn(results)
results.__len__().AndReturn(2)
results.__iter__().AndReturn([timing1, timing2].__iter__())
self.mox.ReplayAll()
@ -467,6 +606,43 @@ class StackyServerTestCase(unittest.TestCase):
results = self.mox.CreateMockAnything()
models.RawData.objects.filter(request_id=REQUEST_ID_1).AndReturn(results)
results.order_by('when').AndReturn(results)
results[None:50].AndReturn(results)
results.__iter__().AndReturn([raw].__iter__())
self.mox.ReplayAll()
resp = stacky_server.do_request(fake_request)
self.assertEqual(resp.status_code, 200)
json_resp = json.loads(resp.content)
self.assertEqual(len(json_resp), 2)
self.assertEqual(json_resp[0], ["#", "?", "When", "Deployment",
"Event", "Host", "State", "State'",
"Task'"])
self.assertEqual(json_resp[1][0], 1)
self.assertEqual(json_resp[1][1], u' ')
self.assertEqual(json_resp[1][2], str(dt.dt_from_decimal(raw.when)))
self.assertEqual(json_resp[1][3], u'deployment')
self.assertEqual(json_resp[1][4], u'test.start')
self.assertEqual(json_resp[1][5], u'example.com')
self.assertEqual(json_resp[1][6], u'active')
self.assertEqual(json_resp[1][7], None)
self.assertEqual(json_resp[1][8], None)
self.mox.VerifyAll()
def test_do_request_when_filters(self):
fake_request = self.mox.CreateMockAnything()
fake_request.GET = {'request_id': REQUEST_ID_1,
'when_min': '1.1',
'when_max': '2.1'}
raw = self._create_raw()
results = self.mox.CreateMockAnything()
when_min = decimal.Decimal('1.1')
when_max = decimal.Decimal('2.1')
models.RawData.objects.filter(request_id=REQUEST_ID_1,
when__gte=when_min,
when__lte=when_max).AndReturn(results)
results.order_by('when').AndReturn(results)
results[None:50].AndReturn(results)
results.__iter__().AndReturn([raw].__iter__())
self.mox.ReplayAll()
@ -504,7 +680,9 @@ class StackyServerTestCase(unittest.TestCase):
self.assertEqual(resp_json[1], ['Bad Request', msg])
self.mox.VerifyAll()
def _assert_on_show(self, values, raw):
def _assert_on_show_nova(self, json_resp, raw):
self.assertEqual(len(json_resp), 3)
values = json_resp[0]
self.assertEqual(len(values), 12)
self.assertEqual(values[0], ["Key", "Value"])
self.assertEqual(values[1], ["#", raw.id])
@ -517,21 +695,115 @@ class StackyServerTestCase(unittest.TestCase):
self.assertEqual(values[7], ["Event", raw.event])
self.assertEqual(values[8], ["Service", raw.service])
self.assertEqual(values[9], ["Host", raw.host])
self.assertEqual(values[10], ["UUID", raw.instance])
self.assertEqual(values[10],["UUID", raw.instance])
self.assertEqual(values[11], ["Req ID", raw.request_id])
def _assert_on_show_glance(self, json_resp, raw):
self.assertEqual(len(json_resp), 3)
values = json_resp[0]
self.assertEqual(len(values), 12)
self.assertEqual(values[0], ["Key", "Value"])
self.assertEqual(values[1], ["#", raw.id])
self.assertEqual(values[2], ["When",
str(dt.dt_from_decimal(raw.when))])
self.assertEqual(values[3], ["Deployment", raw.deployment.name])
self.assertEqual(values[4], ["Category", raw.routing_key])
self.assertEqual(values[5], ["Publisher", raw.publisher])
self.assertEqual(values[6], ["Status", raw.status])
self.assertEqual(values[7], ["Event", raw.event])
self.assertEqual(values[8], ["Service", raw.service])
self.assertEqual(values[9], ["Host", raw.host])
self.assertEqual(values[10],["UUID", raw.uuid])
self.assertEqual(values[11], ["Req ID", raw.request_id])
def test_do_show(self):
fake_request = self.mox.CreateMockAnything()
fake_request.GET = {}
raw = self._create_raw()
models.RawData.objects.get(id=1).AndReturn(raw)
self.mox.ReplayAll()
resp = stacky_server.do_show(fake_request, 1)
self.assertEqual(resp.status_code, 200)
json_resp = json.loads(resp.content)
self._assert_on_show_nova(json_resp, raw)
self.mox.VerifyAll()
def test_do_show_for_glance_rawdata(self):
fake_request = self.mox.CreateMockAnything()
fake_request.GET = {'service':'glance'}
raw = self._create_raw()
models.GlanceRawData.objects.get(id=1).AndReturn(raw)
self.mox.ReplayAll()
resp = stacky_server.do_show(fake_request, 1)
self.assertEqual(resp.status_code, 200)
json_resp = json.loads(resp.content)
self._assert_on_show_glance(json_resp, raw)
self.mox.VerifyAll()
def test_do_show_for_generic_rawdata(self):
fake_request = self.mox.CreateMockAnything()
fake_request.GET = {'service':'generic'}
raw = self._create_raw()
models.GenericRawData.objects.get(id=1).AndReturn(raw)
self.mox.ReplayAll()
resp = stacky_server.do_show(fake_request, 1)
self.assertEqual(resp.status_code, 200)
json_resp = json.loads(resp.content)
self._assert_on_show_nova(json_resp, raw)
self.mox.VerifyAll()
def test_do_show_should_return_empty_result_on_object_not_found_exception(self):
fake_request = self.mox.CreateMockAnything()
fake_request.GET = {}
raw = self._create_raw()
models.RawData.objects.get(id=1).AndReturn(raw)
self.mox.ReplayAll()
resp = stacky_server.do_show(fake_request, 1)
self.assertEqual(resp.status_code, 200)
json_resp = json.loads(resp.content)
self._assert_on_show_nova(json_resp, raw)
self.mox.VerifyAll()
def test_do_watch_for_glance(self):
fake_request = self.mox.CreateMockAnything()
fake_request.GET = {'service': 'glance'}
self.mox.StubOutWithMock(stacky_server, 'get_deployments')
deployment1 = self.mox.CreateMockAnything()
deployment1.id = 1
deployment1.name = 'dep1'
deployments = [deployment1]
stacky_server.get_deployments().AndReturn(deployments)
self.mox.StubOutWithMock(stacky_server, 'get_event_names')
events = [{'event': 'test.start'}, {'event': 'test.end'}]
stacky_server.get_event_names().AndReturn(events)
results = self.mox.CreateMockAnything()
models.GlanceRawData.objects.order_by('when').AndReturn(results)
results.filter(when__gt=mox.IgnoreArg()).AndReturn(results)
results.filter(when__lte=mox.IgnoreArg()).AndReturn(results)
results.__iter__().AndReturn([self._create_raw()].__iter__())
self.mox.ReplayAll()
resp = stacky_server.do_watch(fake_request, 0)
self.assertEqual(resp.status_code, 200)
json_resp = json.loads(resp.content)
self.assertEqual(len(json_resp), 3)
self._assert_on_show(json_resp[0], raw)
self.assertEqual(json_resp[0], [10, 1, 15, 20, 10, 36])
self.assertEqual(json_resp[1][0][0], 1)
self.assertEqual(json_resp[1][0][1], u' ')
time_str = "%s %s" % (json_resp[1][0][2], json_resp[1][0][3])
datetime.datetime.strptime(time_str, "%Y-%m-%d %H:%M:%S.%f")
self.assertEqual(json_resp[1][0][4], u'dep1')
self.assertEqual(json_resp[1][0][5], u'test.start')
self.assertEqual(json_resp[1][0][6], u'%s' % 'uuid')
self.mox.VerifyAll()
def test_do_watch(self):
@ -547,7 +819,10 @@ class StackyServerTestCase(unittest.TestCase):
events = [{'event': 'test.start'}, {'event': 'test.end'}]
stacky_server.get_event_names().AndReturn(events)
results = self.mox.CreateMockAnything()
models.RawData.objects.order_by('when').AndReturn(results)
model = self.mox.CreateMockAnything()
self.mox.StubOutWithMock(stacky_server, '_model_factory')
stacky_server._model_factory('nova').AndReturn(model)
model.order_by('when').AndReturn(results)
results.filter(when__gt=mox.IgnoreArg()).AndReturn(results)
results.filter(when__lte=mox.IgnoreArg()).AndReturn(results)
results.__iter__().AndReturn([self._create_raw()].__iter__())
@ -558,14 +833,13 @@ class StackyServerTestCase(unittest.TestCase):
json_resp = json.loads(resp.content)
self.assertEqual(len(json_resp), 3)
self.assertEqual(json_resp[0], [10, 1, 15, 20, 10, 36])
print json_resp
self.assertEqual(json_resp[1][0][0], 1)
self.assertEqual(json_resp[1][0][1], u' ')
time_str = "%s %s" % (json_resp[1][0][2], json_resp[1][0][3])
datetime.datetime.strptime(time_str, "%Y-%m-%d %H:%M:%S.%f")
self.assertEqual(json_resp[1][0][4], u'dep1')
self.assertEqual(json_resp[1][0][5], u'test.start')
self.assertEqual(json_resp[1][0][6], u'%s' % INSTANCE_ID_1)
self.assertEqual(json_resp[1][0][6], u'%s' % 'uuid')
self.mox.VerifyAll()
def test_do_watch_with_deployment(self):
@ -581,7 +855,11 @@ class StackyServerTestCase(unittest.TestCase):
events = [{'event': 'test.start'}, {'event': 'test.end'}]
stacky_server.get_event_names().AndReturn(events)
results = self.mox.CreateMockAnything()
models.RawData.objects.order_by('when').AndReturn(results)
model = self.mox.CreateMockAnything()
self.mox.StubOutWithMock(stacky_server, '_model_factory')
stacky_server._model_factory('nova').AndReturn(model)
model.order_by('when').AndReturn(results)
results.filter(deployment=1).AndReturn(results)
results.filter(when__gt=mox.IgnoreArg()).AndReturn(results)
results.filter(when__lte=mox.IgnoreArg()).AndReturn(results)
@ -593,19 +871,18 @@ class StackyServerTestCase(unittest.TestCase):
json_resp = json.loads(resp.content)
self.assertEqual(len(json_resp), 3)
self.assertEqual(json_resp[0], [10, 1, 15, 20, 10, 36])
print json_resp
self.assertEqual(json_resp[1][0][0], 1)
self.assertEqual(json_resp[1][0][1], u' ')
time_str = "%s %s" % (json_resp[1][0][2], json_resp[1][0][3])
datetime.datetime.strptime(time_str, "%Y-%m-%d %H:%M:%S.%f")
self.assertEqual(json_resp[1][0][4], u'dep1')
self.assertEqual(json_resp[1][0][5], u'test.start')
self.assertEqual(json_resp[1][0][6], u'%s' % INSTANCE_ID_1)
self.assertEqual(json_resp[1][0][6], u'%s' % 'uuid')
self.mox.VerifyAll()
def test_do_watch_with_event_name(self):
fake_request = self.mox.CreateMockAnything()
fake_request.GET = {'event_name': 'test.start'}
fake_request.GET = {'event_name': 'test.start','service': 'nova'}
self.mox.StubOutWithMock(stacky_server, 'get_deployments')
deployment1 = self.mox.CreateMockAnything()
deployment1.id = 1
@ -628,14 +905,13 @@ class StackyServerTestCase(unittest.TestCase):
json_resp = json.loads(resp.content)
self.assertEqual(len(json_resp), 3)
self.assertEqual(json_resp[0], [10, 1, 15, 20, 10, 36])
print json_resp
self.assertEqual(json_resp[1][0][0], 1)
self.assertEqual(json_resp[1][0][1], u' ')
time_str = "%s %s" % (json_resp[1][0][2], json_resp[1][0][3])
datetime.datetime.strptime(time_str, "%Y-%m-%d %H:%M:%S.%f")
self.assertEqual(json_resp[1][0][4], u'dep1')
self.assertEqual(json_resp[1][0][5], u'test.start')
self.assertEqual(json_resp[1][0][6], u'%s' % INSTANCE_ID_1)
self.assertEqual(json_resp[1][0][6], u'%s' % 'uuid')
self.mox.VerifyAll()
def test_do_kpi(self):
@ -763,6 +1039,7 @@ class StackyServerTestCase(unittest.TestCase):
usage.instance = INSTANCE_ID_1
usage.launched_at = utils.decimal_utc()
usage.instance_type_id = 1
results[None:50].AndReturn(results)
results.__iter__().AndReturn([usage].__iter__())
self.mox.ReplayAll()
@ -789,6 +1066,7 @@ class StackyServerTestCase(unittest.TestCase):
usage.instance = INSTANCE_ID_1
usage.launched_at = utils.decimal_utc()
usage.instance_type_id = 1
results[None:50].AndReturn(results)
results.__iter__().AndReturn([usage].__iter__())
self.mox.ReplayAll()
@ -829,6 +1107,7 @@ class StackyServerTestCase(unittest.TestCase):
usage.instance = INSTANCE_ID_1
usage.launched_at = utils.decimal_utc()
usage.deleted_at = usage.launched_at + 10
results[None:50].AndReturn(results)
results.__iter__().AndReturn([usage].__iter__())
self.mox.ReplayAll()
@ -855,6 +1134,7 @@ class StackyServerTestCase(unittest.TestCase):
usage.instance = INSTANCE_ID_1
usage.launched_at = utils.decimal_utc()
usage.deleted_at = usage.launched_at + 10
results[None:50].AndReturn(results)
results.__iter__().AndReturn([usage].__iter__())
self.mox.ReplayAll()
@ -898,6 +1178,7 @@ class StackyServerTestCase(unittest.TestCase):
usage.instance_type_id = 1
usage.message_id = 'someid'
usage.status = 'pending'
results[None:50].AndReturn(results)
results.__iter__().AndReturn([usage].__iter__())
self.mox.ReplayAll()
@ -928,6 +1209,7 @@ class StackyServerTestCase(unittest.TestCase):
usage.instance_type_id = 1
usage.message_id = 'someid'
usage.status = 'pending'
results[None:50].AndReturn(results)
results.__iter__().AndReturn([usage].__iter__())
self.mox.ReplayAll()
@ -960,3 +1242,193 @@ class StackyServerTestCase(unittest.TestCase):
self.assertEqual(resp_json[1], ['Bad Request', msg])
self.mox.VerifyAll()
def test_model_factory_for_nova(self):
self.mox.UnsetStubs()
nova_model = stacky_server._model_factory('nova')
self.assertEqual(nova_model.model, models.RawData)
def test_model_factory_for_nova(self):
self.mox.UnsetStubs()
nova_model = stacky_server._model_factory('glance')
self.assertEqual(nova_model.model, models.GlanceRawData)
def test_model_factory_for_nova(self):
self.mox.UnsetStubs()
nova_model = stacky_server._model_factory('generic')
self.assertEqual(nova_model.model, models.GenericRawData)
def _assert_on_search_nova(self, json_resp, raw):
title = json_resp[0]
values = json_resp[1]
self.assertEqual(len(values), 9)
self.assertEqual([title[0], values[0]],["#", raw.id] )
self.assertEqual([title[1], values[1]], ['?', ' '])
self.assertEqual([title[2], values[2]], ["When",
str(dt.dt_from_decimal(raw.when))])
self.assertEqual([title[3], values[3]], ["Deployment", raw.deployment.name])
self.assertEqual([title[4], values[4]], ["Event", raw.event])
self.assertEqual([title[5], values[5]], ["Host", raw.host])
self.assertEqual([title[6], values[6]], ["State", raw.state])
self.assertEqual([title[7], values[7]], ["State'", raw.old_state])
def test_search_by_field_for_nova(self):
search_result = [["#", "?", "When", "Deployment", "Event", "Host",
"State", "State'", "Task'"], [1, " ",
"2013-07-17 10:16:10.717219", "deployment",
"test.start", "example.com", "active", None, None]]
fake_request = self.mox.CreateMockAnything()
fake_request.GET = {'field': 'tenant', 'value': 'tenant'}
raw = self._create_raw()
models.RawData.objects.filter(tenant='tenant').AndReturn([raw])
raw.search_results([], mox.IgnoreArg(), ' ').AndReturn(search_result)
self.mox.ReplayAll()
resp = stacky_server.search(fake_request)
self.assertEqual(resp.status_code, 200)
json_resp = json.loads(resp.content)
self._assert_on_search_nova(json_resp, raw)
self.mox.VerifyAll()
def test_search_by_field_for_nova_when_filters(self):
search_result = [["#", "?", "When", "Deployment", "Event", "Host",
"State", "State'", "Task'"], [1, " ",
"2013-07-17 10:16:10.717219", "deployment",
"test.start", "example.com", "active", None, None]]
fake_request = self.mox.CreateMockAnything()
fake_request.GET = {'field': 'tenant', 'value': 'tenant',
'when_min': '1.1',
'when_max': '2.1'}
raw = self._create_raw()
models.RawData.objects.filter(tenant='tenant',
when__gte=decimal.Decimal('1.1'),
when__lte=decimal.Decimal('2.1')).AndReturn([raw])
raw.search_results([], mox.IgnoreArg(), ' ').AndReturn(search_result)
self.mox.ReplayAll()
resp = stacky_server.search(fake_request)
self.assertEqual(resp.status_code, 200)
json_resp = json.loads(resp.content)
self._assert_on_search_nova(json_resp, raw)
self.mox.VerifyAll()
def test_search_by_field_for_nova_with_limit(self):
search_result = [["#", "?", "When", "Deployment", "Event", "Host",
"State", "State'", "Task'"], [1, " ",
"2013-07-17 10:16:10.717219", "deployment",
"test.start", "example.com", "active", None, None]]
search_result_2 = [["#", "?", "When", "Deployment", "Event", "Host",
"State", "State'", "Task'"], [1, " ",
"2013-07-17 10:16:10.717219", "deployment",
"test.start", "example.com", "active", None, None],[2, " ",
"2013-07-17 10:16:10.717219", "deployment",
"test.start", "example.com", "active", None, None]]
fake_request = self.mox.CreateMockAnything()
fake_request.GET = {'field': 'tenant', 'value': 'tenant', 'limit': '2',
'service': 'nova'}
raw1 = self._create_raw()
raw2 = self._create_raw()
raw3 = self._create_raw()
raw2.id = 2
raw3.id = 3
models.RawData.objects.filter(tenant='tenant').AndReturn([raw1, raw2,
raw3])
raw1.search_results([], mox.IgnoreArg(), ' ').AndReturn(search_result)
raw2.search_results(search_result, mox.IgnoreArg(),' ').AndReturn(search_result_2)
self.mox.ReplayAll()
resp = stacky_server.search(fake_request)
self.assertEqual(resp.status_code, 200)
json_resp = json.loads(resp.content)
self.assertEqual(len(json_resp), 3)
self._assert_on_search_nova(json_resp, raw1)
self.mox.VerifyAll()
def test_model_search_default_limit(self):
fake_request = self.mox.CreateMockAnything()
fake_request.GET = {}
fake_model = self.mox.CreateMockAnything()
filters = {'field': 'value'}
results = self.mox.CreateMockAnything()
fake_model.filter(**filters).AndReturn(results)
results[None:50].AndReturn(results)
self.mox.ReplayAll()
actual_results = stacky_server.model_search(fake_request, fake_model,
filters)
self.assertEqual(actual_results, results)
self.mox.VerifyAll()
def test_model_search_default_limit_with_offset(self):
fake_request = self.mox.CreateMockAnything()
fake_request.GET = {'offset': '1'}
fake_model = self.mox.CreateMockAnything()
filters = {'field': 'value'}
results = self.mox.CreateMockAnything()
fake_model.filter(**filters).AndReturn(results)
results[1:51].AndReturn(results)
self.mox.ReplayAll()
actual_results = stacky_server.model_search(fake_request, fake_model,
filters)
self.assertEqual(actual_results, results)
self.mox.VerifyAll()
def test_model_search_default_with_limit(self):
fake_request = self.mox.CreateMockAnything()
fake_request.GET = {'limit': '1'}
fake_model = self.mox.CreateMockAnything()
filters = {'field': 'value'}
results = self.mox.CreateMockAnything()
fake_model.filter(**filters).AndReturn(results)
results[None:1].AndReturn(results)
self.mox.ReplayAll()
actual_results = stacky_server.model_search(fake_request, fake_model,
filters)
self.assertEqual(actual_results, results)
self.mox.VerifyAll()
def test_model_search_default_with_limit_and_offset(self):
fake_request = self.mox.CreateMockAnything()
fake_request.GET = {'limit': '5',
'offset': '10'}
fake_model = self.mox.CreateMockAnything()
filters = {'field': 'value'}
results = self.mox.CreateMockAnything()
fake_model.filter(**filters).AndReturn(results)
results[10:15].AndReturn(results)
self.mox.ReplayAll()
actual_results = stacky_server.model_search(fake_request, fake_model,
filters)
self.assertEqual(actual_results, results)
self.mox.VerifyAll()
def test_model_search_related(self):
fake_request = self.mox.CreateMockAnything()
fake_request.GET = {}
fake_model = self.mox.CreateMockAnything()
filters = {'field': 'value'}
results = self.mox.CreateMockAnything()
fake_model.select_related().AndReturn(results)
results.filter(**filters).AndReturn(results)
results[None:50].AndReturn(results)
self.mox.ReplayAll()
actual_results = stacky_server.model_search(fake_request, fake_model,
filters, related=True)
self.assertEqual(actual_results, results)
self.mox.VerifyAll()
def test_model_order_by(self):
fake_request = self.mox.CreateMockAnything()
fake_request.GET = {}
fake_model = self.mox.CreateMockAnything()
filters = {'field': 'value'}
results = self.mox.CreateMockAnything()
fake_model.filter(**filters).AndReturn(results)
results.order_by('when').AndReturn(results)
results[None:50].AndReturn(results)
self.mox.ReplayAll()
actual_results = stacky_server.model_search(fake_request, fake_model,
filters, order_by='when')
self.assertEqual(actual_results, results)
self.mox.VerifyAll()

File diff suppressed because it is too large Load Diff

View File

@ -19,24 +19,29 @@
# IN THE SOFTWARE.
import json
import unittest
import kombu
import kombu.entity
import kombu.connection
import mox
from stacktach import db, views
from stacktach import db
from stacktach import views
import worker.worker as worker
from tests.unit import StacktachBaseTestCase
class NovaConsumerTestCase(unittest.TestCase):
class ConsumerTestCase(StacktachBaseTestCase):
def setUp(self):
self.mox = mox.Mox()
def tearDown(self):
self.mox.UnsetStubs()
def _test_topics(self):
return [
dict(queue="queue1", routing_key="monitor.info"),
dict(queue="queue2", routing_key="monitor.error")
]
def test_get_consumers(self):
created_queues = []
created_callbacks = []
@ -47,16 +52,17 @@ class NovaConsumerTestCase(unittest.TestCase):
consumer = self.mox.CreateMockAnything()
created_consumers.append(consumer)
return consumer
self.mox.StubOutWithMock(worker.NovaConsumer, '_create_exchange')
self.mox.StubOutWithMock(worker.NovaConsumer, '_create_queue')
consumer = worker.NovaConsumer('test', None, None, True, {})
self.mox.StubOutWithMock(worker.Consumer, '_create_exchange')
self.mox.StubOutWithMock(worker.Consumer, '_create_queue')
consumer = worker.Consumer('test', None, None, True, {}, "nova",
self._test_topics())
exchange = self.mox.CreateMockAnything()
consumer._create_exchange('nova', 'topic').AndReturn(exchange)
info_queue = self.mox.CreateMockAnything()
error_queue = self.mox.CreateMockAnything()
consumer._create_queue('monitor.info', exchange, 'monitor.info')\
consumer._create_queue('queue1', exchange, 'monitor.info')\
.AndReturn(info_queue)
consumer._create_queue('monitor.error', exchange, 'monitor.error')\
consumer._create_queue('queue2', exchange, 'monitor.error')\
.AndReturn(error_queue)
self.mox.ReplayAll()
consumers = consumer.get_consumers(Consumer, None)
@ -71,7 +77,8 @@ class NovaConsumerTestCase(unittest.TestCase):
def test_create_exchange(self):
args = {'key': 'value'}
consumer = worker.NovaConsumer('test', None, None, True, args)
consumer = worker.Consumer('test', None, None, True, args, 'nova',
self._test_topics())
self.mox.StubOutClassWithMocks(kombu.entity, 'Exchange')
exchange = kombu.entity.Exchange('nova', type='topic', exclusive=False,
@ -87,7 +94,8 @@ class NovaConsumerTestCase(unittest.TestCase):
queue = kombu.Queue('name', exchange, auto_delete=False, durable=True,
exclusive=False, routing_key='routing.key',
queue_arguments={})
consumer = worker.NovaConsumer('test', None, None, True, {})
consumer = worker.Consumer('test', None, None, True, {}, 'nova',
self._test_topics())
self.mox.ReplayAll()
actual_queue = consumer._create_queue('name', exchange, 'routing.key',
exclusive=False,
@ -103,7 +111,8 @@ class NovaConsumerTestCase(unittest.TestCase):
queue = kombu.Queue('name', exchange, auto_delete=False, durable=True,
exclusive=False, routing_key='routing.key',
queue_arguments=queue_args)
consumer = worker.NovaConsumer('test', None, None, True, queue_args)
consumer = worker.Consumer('test', None, None, True, queue_args,
'nova', self._test_topics())
self.mox.ReplayAll()
actual_queue = consumer._create_queue('name', exchange, 'routing.key',
exclusive=False,
@ -114,21 +123,30 @@ class NovaConsumerTestCase(unittest.TestCase):
def test_process(self):
deployment = self.mox.CreateMockAnything()
raw = self.mox.CreateMockAnything()
raw.get_name().AndReturn('RawData')
message = self.mox.CreateMockAnything()
consumer = worker.NovaConsumer('test', None, deployment, True, {})
exchange = 'nova'
consumer = worker.Consumer('test', None, deployment, True, {},
exchange, self._test_topics())
routing_key = 'monitor.info'
message.delivery_info = {'routing_key': routing_key}
body_dict = {u'key': u'value'}
message.body = json.dumps(body_dict)
mock_notification = self.mox.CreateMockAnything()
mock_post_process_method = self.mox.CreateMockAnything()
mock_post_process_method(raw, mock_notification)
old_handler = worker.POST_PROCESS_METHODS
worker.POST_PROCESS_METHODS["RawData"] = mock_post_process_method
self.mox.StubOutWithMock(views, 'process_raw_data',
use_mock_anything=True)
args = (routing_key, body_dict)
views.process_raw_data(deployment, args, json.dumps(args))\
.AndReturn(raw)
views.process_raw_data(deployment, args, json.dumps(args), exchange) \
.AndReturn((raw, mock_notification))
message.ack()
self.mox.StubOutWithMock(views, 'post_process')
views.post_process(raw, body_dict)
self.mox.StubOutWithMock(consumer, '_check_memory',
use_mock_anything=True)
consumer._check_memory()
@ -136,29 +154,7 @@ class NovaConsumerTestCase(unittest.TestCase):
consumer._process(message)
self.assertEqual(consumer.processed, 1)
self.mox.VerifyAll()
def test_process_no_raw_dont_ack(self):
deployment = self.mox.CreateMockAnything()
raw = self.mox.CreateMockAnything()
message = self.mox.CreateMockAnything()
consumer = worker.NovaConsumer('test', None, deployment, True, {})
routing_key = 'monitor.info'
message.delivery_info = {'routing_key': routing_key}
body_dict = {u'key': u'value'}
message.body = json.dumps(body_dict)
self.mox.StubOutWithMock(views, 'process_raw_data',
use_mock_anything=True)
args = (routing_key, body_dict)
views.process_raw_data(deployment, args, json.dumps(args))\
.AndReturn(None)
self.mox.StubOutWithMock(consumer, '_check_memory',
use_mock_anything=True)
consumer._check_memory()
self.mox.ReplayAll()
consumer._process(message)
self.assertEqual(consumer.processed, 0)
self.mox.VerifyAll()
worker.POST_PROCESS_METHODS["RawData"] = old_handler
def test_run(self):
config = {
@ -168,7 +164,9 @@ class NovaConsumerTestCase(unittest.TestCase):
'rabbit_port': 5672,
'rabbit_userid': 'rabbit',
'rabbit_password': 'rabbit',
'rabbit_virtual_host': '/'
'rabbit_virtual_host': '/',
"services": ["nova"],
"topics": {"nova": self._test_topics()}
}
self.mox.StubOutWithMock(db, 'get_or_create_deployment')
deployment = self.mox.CreateMockAnything()
@ -187,13 +185,15 @@ class NovaConsumerTestCase(unittest.TestCase):
kombu.connection.BrokerConnection(**params).AndReturn(conn)
conn.__enter__().AndReturn(conn)
conn.__exit__(None, None, None).AndReturn(None)
self.mox.StubOutClassWithMocks(worker, 'NovaConsumer')
consumer = worker.NovaConsumer(config['name'], conn, deployment,
config['durable_queue'], {})
self.mox.StubOutClassWithMocks(worker, 'Consumer')
exchange = 'nova'
consumer = worker.Consumer(config['name'], conn, deployment,
config['durable_queue'], {}, exchange,
self._test_topics())
consumer.run()
worker.continue_running().AndReturn(False)
self.mox.ReplayAll()
worker.run(config)
worker.run(config, exchange)
self.mox.VerifyAll()
def test_run_queue_args(self):
@ -205,7 +205,10 @@ class NovaConsumerTestCase(unittest.TestCase):
'rabbit_userid': 'rabbit',
'rabbit_password': 'rabbit',
'rabbit_virtual_host': '/',
'queue_arguments': {'x-ha-policy': 'all'}
'queue_arguments': {'x-ha-policy': 'all'},
'queue_name_prefix': "test_name_",
"services": ["nova"],
"topics": {"nova": self._test_topics()}
}
self.mox.StubOutWithMock(db, 'get_or_create_deployment')
deployment = self.mox.CreateMockAnything()
@ -224,12 +227,14 @@ class NovaConsumerTestCase(unittest.TestCase):
kombu.connection.BrokerConnection(**params).AndReturn(conn)
conn.__enter__().AndReturn(conn)
conn.__exit__(None, None, None).AndReturn(None)
self.mox.StubOutClassWithMocks(worker, 'NovaConsumer')
consumer = worker.NovaConsumer(config['name'], conn, deployment,
config['durable_queue'],
config['queue_arguments'])
self.mox.StubOutClassWithMocks(worker, 'Consumer')
exchange = 'nova'
consumer = worker.Consumer(config['name'], conn, deployment,
config['durable_queue'],
config['queue_arguments'], exchange,
self._test_topics())
consumer.run()
worker.continue_running().AndReturn(False)
self.mox.ReplayAll()
worker.run(config)
self.mox.VerifyAll()
worker.run(config, exchange)
self.mox.VerifyAll()

View File

@ -25,6 +25,8 @@ TENANT_ID_2 = 'testtenantid2'
from stacktach import datetime_to_decimal as dt
IMAGE_UUID_1 = "12345678-6352-4dbc-8271-96cc54bf14cd"
INSTANCE_ID_1 = "08f685d9-6352-4dbc-8271-96cc54bf14cd"
INSTANCE_ID_2 = "515adf96-41d3-b86d-5467-e584edc61dab"
@ -32,6 +34,7 @@ INSTANCE_TYPE_ID_1 = "12345"
INSTANCE_TYPE_ID_2 = '54321'
DUMMY_TIME = datetime.datetime.utcnow()
DECIMAL_DUMMY_TIME = dt.dt_to_decimal(DUMMY_TIME)
MESSAGE_ID_1 = "7f28f81b-29a2-43f2-9ba1-ccb3e53ab6c8"
MESSAGE_ID_2 = "4d596126-0f04-4329-865f-7b9a7bd69bcf"
@ -52,6 +55,15 @@ OS_ARCH_2 = "x64"
OS_VERSION_1 = "1"
OS_VERSION_2 = "2"
TIMESTAMP_1 = "2013-06-20 17:31:57.939614"
SETTLE_TIME = 5
SETTLE_UNITS = "minutes"
TICK_TIME = 10
HOST = '10.0.0.1'
PORT = '5672'
VIRTUAL_HOST = '/'
USERID = 'rabbit'
PASSWORD = 'password'
def decimal_utc(t = datetime.datetime.utcnow()):
return dt.dt_to_decimal(t)
@ -133,4 +145,29 @@ def create_tracker(mox, request_id, lifecycle, start, last_timing=None,
tracker.start=start
tracker.last_timing=last_timing
tracker.duration=duration
return tracker
return tracker
class FakeVerifierConfig(object):
def __init__(self, host, port, virtual_host, userid, password, tick_time,
settle_time, settle_units, durable_queue, topics, notifs):
self.host = lambda: host
self.port = lambda: port
self.virtual_host = lambda: virtual_host
self.userid = lambda: userid
self.password = lambda: password
self.pool_size = lambda: 5
self.tick_time = lambda: tick_time
self.settle_time = lambda: settle_time
self.settle_units = lambda: settle_units
self.durable_queue = lambda: durable_queue
self.topics = lambda: topics
self.enable_notifications = lambda: notifs
def make_verifier_config(notifs):
topics = {'exchange': ['notifications.info']}
config = FakeVerifierConfig(HOST, PORT, VIRTUAL_HOST, USERID,
PASSWORD, TICK_TIME, SETTLE_TIME,
SETTLE_UNITS, True, topics, notifs)
return config

145
util/glance_usage_seed.py Normal file
View File

@ -0,0 +1,145 @@
# Copyright (c) 2013 - Rackspace Inc.
#
# Permission is hereby granted, free of charge, to any person obtaining a copy
# of this software and associated documentation files (the "Software"), to
# deal in the Software without restriction, including without limitation the
# rights to use, copy, modify, merge, publish, distribute, sublicense, and/or
# sell copies of the Software, and to permit persons to whom the Software is
# furnished to do so, subject to the following conditions:
#
# The above copyright notice and this permission notice shall be included in
# all copies or substantial portions of the Software.
#
# THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
# IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
# FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
# AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
# LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING
# FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS
# IN THE SOFTWARE.
"""
Usage: python glance_usage_seed.py [period_length] [sql_connection]
python glance_usage_seed.py hour mysql://user:password@nova-db.example
.com/nova?charset=utf8
The idea behind glance_usage seeding is to take the current state of all
active, deleted and pending_delete images from glance and insert that
data into Stacktach's image_usage and image_deletes tables.
"""
import __builtin__
setattr(__builtin__, '_', lambda x: x)
import datetime
import os
import sys
from oslo.config import cfg
CONF = cfg.CONF
if __name__ == '__main__':
if len(sys.argv) != 3:
print "Proper Usage: glance_usage_seed.py [period_length] [" \
"sql_connection]"
sys.exit(1)
CONF.sql_connection = sys.argv[2]
import glance.context
import glance.db.sqlalchemy.api as db_api
from sqlalchemy import or_
from sqlalchemy import and_
import glance.db.sqlalchemy.api as db_api
from glance.db.sqlalchemy import models as glancemodels
POSSIBLE_TOPDIR = os.path.normpath(os.path.join(os.path.abspath(sys.argv[0]),
os.pardir, os.pardir))
if os.path.exists(os.path.join(POSSIBLE_TOPDIR, 'stacktach')):
sys.path.insert(0, POSSIBLE_TOPDIR)
from stacktach import datetime_to_decimal as dt
from stacktach import models
# start yanked from reports/nova_usage_audit.py
def get_period_start(time, period_length):
if period_length == 'day':
last_period = time - datetime.timedelta(days=1)
start = datetime.datetime(year=last_period.year,
month=last_period.month,
day=last_period.day)
return start
elif period_length == 'hour':
last_period = time - datetime.timedelta(hours=1)
start = datetime.datetime(year=last_period.year,
month=last_period.month,
day=last_period.day,
hour=last_period.hour)
return start
# end yanked from reports/nova_usage_audit.py
def _usage_for_image(image):
return {
'uuid': image.id,
'owner': image.owner,
'created_at': dt.dt_to_decimal(image.created_at),
'owner': image.owner,
'size': image.size,
'last_raw_id': None
}
def _delete_for_image(image):
return {
'uuid': image.id,
'deleted_at': dt.dt_to_decimal(image.deleted_at),
'raw_id': None
}
def _get_usages(start, session):
usage_filter = (glancemodels.Image.status == 'active',
glancemodels.Image.deleted_at > start)
query = session.query(glancemodels.Image)
images = query.filter(or_(*usage_filter)).all()
return [_usage_for_image(image) for image in images]
def _get_deletes(start, session):
delete_filter = (glancemodels.Image.status == 'deleted',
glancemodels.Image.deleted_at > start)
query = session.query(glancemodels.Image)
images = query.filter(and_(*delete_filter)).all()
return [_delete_for_image(image) for image in images]
def seed(period_length):
start = get_period_start(datetime.datetime.utcnow(), period_length)
db_api.configure_db()
session = db_api.get_session()
print "Populating active image usages"
usages = _get_usages(start, session)
if usages:
print "Saving active image images"
active_images = map(lambda x: models.ImageUsage(**x), usages)
models.ImageUsage.objects.bulk_create(active_images, batch_size=100)
print "Populating image deletes"
deletes = _get_deletes(start, session)
if deletes:
print "Saving image deletes"
deleted_images = map(lambda x: models.ImageDeletes(**x), deletes)
models.ImageDeletes.objects.bulk_create(deleted_images, batch_size=100)
print "Seeding completed"
return len(usages), len(deletes)
if __name__ == '__main__':
msg = ("Seeded system with: \n"
"%s Active images \n"
"%s Deleted images \n")
period = sys.argv[1]
print msg % seed(period)

View File

@ -0,0 +1,63 @@
# Copyright (c) 2013 - Rackspace Inc.
#
# Permission is hereby granted, free of charge, to any person obtaining a copy
# of this software and associated documentation files (the "Software"), to
# deal in the Software without restriction, including without limitation the
# rights to use, copy, modify, merge, publish, distribute, sublicense, and/or
# sell copies of the Software, and to permit persons to whom the Software is
# furnished to do so, subject to the following conditions:
#
# The above copyright notice and this permission notice shall be included in
# all copies or substantial portions of the Software.
#
# THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
# IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
# FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
# AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
# LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING
# FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS
# IN THE SOFTWARE.
import os
import sys
POSSIBLE_TOPDIR = os.path.normpath(os.path.join(os.path.abspath(sys.argv[0]),
os.pardir, os.pardir))
if os.path.exists(os.path.join(POSSIBLE_TOPDIR, 'stacktach')):
sys.path.insert(0, POSSIBLE_TOPDIR)
from stacktach import models
if __name__ != '__main__':
sys.exit(1)
seed_usage = models.InstanceUsage.objects.filter(request_id=None)
deleted_instances = models.InstanceDeletes.objects.values('instance').distinct()
deleted = set()
for instance in deleted_instances:
deleted.add(instance['instance'])
fixed = 0
for usage in seed_usage:
if usage.instance not in deleted and usage.launched_at is not None and \
usage.launched_at is not '':
filters = {
'instance': usage.instance,
'launched_at__gte': int(usage.launched_at),
'launched_at__lt': int(usage.launched_at) + 1,
'status': models.InstanceExists.VERIFIED
}
exists = models.InstanceExists.objects.filter(**filters)
if exists.count() > 0:
fixed += 1
usage.os_architecture = exists[0].os_architecture
usage.os_distro = exists[0].os_distro
usage.os_version = exists[0].os_version
usage.rax_options = exists[0].rax_options
usage.save()
else:
print "Couldn't find verified exists for instance %s" % usage.instance
print "Populated %s usage records" % fixed

153
verifier/base_verifier.py Normal file
View File

@ -0,0 +1,153 @@
# Copyright (c) 2012 - Rackspace Inc.
#
# Permission is hereby granted, free of charge, to any person obtaining a copy
# of this software and associated documentation files (the "Software"), to
# deal in the Software without restriction, including without limitation the
# rights to use, copy, modify, merge, publish, distribute, sublicense, and/or
# sell copies of the Software, and to permit persons to whom the Software is
# furnished to do so, subject to the following conditions:
#
# The above copyright notice and this permission notice shall be included in
# all copies or substantial portions of the Software.
#
# THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
# IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
# FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
# AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
# LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING
# FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS
# IN THE SOFTWARE.
import datetime
import os
import sys
import time
import multiprocessing
from django.db import transaction
POSSIBLE_TOPDIR = os.path.normpath(os.path.join(os.path.abspath(sys.argv[0]),
os.pardir, os.pardir))
if os.path.exists(os.path.join(POSSIBLE_TOPDIR, 'stacktach')):
sys.path.insert(0, POSSIBLE_TOPDIR)
from stacktach import stacklog, message_service
LOG = stacklog.get_logger('verifier')
def _has_field(d1, d2, field1, field2=None):
if not field2:
field2 = field1
return d1.get(field1) is not None and d2.get(field2) is not None
def _verify_simple_field(d1, d2, field1, field2=None):
if not field2:
field2 = field1
if not _has_field(d1, d2, field1, field2):
return False
else:
if d1[field1] != d2[field2]:
return False
return True
def _verify_date_field(d1, d2, same_second=False):
if d1 and d2:
if d1 == d2:
return True
elif same_second and int(d1) == int(d2):
return True
return False
class Verifier(object):
def __init__(self, config, pool=None, reconciler=None):
self.config = config
self.pool = pool or multiprocessing.Pool(config.pool_size())
self.enable_notifications = config.enable_notifications()
self.reconciler = reconciler
self.results = []
self.failed = []
def clean_results(self):
pending = []
finished = 0
successful = 0
for result in self.results:
if result.ready():
finished += 1
if result.successful():
(verified, exists) = result.get()
if self.reconciler and not verified:
self.failed.append(exists)
successful += 1
else:
pending.append(result)
self.results = pending
errored = finished - successful
return len(self.results), successful, errored
def _keep_running(self):
return True
def _utcnow(self):
return datetime.datetime.utcnow()
def _run(self, callback=None):
tick_time = self.config.tick_time()
settle_units = self.config.settle_units()
settle_time = self.config.settle_time()
while self._keep_running():
with transaction.commit_on_success():
now = self._utcnow()
kwargs = {settle_units: settle_time}
ending_max = now - datetime.timedelta(**kwargs)
new = self.verify_for_range(ending_max, callback=callback)
values = ((self.exchange(), new,) + self.clean_results())
if self.reconciler:
self.reconcile_failed()
msg = "%s: N: %s, P: %s, S: %s, E: %s" % values
LOG.info(msg)
time.sleep(tick_time)
def run(self):
if self.enable_notifications:
exchange_name = self.exchange()
exchange = message_service.create_exchange(
exchange_name, 'topic',
durable=self.config.durable_queue())
routing_keys = self.config.topics()[exchange_name]
with message_service.create_connection(
self.config.host(), self.config.port(),
self.config.userid(), self.config.password(),
"librabbitmq", self.config.virtual_host()) as conn:
def callback(result):
(verified, exist) = result
if verified:
self.send_verified_notification(
exist, conn, exchange, routing_keys=routing_keys)
try:
self._run(callback=callback)
except Exception, e:
print e
raise e
else:
self._run()
def verify_for_range(self, ending_max, callback=None):
pass
def reconcile_failed(self):
pass
def exchange(self):
pass

89
verifier/config.py Normal file
View File

@ -0,0 +1,89 @@
# Copyright (c) 2013 - Rackspace Inc.
#
# Permission is hereby granted, free of charge, to any person obtaining a copy
# of this software and associated documentation files (the "Software"), to
# deal in the Software without restriction, including without limitation the
# rights to use, copy, modify, merge, publish, distribute, sublicense, and/or
# sell copies of the Software, and to permit persons to whom the Software is
# furnished to do so, subject to the following conditions:
#
# The above copyright notice and this permission notice shall be included in
# all copies or substantial portions of the Software.
#
# THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
# IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
# FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
# AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
# LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING
# FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS
# IN THE SOFTWARE.
import json
import os
config_filename = os.environ.get('STACKTACH_VERIFIER_CONFIG',
'stacktach_verifier_config.json')
try:
from local_settings import *
config_filename = STACKTACH_VERIFIER_CONFIG
except ImportError:
pass
config = None
with open(config_filename, "r") as f:
config = json.load(f)
def enable_notifications():
return config['enable_notifications']
def topics():
return config['rabbit']['topics']
def tick_time():
return config['tick_time']
def settle_units():
return config['settle_units']
def settle_time():
return config['settle_time']
def reconcile():
return config.get('reconcile', False)
def reconciler_config():
return config.get(
'reconciler_config', '/etc/stacktach/reconciler_config.json')
def pool_size():
return config['pool_size']
def durable_queue():
return config['rabbit']['durable_queue']
def host():
return config['rabbit']['host']
def port():
return config['rabbit']['port']
def userid():
return config['rabbit']['userid']
def password():
return config['rabbit']['password']
def virtual_host():
return config['rabbit']['virtual_host']

View File

@ -1,529 +0,0 @@
# Copyright (c) 2012 - Rackspace Inc.
#
# Permission is hereby granted, free of charge, to any person obtaining a copy
# of this software and associated documentation files (the "Software"), to
# deal in the Software without restriction, including without limitation the
# rights to use, copy, modify, merge, publish, distribute, sublicense, and/or
# sell copies of the Software, and to permit persons to whom the Software is
# furnished to do so, subject to the following conditions:
#
# The above copyright notice and this permission notice shall be included in
# all copies or substantial portions of the Software.
#
# THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
# IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
# FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
# AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
# LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING
# FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS
# IN THE SOFTWARE.
import argparse
import datetime
import json
import os
import sys
import time
import uuid
from django.db import transaction
import kombu.common
import kombu.entity
import kombu.pools
import multiprocessing
POSSIBLE_TOPDIR = os.path.normpath(os.path.join(os.path.abspath(sys.argv[0]),
os.pardir, os.pardir))
if os.path.exists(os.path.join(POSSIBLE_TOPDIR, 'stacktach')):
sys.path.insert(0, POSSIBLE_TOPDIR)
from stacktach import stacklog
stacklog.set_default_logger_name('verifier')
LOG = stacklog.get_logger()
from stacktach import models
from stacktach import datetime_to_decimal as dt
from stacktach import reconciler
from verifier import AmbiguousResults
from verifier import FieldMismatch
from verifier import NotFound
from verifier import VerificationException
def _list_exists(ending_max=None, status=None):
params = {}
if ending_max:
params['audit_period_ending__lte'] = dt.dt_to_decimal(ending_max)
if status:
params['status'] = status
return models.InstanceExists.objects.select_related()\
.filter(**params).order_by('id')
def _find_launch(instance, launched):
start = launched - datetime.timedelta(microseconds=launched.microsecond)
end = start + datetime.timedelta(microseconds=999999)
params = {'instance': instance,
'launched_at__gte': dt.dt_to_decimal(start),
'launched_at__lte': dt.dt_to_decimal(end)}
return models.InstanceUsage.objects.filter(**params)
def _find_reconcile(instance, launched):
start = launched - datetime.timedelta(microseconds=launched.microsecond)
end = start + datetime.timedelta(microseconds=999999)
params = {'instance': instance,
'launched_at__gte': dt.dt_to_decimal(start),
'launched_at__lte': dt.dt_to_decimal(end)}
return models.InstanceReconcile.objects.filter(**params)
def _find_delete(instance, launched, deleted_max=None):
start = launched - datetime.timedelta(microseconds=launched.microsecond)
end = start + datetime.timedelta(microseconds=999999)
params = {'instance': instance,
'launched_at__gte': dt.dt_to_decimal(start),
'launched_at__lte': dt.dt_to_decimal(end)}
if deleted_max:
params['deleted_at__lte'] = dt.dt_to_decimal(deleted_max)
return models.InstanceDeletes.objects.filter(**params)
def _mark_exist_verified(exist,
reconciled=False,
reason=None):
if not reconciled:
exist.status = models.InstanceExists.VERIFIED
else:
exist.status = models.InstanceExists.RECONCILED
if reason is not None:
exist.fail_reason = reason
exist.save()
def _mark_exist_failed(exist, reason=None):
exist.status = models.InstanceExists.FAILED
if reason:
exist.fail_reason = reason
exist.save()
def _has_field(d1, d2, field1, field2=None):
if not field2:
field2 = field1
return d1.get(field1) is not None and d2.get(field2) is not None
def _verify_simple_field(d1, d2, field1, field2=None):
if not field2:
field2 = field1
if not _has_field(d1, d2, field1, field2):
return False
else:
if d1[field1] != d2[field2]:
return False
return True
def _verify_date_field(d1, d2, same_second=False):
if d1 and d2:
if d1 == d2:
return True
elif same_second and int(d1) == int(d2):
return True
return False
def _verify_field_mismatch(exists, launch):
if not _verify_date_field(launch.launched_at, exists.launched_at,
same_second=True):
raise FieldMismatch('launched_at', exists.launched_at,
launch.launched_at)
if launch.instance_type_id != exists.instance_type_id:
raise FieldMismatch('instance_type_id', exists.instance_type_id,
launch.instance_type_id)
if launch.tenant != exists.tenant:
raise FieldMismatch('tenant', exists.tenant,
launch.tenant)
if launch.rax_options != exists.rax_options:
raise FieldMismatch('rax_options', exists.rax_options,
launch.rax_options)
if launch.os_architecture != exists.os_architecture:
raise FieldMismatch('os_architecture', exists.os_architecture,
launch.os_architecture)
if launch.os_version != exists.os_version:
raise FieldMismatch('os_version', exists.os_version,
launch.os_version)
if launch.os_distro != exists.os_distro:
raise FieldMismatch('os_distro', exists.os_distro,
launch.os_distro)
def _verify_for_launch(exist, launch=None, launch_type="InstanceUsage"):
if not launch and exist.usage:
launch = exist.usage
elif not launch:
if models.InstanceUsage.objects\
.filter(instance=exist.instance).count() > 0:
launches = _find_launch(exist.instance,
dt.dt_from_decimal(exist.launched_at))
count = launches.count()
query = {
'instance': exist.instance,
'launched_at': exist.launched_at
}
if count > 1:
raise AmbiguousResults(launch_type, query)
elif count == 0:
raise NotFound(launch_type, query)
launch = launches[0]
else:
raise NotFound(launch_type, {'instance': exist.instance})
_verify_field_mismatch(exist, launch)
def _verify_for_delete(exist, delete=None, delete_type="InstanceDelete"):
if not delete and exist.delete:
# We know we have a delete and we have it's id
delete = exist.delete
elif not delete:
if exist.deleted_at:
# We received this exists before the delete, go find it
deletes = _find_delete(exist.instance,
dt.dt_from_decimal(exist.launched_at))
if deletes.count() == 1:
delete = deletes[0]
else:
query = {
'instance': exist.instance,
'launched_at': exist.launched_at
}
raise NotFound(delete_type, query)
else:
# We don't know if this is supposed to have a delete or not.
# Thus, we need to check if we have a delete for this instance.
# We need to be careful though, since we could be verifying an
# exist event that we got before the delete. So, we restrict the
# search to only deletes before this exist's audit period ended.
# If we find any, we fail validation
launched_at = dt.dt_from_decimal(exist.launched_at)
deleted_at_max = dt.dt_from_decimal(exist.audit_period_ending)
deletes = _find_delete(exist.instance, launched_at, deleted_at_max)
if deletes.count() > 0:
reason = 'Found %ss for non-delete exist' % delete_type
raise VerificationException(reason)
if delete:
if not _verify_date_field(delete.launched_at, exist.launched_at,
same_second=True):
raise FieldMismatch('launched_at', exist.launched_at,
delete.launched_at)
if not _verify_date_field(delete.deleted_at, exist.deleted_at,
same_second=True):
raise FieldMismatch('deleted_at', exist.deleted_at,
delete.deleted_at)
def _verify_with_reconciled_data(exist):
if not exist.launched_at:
raise VerificationException("Exists without a launched_at")
query = models.InstanceReconcile.objects.filter(instance=exist.instance)
if query.count() > 0:
recs = _find_reconcile(exist.instance,
dt.dt_from_decimal(exist.launched_at))
search_query = {'instance': exist.instance,
'launched_at': exist.launched_at}
count = recs.count()
if count > 1:
raise AmbiguousResults('InstanceReconcile', search_query)
elif count == 0:
raise NotFound('InstanceReconcile', search_query)
reconcile = recs[0]
else:
raise NotFound('InstanceReconcile', {'instance': exist.instance})
_verify_for_launch(exist, launch=reconcile,
launch_type="InstanceReconcile")
delete = None
if reconcile.deleted_at is not None:
delete = reconcile
_verify_for_delete(exist, delete=delete,
delete_type="InstanceReconcile")
def _attempt_reconciled_verify(exist, orig_e):
verified = False
try:
# Attempt to verify against reconciled data
_verify_with_reconciled_data(exist)
verified = True
_mark_exist_verified(exist)
except NotFound, rec_e:
# No reconciled data, just mark it failed
_mark_exist_failed(exist, reason=str(orig_e))
except VerificationException, rec_e:
# Verification failed against reconciled data, mark it failed
# using the second failure.
_mark_exist_failed(exist, reason=str(rec_e))
except Exception, rec_e:
_mark_exist_failed(exist, reason=rec_e.__class__.__name__)
LOG.exception(rec_e)
return verified
def _verify(exist):
verified = False
try:
if not exist.launched_at:
raise VerificationException("Exists without a launched_at")
_verify_for_launch(exist)
_verify_for_delete(exist)
verified = True
_mark_exist_verified(exist)
except VerificationException, orig_e:
# Something is wrong with the InstanceUsage record
verified = _attempt_reconciled_verify(exist, orig_e)
except Exception, e:
_mark_exist_failed(exist, reason=e.__class__.__name__)
LOG.exception(e)
return verified, exist
def _send_notification(message, routing_key, connection, exchange):
with kombu.pools.producers[connection].acquire(block=True) as producer:
kombu.common.maybe_declare(exchange, producer.channel)
producer.publish(message, routing_key)
def send_verified_notification(exist, connection, exchange, routing_keys=None):
body = exist.raw.json
json_body = json.loads(body)
json_body[1]['event_type'] = 'compute.instance.exists.verified.old'
json_body[1]['original_message_id'] = json_body[1]['message_id']
json_body[1]['message_id'] = str(uuid.uuid4())
if routing_keys is None:
_send_notification(json_body[1], json_body[0], connection, exchange)
else:
for key in routing_keys:
_send_notification(json_body[1], key, connection, exchange)
def _create_exchange(name, type, exclusive=False, auto_delete=False,
durable=True):
return kombu.entity.Exchange(name, type=type, exclusive=auto_delete,
auto_delete=exclusive, durable=durable)
def _create_connection(config):
rabbit = config['rabbit']
conn_params = dict(hostname=rabbit['host'],
port=rabbit['port'],
userid=rabbit['userid'],
password=rabbit['password'],
transport="librabbitmq",
virtual_host=rabbit['virtual_host'])
return kombu.connection.BrokerConnection(**conn_params)
class Verifier(object):
def __init__(self, config, pool=None, rec=None):
self.config = config
self.pool = pool or multiprocessing.Pool(self.config['pool_size'])
self.reconcile = self.config.get('reconcile', False)
self.reconciler = self._load_reconciler(config, rec=rec)
self.results = []
self.failed = []
def _load_reconciler(self, config, rec=None):
if rec:
return rec
if self.reconcile:
config_loc = config.get('reconciler_config',
'/etc/stacktach/reconciler_config.json')
with open(config_loc, 'r') as rec_config_file:
rec_config = json.load(rec_config_file)
return reconciler.Reconciler(rec_config)
def clean_results(self):
pending = []
finished = 0
successful = 0
for result in self.results:
if result.ready():
finished += 1
if result.successful():
(verified, exists) = result.get()
if self.reconcile and not verified:
self.failed.append(exists)
successful += 1
else:
pending.append(result)
self.results = pending
errored = finished - successful
return len(self.results), successful, errored
def verify_for_range(self, ending_max, callback=None):
exists = _list_exists(ending_max=ending_max,
status=models.InstanceExists.PENDING)
count = exists.count()
added = 0
update_interval = datetime.timedelta(seconds=30)
next_update = datetime.datetime.utcnow() + update_interval
LOG.info("Adding %s exists to queue." % count)
while added < count:
for exist in exists[0:1000]:
exist.status = models.InstanceExists.VERIFYING
exist.save()
result = self.pool.apply_async(_verify, args=(exist,),
callback=callback)
self.results.append(result)
added += 1
if datetime.datetime.utcnow() > next_update:
values = ((added,) + self.clean_results())
msg = "N: %s, P: %s, S: %s, E: %s" % values
LOG.info(msg)
next_update = datetime.datetime.utcnow() + update_interval
return count
def reconcile_failed(self):
for failed_exist in self.failed:
if self.reconciler.failed_validation(failed_exist):
_mark_exist_verified(failed_exist, reconciled=True)
self.failed = []
def _keep_running(self):
return True
def _utcnow(self):
return datetime.datetime.utcnow()
def _run(self, callback=None):
tick_time = self.config['tick_time']
settle_units = self.config['settle_units']
settle_time = self.config['settle_time']
while self._keep_running():
with transaction.commit_on_success():
now = self._utcnow()
kwargs = {settle_units: settle_time}
ending_max = now - datetime.timedelta(**kwargs)
new = self.verify_for_range(ending_max,
callback=callback)
values = ((new,) + self.clean_results())
if self.reconcile:
self.reconcile_failed()
msg = "N: %s, P: %s, S: %s, E: %s" % values
LOG.info(msg)
time.sleep(tick_time)
def run(self):
if self.config['enable_notifications']:
exchange = _create_exchange(self.config['rabbit']['exchange_name'],
'topic',
durable=self.config['rabbit']['durable_queue'])
routing_keys = None
if self.config['rabbit'].get('routing_keys') is not None:
routing_keys = self.config['rabbit']['routing_keys']
with _create_connection(self.config) as conn:
def callback(result):
(verified, exist) = result
if verified:
send_verified_notification(exist, conn, exchange,
routing_keys=routing_keys)
self._run(callback=callback)
else:
self._run()
def _run_once(self, callback=None):
tick_time = self.config['tick_time']
settle_units = self.config['settle_units']
settle_time = self.config['settle_time']
now = self._utcnow()
kwargs = {settle_units: settle_time}
ending_max = now - datetime.timedelta(**kwargs)
new = self.verify_for_range(ending_max, callback=callback)
LOG.info("Verifying %s exist events" % new)
while len(self.results) > 0:
LOG.info("P: %s, F: %s, E: %s" % self.clean_results())
if self.reconcile:
self.reconcile_failed()
time.sleep(tick_time)
def run_once(self):
if self.config['enable_notifications']:
exchange = _create_exchange(self.config['rabbit']['exchange_name'],
'topic',
durable=self.config['rabbit']['durable_queue'])
routing_keys = None
if self.config['rabbit'].get('routing_keys') is not None:
routing_keys = self.config['rabbit']['routing_keys']
with _create_connection(self.config) as conn:
def callback(result):
(verified, exist) = result
if verified:
send_verified_notification(exist, conn, exchange,
routing_keys=routing_keys)
self._run_once(callback=callback)
else:
self._run_once()
if __name__ == '__main__':
parser = argparse.ArgumentParser(description=
"Stacktach Instance Exists Verifier")
parser.add_argument('--tick-time',
help='Time in seconds the verifier will sleep before'
'it will check for new exists records.',
default=30)
parser.add_argument('--run-once',
help='Check database once and verify all returned'
'exists records, then stop',
type=bool,
default=False)
parser.add_argument('--settle-time',
help='Time the verifier will wait for records to'
'settle before it will verify them.',
default=10)
parser.add_argument('--settle-units',
help='Units for settle time',
default='minutes')
parser.add_argument('--pool-size',
help='Number of processes created to verify records',
type=int,
default=10)
args = parser.parse_args()
config = {'tick_time': args.tick_time, 'settle_time': args.settle_time,
'settle_units': args.settle_units, 'pool_size': args.pool_size}
verifier = Verifier(config)
if args.run_once:
verifier.run_once()
else:
verifier.run()

172
verifier/glance_verifier.py Normal file
View File

@ -0,0 +1,172 @@
# Copyright (c) 2012 - Rackspace Inc.
#
# Permission is hereby granted, free of charge, to any person obtaining a copy
# of this software and associated documentation files (the "Software"), to
# deal in the Software without restriction, including without limitation the
# rights to use, copy, modify, merge, publish, distribute, sublicense, and/or
# sell copies of the Software, and to permit persons to whom the Software is
# furnished to do so, subject to the following conditions:
#
# The above copyright notice and this permission notice shall be included in
# all copies or substantial portions of the Software.
#
# THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
# IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
# FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
# AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
# LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING
# FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS
# IN THE SOFTWARE.
import json
import os
import sys
import uuid
from verifier.base_verifier import Verifier
POSSIBLE_TOPDIR = os.path.normpath(os.path.join(os.path.abspath(sys.argv[0]),
os.pardir, os.pardir))
if os.path.exists(os.path.join(POSSIBLE_TOPDIR, 'stacktach')):
sys.path.insert(0, POSSIBLE_TOPDIR)
from stacktach import models
from verifier import FieldMismatch, VerificationException, base_verifier
from verifier import NotFound
from stacktach import datetime_to_decimal as dt
import datetime
from stacktach import stacklog, message_service
LOG = stacklog.get_logger('verifier')
def _verify_field_mismatch(exists, usage):
if not base_verifier._verify_date_field(
usage.created_at, exists.created_at, same_second=True):
raise FieldMismatch('created_at', exists.created_at,
usage.created_at)
if usage.owner != exists.owner:
raise FieldMismatch('owner', exists.owner,
usage.owner)
if usage.size != exists.size:
raise FieldMismatch('size', exists.size,
usage.size)
def _verify_for_usage(exist, usage=None):
usage_type = "ImageUsage"
if not usage and exist.usage:
usage = exist.usage
elif not usage:
usages = models.ImageUsage.objects.filter(uuid=exist.uuid)
usage_count = usages.count()
if usage_count == 0:
query = {'uuid': exist.uuid}
raise NotFound(usage_type, query)
usage = usages[0]
_verify_field_mismatch(exist, usage)
def _verify_for_delete(exist, delete=None):
delete_type = "ImageDelete"
if not delete and exist.delete:
# We know we have a delete and we have it's id
delete = exist.delete
elif not delete:
if exist.deleted_at:
# We received this exists before the delete, go find it
deletes = models.ImageDeletes.find(uuid=exist.uuid)
if deletes.count() == 1:
delete = deletes[0]
else:
query = {
'instance': exist.instance,
'launched_at': exist.launched_at
}
raise NotFound(delete_type, query)
else:
# We don't know if this is supposed to have a delete or not.
# Thus, we need to check if we have a delete for this instance.
# We need to be careful though, since we could be verifying an
# exist event that we got before the delete. So, we restrict the
# search to only deletes before this exist's audit period ended.
# If we find any, we fail validation
deleted_at_max = dt.dt_from_decimal(exist.audit_period_ending)
deletes = models.ImageDeletes.find(
exist.uuid, deleted_at_max)
if deletes.count() > 0:
reason = 'Found %ss for non-delete exist' % delete_type
raise VerificationException(reason)
if delete:
if not base_verifier._verify_date_field(
delete.created_at, exist.created_at, same_second=True):
raise FieldMismatch('created_at', exist.created_at,
delete.created_at)
if not base_verifier._verify_date_field(
delete.deleted_at, exist.deleted_at, same_second=True):
raise FieldMismatch('deleted_at', exist.deleted_at,
delete.deleted_at)
def _verify(exist):
verified = False
try:
_verify_for_usage(exist)
_verify_for_delete(exist)
verified = True
exist.mark_verified()
except Exception, e:
exist.mark_failed(reason=e.__class__.__name__)
LOG.exception("glance: %s" % e)
return verified, exist
class GlanceVerifier(Verifier):
def __init__(self, config, pool=None):
super(GlanceVerifier, self).__init__(config, pool=pool)
def verify_for_range(self, ending_max, callback=None):
exists = models.ImageExists.find(
ending_max=ending_max, status=models.ImageExists.PENDING)
count = exists.count()
added = 0
update_interval = datetime.timedelta(seconds=30)
next_update = datetime.datetime.utcnow() + update_interval
LOG.info("glance: Adding %s exists to queue." % count)
while added < count:
for exist in exists[0:1000]:
exist.status = models.ImageExists.VERIFYING
exist.save()
result = self.pool.apply_async(_verify, args=(exist,),
callback=callback)
self.results.append(result)
added += 1
if datetime.datetime.utcnow() > next_update:
values = ((added,) + self.clean_results())
msg = "glance: N: %s, P: %s, S: %s, E: %s" % values
LOG.info(msg)
next_update = datetime.datetime.utcnow() + update_interval
return count
def send_verified_notification(self, exist, connection, exchange,
routing_keys=None):
body = exist.raw.json
json_body = json.loads(body)
json_body[1]['event_type'] = 'image.exists.verified.old'
json_body[1]['original_message_id'] = json_body[1]['message_id']
json_body[1]['message_id'] = str(uuid.uuid4())
if routing_keys is None:
message_service.send_notification(json_body[1], json_body[0],
connection, exchange)
else:
for key in routing_keys:
message_service.send_notification(json_body[1], key,
connection, exchange)
def exchange(self):
return 'glance'

268
verifier/nova_verifier.py Normal file
View File

@ -0,0 +1,268 @@
# Copyright (c) 2012 - Rackspace Inc.
#
# Permission is hereby granted, free of charge, to any person obtaining a copy
# of this software and associated documentation files (the "Software"), to
# deal in the Software without restriction, including without limitation the
# rights to use, copy, modify, merge, publish, distribute, sublicense, and/or
# sell copies of the Software, and to permit persons to whom the Software is
# furnished to do so, subject to the following conditions:
#
# The above copyright notice and this permission notice shall be included in
# all copies or substantial portions of the Software.
#
# THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
# IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
# FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
# AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
# LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING
# FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS
# IN THE SOFTWARE.
import argparse
import datetime
import json
import os
import sys
import uuid
POSSIBLE_TOPDIR = os.path.normpath(os.path.join(os.path.abspath(sys.argv[0]),
os.pardir, os.pardir))
if os.path.exists(os.path.join(POSSIBLE_TOPDIR, 'stacktach')):
sys.path.insert(0, POSSIBLE_TOPDIR)
from verifier import base_verifier
from stacktach import models
from stacktach import datetime_to_decimal as dt
from verifier import FieldMismatch
from verifier import AmbiguousResults
from verifier import NotFound
from verifier import VerificationException
from stacktach import stacklog, message_service
LOG = stacklog.get_logger('verifier')
def _verify_field_mismatch(exists, launch):
if not base_verifier._verify_date_field(
launch.launched_at, exists.launched_at, same_second=True):
raise FieldMismatch('launched_at', exists.launched_at,
launch.launched_at)
if launch.instance_type_id != exists.instance_type_id:
raise FieldMismatch('instance_type_id', exists.instance_type_id,
launch.instance_type_id)
if launch.tenant != exists.tenant:
raise FieldMismatch('tenant', exists.tenant,
launch.tenant)
if launch.rax_options != exists.rax_options:
raise FieldMismatch('rax_options', exists.rax_options,
launch.rax_options)
if launch.os_architecture != exists.os_architecture:
raise FieldMismatch('os_architecture', exists.os_architecture,
launch.os_architecture)
if launch.os_version != exists.os_version:
raise FieldMismatch('os_version', exists.os_version,
launch.os_version)
if launch.os_distro != exists.os_distro:
raise FieldMismatch('os_distro', exists.os_distro,
launch.os_distro)
def _verify_for_launch(exist, launch=None,
launch_type="InstanceUsage"):
if not launch and exist.usage:
launch = exist.usage
elif not launch:
if models.InstanceUsage.objects\
.filter(instance=exist.instance).count() > 0:
launches = models.InstanceUsage.find(
exist.instance, dt.dt_from_decimal(exist.launched_at))
count = launches.count()
query = {
'instance': exist.instance,
'launched_at': exist.launched_at
}
if count > 1:
raise AmbiguousResults(launch_type, query)
elif count == 0:
raise NotFound(launch_type, query)
launch = launches[0]
else:
raise NotFound(launch_type, {'instance': exist.instance})
_verify_field_mismatch(exist, launch)
def _verify_for_delete(exist, delete=None,
delete_type="InstanceDeletes"):
if not delete and exist.delete:
# We know we have a delete and we have it's id
delete = exist.delete
elif not delete:
if exist.deleted_at:
# We received this exists before the delete, go find it
deletes = models.InstanceDeletes.find(
exist.instance, dt.dt_from_decimal(exist.launched_at))
if deletes.count() == 1:
delete = deletes[0]
else:
query = {
'instance': exist.instance,
'launched_at': exist.launched_at
}
raise NotFound(delete_type, query)
else:
# We don't know if this is supposed to have a delete or not.
# Thus, we need to check if we have a delete for this instance.
# We need to be careful though, since we could be verifying an
# exist event that we got before the delete. So, we restrict the
# search to only deletes before this exist's audit period ended.
# If we find any, we fail validation
launched_at = dt.dt_from_decimal(exist.launched_at)
deleted_at_max = dt.dt_from_decimal(exist.audit_period_ending)
deletes = models.InstanceDeletes.find(exist.instance, launched_at,
deleted_at_max)
if deletes.count() > 0:
reason = 'Found %s for non-delete exist' % delete_type
raise VerificationException(reason)
if delete:
if not base_verifier._verify_date_field(
delete.launched_at, exist.launched_at, same_second=True):
raise FieldMismatch('launched_at', exist.launched_at,
delete.launched_at)
if not base_verifier._verify_date_field(
delete.deleted_at, exist.deleted_at, same_second=True):
raise FieldMismatch(
'deleted_at', exist.deleted_at, delete.deleted_at)
def _verify_with_reconciled_data(exist):
if not exist.launched_at:
raise VerificationException("Exists without a launched_at")
query = models.InstanceReconcile.objects.filter(instance=exist.instance)
if query.count() > 0:
recs = models.InstanceReconcile.find(exist.instance,
dt.dt_from_decimal((
exist.launched_at)))
search_query = {'instance': exist.instance,
'launched_at': exist.launched_at}
count = recs.count()
if count > 1:
raise AmbiguousResults('InstanceReconcile', search_query)
elif count == 0:
raise NotFound('InstanceReconcile', search_query)
reconcile = recs[0]
else:
raise NotFound('InstanceReconcile', {'instance': exist.instance})
_verify_for_launch(exist, launch=reconcile,
launch_type="InstanceReconcile")
delete = None
if reconcile.deleted_at is not None:
delete = reconcile
_verify_for_delete(exist, delete=delete, delete_type="InstanceReconcile")
def _attempt_reconciled_verify(exist, orig_e):
verified = False
try:
# Attempt to verify against reconciled data
_verify_with_reconciled_data(exist)
verified = True
exist.mark_verified(reconciled=True)
except NotFound, rec_e:
# No reconciled data, just mark it failed
exist.mark_failed(reason=str(orig_e))
except VerificationException, rec_e:
# Verification failed against reconciled data, mark it failed
# using the second failure.
exist.mark_failed(reason=str(rec_e))
except Exception, rec_e:
exist.mark_failed(reason=rec_e.__class__.__name__)
LOG.exception("nova: %s" % rec_e)
return verified
def _verify(exist):
verified = False
try:
if not exist.launched_at:
raise VerificationException("Exists without a launched_at")
_verify_for_launch(exist)
_verify_for_delete(exist)
verified = True
exist.mark_verified()
except VerificationException, orig_e:
# Something is wrong with the InstanceUsage record
verified = _attempt_reconciled_verify(exist, orig_e)
except Exception, e:
exist.mark_failed(reason=e.__class__.__name__)
LOG.exception("nova: %s" % e)
return verified, exist
class NovaVerifier(base_verifier.Verifier):
def __init__(self, config, pool=None, reconciler=None):
super(NovaVerifier, self).__init__(config,
pool=pool,
reconciler=reconciler)
def send_verified_notification(self, exist, connection, exchange,
routing_keys=None):
body = exist.raw.json
json_body = json.loads(body)
json_body[1]['event_type'] = 'compute.instance.exists.verified.old'
json_body[1]['original_message_id'] = json_body[1]['message_id']
json_body[1]['message_id'] = str(uuid.uuid4())
if routing_keys is None:
message_service.send_notification(
json_body[1], json_body[0], connection, exchange)
else:
for key in routing_keys:
message_service.send_notification(
json_body[1], key, connection, exchange)
def verify_for_range(self, ending_max, callback=None):
exists = models.InstanceExists.find(
ending_max=ending_max, status=models.InstanceExists.PENDING)
count = exists.count()
added = 0
update_interval = datetime.timedelta(seconds=30)
next_update = datetime.datetime.utcnow() + update_interval
LOG.info("nova: Adding %s exists to queue." % count)
while added < count:
for exist in exists[0:1000]:
exist.update_status(models.InstanceExists.VERIFYING)
exist.save()
result = self.pool.apply_async(
_verify, args=(exist,),
callback=callback)
self.results.append(result)
added += 1
if datetime.datetime.utcnow() > next_update:
values = ((added,) + self.clean_results())
msg = "nova: N: %s, P: %s, S: %s, E: %s" % values
LOG.info(msg)
next_update = datetime.datetime.utcnow() + update_interval
return count
def reconcile_failed(self):
for failed_exist in self.failed:
self.reconciler.failed_validation(failed_exist)
self.failed = []
def exchange(self):
return 'nova'

View File

@ -17,8 +17,8 @@
# LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING
# FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS
# IN THE SOFTWARE.
import json
import os
import signal
import sys
@ -30,10 +30,11 @@ POSSIBLE_TOPDIR = os.path.normpath(os.path.join(os.path.abspath(sys.argv[0]),
if os.path.exists(os.path.join(POSSIBLE_TOPDIR, 'stacktach')):
sys.path.insert(0, POSSIBLE_TOPDIR)
from verifier import dbverifier
from stacktach import reconciler
from verifier import nova_verifier
from verifier import glance_verifier
import verifier.config as verifier_config
config_filename = os.environ.get('STACKTACH_VERIFIER_CONFIG',
'stacktach_verifier_config.json')
try:
from local_settings import *
config_filename = STACKTACH_VERIFIER_CONFIG
@ -42,31 +43,47 @@ except ImportError:
process = None
processes = []
def kill_time(signal, frame):
print "dying ..."
if process:
for process in processes:
process.terminate()
print "rose"
if process:
for process in processes:
process.join()
print "bud"
sys.exit(0)
if __name__ == '__main__':
config = None
with open(config_filename, "r") as f:
config = json.load(f)
def _load_nova_reconciler():
config_loc = verifier_config.reconciler_config()
with open(config_loc, 'r') as rec_config_file:
rec_config = json.load(rec_config_file)
return reconciler.Reconciler(rec_config)
def make_and_start_verifier(config):
if __name__ == '__main__':
def make_and_start_verifier(exchange):
# Gotta create it and run it this way so things don't get
# lost when the process is forked.
verifier = dbverifier.Verifier(config)
verifier = None
if exchange == "nova":
reconcile = verifier_config.reconcile()
reconciler = None
if reconcile:
reconciler = _load_nova_reconciler()
verifier = nova_verifier.NovaVerifier(verifier_config,
reconciler=reconciler)
elif exchange == "glance":
verifier = glance_verifier.GlanceVerifier(verifier_config)
verifier.run()
process = Process(target=make_and_start_verifier, args=(config,))
process.start()
for exchange in verifier_config.topics().keys():
process = Process(target=make_and_start_verifier, args=(exchange,))
process.start()
processes.append(process)
signal.signal(signal.SIGINT, kill_time)
signal.signal(signal.SIGTERM, kill_time)
signal.pause()

41
worker/config.py Normal file
View File

@ -0,0 +1,41 @@
# Copyright (c) 2013 - Rackspace Inc.
#
# Permission is hereby granted, free of charge, to any person obtaining a copy
# of this software and associated documentation files (the "Software"), to
# deal in the Software without restriction, including without limitation the
# rights to use, copy, modify, merge, publish, distribute, sublicense, and/or
# sell copies of the Software, and to permit persons to whom the Software is
# furnished to do so, subject to the following conditions:
#
# The above copyright notice and this permission notice shall be included in
# all copies or substantial portions of the Software.
#
# THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
# IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
# FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
# AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
# LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING
# FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS
# IN THE SOFTWARE.
import json
import os
config_filename = os.environ.get('STACKTACH_DEPLOYMENTS_FILE',
'stacktach_worker_config.json')
try:
from local_settings import *
config_filename = STACKTACH_DEPLOYMENTS_FILE
except ImportError:
pass
config = None
with open(config_filename, "r") as f:
config = json.load(f)
def deployments():
return config['deployments']
def topics():
return config['topics']

View File

@ -1,4 +1,3 @@
import json
import os
import signal
import sys
@ -11,14 +10,7 @@ if os.path.exists(os.path.join(POSSIBLE_TOPDIR, 'stacktach')):
sys.path.insert(0, POSSIBLE_TOPDIR)
import worker.worker as worker
config_filename = os.environ.get('STACKTACH_DEPLOYMENTS_FILE',
'stacktach_worker_config.json')
try:
from local_settings import *
config_filename = STACKTACH_DEPLOYMENTS_FILE
except ImportError:
pass
from worker import config
processes = []
@ -35,18 +27,15 @@ def kill_time(signal, frame):
if __name__ == '__main__':
config = None
with open(config_filename, "r") as f:
config = json.load(f)
deployments = config['deployments']
for deployment in deployments:
for deployment in config.deployments():
if deployment.get('enabled', True):
process = Process(target=worker.run, args=(deployment,))
process.daemon = True
process.start()
processes.append(process)
for exchange in deployment.get('topics').keys():
process = Process(target=worker.run, args=(deployment,
exchange,))
process.daemon = True
process.start()
processes.append(process)
signal.signal(signal.SIGINT, kill_time)
signal.signal(signal.SIGTERM, kill_time)
signal.pause()

View File

@ -17,12 +17,13 @@
# to set TENANT_ID and URL to point to your StackTach web server.
import datetime
import kombu
import kombu.entity
import kombu.mixins
import sys
import time
import kombu
import kombu.mixins
try:
import ujson as json
except ImportError:
@ -33,7 +34,7 @@ except ImportError:
from pympler.process import ProcessMemoryInfo
from stacktach import db
from stacktach import db, message_service
from stacktach import stacklog
from stacktach import views
@ -41,8 +42,9 @@ stacklog.set_default_logger_name('worker')
LOG = stacklog.get_logger()
class NovaConsumer(kombu.mixins.ConsumerMixin):
def __init__(self, name, connection, deployment, durable, queue_arguments):
class Consumer(kombu.mixins.ConsumerMixin):
def __init__(self, name, connection, deployment, durable, queue_arguments,
exchange, topics):
self.connection = connection
self.deployment = deployment
self.durable = durable
@ -52,28 +54,29 @@ class NovaConsumer(kombu.mixins.ConsumerMixin):
self.pmi = None
self.processed = 0
self.total_processed = 0
self.topics = topics
self.exchange = exchange
def _create_exchange(self, name, type, exclusive=False, auto_delete=False):
return kombu.entity.Exchange(name, type=type, exclusive=exclusive,
return message_service.create_exchange(name, exchange_type=type, exclusive=exclusive,
durable=self.durable,
auto_delete=auto_delete)
def _create_queue(self, name, nova_exchange, routing_key, exclusive=False,
auto_delete=False):
return kombu.Queue(name, nova_exchange, durable=self.durable,
auto_delete=exclusive, exclusive=auto_delete,
queue_arguments=self.queue_arguments,
routing_key=routing_key)
return message_service.create_queue(
name, nova_exchange, durable=self.durable, auto_delete=exclusive,
exclusive=auto_delete, queue_arguments=self.queue_arguments,
routing_key=routing_key)
def get_consumers(self, Consumer, channel):
nova_exchange = self._create_exchange("nova", "topic")
exchange = self._create_exchange(self.exchange, "topic")
nova_queues = [
self._create_queue('monitor.info', nova_exchange, 'monitor.info'),
self._create_queue('monitor.error', nova_exchange, 'monitor.error')
]
queues = [self._create_queue(topic['queue'], exchange,
topic['routing_key'])
for topic in self.topics]
return [Consumer(queues=nova_queues, callbacks=[self.on_nova])]
return [Consumer(queues=queues, callbacks=[self.on_nova])]
def _process(self, message):
routing_key = message.delivery_info['routing_key']
@ -81,14 +84,13 @@ class NovaConsumer(kombu.mixins.ConsumerMixin):
body = str(message.body)
args = (routing_key, json.loads(body))
asJson = json.dumps(args)
# save raw and ack the message
raw = views.process_raw_data(self.deployment, args, asJson)
raw, notif = views.process_raw_data(
self.deployment, args, asJson, self.exchange)
if raw:
self.processed += 1
message.ack()
views.post_process(raw, args[1])
self.processed += 1
message.ack()
POST_PROCESS_METHODS[raw.get_name()](raw, notif)
self._check_memory()
@ -113,9 +115,9 @@ class NovaConsumer(kombu.mixins.ConsumerMixin):
per_message = 0
if self.total_processed:
per_message = idiff / self.total_processed
LOG.debug("%20s %6dk/%6dk ram, "
LOG.debug("%20s %20s %6dk/%6dk ram, "
"%3d/%4d msgs @ %6dk/msg" %
(self.name, diff, idiff, self.processed,
(self.name, self.exchange, diff, idiff, self.processed,
self.total_processed, per_message))
self.last_vsz = self.pmi.vsz
self.processed = 0
@ -140,7 +142,7 @@ def exit_or_sleep(exit=False):
time.sleep(5)
def run(deployment_config):
def run(deployment_config, exchange):
name = deployment_config['name']
host = deployment_config.get('rabbit_host', 'localhost')
port = deployment_config.get('rabbit_port', 5672)
@ -150,11 +152,13 @@ def run(deployment_config):
durable = deployment_config.get('durable_queue', True)
queue_arguments = deployment_config.get('queue_arguments', {})
exit_on_exception = deployment_config.get('exit_on_exception', False)
topics = deployment_config.get('topics', {})
deployment, new = db.get_or_create_deployment(name)
print "Starting worker for '%s'" % name
LOG.info("%s: %s %s %s %s" % (name, host, port, user_id, virtual_host))
print "Starting worker for '%s %s'" % (name, exchange)
LOG.info("%s: %s %s %s %s %s" % (name, exchange, host, port, user_id,
virtual_host))
params = dict(hostname=host,
port=port,
@ -166,21 +170,30 @@ def run(deployment_config):
# continue_running() is used for testing
while continue_running():
try:
LOG.debug("Processing on '%s'" % name)
LOG.debug("Processing on '%s %s'" % (name, exchange))
with kombu.connection.BrokerConnection(**params) as conn:
try:
consumer = NovaConsumer(name, conn, deployment, durable,
queue_arguments)
consumer = Consumer(name, conn, deployment, durable,
queue_arguments, exchange,
topics[exchange])
consumer.run()
except Exception as e:
LOG.error("!!!!Exception!!!!")
LOG.exception("name=%s, exception=%s. Reconnecting in 5s" %
(name, e))
LOG.exception("name=%s, exchange=%s, exception=%s. "
"Reconnecting in 5s" %
(name, exchange, e))
exit_or_sleep(exit_on_exception)
LOG.debug("Completed processing on '%s'" % name)
LOG.debug("Completed processing on '%s %s'" % (name, exchange))
except:
LOG.error("!!!!Exception!!!!")
e = sys.exc_info()[0]
msg = "Uncaught exception: deployment=%s, exception=%s. Retrying in 5s"
LOG.exception(msg % (name, e))
msg = "Uncaught exception: deployment=%s, exchange=%s, " \
"exception=%s. Retrying in 5s"
LOG.exception(msg % (name, exchange, e))
exit_or_sleep(exit_on_exception)
POST_PROCESS_METHODS = {
'RawData': views.post_process_rawdata,
'GlanceRawData': views.post_process_glancerawdata,
'GenericRawData': views.post_process_genericrawdata
}