Merge branch 'master' of https://github.com/rackerlabs/stacktach into stable_2013_12_11

Conflicts:
	verifier/base_verifier.py
This commit is contained in:
Andrew Melton 2013-11-12 14:01:59 -05:00
commit 1620c998a9
43 changed files with 2419 additions and 1342 deletions

View File

@ -34,9 +34,9 @@ StackTach has three primary components:
Of course, this is only suitable for playing around. If you want to get serious about deploying StackTach you should set up a proper webserver and database on standalone servers. There is a lot of data that gets collected by StackTach (depending on your deployment size) ... be warned. Keep an eye on DB size.
#### The Config Files
There are two config files for StackTach. The first one tells us where the second one is. A sample of these two files is in `./etc/sample_*`
There are two config files for StackTach. The first one tells us where the second one is. A sample of these two files is in `./etc/sample_*`. Create a local copy of these files and populate them with the appropriate config values as described below.
The `sample_stacktach_config.sh` shell script defines the necessary environment variables StackTach needs. Most of these are just information about the database (assuming MySql) but some are a little different.
The `sample_stacktach_config.sh` shell script defines the necessary environment variables StackTach needs. Most of these are just information about the database (assuming MySql) but some are a little different. **Remember to source the local copy of the `sample_stacktach_config.sh` shell script to set up the necessary environment variables.**
If your db host is not on the same machine, you'll need to set this flag. Otherwise the empty string is fine.
@ -72,7 +72,7 @@ But that's not much fun. A deployment entry would look like this:
where, *name* is whatever you want to call your deployment, and *rabbit_<>* are the connectivity details for your rabbit server. It should be the same information in your `nova.conf` file that OpenStack is using. Note, json has no concept of comments, so using `#`, `//` or `/* */` as a comment won't work.
By default, Nova uses emphemeral queues. If you are using durable queues, be sure to change the necessary flag here.
By default, Nova uses ephemeral queues. If you are using durable queues, be sure to change the necessary flag here.
You can add as many deployments as you like.

View File

@ -28,7 +28,7 @@
{
"queue": "stacktach_monitor_glance.error",
"routing_key": "monitor_glance.error"
},
}
]
}
},

0
reports/__init__.py Normal file
View File

View File

@ -0,0 +1,221 @@
import argparse
import datetime
import json
import os
import sys
sys.path.append(os.environ.get('STACKTACH_INSTALL_DIR', '/stacktach'))
from django.db.models import F
from reports import usage_audit
from stacktach import models
from stacktach import datetime_to_decimal as dt
OLD_IMAGES_QUERY = """
select * from stacktach_imageusage left join stacktach_imagedeletes
on (stacktach_imageusage.uuid = stacktach_imagedeletes.uuid and
deleted_at < %s)
where stacktach_imagedeletes.id IS NULL
and created_at is not null and created_at < %s;"""
def audit_usages_to_exists(exists, usages):
# checks if all exists correspond to the given usages
fails = []
for (uuid, images) in usages.items():
if uuid not in exists:
msg = "No exists for usage (%s)" % uuid
fails.append(['Usage', images[0]['id'], msg])
return fails
def _get_new_images(beginning, ending):
filters = {
'created_at__gte': beginning,
'created_at__lte': ending,
}
return models.ImageUsage.objects.filter(**filters)
def _get_exists(beginning, ending):
filters = {
'audit_period_beginning': beginning,
'audit_period_ending__gte': beginning,
'audit_period_ending__lte': ending,
}
return models.ImageExists.objects.filter(**filters)
def valid_datetime(d):
try:
t = datetime.datetime.strptime(d, "%Y-%m-%d %H:%M:%S")
return t
except Exception, e:
raise argparse.ArgumentTypeError(
"'%s' is not in YYYY-MM-DD HH:MM:SS format." % d)
def audit_for_period(beginning, ending):
beginning_decimal = dt.dt_to_decimal(beginning)
ending_decimal = dt.dt_to_decimal(ending)
(verify_summary,
verify_detail) = _verifier_audit_for_day(beginning_decimal,
ending_decimal,
models.ImageExists)
detail, new_count, old_count = _image_audit_for_period(beginning_decimal,
ending_decimal)
summary = {
'verifier': verify_summary,
'image_summary': {
'new_images': new_count,
'old_images': old_count,
'failures': len(detail)
},
}
details = {
'exist_fails': verify_detail,
'image_fails': detail,
}
return summary, details
def _verifier_audit_for_day(beginning, ending, exists_model):
summary = {}
period = 60*60*24-0.000001
if args.period_length == 'hour':
period = 60*60-0.000001
filters = {
'raw__when__gte': beginning,
'raw__when__lte': ending,
'audit_period_ending': F('audit_period_beginning') + period
}
exists = exists_model.objects.filter(**filters)
summary['exists'] = _audit_for_exists(exists)
filters = {
'raw__when__gte': beginning,
'raw__when__lte': ending,
'status': exists_model.FAILED
}
failed = exists_model.objects.filter(**filters)
detail = []
for exist in failed:
detail.append(['Exist', exist.id, exist.fail_reason])
return summary, detail
def _audit_for_exists(exists_query):
(verified, reconciled,
fail, pending, verifying) = usage_audit._status_queries(exists_query)
(success, unsent, redirect,
client_error, server_error) = usage_audit._send_status_queries(verified)
report = {
'count': exists_query.count(),
'verified': verified.count(),
'failed': fail.count(),
'pending': pending.count(),
'verifying': verifying.count(),
'send_status': {
'success': success.count(),
'unsent': unsent.count(),
'redirect': redirect.count(),
'client_error': client_error.count(),
'server_error': server_error.count(),
}
}
return report
def _image_audit_for_period(beginning, ending):
images_dict = {}
new_images = _get_new_images(beginning, ending)
for image in new_images:
uuid = image.uuid
l = {'id': image.id, 'created_at': image.created_at}
if uuid in images_dict:
images_dict[uuid].append(l)
else:
images_dict[uuid] = [l, ]
# Django's safe substitution doesn't allow dict substitution...
# Thus, we send it 'beginning' two times...
old_images = models.ImageUsage.objects\
.raw(OLD_IMAGES_QUERY,
[beginning, beginning])
old_images_dict = {}
for image in old_images:
uuid = image.uuid
l = {'id': image.id, 'created_at': image.created_at}
old_images_dict[uuid] = l
exists_dict = {}
exists = _get_exists(beginning, ending)
for exist in exists:
uuid = exist.uuid
e = {'id': exist.id,
'created_at': exist.created_at,
'deleted_at': exist.deleted_at}
if uuid in exists_dict:
exists_dict[uuid].append(e)
else:
exists_dict[uuid] = [e, ]
image_to_exists_fails = audit_usages_to_exists(exists_dict,images_dict)
return image_to_exists_fails, new_images.count(), len(old_images_dict)
def store_results(start, end, summary, details):
values = {
'json': make_json_report(summary, details),
'created': dt.dt_to_decimal(datetime.datetime.utcnow()),
'period_start': start,
'period_end': end,
'version': 4,
'name': 'glance usage audit'
}
report = models.JsonReport(**values)
report.save()
def make_json_report(summary, details):
report = [{'summary': summary},
['Object', 'ID', 'Error Description']]
report.extend(details['exist_fails'])
report.extend(details['image_fails'])
return json.dumps(report)
if __name__ == '__main__':
parser = argparse.ArgumentParser('StackTach Nova Usage Audit Report')
parser.add_argument('--period_length',
choices=['hour', 'day'], default='day')
parser.add_argument('--utcdatetime',
help="Override the end time used to generate report.",
type=valid_datetime, default=None)
parser.add_argument('--store',
help="If set to true, report will be stored. "
"Otherwise, it will just be printed",
type=bool, default=False)
args = parser.parse_args()
if args.utcdatetime is not None:
time = args.utcdatetime
else:
time = datetime.datetime.utcnow()
start, end = usage_audit.get_previous_period(time, args.period_length)
summary, details = audit_for_period(start, end)
if not args.store:
print make_json_report(summary, details)
else:
store_results(start, end, summary, details)

View File

@ -0,0 +1,157 @@
# Copyright (c) 2013 - Rackspace Inc.
#
# Permission is hereby granted, free of charge, to any person obtaining a copy
# of this software and associated documentation files (the "Software"), to
# deal in the Software without restriction, including without limitation the
# rights to use, copy, modify, merge, publish, distribute, sublicense, and/or
# sell copies of the Software, and to permit persons to whom the Software is
# furnished to do so, subject to the following conditions:
#
# The above copyright notice and this permission notice shall be included in
# all copies or substantial portions of the Software.
#
# THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
# IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
# FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
# AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
# LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING
# FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS
# IN THE SOFTWARE.
import argparse
import datetime
import json
import sys
import os
sys.path.append(os.environ.get('STACKTACH_INSTALL_DIR', '/stacktach'))
from stacktach import datetime_to_decimal as dt
from stacktach import models
def __get_image_activate_count(beginning, ending):
upload_filters = {
'when__gte': beginning,
'when__lte': ending,
'event': 'image.upload'
}
image_upload = models.GlanceRawData.objects.filter(**upload_filters).count()
activate_filters = {
'last_raw__when__gte': beginning,
'last_raw__when__lte': ending
}
image_activate = models.ImageUsage.objects.filter(**activate_filters).count()
delete_exists_filters = {
'raw__when__gte': beginning,
'raw__when__lte': ending
}
image_delete = models.ImageDeletes.objects.filter(**delete_exists_filters).count()
image_exists = models.ImageExists.objects.filter(**delete_exists_filters).count()
exists_verified_filters = {
'raw__when__gte': beginning,
'raw__when__lte': ending,
'status': models.ImageExists.VERIFIED,
'send_status__gte': 200,
'send_status__lt': 300
}
image_exists_verified = models.ImageExists.objects.filter(**exists_verified_filters).count()
return {
'image.upload': image_upload,
'image.activate': image_activate,
'image.delete': image_delete,
'image.exists': image_exists,
'image.exists.verified': image_exists_verified
}
def audit_for_period(beginning, ending):
beginning_decimal = dt.dt_to_decimal(beginning)
ending_decimal = dt.dt_to_decimal(ending)
image_event_counts = __get_image_activate_count(beginning_decimal,
ending_decimal)
return image_event_counts
def get_previous_period(time, period_length):
if period_length == 'day':
last_period = time - datetime.timedelta(days=1)
start = datetime.datetime(year=last_period.year,
month=last_period.month,
day=last_period.day)
end = datetime.datetime(year=time.year,
month=time.month,
day=time.day)
return start, end
elif period_length == 'hour':
last_period = time - datetime.timedelta(hours=1)
start = datetime.datetime(year=last_period.year,
month=last_period.month,
day=last_period.day,
hour=last_period.hour)
end = datetime.datetime(year=time.year,
month=time.month,
day=time.day,
hour=time.hour)
return start, end
def __make_json_report(report):
return json.dumps(report)
def __store_results(start, end, report):
values = {
'json': __make_json_report(report),
'created': dt.dt_to_decimal(datetime.datetime.utcnow()),
'period_start': start,
'period_end': end,
'version': 1,
'name': 'image events audit'
}
report = models.JsonReport(**values)
report.save()
def valid_datetime(d):
try:
t = datetime.datetime.strptime(d, "%Y-%m-%d %H:%M:%S")
return t
except Exception, e:
raise argparse.ArgumentTypeError(
"'%s' is not in YYYY-MM-DD HH:MM:SS format." % d)
if __name__ == '__main__':
parser = argparse.ArgumentParser('StackTach Image Events Audit Report')
parser.add_argument('--period_length',
choices=['hour', 'day'], default='day')
parser.add_argument('--utcdatetime',
help="Override the end time used to generate report.",
type=valid_datetime, default=None)
parser.add_argument('--store',
help="If set to true, report will be stored. "
"Otherwise, it will just be printed",
type=bool, default=False)
args = parser.parse_args()
if args.utcdatetime is not None:
time = args.utcdatetime
else:
time = datetime.datetime.utcnow()
start, end = get_previous_period(time, args.period_length)
event_counts = audit_for_period(start, end)
if not args.store:
print event_counts
else:
__store_results(start, end, event_counts)

View File

@ -26,11 +26,12 @@ import os
sys.path.append(os.environ.get('STACKTACH_INSTALL_DIR', '/stacktach'))
from django.db.models import F
import usage_audit
from stacktach import datetime_to_decimal as dt
from stacktach import models
from stacktach.reconciler import Reconciler
from stacktach import stacklog
OLD_LAUNCHES_QUERY = """
select stacktach_instanceusage.id,
@ -123,98 +124,6 @@ def _audit_launches_to_exists(launches, exists, beginning):
return fails
def _status_queries(exists_query):
verified = exists_query.filter(status=models.InstanceExists.VERIFIED)
reconciled = exists_query.filter(status=models.InstanceExists.RECONCILED)
fail = exists_query.filter(status=models.InstanceExists.FAILED)
pending = exists_query.filter(status=models.InstanceExists.PENDING)
verifying = exists_query.filter(status=models.InstanceExists.VERIFYING)
return verified, reconciled, fail, pending, verifying
def _send_status_queries(exists_query):
unsent = exists_query.filter(send_status=0)
success = exists_query.filter(send_status__gte=200,
send_status__lt=300)
redirect = exists_query.filter(send_status__gte=300,
send_status__lt=400)
client_error = exists_query.filter(send_status__gte=400,
send_status__lt=500)
server_error = exists_query.filter(send_status__gte=500,
send_status__lt=600)
return success, unsent, redirect, client_error, server_error
def _audit_for_exists(exists_query):
(verified, reconciled,
fail, pending, verifying) = _status_queries(exists_query)
(success, unsent, redirect,
client_error, server_error) = _send_status_queries(verified)
(success_rec, unsent_rec, redirect_rec,
client_error_rec, server_error_rec) = _send_status_queries(reconciled)
report = {
'count': exists_query.count(),
'verified': verified.count(),
'reconciled': reconciled.count(),
'failed': fail.count(),
'pending': pending.count(),
'verifying': verifying.count(),
'send_status': {
'success': success.count(),
'unsent': unsent.count(),
'redirect': redirect.count(),
'client_error': client_error.count(),
'server_error': server_error.count(),
},
'send_status_rec': {
'success': success_rec.count(),
'unsent': unsent_rec.count(),
'redirect': redirect_rec.count(),
'client_error': client_error_rec.count(),
'server_error': server_error_rec.count(),
}
}
return report
def _verifier_audit_for_day(beginning, ending):
summary = {}
filters = {
'raw__when__gte': beginning,
'raw__when__lte': ending,
'audit_period_ending': F('audit_period_beginning') + (60*60*24)
}
periodic_exists = models.InstanceExists.objects.filter(**filters)
summary['periodic'] = _audit_for_exists(periodic_exists)
filters = {
'raw__when__gte': beginning,
'raw__when__lte': ending,
'audit_period_ending__lt': F('audit_period_beginning') + (60*60*24)
}
instant_exists = models.InstanceExists.objects.filter(**filters)
summary['instantaneous'] = _audit_for_exists(instant_exists)
filters = {
'raw__when__gte': beginning,
'raw__when__lte': ending,
'status': models.InstanceExists.FAILED
}
failed = models.InstanceExists.objects.filter(**filters)
detail = []
for exist in failed:
detail.append(['Exist', exist.id, exist.fail_reason])
return summary, detail
def _launch_audit_for_period(beginning, ending):
launches_dict = {}
new_launches = _get_new_launches(beginning, ending)
@ -278,7 +187,6 @@ def _launch_audit_for_period(beginning, ending):
launch_to_exists_fails = _audit_launches_to_exists(launches_dict,
exists_dict,
beginning)
return launch_to_exists_fails, new_launches.count(), len(old_launches_dict)
@ -287,8 +195,9 @@ def audit_for_period(beginning, ending):
ending_decimal = dt.dt_to_decimal(ending)
(verify_summary,
verify_detail) = _verifier_audit_for_day(beginning_decimal,
ending_decimal)
verify_detail) = usage_audit._verifier_audit_for_day(beginning_decimal,
ending_decimal,
models.InstanceExists)
detail, new_count, old_count = _launch_audit_for_period(beginning_decimal,
ending_decimal)
@ -309,29 +218,6 @@ def audit_for_period(beginning, ending):
return summary, details
def get_previous_period(time, period_length):
if period_length == 'day':
last_period = time - datetime.timedelta(days=1)
start = datetime.datetime(year=last_period.year,
month=last_period.month,
day=last_period.day)
end = datetime.datetime(year=time.year,
month=time.month,
day=time.day)
return start, end
elif period_length == 'hour':
last_period = time - datetime.timedelta(hours=1)
start = datetime.datetime(year=last_period.year,
month=last_period.month,
day=last_period.day,
hour=last_period.hour)
end = datetime.datetime(year=time.year,
month=time.month,
day=time.day,
hour=time.hour)
return start, end
def store_results(start, end, summary, details):
values = {
'json': make_json_report(summary, details),
@ -383,6 +269,11 @@ if __name__ == '__main__':
default='/etc/stacktach/reconciler-config.json')
args = parser.parse_args()
stacklog.set_default_logger_name('nova_usage_audit')
parent_logger = stacklog.get_logger('nova_usage_audit', is_parent=True)
log_listener = stacklog.LogListener(parent_logger)
log_listener.start()
if args.reconcile:
with open(args.reconciler_config) as f:
reconciler_config = json.load(f)
@ -393,7 +284,7 @@ if __name__ == '__main__':
else:
time = datetime.datetime.utcnow()
start, end = get_previous_period(time, args.period_length)
start, end = usage_audit.get_previous_period(time, args.period_length)
summary, details = audit_for_period(start, end)

117
reports/usage_audit.py Normal file
View File

@ -0,0 +1,117 @@
import datetime
from django.db.models import F
from stacktach import models
def _status_queries(exists_query):
verified = exists_query.filter(status=models.InstanceExists.VERIFIED)
reconciled = exists_query.filter(status=models.InstanceExists.RECONCILED)
fail = exists_query.filter(status=models.InstanceExists.FAILED)
pending = exists_query.filter(status=models.InstanceExists.PENDING)
verifying = exists_query.filter(status=models.InstanceExists.VERIFYING)
return verified, reconciled, fail, pending, verifying
def _send_status_queries(exists_query):
unsent = exists_query.filter(send_status=0)
success = exists_query.filter(send_status__gte=200,
send_status__lt=300)
redirect = exists_query.filter(send_status__gte=300,
send_status__lt=400)
client_error = exists_query.filter(send_status__gte=400,
send_status__lt=500)
server_error = exists_query.filter(send_status__gte=500,
send_status__lt=600)
return success, unsent, redirect, client_error, server_error
def _audit_for_exists(exists_query):
(verified, reconciled,
fail, pending, verifying) = _status_queries(exists_query)
(success, unsent, redirect,
client_error, server_error) = _send_status_queries(verified)
(success_rec, unsent_rec, redirect_rec,
client_error_rec, server_error_rec) = _send_status_queries(reconciled)
report = {
'count': exists_query.count(),
'verified': verified.count(),
'reconciled': reconciled.count(),
'failed': fail.count(),
'pending': pending.count(),
'verifying': verifying.count(),
'send_status': {
'success': success.count(),
'unsent': unsent.count(),
'redirect': redirect.count(),
'client_error': client_error.count(),
'server_error': server_error.count(),
},
'send_status_rec': {
'success': success_rec.count(),
'unsent': unsent_rec.count(),
'redirect': redirect_rec.count(),
'client_error': client_error_rec.count(),
'server_error': server_error_rec.count(),
}
}
return report
def _verifier_audit_for_day(beginning, ending, exists_model):
summary = {}
filters = {
'raw__when__gte': beginning,
'raw__when__lte': ending,
'audit_period_ending': F('audit_period_beginning') + (60*60*24)
}
periodic_exists = exists_model.objects.filter(**filters)
summary['periodic'] = _audit_for_exists(periodic_exists)
filters = {
'raw__when__gte': beginning,
'raw__when__lte': ending,
'audit_period_ending__lt': F('audit_period_beginning') + (60*60*24)
}
instant_exists = exists_model.objects.filter(**filters)
summary['instantaneous'] = _audit_for_exists(instant_exists)
filters = {
'raw__when__gte': beginning,
'raw__when__lte': ending,
'status': exists_model.FAILED
}
failed = exists_model.objects.filter(**filters)
detail = []
for exist in failed:
detail.append(['Exist', exist.id, exist.fail_reason])
return summary, detail
def get_previous_period(time, period_length):
if period_length == 'day':
last_period = time - datetime.timedelta(days=1)
start = datetime.datetime(year=last_period.year,
month=last_period.month,
day=last_period.day)
end = datetime.datetime(year=time.year,
month=time.month,
day=time.day)
return start, end
elif period_length == 'hour':
last_period = time - datetime.timedelta(hours=1)
start = datetime.datetime(year=last_period.year,
month=last_period.month,
day=last_period.day,
hour=last_period.hour)
end = datetime.datetime(year=time.year,
month=time.month,
day=time.day,
hour=time.hour)
return start, end

View File

@ -0,0 +1,56 @@
#!/usr/bin/python
import os
import sys
sys.path.append(os.environ.get('STACKTACH_INSTALL_DIR', '/stacktach'))
import csv
from stacktach import models
def migrate_forwards(csv_file_path):
with open(csv_file_path, "r") as f:
for old_flavor, new_flavor in csv.reader(f):
models.InstanceUsage.objects.filter(
instance_type_id=old_flavor, instance_flavor_id=None)\
.update(instance_flavor_id=new_flavor)
models.InstanceExists.objects.filter(
instance_type_id=old_flavor, instance_flavor_id=None)\
.update(instance_flavor_id=new_flavor)
models.InstanceReconcile.objects.filter(
instance_type_id=old_flavor, instance_flavor_id=None)\
.update(instance_flavor_id=new_flavor)
def migrate_backwards(csv_file_path):
with open(csv_file_path, "r") as f:
for old_flavor, new_flavor in csv.reader(f):
models.InstanceUsage.objects.filter(instance_flavor_id=new_flavor)\
.update(instance_flavor_id=None)
models.InstanceExists.objects.filter(instance_flavor_id=new_flavor)\
.update(instance_flavor_id=None)
models.InstanceReconcile.objects.filter(
instance_flavor_id=new_flavor)\
.update(instance_flavor_id=None)
if __name__ == '__main__':
try:
csv_file_path = sys.argv[1]
action = sys.argv[2]
except Exception:
print ("""usage: migrate_flavor_id.py <csv_file_absolute_path>"""
""" <forwards | backwards>"""
"""\n\nThe input file for this script can be generated"""
""" using the following SQL query:"""
"""\nSELECT id, flavorid"""
"""\nINTO OUTFILE '/tmp/flavors.csv'"""
"""\nFIELDS TERMINATED BY ','"""
"""\nENCLOSED BY '"'"""
"""\nLINES TERMINATED BY '\\n'"""
"""\nFROM instance_types;""")
sys.exit(2)
if action == "forwards":
migrate_forwards(csv_file_path)
elif action == "backwards":
migrate_backwards(csv_file_path)

View File

@ -1,7 +1,6 @@
import calendar
import datetime
import decimal
import time
def dt_to_decimal(utc):

View File

@ -16,6 +16,10 @@ def _safe_get(Model, **kwargs):
return object
def get_deployment(id):
return _safe_get(models.Deployment, id=id)
def get_or_create_deployment(name):
return models.Deployment.objects.get_or_create(name=name)
@ -132,4 +136,4 @@ def get_image_delete(**kwargs):
def get_image_usage(**kwargs):
return _safe_get(models.ImageUsage, **kwargs)
return _safe_get(models.ImageUsage, **kwargs)

View File

@ -0,0 +1,244 @@
# -*- coding: utf-8 -*-
import datetime
from south.db import db
from south.v2 import SchemaMigration
from django.db import models
class Migration(SchemaMigration):
def forwards(self, orm):
# Adding field 'InstanceReconcile.instance_flavor_id'
db.add_column(u'stacktach_instancereconcile', 'instance_flavor_id',
self.gf('django.db.models.fields.CharField')(db_index=True, max_length=100, null=True, blank=True),
keep_default=False)
# Adding field 'InstanceExists.instance_flavor_id'
db.add_column(u'stacktach_instanceexists', 'instance_flavor_id',
self.gf('django.db.models.fields.CharField')(db_index=True, max_length=100, null=True, blank=True),
keep_default=False)
# Adding field 'InstanceUsage.instance_flavor_id'
db.add_column(u'stacktach_instanceusage', 'instance_flavor_id',
self.gf('django.db.models.fields.CharField')(db_index=True, max_length=100, null=True, blank=True),
keep_default=False)
def backwards(self, orm):
# Deleting field 'InstanceReconcile.instance_flavor_id'
db.delete_column(u'stacktach_instancereconcile', 'instance_flavor_id')
# Deleting field 'InstanceExists.instance_flavor_id'
db.delete_column(u'stacktach_instanceexists', 'instance_flavor_id')
# Deleting field 'InstanceUsage.instance_flavor_id'
db.delete_column(u'stacktach_instanceusage', 'instance_flavor_id')
models = {
u'stacktach.deployment': {
'Meta': {'object_name': 'Deployment'},
u'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'name': ('django.db.models.fields.CharField', [], {'max_length': '50'})
},
u'stacktach.genericrawdata': {
'Meta': {'object_name': 'GenericRawData'},
'deployment': ('django.db.models.fields.related.ForeignKey', [], {'to': u"orm['stacktach.Deployment']"}),
'event': ('django.db.models.fields.CharField', [], {'db_index': 'True', 'max_length': '50', 'null': 'True', 'blank': 'True'}),
'host': ('django.db.models.fields.CharField', [], {'db_index': 'True', 'max_length': '100', 'null': 'True', 'blank': 'True'}),
u'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'instance': ('django.db.models.fields.CharField', [], {'db_index': 'True', 'max_length': '50', 'null': 'True', 'blank': 'True'}),
'json': ('django.db.models.fields.TextField', [], {}),
'message_id': ('django.db.models.fields.CharField', [], {'db_index': 'True', 'max_length': '50', 'null': 'True', 'blank': 'True'}),
'publisher': ('django.db.models.fields.CharField', [], {'db_index': 'True', 'max_length': '100', 'null': 'True', 'blank': 'True'}),
'request_id': ('django.db.models.fields.CharField', [], {'db_index': 'True', 'max_length': '50', 'null': 'True', 'blank': 'True'}),
'routing_key': ('django.db.models.fields.CharField', [], {'db_index': 'True', 'max_length': '50', 'null': 'True', 'blank': 'True'}),
'service': ('django.db.models.fields.CharField', [], {'db_index': 'True', 'max_length': '50', 'null': 'True', 'blank': 'True'}),
'tenant': ('django.db.models.fields.CharField', [], {'db_index': 'True', 'max_length': '50', 'null': 'True', 'blank': 'True'}),
'when': ('django.db.models.fields.DecimalField', [], {'max_digits': '20', 'decimal_places': '6', 'db_index': 'True'})
},
u'stacktach.glancerawdata': {
'Meta': {'object_name': 'GlanceRawData'},
'deployment': ('django.db.models.fields.related.ForeignKey', [], {'to': u"orm['stacktach.Deployment']"}),
'event': ('django.db.models.fields.CharField', [], {'db_index': 'True', 'max_length': '50', 'null': 'True', 'blank': 'True'}),
'host': ('django.db.models.fields.CharField', [], {'db_index': 'True', 'max_length': '100', 'null': 'True', 'blank': 'True'}),
u'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'image_type': ('django.db.models.fields.IntegerField', [], {'default': '0', 'null': 'True', 'db_index': 'True'}),
'instance': ('django.db.models.fields.CharField', [], {'db_index': 'True', 'max_length': '50', 'null': 'True', 'blank': 'True'}),
'json': ('django.db.models.fields.TextField', [], {}),
'owner': ('django.db.models.fields.CharField', [], {'db_index': 'True', 'max_length': '255', 'null': 'True', 'blank': 'True'}),
'publisher': ('django.db.models.fields.CharField', [], {'db_index': 'True', 'max_length': '100', 'null': 'True', 'blank': 'True'}),
'request_id': ('django.db.models.fields.CharField', [], {'db_index': 'True', 'max_length': '50', 'null': 'True', 'blank': 'True'}),
'routing_key': ('django.db.models.fields.CharField', [], {'db_index': 'True', 'max_length': '50', 'null': 'True', 'blank': 'True'}),
'service': ('django.db.models.fields.CharField', [], {'db_index': 'True', 'max_length': '50', 'null': 'True', 'blank': 'True'}),
'status': ('django.db.models.fields.CharField', [], {'max_length': '30', 'null': 'True', 'db_index': 'True'}),
'uuid': ('django.db.models.fields.CharField', [], {'db_index': 'True', 'max_length': '36', 'null': 'True', 'blank': 'True'}),
'when': ('django.db.models.fields.DecimalField', [], {'max_digits': '20', 'decimal_places': '6', 'db_index': 'True'})
},
u'stacktach.imagedeletes': {
'Meta': {'object_name': 'ImageDeletes'},
'deleted_at': ('django.db.models.fields.DecimalField', [], {'null': 'True', 'max_digits': '20', 'decimal_places': '6', 'db_index': 'True'}),
u'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'raw': ('django.db.models.fields.related.ForeignKey', [], {'to': u"orm['stacktach.GlanceRawData']", 'null': 'True'}),
'uuid': ('django.db.models.fields.CharField', [], {'max_length': '50', 'db_index': 'True'})
},
u'stacktach.imageexists': {
'Meta': {'object_name': 'ImageExists'},
'audit_period_beginning': ('django.db.models.fields.DecimalField', [], {'max_digits': '20', 'decimal_places': '6', 'db_index': 'True'}),
'audit_period_ending': ('django.db.models.fields.DecimalField', [], {'max_digits': '20', 'decimal_places': '6', 'db_index': 'True'}),
'created_at': ('django.db.models.fields.DecimalField', [], {'null': 'True', 'max_digits': '20', 'decimal_places': '6', 'db_index': 'True'}),
'delete': ('django.db.models.fields.related.ForeignKey', [], {'related_name': "'+'", 'null': 'True', 'to': u"orm['stacktach.ImageDeletes']"}),
'deleted_at': ('django.db.models.fields.DecimalField', [], {'null': 'True', 'max_digits': '20', 'decimal_places': '6', 'db_index': 'True'}),
'fail_reason': ('django.db.models.fields.CharField', [], {'max_length': '300', 'null': 'True'}),
u'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'owner': ('django.db.models.fields.CharField', [], {'max_length': '255', 'null': 'True', 'db_index': 'True'}),
'raw': ('django.db.models.fields.related.ForeignKey', [], {'related_name': "'+'", 'to': u"orm['stacktach.GlanceRawData']"}),
'send_status': ('django.db.models.fields.IntegerField', [], {'default': '0', 'db_index': 'True'}),
'size': ('django.db.models.fields.BigIntegerField', [], {'max_length': '20'}),
'status': ('django.db.models.fields.CharField', [], {'default': "'pending'", 'max_length': '50', 'db_index': 'True'}),
'usage': ('django.db.models.fields.related.ForeignKey', [], {'related_name': "'+'", 'null': 'True', 'to': u"orm['stacktach.ImageUsage']"}),
'uuid': ('django.db.models.fields.CharField', [], {'max_length': '50', 'null': 'True', 'db_index': 'True'})
},
u'stacktach.imageusage': {
'Meta': {'object_name': 'ImageUsage'},
'created_at': ('django.db.models.fields.DecimalField', [], {'max_digits': '20', 'decimal_places': '6', 'db_index': 'True'}),
u'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'last_raw': ('django.db.models.fields.related.ForeignKey', [], {'to': u"orm['stacktach.GlanceRawData']", 'null': 'True'}),
'owner': ('django.db.models.fields.CharField', [], {'max_length': '50', 'null': 'True', 'db_index': 'True'}),
'size': ('django.db.models.fields.BigIntegerField', [], {'max_length': '20'}),
'uuid': ('django.db.models.fields.CharField', [], {'max_length': '50', 'db_index': 'True'})
},
u'stacktach.instancedeletes': {
'Meta': {'object_name': 'InstanceDeletes'},
'deleted_at': ('django.db.models.fields.DecimalField', [], {'null': 'True', 'max_digits': '20', 'decimal_places': '6', 'db_index': 'True'}),
u'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'instance': ('django.db.models.fields.CharField', [], {'db_index': 'True', 'max_length': '50', 'null': 'True', 'blank': 'True'}),
'launched_at': ('django.db.models.fields.DecimalField', [], {'null': 'True', 'max_digits': '20', 'decimal_places': '6', 'db_index': 'True'}),
'raw': ('django.db.models.fields.related.ForeignKey', [], {'to': u"orm['stacktach.RawData']", 'null': 'True'})
},
u'stacktach.instanceexists': {
'Meta': {'object_name': 'InstanceExists'},
'audit_period_beginning': ('django.db.models.fields.DecimalField', [], {'null': 'True', 'max_digits': '20', 'decimal_places': '6', 'db_index': 'True'}),
'audit_period_ending': ('django.db.models.fields.DecimalField', [], {'null': 'True', 'max_digits': '20', 'decimal_places': '6', 'db_index': 'True'}),
'bandwidth_public_out': ('django.db.models.fields.BigIntegerField', [], {'default': '0'}),
'delete': ('django.db.models.fields.related.ForeignKey', [], {'related_name': "'+'", 'null': 'True', 'to': u"orm['stacktach.InstanceDeletes']"}),
'deleted_at': ('django.db.models.fields.DecimalField', [], {'null': 'True', 'max_digits': '20', 'decimal_places': '6', 'db_index': 'True'}),
'fail_reason': ('django.db.models.fields.CharField', [], {'db_index': 'True', 'max_length': '300', 'null': 'True', 'blank': 'True'}),
u'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'instance': ('django.db.models.fields.CharField', [], {'db_index': 'True', 'max_length': '50', 'null': 'True', 'blank': 'True'}),
'instance_flavor_id': ('django.db.models.fields.CharField', [], {'db_index': 'True', 'max_length': '100', 'null': 'True', 'blank': 'True'}),
'instance_type_id': ('django.db.models.fields.CharField', [], {'db_index': 'True', 'max_length': '50', 'null': 'True', 'blank': 'True'}),
'launched_at': ('django.db.models.fields.DecimalField', [], {'null': 'True', 'max_digits': '20', 'decimal_places': '6', 'db_index': 'True'}),
'message_id': ('django.db.models.fields.CharField', [], {'db_index': 'True', 'max_length': '50', 'null': 'True', 'blank': 'True'}),
'os_architecture': ('django.db.models.fields.TextField', [], {'null': 'True', 'blank': 'True'}),
'os_distro': ('django.db.models.fields.TextField', [], {'null': 'True', 'blank': 'True'}),
'os_version': ('django.db.models.fields.TextField', [], {'null': 'True', 'blank': 'True'}),
'raw': ('django.db.models.fields.related.ForeignKey', [], {'related_name': "'+'", 'null': 'True', 'to': u"orm['stacktach.RawData']"}),
'rax_options': ('django.db.models.fields.TextField', [], {'null': 'True', 'blank': 'True'}),
'send_status': ('django.db.models.fields.IntegerField', [], {'default': '0', 'null': 'True', 'db_index': 'True'}),
'status': ('django.db.models.fields.CharField', [], {'default': "'pending'", 'max_length': '50', 'db_index': 'True'}),
'tenant': ('django.db.models.fields.CharField', [], {'db_index': 'True', 'max_length': '50', 'null': 'True', 'blank': 'True'}),
'usage': ('django.db.models.fields.related.ForeignKey', [], {'related_name': "'+'", 'null': 'True', 'to': u"orm['stacktach.InstanceUsage']"})
},
u'stacktach.instancereconcile': {
'Meta': {'object_name': 'InstanceReconcile'},
'deleted_at': ('django.db.models.fields.DecimalField', [], {'null': 'True', 'max_digits': '20', 'decimal_places': '6', 'db_index': 'True'}),
u'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'instance': ('django.db.models.fields.CharField', [], {'db_index': 'True', 'max_length': '50', 'null': 'True', 'blank': 'True'}),
'instance_flavor_id': ('django.db.models.fields.CharField', [], {'db_index': 'True', 'max_length': '100', 'null': 'True', 'blank': 'True'}),
'instance_type_id': ('django.db.models.fields.CharField', [], {'db_index': 'True', 'max_length': '50', 'null': 'True', 'blank': 'True'}),
'launched_at': ('django.db.models.fields.DecimalField', [], {'null': 'True', 'max_digits': '20', 'decimal_places': '6', 'db_index': 'True'}),
'os_architecture': ('django.db.models.fields.TextField', [], {'null': 'True', 'blank': 'True'}),
'os_distro': ('django.db.models.fields.TextField', [], {'null': 'True', 'blank': 'True'}),
'os_version': ('django.db.models.fields.TextField', [], {'null': 'True', 'blank': 'True'}),
'rax_options': ('django.db.models.fields.TextField', [], {'null': 'True', 'blank': 'True'}),
'row_created': ('django.db.models.fields.DateTimeField', [], {'auto_now_add': 'True', 'blank': 'True'}),
'row_updated': ('django.db.models.fields.DateTimeField', [], {'auto_now': 'True', 'blank': 'True'}),
'source': ('django.db.models.fields.CharField', [], {'db_index': 'True', 'max_length': '150', 'null': 'True', 'blank': 'True'}),
'tenant': ('django.db.models.fields.CharField', [], {'db_index': 'True', 'max_length': '50', 'null': 'True', 'blank': 'True'})
},
u'stacktach.instanceusage': {
'Meta': {'object_name': 'InstanceUsage'},
u'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'instance': ('django.db.models.fields.CharField', [], {'db_index': 'True', 'max_length': '50', 'null': 'True', 'blank': 'True'}),
'instance_flavor_id': ('django.db.models.fields.CharField', [], {'db_index': 'True', 'max_length': '100', 'null': 'True', 'blank': 'True'}),
'instance_type_id': ('django.db.models.fields.CharField', [], {'db_index': 'True', 'max_length': '50', 'null': 'True', 'blank': 'True'}),
'launched_at': ('django.db.models.fields.DecimalField', [], {'null': 'True', 'max_digits': '20', 'decimal_places': '6', 'db_index': 'True'}),
'os_architecture': ('django.db.models.fields.TextField', [], {'null': 'True', 'blank': 'True'}),
'os_distro': ('django.db.models.fields.TextField', [], {'null': 'True', 'blank': 'True'}),
'os_version': ('django.db.models.fields.TextField', [], {'null': 'True', 'blank': 'True'}),
'rax_options': ('django.db.models.fields.TextField', [], {'null': 'True', 'blank': 'True'}),
'request_id': ('django.db.models.fields.CharField', [], {'db_index': 'True', 'max_length': '50', 'null': 'True', 'blank': 'True'}),
'tenant': ('django.db.models.fields.CharField', [], {'db_index': 'True', 'max_length': '50', 'null': 'True', 'blank': 'True'})
},
u'stacktach.jsonreport': {
'Meta': {'object_name': 'JsonReport'},
'created': ('django.db.models.fields.DecimalField', [], {'max_digits': '20', 'decimal_places': '6', 'db_index': 'True'}),
u'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'json': ('django.db.models.fields.TextField', [], {}),
'name': ('django.db.models.fields.CharField', [], {'max_length': '50', 'db_index': 'True'}),
'period_end': ('django.db.models.fields.DateTimeField', [], {'db_index': 'True'}),
'period_start': ('django.db.models.fields.DateTimeField', [], {'db_index': 'True'}),
'version': ('django.db.models.fields.IntegerField', [], {'default': '1'})
},
u'stacktach.lifecycle': {
'Meta': {'object_name': 'Lifecycle'},
u'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'instance': ('django.db.models.fields.CharField', [], {'db_index': 'True', 'max_length': '50', 'null': 'True', 'blank': 'True'}),
'last_raw': ('django.db.models.fields.related.ForeignKey', [], {'to': u"orm['stacktach.RawData']", 'null': 'True'}),
'last_state': ('django.db.models.fields.CharField', [], {'db_index': 'True', 'max_length': '50', 'null': 'True', 'blank': 'True'}),
'last_task_state': ('django.db.models.fields.CharField', [], {'db_index': 'True', 'max_length': '50', 'null': 'True', 'blank': 'True'})
},
u'stacktach.rawdata': {
'Meta': {'object_name': 'RawData'},
'deployment': ('django.db.models.fields.related.ForeignKey', [], {'to': u"orm['stacktach.Deployment']"}),
'event': ('django.db.models.fields.CharField', [], {'db_index': 'True', 'max_length': '50', 'null': 'True', 'blank': 'True'}),
'host': ('django.db.models.fields.CharField', [], {'db_index': 'True', 'max_length': '100', 'null': 'True', 'blank': 'True'}),
u'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'image_type': ('django.db.models.fields.IntegerField', [], {'default': '0', 'null': 'True', 'db_index': 'True'}),
'instance': ('django.db.models.fields.CharField', [], {'db_index': 'True', 'max_length': '50', 'null': 'True', 'blank': 'True'}),
'json': ('django.db.models.fields.TextField', [], {}),
'old_state': ('django.db.models.fields.CharField', [], {'db_index': 'True', 'max_length': '20', 'null': 'True', 'blank': 'True'}),
'old_task': ('django.db.models.fields.CharField', [], {'db_index': 'True', 'max_length': '30', 'null': 'True', 'blank': 'True'}),
'publisher': ('django.db.models.fields.CharField', [], {'db_index': 'True', 'max_length': '100', 'null': 'True', 'blank': 'True'}),
'request_id': ('django.db.models.fields.CharField', [], {'db_index': 'True', 'max_length': '50', 'null': 'True', 'blank': 'True'}),
'routing_key': ('django.db.models.fields.CharField', [], {'db_index': 'True', 'max_length': '50', 'null': 'True', 'blank': 'True'}),
'service': ('django.db.models.fields.CharField', [], {'db_index': 'True', 'max_length': '50', 'null': 'True', 'blank': 'True'}),
'state': ('django.db.models.fields.CharField', [], {'db_index': 'True', 'max_length': '20', 'null': 'True', 'blank': 'True'}),
'task': ('django.db.models.fields.CharField', [], {'db_index': 'True', 'max_length': '30', 'null': 'True', 'blank': 'True'}),
'tenant': ('django.db.models.fields.CharField', [], {'db_index': 'True', 'max_length': '50', 'null': 'True', 'blank': 'True'}),
'when': ('django.db.models.fields.DecimalField', [], {'max_digits': '20', 'decimal_places': '6', 'db_index': 'True'})
},
u'stacktach.rawdataimagemeta': {
'Meta': {'object_name': 'RawDataImageMeta'},
u'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'os_architecture': ('django.db.models.fields.TextField', [], {'null': 'True', 'blank': 'True'}),
'os_distro': ('django.db.models.fields.TextField', [], {'null': 'True', 'blank': 'True'}),
'os_version': ('django.db.models.fields.TextField', [], {'null': 'True', 'blank': 'True'}),
'raw': ('django.db.models.fields.related.ForeignKey', [], {'to': u"orm['stacktach.RawData']"}),
'rax_options': ('django.db.models.fields.TextField', [], {'null': 'True', 'blank': 'True'})
},
u'stacktach.requesttracker': {
'Meta': {'object_name': 'RequestTracker'},
'completed': ('django.db.models.fields.BooleanField', [], {'default': 'False', 'db_index': 'True'}),
'duration': ('django.db.models.fields.DecimalField', [], {'max_digits': '20', 'decimal_places': '6', 'db_index': 'True'}),
u'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'last_timing': ('django.db.models.fields.related.ForeignKey', [], {'to': u"orm['stacktach.Timing']", 'null': 'True'}),
'lifecycle': ('django.db.models.fields.related.ForeignKey', [], {'to': u"orm['stacktach.Lifecycle']"}),
'request_id': ('django.db.models.fields.CharField', [], {'max_length': '50', 'db_index': 'True'}),
'start': ('django.db.models.fields.DecimalField', [], {'max_digits': '20', 'decimal_places': '6', 'db_index': 'True'})
},
u'stacktach.timing': {
'Meta': {'object_name': 'Timing'},
'diff': ('django.db.models.fields.DecimalField', [], {'null': 'True', 'max_digits': '20', 'decimal_places': '6', 'db_index': 'True'}),
'end_raw': ('django.db.models.fields.related.ForeignKey', [], {'related_name': "'+'", 'null': 'True', 'to': u"orm['stacktach.RawData']"}),
'end_when': ('django.db.models.fields.DecimalField', [], {'null': 'True', 'max_digits': '20', 'decimal_places': '6'}),
u'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'lifecycle': ('django.db.models.fields.related.ForeignKey', [], {'to': u"orm['stacktach.Lifecycle']"}),
'name': ('django.db.models.fields.CharField', [], {'max_length': '50', 'db_index': 'True'}),
'start_raw': ('django.db.models.fields.related.ForeignKey', [], {'related_name': "'+'", 'null': 'True', 'to': u"orm['stacktach.RawData']"}),
'start_when': ('django.db.models.fields.DecimalField', [], {'null': 'True', 'max_digits': '20', 'decimal_places': '6'})
}
}
complete_apps = ['stacktach']

View File

@ -16,6 +16,7 @@ import datetime
import copy
from django.db import models
from django.db.models import Q
from stacktach import datetime_to_decimal as dt
@ -164,6 +165,8 @@ class InstanceUsage(models.Model):
null=True,
blank=True,
db_index=True)
instance_flavor_id = models.CharField(max_length=100, null=True,
blank=True, db_index=True)
tenant = models.CharField(max_length=50, null=True, blank=True,
db_index=True)
os_architecture = models.TextField(null=True, blank=True)
@ -226,6 +229,8 @@ class InstanceReconcile(models.Model):
null=True,
blank=True,
db_index=True)
instance_flavor_id = models.CharField(max_length=100, null=True,
blank=True, db_index=True)
tenant = models.CharField(max_length=50, null=True, blank=True,
db_index=True)
os_architecture = models.TextField(null=True, blank=True)
@ -292,6 +297,8 @@ class InstanceExists(models.Model):
os_version = models.TextField(null=True, blank=True)
rax_options = models.TextField(null=True, blank=True)
bandwidth_public_out = models.BigIntegerField(default=0)
instance_flavor_id = models.CharField(max_length=100, null=True,
blank=True, db_index=True)
def deployment(self):
return self.raw.deployment
@ -485,10 +492,19 @@ class ImageExists(models.Model):
self.status = new_status
@staticmethod
def find(ending_max, status):
def find_and_group_by_owner_and_raw_id(ending_max, status):
params = {'audit_period_ending__lte': dt.dt_to_decimal(ending_max),
'status': status}
return ImageExists.objects.select_related().filter(**params).order_by('id')
ordered_exists = ImageExists.objects.select_related().\
filter(**params).order_by('owner')
result = {}
for exist in ordered_exists:
key = "%s-%s" % (exist.owner, exist.raw_id)
if key in result:
result[key].append(exist)
else:
result[key] = [exist]
return result
def mark_verified(self):
self.status = InstanceExists.VERIFIED

View File

@ -103,14 +103,6 @@ class GlanceNotification(Notification):
self.size = self.payload.get('size', None)
created_at = self.payload.get('created_at', None)
self.created_at = created_at and utils.str_time_to_unix(created_at)
audit_period_beginning = self.payload.get(
'audit_period_beginning', None)
self.audit_period_beginning = audit_period_beginning and\
utils.str_time_to_unix(audit_period_beginning)
audit_period_ending = self.payload.get(
'audit_period_ending', None)
self.audit_period_ending = audit_period_ending and \
utils.str_time_to_unix(audit_period_ending)
else:
self.properties = {}
self.image_type = None
@ -118,8 +110,6 @@ class GlanceNotification(Notification):
self.uuid = None
self.size = None
self.created_at = None
self.audit_period_beginning = None
self.audit_period_ending = None
@property
def owner(self):
@ -131,6 +121,7 @@ class GlanceNotification(Notification):
@property
def instance(self):
return self.properties.get('instance_uuid', None)
@property
def deleted_at(self):
deleted_at = self.body.get('deleted_at', None)
@ -157,27 +148,51 @@ class GlanceNotification(Notification):
uuid=self.uuid)
def save_exists(self, raw):
if self.created_at:
values = {
'uuid': self.uuid,
'audit_period_beginning': self.audit_period_beginning,
'audit_period_ending': self.audit_period_ending,
'owner': self.owner,
'size': self.size,
'raw': raw
}
usage = db.get_image_usage(uuid=self.uuid)
values['usage'] = usage
values['created_at'] = self.created_at
if self.deleted_at:
delete = db.get_image_delete(uuid=self.uuid)
values['delete'] = delete
values['deleted_at'] = self.deleted_at
db.create_image_exists(**values)
if isinstance(self.payload, dict):
audit_period_beginning = self.payload.get(
'audit_period_beginning', None)
audit_period_beginning = audit_period_beginning and\
utils.str_time_to_unix(audit_period_beginning)
audit_period_ending = self.payload.get(
'audit_period_ending', None)
audit_period_ending = audit_period_ending and \
utils.str_time_to_unix(audit_period_ending)
images = self.payload.get('images', [])
else:
stacklog.warn("Ignoring exists without created_at. GlanceRawData(%s)"
% raw.id)
stacklog.warn("Received exists with invalid payload "
"GlanceRawData(%s)" % raw.id)
audit_period_beginning = None
audit_period_ending = None
images = []
for image in images:
created_at = image['created_at']
created_at = created_at and utils.str_time_to_unix(created_at)
uuid = image['id']
deleted_at = image['deleted_at']
deleted_at = deleted_at and utils.str_time_to_unix(deleted_at)
if created_at:
values = {
'uuid': uuid,
'audit_period_beginning': audit_period_beginning,
'audit_period_ending': audit_period_ending,
'owner': self.owner,
'size': image['size'],
'raw': raw
}
usage = db.get_image_usage(uuid=uuid)
values['usage'] = usage
values['created_at'] = created_at
if deleted_at:
delete = db.get_image_delete(uuid=uuid)
values['delete'] = delete
values['deleted_at'] = deleted_at
db.create_image_exists(**values)
else:
stacklog.warn("Ignoring exists without created_at. GlanceRawData(%s)"
% raw.id)
def save_usage(self, raw):
values = {
@ -214,6 +229,7 @@ class NovaNotification(Notification):
self.os_version = image_meta.get('org.openstack__1__os_version', '')
self.rax_options = image_meta.get('com.rackspace__1__options', '')
self.instance_type_id = self.payload.get('instance_type_id', None)
self.instance_flavor_id = self.payload.get('instance_flavor_id', None)
self.new_instance_type_id = \
self.payload.get('new_instance_type_id', None)
self.launched_at = self.payload.get('launched_at', None)

View File

@ -79,6 +79,7 @@ class Reconciler(object):
'launched_at': usage.launched_at,
'deleted_at': deleted_at,
'instance_type_id': usage.instance_type_id,
'instance_flavor_id': usage.instance_flavor_id,
'source': 'reconciler:%s' % src,
'tenant': usage.tenant,
'os_architecture': usage.os_architecture,
@ -93,6 +94,7 @@ class Reconciler(object):
if (exists.launched_at != instance['launched_at'] or
exists.instance_type_id != instance['instance_type_id'] or
exists.instance_flavor_id != instance['instance_flavor_id'] or
exists.tenant != instance['tenant'] or
exists.os_architecture != instance['os_architecture'] or
exists.os_distro != instance['os_distro'] or

View File

@ -4,7 +4,9 @@ from stacktach import utils as stackutils
from stacktach.reconciler import exceptions
from stacktach.reconciler.utils import empty_reconciler_instance
GET_INSTANCE_QUERY = "SELECT * FROM instances where uuid ='%s';"
GET_INSTANCE_QUERY = \
"SELECT i.*, it.flavorid FROM instances i INNER JOIN " \
"instance_types it on i.instance_type_id = it.id where i.uuid ='%s';"
METADATA_MAPPING = {
'image_org.openstack__1__architecture': 'os_architecture',
@ -51,6 +53,7 @@ class JSONBridgeClient(object):
'id': instance['uuid'],
'tenant': instance['project_id'],
'instance_type_id': str(instance['instance_type_id']),
'instance_flavor_id': str(instance['flavorid']),
})
if instance['launched_at'] is not None:

View File

@ -6,6 +6,7 @@ def empty_reconciler_instance():
'deleted': False,
'deleted_at': None,
'instance_type_id': None,
'instance_flavor_id': None,
'os_architecture': '',
'os_distro': '',
'os_version': '',

View File

@ -20,8 +20,13 @@
import logging
import logging.handlers
import multiprocessing
import threading
import traceback
import sys
LOGGERS = {}
LOGGER_QUEUE_MAP = {}
default_logger_location = '/var/log/stacktach/%s.log'
default_logger_name = 'stacktach-default'
@ -36,84 +41,148 @@ def set_default_logger_name(name):
default_logger_name = name
def _logger_factory(exchange, name):
if exchange:
return ExchangeLogger(exchange, name)
class ParentLoggerDoesNotExist(Exception):
def __init__(self, parent_logger_name):
self.reason = "Cannot create child logger as parent logger with the" \
"name %s does not exist." % parent_logger_name
def _create_parent_logger(parent_logger_name):
if parent_logger_name not in LOGGERS:
logger = _create_timed_rotating_logger(parent_logger_name)
LOGGERS[parent_logger_name] = logger
LOGGER_QUEUE_MAP[parent_logger_name] = multiprocessing.Queue(-1)
return LOGGERS[parent_logger_name]
def _create_child_logger(parent_logger_name):
child_logger_name = "child_%s" % parent_logger_name
if child_logger_name in LOGGERS:
return LOGGERS[child_logger_name]
if parent_logger_name in LOGGERS:
queue = LOGGER_QUEUE_MAP[parent_logger_name]
logger = _create_queue_logger(child_logger_name, queue)
LOGGERS[child_logger_name] = logger
else:
logger = logging.getLogger(__name__)
_configure(logger, name)
return logger
raise ParentLoggerDoesNotExist(parent_logger_name)
return LOGGERS[child_logger_name]
def _make_logger(name, exchange=None):
log = _logger_factory(exchange, name)
return log
def _logger_factory(parent_logger_name, is_parent):
if parent_logger_name is None:
parent_logger_name = default_logger_name
if is_parent:
return _create_parent_logger(parent_logger_name)
else:
return _create_child_logger(parent_logger_name)
def init_logger(name=None, exchange=None):
global LOGGERS
if name is None:
name = default_logger_name
if name not in LOGGERS:
LOGGERS[name] = _make_logger(name, exchange)
def get_logger(name=None, exchange=None):
global LOGGERS
if name is None:
name = default_logger_name
init_logger(name=name, exchange=exchange)
return LOGGERS[name]
def get_logger(name=None, is_parent=True):
return _logger_factory(name, is_parent)
def warn(msg, name=None):
if name is None:
name = default_logger_name
get_logger(name=name).warn(msg)
get_logger(name=name, is_parent=False).warn(msg)
def error(msg, name=None):
if name is None:
name = default_logger_name
get_logger(name=name).error(msg)
get_logger(name=name, is_parent=False).error(msg)
def info(msg, name=None):
if name is None:
name = default_logger_name
get_logger(name=name).info(msg)
get_logger(name=name, is_parent=False).info(msg)
def _configure(logger, name):
logger.setLevel(logging.DEBUG)
handler = logging.handlers.TimedRotatingFileHandler(
default_logger_location % name,
when='midnight', interval=1, backupCount=3)
formatter = logging.Formatter(
'%(asctime)s - %(name)s - %(levelname)s - %(message)s')
handler.setFormatter(formatter)
logger.addHandler(handler)
logger.handlers[0].doRollover()
def _create_timed_rotating_logger(name):
logger = logging.getLogger(name)
logger.setLevel(logging.DEBUG)
handler = logging.handlers.TimedRotatingFileHandler(
default_logger_location % name,
when='midnight', interval=1, backupCount=3)
formatter = logging.Formatter(
'%(asctime)s - %(name)s - %(levelname)s - %(message)s')
handler.setFormatter(formatter)
logger.addHandler(handler)
logger.handlers[0].doRollover()
return logger
class ExchangeLogger():
def __init__(self, exchange, name='stacktach-default'):
self.logger = logging.getLogger(__name__)
_configure(self.logger, name)
self.exchange = exchange
def _create_queue_logger(name, queue):
logger = logging.getLogger(name)
logger.setLevel(logging.DEBUG)
handler = QueueHandler(queue)
formatter = logging.Formatter(
'%(asctime)s - %(name)s - %(levelname)s - %(message)s')
handler.setFormatter(formatter)
logger.addHandler(handler)
return logger
def info(self, msg, *args, **kwargs):
msg = self.exchange + ': ' + msg
self.logger.info(msg, *args, **kwargs)
def warn(self, msg, *args, **kwargs):
msg = self.exchange + ': ' + msg
self.logger.warn(msg, *args, **kwargs)
class QueueHandler(logging.Handler):
def __init__(self, queue):
logging.Handler.__init__(self)
self.queue = queue
def error(self, msg, *args, **kwargs):
msg = self.exchange + ': ' + msg
self.logger.error(msg, *args, **kwargs)
def emit(self, record):
try:
# ensure that exc_info and args
# have been stringified. Removes any chance of
# unpickleable things inside and possibly reduces
# message size sent over the pipe
if record.exc_info:
# just to get traceback text into record.exc_text
self.format(record)
# remove exception info as it's not needed any more
record.exc_info = None
if record.args:
record.msg = record.msg % record.args
record.args = None
self.queue.put_nowait(record)
except (KeyboardInterrupt, SystemExit):
raise
except:
self.handleError(record)
def exception(self, msg, *args, **kwargs):
msg = self.exchange + ': ' + msg
self.logger.error(msg, *args, **kwargs)
class LogListener:
def __init__(self, logger):
self.logger = logger
self.queue = get_queue(logger.name)
def start(self):
self.thread = threading.Thread(target=self._receive)
self.thread.daemon = True
self.thread.start()
def _receive(self):
while True:
try:
record = self.queue.get()
# None is sent as a sentinel to tell the listener to quit
if record is None:
break
self.logger.handle(record)
except (KeyboardInterrupt, SystemExit):
raise
except EOFError:
break
except:
traceback.print_exc(file=sys.stderr)
def end(self):
self.queue.put_nowait(None)
self.thread.join()
for handler in self.logger.handlers:
handler.close()
def get_queue(logger_name):
return LOGGER_QUEUE_MAP[logger_name]

View File

@ -90,12 +90,12 @@ def get_all_event_names():
events.extend(get_event_names(service))
return events
def get_host_names():
def get_host_names(service):
# TODO: We need to upgrade to Django 1.4 so we can get tenent id and
# host and just do distinct on host name.
# like: values('host', 'tenant_id').distinct('host')
# This will be more meaningful. Host by itself isn't really.
return models.RawData.objects.values('host').distinct()
return _model_factory(service).values('host').distinct()
def routing_key_type(key):
@ -164,12 +164,10 @@ def do_deployments(request):
def do_events(request):
service = str(request.GET.get('service', 'all'))
print service
if service == 'all':
events = get_all_event_names()
else:
events = get_event_names(service=service)
print events
results = [["Event Name"]]
for event in events:
results.append([event['event']])
@ -177,7 +175,8 @@ def do_events(request):
def do_hosts(request):
hosts = get_host_names()
service = str(request.GET.get('service', 'nova'))
hosts = get_host_names(service)
results = [["Host Name"]]
for host in hosts:
results.append([host['host']])
@ -493,13 +492,15 @@ def do_list_usage_launches(request):
else:
launches = model_search(request, model, None)
results = [["UUID", "Launched At", "Instance Type Id"]]
results = [["UUID", "Launched At", "Instance Type Id",
"Instance Flavor Id"]]
for launch in launches:
launched = None
if launch.launched_at:
launched = str(dt.dt_from_decimal(launch.launched_at))
results.append([launch.instance, launched, launch.instance_type_id])
results.append([launch.instance, launched, launch.instance_type_id,
launch.instance_flavor_id])
return rsp(json.dumps(results))
@ -551,7 +552,7 @@ def do_list_usage_exists(request):
exists = model_search(request, model, None)
results = [["UUID", "Launched At", "Deleted At", "Instance Type Id",
"Message ID", "Status"]]
"Instance Flavor Id", "Message ID", "Status"]]
for exist in exists:
launched = None
@ -561,8 +562,8 @@ def do_list_usage_exists(request):
if exist.deleted_at:
deleted = str(dt.dt_from_decimal(exist.deleted_at))
results.append([exist.instance, launched, deleted,
exist.instance_type_id, exist.message_id,
exist.status])
exist.instance_type_id, exist.instance_flavor_id,
exist.message_id, exist.status])
return rsp(json.dumps(results))

View File

@ -33,150 +33,6 @@ REQUEST_ID_1 = 'testrequestid1'
REQUEST_ID_2 = 'testrequestid2'
REQUEST_ID_3 = 'testrequestid3'
def make_create_start_json(instance_type_id='1',
instance_id=INSTANCE_ID_1,
request_id=REQUEST_ID_1):
notification = ['monitor.info', {
'_context_request_id': request_id,
'event_type': views.INSTANCE_EVENT['create_start'],
'payload': {
'instance_id': instance_id,
'instance_type_id': instance_type_id,
}
}
]
return json.dumps(notification)
def make_create_end_json(launched_at, instance_type_id='1',
instance_id=INSTANCE_ID_1,
request_id=REQUEST_ID_1):
notification = ['monitor.info', {
'_context_request_id': request_id,
'event_type': views.INSTANCE_EVENT['create_end'],
'payload': {
'instance_id': instance_id,
'instance_type_id': instance_type_id,
'launched_at': launched_at
}
}
]
return json.dumps(notification)
def make_delete_end_json(launched_at, deleted_at,
instance_type_id='1', instance_id=INSTANCE_ID_1,
request_id=REQUEST_ID_2):
notification = ['monitor.info', {
'_context_request_id': request_id,
'event_type': views.INSTANCE_EVENT['create_end'],
'payload': {
'instance_id': instance_id,
'instance_type_id': instance_type_id,
'launched_at': launched_at,
'deleted_at': deleted_at
}
}
]
return json.dumps(notification)
def make_exists_json(launched_at, instance_type_id='1',
instance_id=INSTANCE_ID_1, deleted_at=None):
notification = ['monitor.info', {
'message_id': MESSAGE_ID_1,
'event_type': views.INSTANCE_EVENT['create_end'],
'payload': {
'instance_id': instance_id,
'instance_type_id': instance_type_id,
'launched_at': launched_at,
}
}
]
if deleted_at:
notification[1]['payload']['deleted_at'] = deleted_at
return json.dumps(notification)
def make_resize_finish_json(launched_at, instance_type_id='2',
instance_id=INSTANCE_ID_1,
request_id=REQUEST_ID_1):
notification = ['monitor.info', {
'_context_request_id': request_id,
'event_type': views.INSTANCE_EVENT['resize_finish_end'],
'payload': {
'instance_id': instance_id,
'instance_type_id': instance_type_id,
'launched_at': launched_at
}
}
]
return json.dumps(notification)
def make_resize_prep_start_json(instance_type_id='1',
instance_id=INSTANCE_ID_1,
request_id=REQUEST_ID_1):
notification = ['monitor.info', {
'_context_request_id': request_id,
'event_type': views.INSTANCE_EVENT['resize_prep_start'],
'payload': {
'instance_id': instance_id,
'instance_type_id': instance_type_id,
}
}
]
return json.dumps(notification)
def make_resize_prep_end_json(instance_type_id='1',
new_instance_type_id='2',
instance_id=INSTANCE_ID_1,
request_id=REQUEST_ID_1):
notification = ['monitor.info', {
'_context_request_id': request_id,
'event_type': views.INSTANCE_EVENT['resize_prep_start'],
'payload': {
'instance_id': instance_id,
'instance_type_id': instance_type_id,
'new_instance_type_id': new_instance_type_id,
}
}
]
return json.dumps(notification)
def make_resize_revert_start_json(instance_type_id='2',
instance_id=INSTANCE_ID_1,
request_id=REQUEST_ID_1):
notification = ['monitor.info', {
'_context_request_id': request_id,
'event_type': views.INSTANCE_EVENT['resize_revert_start'],
'payload': {
'instance_id': instance_id,
'instance_type_id': instance_type_id,
}
}
]
return json.dumps(notification)
def make_resize_revert_end_json(launched_at, instance_type_id='1',
instance_id=INSTANCE_ID_1,
request_id=REQUEST_ID_1):
notification = ['monitor.info', {
'_context_request_id': request_id,
'event_type': views.INSTANCE_EVENT['resize_finish_end'],
'payload': {
'instance_id': instance_id,
'instance_type_id': instance_type_id,
'launched_at': launched_at
}
}
]
return json.dumps(notification)
def create_raw(deployment, when, event, instance=INSTANCE_ID_1,
request_id=REQUEST_ID_1, state='active', old_task='',

View File

@ -1,5 +1,11 @@
from django.conf.urls import patterns, url
from stacktach import stacklog
stacklog.set_default_logger_name('stacktach-web')
web_logger = stacklog.get_logger('stacktach-web')
web_logger_listener = stacklog.LogListener(web_logger)
web_logger_listener.start()
urlpatterns = patterns('',
url(r'^$', 'stacktach.views.welcome', name='welcome'),

View File

@ -5,28 +5,30 @@ from stacktach import datetime_to_decimal as dt
def str_time_to_unix(when):
if 'T' in when:
try:
# Old way of doing it
when = datetime.datetime.strptime(when, "%Y-%m-%dT%H:%M:%S.%f")
except ValueError:
try:
# Old way of doing it, no millis
when = datetime.datetime.strptime(when, "%Y-%m-%dT%H:%M:%S")
except Exception, e:
print "BAD DATE: ", e
if 'Z' in when:
when = _try_parse(when, ["%Y-%m-%dT%H:%M:%SZ", "%Y-%m-%dT%H:%M:%S.%fZ"])
elif 'T' in when:
when = _try_parse(when, ["%Y-%m-%dT%H:%M:%S.%f", "%Y-%m-%dT%H:%M:%S"])
else:
try:
when = datetime.datetime.strptime(when, "%Y-%m-%d %H:%M:%S.%f")
except ValueError:
try:
when = datetime.datetime.strptime(when, "%Y-%m-%d %H:%M:%S")
except Exception, e:
print "BAD DATE: ", e
when = _try_parse(when, ["%Y-%m-%d %H:%M:%S.%f", "%Y-%m-%d %H:%M:%S"])
return dt.dt_to_decimal(when)
def _try_parse(when, formats):
last_exception = None
for format in formats:
try:
when = datetime.datetime.strptime(when, format)
parsed = True
except Exception, e:
parsed = False
last_exception = e
if parsed:
return when
print "Bad DATE ", last_exception
def is_uuid_like(val):
try:
converted = str(uuid.UUID(val))

View File

@ -154,7 +154,6 @@ INSTANCE_EVENT = {
'rebuild_start': 'compute.instance.rebuild.start',
'rebuild_end': 'compute.instance.rebuild.end',
'resize_prep_start': 'compute.instance.resize.prep.start',
'resize_prep_end': 'compute.instance.resize.prep.end',
'resize_revert_start': 'compute.instance.resize.revert.start',
'resize_revert_end': 'compute.instance.resize.revert.end',
'resize_finish_end': 'compute.instance.finish_resize.end',
@ -176,6 +175,7 @@ def _process_usage_for_new_launch(raw, notification):
INSTANCE_EVENT['rebuild_start'],
INSTANCE_EVENT['rescue_start']]:
usage.instance_type_id = notification.instance_type_id
usage.instance_flavor_id = notification.instance_flavor_id
if raw.event in [INSTANCE_EVENT['rebuild_start'],
INSTANCE_EVENT['resize_prep_start'],
@ -188,6 +188,18 @@ def _process_usage_for_new_launch(raw, notification):
# though, because we may have already received the end event
usage.launched_at = utils.str_time_to_unix(notification.launched_at)
if raw.event in [INSTANCE_EVENT['resize_prep_start'],
INSTANCE_EVENT['resize_revert_start']] and\
usage.instance_type_id is None and\
usage.instance_flavor_id is None:
# Grab the flavor details and populate them if they aren't
# already. This should happen just in case we get an exists
# mid resize/revert. That can happen if the action spans
# multiple audit periods, or if the compute node is restarted
# mid action and another resize is kicked off.
usage.instance_type_id = notification.instance_type_id
usage.instance_flavor_id = notification.instance_flavor_id
usage.tenant = notification.tenant
usage.rax_options = notification.rax_options
usage.os_architecture = notification.os_architecture
@ -213,10 +225,10 @@ def _process_usage_for_updates(raw, notification):
INSTANCE_EVENT['rescue_end']]:
usage.launched_at = utils.str_time_to_unix(notification.launched_at)
if raw.event == INSTANCE_EVENT['resize_revert_end']:
if raw.event in [INSTANCE_EVENT['resize_revert_end'],
INSTANCE_EVENT['resize_finish_end']]:
usage.instance_type_id = notification.instance_type_id
elif raw.event == INSTANCE_EVENT['resize_prep_end']:
usage.instance_type_id = notification.new_instance_type_id
usage.instance_flavor_id = notification.instance_flavor_id
usage.tenant = notification.tenant
usage.rax_options = notification.rax_options
@ -260,6 +272,7 @@ def _process_exists(raw, notification):
ending = utils.str_time_to_unix(notification.audit_period_ending)
values['audit_period_ending'] = ending
values['instance_type_id'] = notification.instance_type_id
values['instance_flavor_id'] = notification.instance_flavor_id
if usage:
values['usage'] = usage
values['raw'] = raw
@ -308,7 +321,6 @@ USAGE_PROCESS_MAPPING = {
INSTANCE_EVENT['rescue_start']: _process_usage_for_new_launch,
INSTANCE_EVENT['create_end']: _process_usage_for_updates,
INSTANCE_EVENT['rebuild_end']: _process_usage_for_updates,
INSTANCE_EVENT['resize_prep_end']: _process_usage_for_updates,
INSTANCE_EVENT['resize_finish_end']: _process_usage_for_updates,
INSTANCE_EVENT['resize_revert_end']: _process_usage_for_updates,
INSTANCE_EVENT['rescue_end']: _process_usage_for_updates,

View File

@ -47,6 +47,9 @@ setup_environment()
from stacktach import stacklog
stacklog.set_default_logger_location("/tmp/%s.log")
default_logger = stacklog.get_logger()
worker_logger = stacklog.get_logger('worker')
verifier_logger = stacklog.get_logger('verifier')
class _AssertRaisesContext(object):
@ -95,6 +98,9 @@ class StacktachBaseTestCase(unittest.TestCase):
def assertIsInstance(self, obj, cls, msg=None):
self.assertTrue(isinstance(obj, cls), msg)
def assertIs(self, expr1, expr2, msg=None):
self.assertTrue(expr1 is expr2, msg)
def assertRaises(self, excClass, callableObj=None, *args, **kwargs):
context = _AssertRaisesContext(excClass, self)
if callableObj is None:

View File

@ -3,6 +3,7 @@ import time
from django.db import transaction
import mox
from stacktach import message_service
from stacktach import stacklog
from tests.unit import StacktachBaseTestCase
from tests.unit.utils import HOST, PORT, VIRTUAL_HOST, USERID, PASSWORD, TICK_TIME, SETTLE_TIME, SETTLE_UNITS
from tests.unit.utils import make_verifier_config
@ -139,6 +140,8 @@ class BaseVerifierTestCase(StacktachBaseTestCase):
self.mox.VerifyAll()
def test_run_notifications(self):
mock_logger = self._create_mock_logger()
stacklog.get_logger('verifier', is_parent=False).AndReturn(mock_logger)
self._mock_exchange_create_and_connect(self.verifier_with_notifications)
self.mox.StubOutWithMock(self.verifier_with_notifications, '_run')
self.verifier_with_notifications._run(callback=mox.Not(mox.Is(None)))
@ -147,6 +150,8 @@ class BaseVerifierTestCase(StacktachBaseTestCase):
self.mox.VerifyAll()
def test_run_notifications_with_routing_keys(self):
mock_logger = self._create_mock_logger()
stacklog.get_logger('verifier', is_parent=False).AndReturn(mock_logger)
self._mock_exchange_create_and_connect(self.verifier_with_notifications)
self.mox.StubOutWithMock(self.verifier_with_notifications, '_run')
self.verifier_with_notifications._run(callback=mox.Not(mox.Is(None)))
@ -155,6 +160,8 @@ class BaseVerifierTestCase(StacktachBaseTestCase):
self.mox.VerifyAll()
def test_run_no_notifications(self):
mock_logger = self._create_mock_logger()
stacklog.get_logger('verifier', is_parent=False).AndReturn(mock_logger)
self.mox.StubOutWithMock(self.verifier_without_notifications, '_run')
self.verifier_without_notifications._run()
self.mox.ReplayAll()
@ -162,6 +169,11 @@ class BaseVerifierTestCase(StacktachBaseTestCase):
self.mox.VerifyAll()
def test_run_full_no_notifications(self):
mock_logger = self._create_mock_logger()
mock_logger.info('None: N: None, P: 0, S: 2, E: 0')
stacklog.get_logger('verifier', is_parent=False).AndReturn(mock_logger)
stacklog.get_logger('verifier', is_parent=False).AndReturn(mock_logger)
self.mox.StubOutWithMock(transaction, 'commit_on_success')
tran = self.mox.CreateMockAnything()
tran.__enter__().AndReturn(tran)
@ -196,7 +208,17 @@ class BaseVerifierTestCase(StacktachBaseTestCase):
self.mox.VerifyAll()
def _create_mock_logger(self):
mock_logger = self.mox.CreateMockAnything()
self.mox.StubOutWithMock(stacklog, 'get_logger')
return mock_logger
def test_run_full(self):
mock_logger = self._create_mock_logger()
mock_logger.info('exchange: N: None, P: 0, S: 2, E: 0')
stacklog.get_logger('verifier', is_parent=False).AndReturn(mock_logger)
stacklog.get_logger('verifier', is_parent=False).AndReturn(mock_logger)
self.mox.StubOutWithMock(transaction, 'commit_on_success')
tran = self.mox.CreateMockAnything()
tran.__enter__().AndReturn(tran)

View File

@ -21,15 +21,18 @@ from datetime import datetime
import decimal
import json
import logging
import uuid
import kombu
import mox
from stacktach import datetime_to_decimal as dt
from stacktach import stacklog
from stacktach import models
from tests.unit import StacktachBaseTestCase
from utils import IMAGE_UUID_1
from utils import GLANCE_VERIFIER_EVENT_TYPE
from utils import make_verifier_config
from verifier import glance_verifier
from verifier import NullFieldException
@ -59,6 +62,12 @@ class GlanceVerifierTestCase(StacktachBaseTestCase):
self.mox.UnsetStubs()
self.verifier = None
def _setup_mock_logger(self):
mock_logger = self.mox.CreateMockAnything()
self.mox.StubOutWithMock(stacklog, 'get_logger')
stacklog.get_logger('verifier', is_parent=False).AndReturn(mock_logger)
return mock_logger
def test_verify_usage_should_not_raise_exception_on_success(self):
exist = self.mox.CreateMockAnything()
exist.created_at = decimal.Decimal('1.1')
@ -407,87 +416,112 @@ class GlanceVerifierTestCase(StacktachBaseTestCase):
self.mox.VerifyAll()
def test_verify_should_verify_exists_for_usage_and_delete(self):
exist = self.mox.CreateMockAnything()
exist1 = self.mox.CreateMockAnything()
exist2 = self.mox.CreateMockAnything()
self.mox.StubOutWithMock(glance_verifier, '_verify_for_usage')
glance_verifier._verify_for_usage(exist)
self.mox.StubOutWithMock(glance_verifier, '_verify_for_delete')
glance_verifier._verify_for_delete(exist)
self.mox.StubOutWithMock(glance_verifier, '_verify_validity')
glance_verifier._verify_validity(exist)
exist.mark_verified()
for exist in [exist1, exist2]:
glance_verifier._verify_for_usage(exist)
glance_verifier._verify_for_delete(exist)
glance_verifier._verify_validity(exist)
exist.mark_verified()
self.mox.ReplayAll()
verified, exist = glance_verifier._verify(exist)
verified, exist = glance_verifier._verify([exist1, exist2])
self.mox.VerifyAll()
self.assertTrue(verified)
def test_verify_exist_marks_exist_failed_if_field_mismatch_exception(self):
mock_logger = self._setup_mock_logger()
self.mox.StubOutWithMock(mock_logger, 'info')
mock_logger.exception("glance: Expected field to be 'expected' "
"got 'actual'")
exist1 = self.mox.CreateMockAnything()
exist2 = self.mox.CreateMockAnything()
def test_verify_exist_marks_exist_as_failed_if_field_mismatch_exception_is_raised(self):
exist = self.mox.CreateMockAnything()
self.mox.StubOutWithMock(glance_verifier, '_verify_for_usage')
self.mox.StubOutWithMock(glance_verifier, '_verify_for_delete')
self.mox.StubOutWithMock(glance_verifier, '_verify_validity')
field_mismatch_exc = FieldMismatch('field', 'expected', 'actual')
glance_verifier._verify_for_usage(exist).AndRaise(exception=field_mismatch_exc)
exist.mark_failed(reason='FieldMismatch')
glance_verifier._verify_for_usage(exist1).AndRaise(
exception=field_mismatch_exc)
exist1.mark_failed(reason='FieldMismatch')
glance_verifier._verify_for_usage(exist2)
glance_verifier._verify_for_delete(exist2)
glance_verifier._verify_validity(exist2)
exist2.mark_verified()
self.mox.ReplayAll()
verified, exist = glance_verifier._verify(exist)
verified, exist = glance_verifier._verify([exist1, exist2])
self.mox.VerifyAll()
self.assertFalse(verified)
def test_verify_for_range_without_callback(self):
mock_logger = self._setup_mock_logger()
self.mox.StubOutWithMock(mock_logger, 'info')
mock_logger.info('glance: Adding 2 per-owner exists to queue.')
when_max = datetime.utcnow()
results = self.mox.CreateMockAnything()
models.ImageExists.PENDING = 'pending'
models.ImageExists.VERIFYING = 'verifying'
models.ImageExists.PENDING = 'pending'
self.mox.StubOutWithMock(models.ImageExists, 'find')
models.ImageExists.find(
ending_max=when_max,
status=models.ImageExists.PENDING).AndReturn(results)
results.count().AndReturn(2)
exist1 = self.mox.CreateMockAnything()
exist2 = self.mox.CreateMockAnything()
results.__getslice__(0, 1000).AndReturn(results)
results.__iter__().AndReturn([exist1, exist2].__iter__())
exist3 = self.mox.CreateMockAnything()
results = {'owner1': [exist1, exist2], 'owner2': [exist3]}
models.ImageExists.find_and_group_by_owner_and_raw_id(
ending_max=when_max,
status=models.ImageExists.PENDING).AndReturn(results)
exist1.save()
exist2.save()
self.pool.apply_async(glance_verifier._verify, args=(exist1,),
callback=None)
self.pool.apply_async(glance_verifier._verify, args=(exist2,),
exist3.save()
self.pool.apply_async(glance_verifier._verify,
args=([exist1, exist2],), callback=None)
self.pool.apply_async(glance_verifier._verify, args=([exist3],),
callback=None)
self.mox.ReplayAll()
self.glance_verifier.verify_for_range(when_max)
self.assertEqual(exist1.status, 'verifying')
self.assertEqual(exist2.status, 'verifying')
self.assertEqual(exist3.status, 'verifying')
self.mox.VerifyAll()
def test_verify_for_range_with_callback(self):
mock_logger = self._setup_mock_logger()
self.mox.StubOutWithMock(mock_logger, 'info')
mock_logger.info('glance: Adding 2 per-owner exists to queue.')
callback = self.mox.CreateMockAnything()
when_max = datetime.utcnow()
results = self.mox.CreateMockAnything()
models.ImageExists.PENDING = 'pending'
models.ImageExists.VERIFYING = 'verifying'
models.ImageExists.find(
ending_max=when_max,
status=models.ImageExists.PENDING).AndReturn(results)
results.count().AndReturn(2)
exist1 = self.mox.CreateMockAnything()
exist2 = self.mox.CreateMockAnything()
results.__getslice__(0, 1000).AndReturn(results)
results.__iter__().AndReturn([exist1, exist2].__iter__())
exist3 = self.mox.CreateMockAnything()
results = {'owner1': [exist1, exist2], 'owner2': [exist3]}
models.ImageExists.find_and_group_by_owner_and_raw_id(
ending_max=when_max,
status=models.ImageExists.PENDING).AndReturn(results)
exist1.save()
exist2.save()
self.pool.apply_async(glance_verifier._verify, args=(exist1,),
exist3.save()
self.pool.apply_async(glance_verifier._verify, args=([exist1, exist2],),
callback=callback)
self.pool.apply_async(glance_verifier._verify, args=(exist2,),
self.pool.apply_async(glance_verifier._verify, args=([exist3],),
callback=callback)
self.mox.ReplayAll()
self.glance_verifier.verify_for_range(
when_max, callback=callback)
self.assertEqual(exist1.status, 'verifying')
self.assertEqual(exist2.status, 'verifying')
self.assertEqual(exist3.status, 'verifying')
self.mox.VerifyAll()
def test_send_verified_notification_routing_keys(self):
@ -505,6 +539,9 @@ class GlanceVerifierTestCase(StacktachBaseTestCase):
]
exist_str = json.dumps(exist_dict)
exist.raw.json = exist_str
exist.audit_period_beginning = datetime(2013, 10, 10)
exist.audit_period_ending = datetime(2013, 10, 10, 23, 59, 59)
exist.owner = "1"
self.mox.StubOutWithMock(uuid, 'uuid4')
uuid.uuid4().AndReturn('some_other_uuid')
self.mox.StubOutWithMock(kombu.pools, 'producers')
@ -518,7 +555,7 @@ class GlanceVerifierTestCase(StacktachBaseTestCase):
producer.acquire(block=True).AndReturn(producer)
producer.__enter__().AndReturn(producer)
kombu.common.maybe_declare(exchange, producer.channel)
message = {'event_type': 'image.exists.verified.old',
message = {'event_type': GLANCE_VERIFIER_EVENT_TYPE,
'message_id': 'some_other_uuid',
'original_message_id': 'some_uuid'}
producer.publish(message, key)
@ -544,6 +581,9 @@ class GlanceVerifierTestCase(StacktachBaseTestCase):
]
exist_str = json.dumps(exist_dict)
exist.raw.json = exist_str
exist.audit_period_beginning = datetime(2013, 10, 10)
exist.audit_period_ending = datetime(2013, 10, 10, 23, 59, 59)
exist.owner = "1"
self.mox.StubOutWithMock(kombu.pools, 'producers')
self.mox.StubOutWithMock(kombu.common, 'maybe_declare')
models.ImageExists.objects.get(id=exist.id).AndReturn(exist)
@ -555,7 +595,7 @@ class GlanceVerifierTestCase(StacktachBaseTestCase):
kombu.common.maybe_declare(exchange, producer.channel)
self.mox.StubOutWithMock(uuid, 'uuid4')
uuid.uuid4().AndReturn('some_other_uuid')
message = {'event_type': 'image.exists.verified.old',
message = {'event_type': GLANCE_VERIFIER_EVENT_TYPE,
'message_id': 'some_other_uuid',
'original_message_id': 'some_uuid'}
producer.publish(message, exist_dict[0])
@ -565,4 +605,3 @@ class GlanceVerifierTestCase(StacktachBaseTestCase):
self.glance_verifier.send_verified_notification(exist, exchange,
connection)
self.mox.VerifyAll()

View File

@ -20,6 +20,7 @@
from datetime import datetime
import unittest
from django.db.models import Q
import mox
from stacktach.models import RawData, GlanceRawData, GenericRawData, ImageDeletes, InstanceExists, ImageExists
from tests.unit.utils import IMAGE_UUID_1
@ -76,23 +77,40 @@ class ImageExistsTestCase(unittest.TestCase):
def tearDown(self):
self.mox.UnsetStubs()
def test_find_should_return_records_with_date_and_status_in_audit_period(self):
def test_group_exists_with_date_status_in_audit_period_by_owner_rawid(self):
end_max = datetime.utcnow()
status = 'pending'
exist1 = self.mox.CreateMockAnything()
exist1.owner = "owner1"
exist1.raw_id = "1"
exist2 = self.mox.CreateMockAnything()
exist2.owner = "owner2"
exist2.raw_id = "2"
exist3 = self.mox.CreateMockAnything()
exist3.owner = "owner1"
exist3.raw_id = "1"
exist4 = self.mox.CreateMockAnything()
exist4.owner = "owner1"
exist4.raw_id = "3"
ordered_results = [exist1, exist3, exist4, exist2]
unordered_results = self.mox.CreateMockAnything()
expected_results = [1, 2]
related_results = self.mox.CreateMockAnything()
self.mox.StubOutWithMock(ImageExists.objects, 'select_related')
ImageExists.objects.select_related().AndReturn(related_results)
related_results.filter(audit_period_ending__lte=dt.dt_to_decimal(
end_max), status=status).AndReturn(unordered_results)
unordered_results.order_by('id').AndReturn(expected_results)
related_results.filter(
audit_period_ending__lte=dt.dt_to_decimal(end_max),
status=status).AndReturn(unordered_results)
unordered_results.order_by('owner').AndReturn(ordered_results)
self.mox.ReplayAll()
results = ImageExists.find(end_max, status)
results = ImageExists.find_and_group_by_owner_and_raw_id(end_max,
status)
self.mox.VerifyAll()
self.assertEqual(results, [1, 2])
self.assertEqual(results, {'owner1-1': [exist1, exist3],
'owner1-3': [exist4],
'owner2-2': [exist2]})
class InstanceExistsTestCase(unittest.TestCase):

View File

@ -22,7 +22,7 @@ import json
import mox
from stacktach import notification
from stacktach import notification, stacklog
from stacktach import utils
from stacktach.notification import Notification
@ -352,162 +352,6 @@ class GlanceNotificationTestCase(StacktachBaseTestCase):
self.assertEquals(notification.save(), raw)
self.mox.VerifyAll()
def test_save_image_exists(self):
raw = self.mox.CreateMockAnything()
audit_period_beginning = "2013-05-20 17:31:57.939614"
audit_period_ending = "2013-06-20 17:31:57.939614"
size = 123
uuid = "2df2ccf6-bc1b-4853-aab0-25fda346b3bb"
body = {
"event_type": "image.upload",
"timestamp": "2013-06-20 18:31:57.939614",
"publisher_id": "glance-api01-r2961.global.preprod-ord.ohthree.com",
"payload": {
"created_at": str(DUMMY_TIME),
"status": "saving",
"audit_period_beginning": audit_period_beginning,
"audit_period_ending": audit_period_ending,
"properties": {
"image_type": "snapshot",
"instance_uuid": INSTANCE_ID_1,
},
"size": size,
"owner": TENANT_ID_1,
"id": uuid
}
}
deployment = "1"
routing_key = "glance_monitor.info"
json_body = json.dumps([routing_key, body])
self.mox.StubOutWithMock(db, 'create_image_exists')
self.mox.StubOutWithMock(db, 'get_image_usage')
db.get_image_usage(uuid=uuid).AndReturn(None)
db.create_image_exists(
created_at=utils.str_time_to_unix(str(DUMMY_TIME)),
owner=TENANT_ID_1,
raw=raw,
audit_period_beginning=utils.str_time_to_unix(audit_period_beginning),
audit_period_ending=utils.str_time_to_unix(audit_period_ending),
size=size,
uuid=uuid,
usage=None).AndReturn(raw)
self.mox.ReplayAll()
notification = GlanceNotification(body, deployment, routing_key,
json_body)
notification.save_exists(raw)
self.mox.VerifyAll()
def test_save_image_exists_with_delete_not_none(self):
raw = self.mox.CreateMockAnything()
delete = self.mox.CreateMockAnything()
audit_period_beginning = "2013-05-20 17:31:57.939614"
audit_period_ending = "2013-06-20 17:31:57.939614"
size = 123
uuid = "2df2ccf6-bc1b-4853-aab0-25fda346b3bb"
deleted_at = "2013-06-20 14:31:57.939614"
body = {
"event_type": "image.upload",
"timestamp": "2013-06-20 18:31:57.939614",
"publisher_id": "glance-api01-r2961.global.preprod-ord.ohthree.com",
"payload": {
"created_at": str(DUMMY_TIME),
"status": "saving",
"audit_period_beginning": audit_period_beginning,
"audit_period_ending": audit_period_ending,
"properties": {
"image_type": "snapshot",
"instance_uuid": INSTANCE_ID_1,
},
"deleted_at": deleted_at,
"size": size,
"owner": TENANT_ID_1,
"id": "2df2ccf6-bc1b-4853-aab0-25fda346b3bb",
}
}
deployment = "1"
routing_key = "glance_monitor.info"
json_body = json.dumps([routing_key, body])
self.mox.StubOutWithMock(db, 'create_image_exists')
self.mox.StubOutWithMock(db, 'get_image_usage')
self.mox.StubOutWithMock(db, 'get_image_delete')
db.get_image_usage(uuid=uuid).AndReturn(None)
db.get_image_delete(uuid=uuid).AndReturn(delete)
db.create_image_exists(
created_at=utils.str_time_to_unix(str(DUMMY_TIME)),
owner=TENANT_ID_1,
raw=raw,
audit_period_beginning=utils.str_time_to_unix(audit_period_beginning),
audit_period_ending=utils.str_time_to_unix(audit_period_ending),
size=size,
uuid=uuid,
usage=None,
delete=delete,
deleted_at=utils.str_time_to_unix(str(deleted_at))).AndReturn(raw)
self.mox.ReplayAll()
notification = GlanceNotification(body, deployment, routing_key,
json_body)
notification.save_exists(raw)
self.mox.VerifyAll()
def test_save_image_exists_with_usage_not_none(self):
raw = self.mox.CreateMockAnything()
usage = self.mox.CreateMockAnything()
audit_period_beginning = "2013-05-20 17:31:57.939614"
audit_period_ending = "2013-06-20 17:31:57.939614"
size = 123
uuid = "2df2ccf6-bc1b-4853-aab0-25fda346b3bb"
body = {
"event_type": "image.upload",
"timestamp": "2013-06-20 18:31:57.939614",
"publisher_id": "glance-api01-r2961.global.preprod-ord.ohthree.com",
"payload": {
"created_at": str(DUMMY_TIME),
"status": "saving",
"audit_period_beginning": audit_period_beginning,
"audit_period_ending": audit_period_ending,
"properties": {
"image_type": "snapshot",
"instance_uuid": INSTANCE_ID_1,
},
"size": size,
"owner": TENANT_ID_1,
"id": "2df2ccf6-bc1b-4853-aab0-25fda346b3bb",
}
}
deployment = "1"
routing_key = "glance_monitor.info"
json_body = json.dumps([routing_key, body])
self.mox.StubOutWithMock(db, 'create_image_exists')
self.mox.StubOutWithMock(db, 'get_image_usage')
self.mox.StubOutWithMock(db, 'get_image_delete')
db.get_image_usage(uuid=uuid).AndReturn(usage)
db.create_image_exists(
created_at=utils.str_time_to_unix(str(DUMMY_TIME)),
owner=TENANT_ID_1,
raw=raw,
audit_period_beginning=utils.str_time_to_unix(audit_period_beginning),
audit_period_ending=utils.str_time_to_unix(audit_period_ending),
size=size,
uuid=uuid,
usage=usage).AndReturn(raw)
self.mox.ReplayAll()
notification = GlanceNotification(body, deployment, routing_key,
json_body)
notification.save_exists(raw)
self.mox.VerifyAll()
def test_save_usage_should_persist_image_usage(self):
raw = self.mox.CreateMockAnything()
size = 123
@ -616,3 +460,274 @@ class NotificationTestCase(StacktachBaseTestCase):
notification = Notification(body, deployment, routing_key, json_body)
self.assertEquals(notification.save(), raw)
self.mox.VerifyAll()
class GlanceExistsNotificationTestCase(StacktachBaseTestCase):
def setUp(self):
self.mox = mox.Mox()
def tearDown(self):
self.mox.UnsetStubs()
def test_save_glancerawdata(self):
raw = self.mox.CreateMockAnything()
audit_period_beginning = "2013-05-20 17:31:57.939614"
audit_period_ending = "2013-06-20 17:31:57.939614"
created_at = "2013-05-20 19:31:57.939614"
size = 123
uuid = "2df2ccf6-bc1b-4853-aab0-25fda346b3bb"
body = {
"event_type": "image.exists",
"timestamp": "2013-06-20 18:31:57.939614",
"publisher_id": "glance-api01-r2961.global.preprod-ord.ohthree.com",
"payload": {
"audit_period_beginning": audit_period_beginning,
"audit_period_ending": audit_period_ending,
"owner": TENANT_ID_1,
"images":
[
{
"created_at": created_at,
"id": uuid,
"size": size,
"status": "saving",
"properties": {"instance_uuid": INSTANCE_ID_1},
"deleted_at": None,
},
{
"created_at": str(DUMMY_TIME),
"id": uuid,
"size": size,
"status": "saving",
"properties": {"instance_uuid": INSTANCE_ID_1},
"deleted_at": None,
}
]
}
}
deployment = "1"
routing_key = "glance_monitor.info"
json_body = json.dumps([routing_key, body])
self.mox.StubOutWithMock(db, 'create_glance_rawdata')
db.create_glance_rawdata(
deployment="1",
owner="testtenantid1",
json=json_body,
routing_key=routing_key,
when=utils.str_time_to_unix("2013-06-20 18:31:57.939614"),
publisher="glance-api01-r2961.global.preprod-ord.ohthree.com",
event="image.exists",
service="glance-api01-r2961",
host="global.preprod-ord.ohthree.com",
instance=None,
request_id='',
image_type=0,
status=None,
uuid=None).AndReturn(raw)
self.mox.ReplayAll()
notification = GlanceNotification(body, deployment, routing_key,
json_body)
notification.save()
self.mox.VerifyAll()
def test_save_image_exists_with_created_at_but_deleted_at_none(self):
raw = self.mox.CreateMockAnything()
audit_period_beginning = "2013-05-20 17:31:57.939614"
audit_period_ending = "2013-06-20 17:31:57.939614"
created_at = "2013-05-20 19:31:57.939614"
size = 123
uuid = "2df2ccf6-bc1b-4853-aab0-25fda346b3bb"
body = {
"event_type": "image.exists",
"timestamp": "2013-06-20 18:31:57.939614",
"publisher_id": "glance-api01-r2961.global.preprod-ord.ohthree.com",
"payload": {
"audit_period_beginning": audit_period_beginning,
"audit_period_ending": audit_period_ending,
"owner": TENANT_ID_1,
"images":
[
{
"created_at": created_at,
"id": uuid,
"size": size,
"status": "saving",
"properties": {"instance_uuid": INSTANCE_ID_1},
"deleted_at": None,
},
{
"created_at": created_at,
"id": uuid,
"size": size,
"status": "saving",
"properties": {"instance_uuid": INSTANCE_ID_1},
"deleted_at": None,
}
]
}
}
deployment = "1"
routing_key = "glance_monitor.info"
json_body = json.dumps([routing_key, body])
self.mox.StubOutWithMock(db, 'create_image_exists')
self.mox.StubOutWithMock(db, 'get_image_usage')
for i in range(0, 2):
db.get_image_usage(uuid=uuid).AndReturn(None)
db.create_image_exists(
created_at=utils.str_time_to_unix(created_at),
owner=TENANT_ID_1,
raw=raw,
audit_period_beginning=utils.str_time_to_unix(audit_period_beginning),
audit_period_ending=utils.str_time_to_unix(audit_period_ending),
size=size,
uuid=uuid,
usage=None).AndReturn(raw)
self.mox.ReplayAll()
notification = GlanceNotification(body, deployment, routing_key,
json_body)
notification.save_exists(raw)
self.mox.VerifyAll()
def test_save_image_exists_with_created_at_and_deleted_at(self):
raw = self.mox.CreateMockAnything()
delete = self.mox.CreateMockAnything()
audit_period_beginning = "2013-05-20 17:31:57.939614"
audit_period_ending = "2013-06-20 17:31:57.939614"
created_at = "2013-05-20 19:31:57.939614"
deleted_at = "2013-05-20 21:31:57.939614"
size = 123
uuid = "2df2ccf6-bc1b-4853-aab0-25fda346b3bb"
body = {
"event_type": "image.exists",
"timestamp": "2013-06-20 18:31:57.939614",
"publisher_id": "glance-api01-r2961.global.preprod-ord.ohthree.com",
"payload": {
"audit_period_beginning": audit_period_beginning,
"audit_period_ending": audit_period_ending,
"owner": TENANT_ID_1,
"images":
[
{
"created_at": created_at,
"id": uuid,
"size": size,
"status": "saving",
"properties": {"instance_uuid": INSTANCE_ID_1},
"deleted_at": deleted_at,
},
{
"created_at": created_at,
"id": uuid,
"size": size,
"status": "saving",
"properties": {"instance_uuid": INSTANCE_ID_1},
"deleted_at": deleted_at,
}
]
}
}
deployment = "1"
routing_key = "glance_monitor.info"
json_body = json.dumps([routing_key, body])
self.mox.StubOutWithMock(db, 'create_image_exists')
self.mox.StubOutWithMock(db, 'get_image_usage')
self.mox.StubOutWithMock(db, 'get_image_delete')
for i in range(0, 2):
db.get_image_usage(uuid=uuid).AndReturn(None)
db.get_image_delete(uuid=uuid).AndReturn(delete)
db.create_image_exists(
created_at=utils.str_time_to_unix(created_at),
owner=TENANT_ID_1,
raw=raw,
audit_period_beginning=utils.str_time_to_unix(audit_period_beginning),
audit_period_ending=utils.str_time_to_unix(audit_period_ending),
size=size,
uuid=uuid,
usage=None,
delete=delete,
deleted_at=utils.str_time_to_unix(deleted_at)).AndReturn(raw)
self.mox.ReplayAll()
notification = GlanceNotification(body, deployment, routing_key,
json_body)
notification.save_exists(raw)
self.mox.VerifyAll()
def test_save_image_exists_without_created_at(self):
raw = self.mox.CreateMockAnything()
raw.id = 1
audit_period_beginning = "2013-05-20 17:31:57.939614"
audit_period_ending = "2013-06-20 17:31:57.939614"
size = 123
uuid = "2df2ccf6-bc1b-4853-aab0-25fda346b3bb"
body = {
"event_type": "image.exists",
"timestamp": "2013-06-20 18:31:57.939614",
"publisher_id": "glance-api01-r2961.global.preprod-ord.ohthree.com",
"payload": {
"audit_period_beginning": audit_period_beginning,
"audit_period_ending": audit_period_ending,
"owner": TENANT_ID_1,
"images":
[
{
"created_at": None,
"id": uuid,
"size": size,
"status": "saving",
"properties": {"instance_uuid": INSTANCE_ID_1},
"deleted_at": None,
},
{
"created_at": None,
"id": uuid,
"size": size,
"status": "saving",
"properties": {"instance_uuid": INSTANCE_ID_1},
"deleted_at": None,
}
]
}
}
deployment = "1"
routing_key = "glance_monitor.info"
json_body = json.dumps([routing_key, body])
self.mox.StubOutWithMock(stacklog, 'warn')
stacklog.warn("Ignoring exists without created_at. GlanceRawData(1)")
stacklog.warn("Ignoring exists without created_at. GlanceRawData(1)")
self.mox.ReplayAll()
notification = GlanceNotification(body, deployment, routing_key,
json_body)
notification.save_exists(raw)
self.mox.VerifyAll()
def test_save_exists_should_log_warning_when_payload_is_invalid(self):
raw = self.mox.CreateMockAnything()
raw.id = 1
body = {
"event_type": "image.exists",
"publisher_id": "glance-api01-r2961.global.preprod-ord.ohthree.com",
"payload": []
}
deployment = "1"
routing_key = "glance_monitor.info"
json_body = json.dumps([routing_key, body])
self.mox.StubOutWithMock(stacklog, 'warn')
stacklog.warn("Received exists with invalid payload GlanceRawData(1)")
self.mox.ReplayAll()
notification = GlanceNotification(body, deployment, routing_key,
json_body)
notification.save_exists(raw)
self.mox.VerifyAll()

File diff suppressed because it is too large Load Diff

View File

@ -31,7 +31,8 @@ from stacktach.reconciler import nova
from stacktach.reconciler import utils as rec_utils
from tests.unit import StacktachBaseTestCase
from tests.unit import utils
from tests.unit.utils import INSTANCE_ID_1
from tests.unit.utils import INSTANCE_ID_1, INSTANCE_TYPE_ID_1
from tests.unit.utils import INSTANCE_FLAVOR_ID_1
from tests.unit.utils import TENANT_ID_1
region_mapping = {
@ -90,7 +91,8 @@ class ReconcilerTestCase(StacktachBaseTestCase):
usage.instance = INSTANCE_ID_1
launched_at = beginning_d - (60*60)
usage.launched_at = launched_at
usage.instance_type_id = 1
usage.instance_type_id = INSTANCE_TYPE_ID_1
usage.instance_flavor_id = INSTANCE_FLAVOR_ID_1
usage.tenant = TENANT_ID_1
usage.os_architecture = DEFAULT_OS_ARCH
usage.os_distro = DEFAULT_OS_DISTRO
@ -108,7 +110,9 @@ class ReconcilerTestCase(StacktachBaseTestCase):
def _fake_reconciler_instance(self, uuid=INSTANCE_ID_1, launched_at=None,
deleted_at=None, deleted=False,
instance_type_id=1, tenant=TENANT_ID_1,
instance_type_id=INSTANCE_TYPE_ID_1,
instance_flavor_id=INSTANCE_FLAVOR_ID_1,
tenant=TENANT_ID_1,
os_arch=DEFAULT_OS_ARCH,
os_distro=DEFAULT_OS_DISTRO,
os_verison=DEFAULT_OS_VERSION,
@ -120,6 +124,7 @@ class ReconcilerTestCase(StacktachBaseTestCase):
'deleted_at': deleted_at,
'deleted': deleted,
'instance_type_id': instance_type_id,
'instance_flavor_id': instance_flavor_id,
'tenant': tenant,
'os_architecture': os_arch,
'os_distro': os_distro,
@ -194,6 +199,7 @@ class ReconcilerTestCase(StacktachBaseTestCase):
'launched_at': launch.launched_at,
'deleted_at': deleted_at,
'instance_type_id': launch.instance_type_id,
'instance_flavor_id': launch.instance_flavor_id,
'source': 'reconciler:mocked_client',
'tenant': TENANT_ID_1,
'os_architecture': DEFAULT_OS_ARCH,
@ -216,7 +222,7 @@ class ReconcilerTestCase(StacktachBaseTestCase):
launch = self.mox.CreateMockAnything()
launch.instance = INSTANCE_ID_1
launch.launched_at = beginning_d - (60*60)
launch.instance_type_id = 1
launch.instance_flavor_id = INSTANCE_FLAVOR_ID_1
models.InstanceUsage.objects.get(id=launch_id).AndReturn(launch)
deployment = self.mox.CreateMockAnything()
launch.deployment().AndReturn(deployment)
@ -233,9 +239,9 @@ class ReconcilerTestCase(StacktachBaseTestCase):
launch_id = 1
beginning_d = utils.decimal_utc()
launch = self.mox.CreateMockAnything()
launch.instance = INSTANCE_ID_1
launch.instance = INSTANCE_FLAVOR_ID_1
launch.launched_at = beginning_d - (60*60)
launch.instance_type_id = 1
launch.instance_flavor_id = 1
models.InstanceUsage.objects.get(id=launch_id).AndReturn(launch)
launch.deployment().AndReturn(None)
self.mox.ReplayAll()
@ -255,6 +261,7 @@ class ReconcilerTestCase(StacktachBaseTestCase):
'launched_at': exists.launched_at,
'deleted_at': exists.deleted_at,
'instance_type_id': exists.instance_type_id,
'instance_flavor_id': exists.instance_flavor_id,
'source': 'reconciler:mocked_client',
'tenant': TENANT_ID_1,
'os_architecture': DEFAULT_OS_ARCH,
@ -285,6 +292,7 @@ class ReconcilerTestCase(StacktachBaseTestCase):
'launched_at': exists.launched_at,
'deleted_at': exists.deleted_at,
'instance_type_id': exists.instance_type_id,
'instance_flavor_id': exists.instance_flavor_id,
'source': 'reconciler:mocked_client',
'tenant': TENANT_ID_1,
'os_architecture': DEFAULT_OS_ARCH,
@ -306,7 +314,7 @@ class ReconcilerTestCase(StacktachBaseTestCase):
exists.instance = INSTANCE_ID_1
launched_at = beginning_d - (60*60)
exists.launched_at = launched_at
exists.instance_type_id = 1
exists.instance_flavor_id = INSTANCE_FLAVOR_ID_1
exists.deleted_at = beginning_d
deployment = self.mox.CreateMockAnything()
exists.deployment().AndReturn(deployment)
@ -327,7 +335,7 @@ class ReconcilerTestCase(StacktachBaseTestCase):
exists.instance = INSTANCE_ID_1
launched_at = beginning_d - (60*60)
exists.launched_at = launched_at
exists.instance_type_id = 1
exists.instance_flavor_id = INSTANCE_FLAVOR_ID_1
exists.deleted_at = beginning_d
deployment = self.mox.CreateMockAnything()
exists.deployment().AndReturn(deployment)
@ -346,7 +354,7 @@ class ReconcilerTestCase(StacktachBaseTestCase):
exists.instance = INSTANCE_ID_1
launched_at = beginning_d - (60*60)
exists.launched_at = launched_at
exists.instance_type_id = 1
exists.instance_flavor_id = INSTANCE_FLAVOR_ID_1
exists.deleted_at = None
deployment = self.mox.CreateMockAnything()
exists.deployment().AndReturn(deployment)
@ -365,7 +373,7 @@ class ReconcilerTestCase(StacktachBaseTestCase):
exists.instance = INSTANCE_ID_1
launched_at = beginning_d - (60*60)
exists.launched_at = launched_at
exists.instance_type_id = 1
exists.instance_flavor_id = INSTANCE_FLAVOR_ID_1
exists.deleted_at = None
exists.deployment().AndReturn(None)
ex = exceptions.NotFound()
@ -465,14 +473,17 @@ class NovaJSONBridgeClientTestCase(StacktachBaseTestCase):
response.json().AndReturn(result)
def _fake_instance(self, uuid=INSTANCE_ID_1, launched_at=None,
terminated_at=None, deleted=0, instance_type_id=1,
project_id=TENANT_ID_1):
terminated_at=None, deleted=0,
instance_type_id=INSTANCE_TYPE_ID_1,
instance_flavor_id=INSTANCE_FLAVOR_ID_1,
project_id=TENANT_ID_1):
return {
'uuid': uuid,
'launched_at': launched_at,
'terminated_at': terminated_at,
'deleted': deleted,
'instance_type_id': instance_type_id,
'flavorid': instance_flavor_id,
'project_id': project_id
}
@ -488,8 +499,9 @@ class NovaJSONBridgeClientTestCase(StacktachBaseTestCase):
self.mox.ReplayAll()
instance = self.client.get_instance('RegionOne', INSTANCE_ID_1)
self.assertIsNotNone(instance)
self.assertEqual(instance['id'], INSTANCE_ID_1)
self.assertEqual(instance['instance_type_id'], '1')
self.assertEqual(instance['id'], INSTANCE_ID_1 )
self.assertEqual(instance['instance_type_id'], INSTANCE_TYPE_ID_1)
self.assertEqual(instance['instance_flavor_id'], INSTANCE_FLAVOR_ID_1)
launched_at_dec = stackutils.str_time_to_unix(launched_at)
self.assertEqual(instance['launched_at'], launched_at_dec)
terminated_at_dec = stackutils.str_time_to_unix(terminated_at)
@ -528,7 +540,7 @@ class NovaJSONBridgeClientTestCase(StacktachBaseTestCase):
get_metadata=True)
self.assertIsNotNone(instance)
self.assertEqual(instance['id'], INSTANCE_ID_1)
self.assertEqual(instance['instance_type_id'], '1')
self.assertEqual(instance['instance_flavor_id'], INSTANCE_FLAVOR_ID_1)
launched_at_dec = stackutils.str_time_to_unix(launched_at)
self.assertEqual(instance['launched_at'], launched_at_dec)
terminated_at_dec = stackutils.str_time_to_unix(terminated_at)

View File

@ -1,11 +1,9 @@
import glob
import logging
import os
import mox
from stacktach import stacklog
from stacktach.stacklog import ExchangeLogger
from tests.unit import StacktachBaseTestCase
class StacklogTestCase(StacktachBaseTestCase):
def setUp(self):
self.mox = mox.Mox()
@ -13,88 +11,47 @@ class StacklogTestCase(StacktachBaseTestCase):
def tearDown(self):
self.mox.UnsetStubs()
def test_get_logger_should_get_exchange_logger_if_exchange_provided(self):
filename = 'filename'
logger = stacklog.get_logger(filename, 'nova')
self.assertIsInstance(logger, ExchangeLogger)
for file in glob.glob('{0}.log*'.format(filename)):
os.remove(file)
def test_get_logger_should_create_timed_rotating_logger_for_parent(self):
logger_name = 'logger'
logger = stacklog.get_logger(logger_name, is_parent=True)
self.assertIsInstance(
logger.handlers[0], logging.handlers.TimedRotatingFileHandler)
self.assertEquals(logger.handlers[0].when, 'MIDNIGHT')
self.assertEquals(logger.handlers[0].interval, 86400)
self.assertEquals(logger.handlers[0].backupCount, 3)
self.assertEqual(logger.name, 'logger')
self.assertEquals(logger.level, logging.DEBUG)
def test_get_logger_should_get_default_logger_if_exchange_not_provided(self):
filename = 'default_logger'
logger = stacklog.get_logger(filename)
self.assertIsInstance(logger, logging.Logger)
for file in glob.glob('{0}.log*'.format(filename)):
os.remove(file)
def test_get_logger_should_create_queue_logger_for_child(self):
logger_name = 'logger'
stacklog.get_logger(logger_name, is_parent=True)
child_logger = stacklog.get_logger(logger_name, is_parent=False)
self.assertIsInstance(
child_logger.handlers[0], stacklog.QueueHandler)
self.assertEqual(child_logger.name, 'child_logger')
self.assertEquals(child_logger.level, logging.DEBUG)
def test_get_logger_should_use_default_name_when_name_not_specified(self):
logger = stacklog.get_logger(None, is_parent=True)
self.assertEquals(logger.name, stacklog.default_logger_name)
class ExchangeLoggerTestCase(StacktachBaseTestCase):
def setUp(self):
self.mox = mox.Mox()
stacklog.set_default_logger_name('default')
logger = stacklog.get_logger(None, is_parent=True)
self.assertEquals(logger.name, 'default')
def tearDown(self):
self.mox.UnsetStubs()
def test_get_logger_raise_exception_when_child_created_before_parent(self):
with self.assertRaises(stacklog.ParentLoggerDoesNotExist):
stacklog.get_logger('logger', is_parent=False)
def _setup_logger_mocks(self, name='name'):
mock_logger = self.mox.CreateMockAnything()
self.mox.StubOutWithMock(logging, 'getLogger')
logging.getLogger(stacklog.__name__).AndReturn(mock_logger)
mock_logger.setLevel(logging.DEBUG)
self.mox.StubOutClassWithMocks(logging.handlers,
'TimedRotatingFileHandler')
filename = "/tmp/{0}.log".format(name)
handler = logging.handlers.TimedRotatingFileHandler(
filename, backupCount=3, interval=1, when='midnight')
self.mox.StubOutClassWithMocks(logging, 'Formatter')
mock_formatter = logging.Formatter(
"%(asctime)s - %(name)s - %(levelname)s - %(message)s")
handler.setFormatter(mock_formatter)
mock_logger.addHandler(handler)
mock_logger.handlers = [handler]
handler.doRollover()
return mock_logger
def test_get_logger_should_return_existing_parent_logger_if_present(self):
logger_1 = stacklog.get_logger('logger', is_parent=True)
logger_2 = stacklog.get_logger('logger', is_parent=True)
def test_exchange_logger_should_append_exchange_name_to_info(self):
mock_logger = self._setup_logger_mocks()
mock_logger.info('exchange: Log %s', 'args', xyz='xyz')
self.mox.ReplayAll()
log = ExchangeLogger('exchange', 'name')
log.info("Log %s", 'args', xyz='xyz')
self.mox.VerifyAll()
def test_exchange_logger_should_append_exchange_name_to_warn(self):
mock_logger = self._setup_logger_mocks()
mock_logger.warn('exchange: Log %s', 'args', xyz='xyz')
self.mox.ReplayAll()
logger = ExchangeLogger('exchange', 'name')
logger.warn("Log %s", 'args', xyz='xyz')
self.mox.VerifyAll()
def test_exchange_logger_should_append_exchange_name_to_error(self):
mock_logger = self._setup_logger_mocks()
mock_logger.error('exchange: Log %s', 'args', xyz='xyz')
self.mox.ReplayAll()
logger = ExchangeLogger('exchange', 'name')
logger.error("Log %s", 'args', xyz='xyz')
self.mox.VerifyAll()
def test_exchange_logger_should_append_exchange_name_to_exception(self):
mock_logger = self._setup_logger_mocks()
mock_logger.error('exchange: Log %s', 'args', xyz='xyz')
self.mox.ReplayAll()
logger = ExchangeLogger('exchange', 'name')
logger.exception("Log %s", 'args', xyz='xyz')
self.mox.VerifyAll()
def test_exchange_logger_should_use_default_name_if_not_provided(self):
self._setup_logger_mocks('stacktach-default')
self.mox.ReplayAll()
ExchangeLogger('exchange')
self.mox.VerifyAll()
self.assertIs(logger_1, logger_2)
def test_get_logger_should_return_existing_child_logger_if_present(self):
stacklog.get_logger('logger', is_parent=True)
child_logger_1 = stacklog.get_logger('logger', is_parent=False)
child_logger_2 = stacklog.get_logger('logger', is_parent=False)
self.assertIs(child_logger_1, child_logger_2)

View File

@ -25,6 +25,8 @@ import mox
import utils
from utils import BANDWIDTH_PUBLIC_OUTBOUND
from utils import INSTANCE_FLAVOR_ID_1
from utils import INSTANCE_FLAVOR_ID_2
from utils import INSTANCE_ID_1
from utils import OS_VERSION_1
from utils import OS_ARCH_1
@ -300,16 +302,18 @@ class StacktachUsageParsingTestCase(StacktachBaseTestCase):
def setup_mock_log(self, name=None):
if name is None:
stacklog.get_logger(name=mox.IgnoreArg()).AndReturn(self.log)
stacklog.get_logger(name=mox.IgnoreArg(),
is_parent=False).AndReturn(self.log)
else:
stacklog.get_logger(name=name).AndReturn(self.log)
stacklog.get_logger(name=name,
is_parent=False).AndReturn(self.log)
def test_all_instance_events_have_mapping(self):
for key, value in views.INSTANCE_EVENT.items():
msg = "'%s' does not have a process function mapping." % value
self.assertTrue(value in views.USAGE_PROCESS_MAPPING, msg)
def test_process_usage_for_new_launch_create_start(self):
def _create_mock_notification(self):
notification = self.mox.CreateMockAnything()
notification.launched_at = str(DUMMY_TIME)
notification.tenant = TENANT_ID_1
@ -320,6 +324,12 @@ class StacktachUsageParsingTestCase(StacktachBaseTestCase):
notification.instance = INSTANCE_ID_1
notification.request_id = REQUEST_ID_1
notification.instance_type_id = INSTANCE_TYPE_ID_1
notification.instance_flavor_id = INSTANCE_FLAVOR_ID_1
return notification
def test_process_usage_for_new_launch_create_start(self):
notification = self._create_mock_notification()
notification.instance_flavor_id = INSTANCE_FLAVOR_ID_1
raw = self.mox.CreateMockAnything()
raw.event = 'compute.instance.create.start'
@ -339,20 +349,12 @@ class StacktachUsageParsingTestCase(StacktachBaseTestCase):
self.assertEquals(usage.os_version, OS_VERSION_1)
self.assertEquals(usage.os_distro, OS_DISTRO_1)
self.assertEquals(usage.rax_options, RAX_OPTIONS_1)
self.assertEquals(usage.instance_flavor_id, INSTANCE_FLAVOR_ID_1)
self.mox.VerifyAll()
def test_process_usage_for_new_launch_rescue_start(self):
notification = self.mox.CreateMockAnything()
notification.launched_at = str(DUMMY_TIME)
notification.tenant = TENANT_ID_1
notification.rax_options = RAX_OPTIONS_1
notification.os_architecture = OS_ARCH_1
notification.os_version = OS_VERSION_1
notification.os_distro = OS_DISTRO_1
notification.instance = INSTANCE_ID_1
notification.request_id = REQUEST_ID_1
notification.instance_type_id = INSTANCE_TYPE_ID_1
notification = self._create_mock_notification()
raw = self.mox.CreateMockAnything()
raw.event = 'compute.instance.rescue.start'
@ -376,17 +378,8 @@ class StacktachUsageParsingTestCase(StacktachBaseTestCase):
self.mox.VerifyAll()
def test_process_usage_for_new_launch_rebuild_start(self):
notification = self.mox.CreateMockAnything()
notification.launched_at = str(DUMMY_TIME)
notification.tenant = TENANT_ID_1
notification.rax_options = RAX_OPTIONS_1
notification.os_architecture = OS_ARCH_1
notification.os_version = OS_VERSION_1
notification.os_distro = OS_DISTRO_1
notification.instance = INSTANCE_ID_1
notification.request_id = REQUEST_ID_1
notification.instance_type_id = INSTANCE_TYPE_ID_1
notification = self._create_mock_notification()
notification.instance_flavor_id = INSTANCE_FLAVOR_ID_1
raw = self.mox.CreateMockAnything()
raw.event = 'compute.instance.rebuild.start'
usage = self.mox.CreateMockAnything()
@ -404,20 +397,12 @@ class StacktachUsageParsingTestCase(StacktachBaseTestCase):
self.assertEquals(usage.os_version, OS_VERSION_1)
self.assertEquals(usage.os_distro, OS_DISTRO_1)
self.assertEquals(usage.rax_options, RAX_OPTIONS_1)
self.assertEquals(usage.instance_flavor_id, INSTANCE_FLAVOR_ID_1)
self.mox.VerifyAll()
def test_process_usage_for_new_launch_rebuild_start_when_no_launched_at_in_db(self):
notification = self.mox.CreateMockAnything()
notification.launched_at = str(DUMMY_TIME)
notification.tenant = TENANT_ID_1
notification.rax_options = RAX_OPTIONS_1
notification.os_architecture = OS_ARCH_1
notification.os_version = OS_VERSION_1
notification.os_distro = OS_DISTRO_1
notification.instance = INSTANCE_ID_1
notification.request_id = REQUEST_ID_1
notification.instance_type_id = INSTANCE_TYPE_ID_1
notification = self._create_mock_notification()
notification.instance_flavor_id = INSTANCE_FLAVOR_ID_1
raw = self.mox.CreateMockAnything()
raw.event = 'compute.instance.rebuild.start'
@ -437,21 +422,12 @@ class StacktachUsageParsingTestCase(StacktachBaseTestCase):
self.assertEquals(usage.os_version, OS_VERSION_1)
self.assertEquals(usage.os_distro, OS_DISTRO_1)
self.assertEquals(usage.rax_options, RAX_OPTIONS_1)
self.assertEquals(usage.instance_flavor_id, INSTANCE_FLAVOR_ID_1)
self.mox.VerifyAll()
def test_process_usage_for_new_launch_resize_prep_start_when_no_launched_at_in_db(self):
notification = self.mox.CreateMockAnything()
notification.launched_at = str(DUMMY_TIME)
notification.tenant = TENANT_ID_1
notification.rax_options = RAX_OPTIONS_1
notification.os_architecture = OS_ARCH_1
notification.os_version = OS_VERSION_1
notification.os_distro = OS_DISTRO_1
notification.instance = INSTANCE_ID_1
notification.request_id = REQUEST_ID_1
notification.instance_type_id = INSTANCE_TYPE_ID_1
notification = self._create_mock_notification()
raw = self.mox.CreateMockAnything()
raw.event = 'compute.instance.resize.prep.start'
@ -477,17 +453,7 @@ class StacktachUsageParsingTestCase(StacktachBaseTestCase):
self.mox.VerifyAll()
def test_process_usage_for_new_launch_resize_revert_start_when_no_launched_at_in_db(self):
notification = self.mox.CreateMockAnything()
notification.launched_at = str(DUMMY_TIME)
notification.tenant = TENANT_ID_1
notification.rax_options = RAX_OPTIONS_1
notification.os_architecture = OS_ARCH_1
notification.os_version = OS_VERSION_1
notification.os_distro = OS_DISTRO_1
notification.instance = INSTANCE_ID_1
notification.request_id = REQUEST_ID_1
notification.instance_type_id = INSTANCE_TYPE_ID_1
notification = self._create_mock_notification()
raw = self.mox.CreateMockAnything()
raw.event = 'compute.instance.resize.revert.start'
@ -511,17 +477,7 @@ class StacktachUsageParsingTestCase(StacktachBaseTestCase):
self.mox.VerifyAll()
def test_process_usage_for_new_launch_resize_prep_start_when_launched_at_in_db(self):
notification = self.mox.CreateMockAnything()
notification.launched_at = str(DUMMY_TIME)
notification.tenant = TENANT_ID_1
notification.rax_options = RAX_OPTIONS_1
notification.os_architecture = OS_ARCH_1
notification.os_version = OS_VERSION_1
notification.os_distro = OS_DISTRO_1
notification.instance = INSTANCE_ID_1
notification.request_id = REQUEST_ID_1
notification.instance_type_id = INSTANCE_TYPE_ID_1
notification = self._create_mock_notification()
raw = self.mox.CreateMockAnything()
raw.event = 'compute.instance.resize.prep.start'
@ -546,17 +502,7 @@ class StacktachUsageParsingTestCase(StacktachBaseTestCase):
self.mox.VerifyAll()
def test_process_usage_for_new_launch_rescue_start_when_launched_at_in_db(self):
notification = self.mox.CreateMockAnything()
notification.launched_at = str(DUMMY_TIME)
notification.tenant = TENANT_ID_1
notification.rax_options = RAX_OPTIONS_1
notification.os_architecture = OS_ARCH_1
notification.os_version = OS_VERSION_1
notification.os_distro = OS_DISTRO_1
notification.instance = INSTANCE_ID_1
notification.request_id = REQUEST_ID_1
notification.instance_type_id = INSTANCE_TYPE_ID_1
notification = self._create_mock_notification()
raw = self.mox.CreateMockAnything()
raw.event = 'compute.instance.rescue.start'
@ -581,18 +527,8 @@ class StacktachUsageParsingTestCase(StacktachBaseTestCase):
self.mox.VerifyAll()
def test_process_usage_for_updates_create_end(self):
notification = self.mox.CreateMockAnything()
notification.launched_at = str(DUMMY_TIME)
notification.tenant = TENANT_ID_1
notification.rax_options = RAX_OPTIONS_1
notification.os_architecture = OS_ARCH_1
notification.os_version = OS_VERSION_1
notification.os_distro = OS_DISTRO_1
notification.instance = INSTANCE_ID_1
notification.request_id = REQUEST_ID_1
notification.instance_type_id = INSTANCE_TYPE_ID_1
notification.message = None
notification = self._create_mock_notification()
notification.message = 'Success'
raw = self.mox.CreateMockAnything()
raw.event = 'compute.instance.create.end'
@ -616,18 +552,7 @@ class StacktachUsageParsingTestCase(StacktachBaseTestCase):
self.mox.VerifyAll()
def test_process_usage_for_updates_rescue_end(self):
notification = self.mox.CreateMockAnything()
notification.launched_at = str(DUMMY_TIME)
notification.tenant = TENANT_ID_1
notification.rax_options = RAX_OPTIONS_1
notification.os_architecture = OS_ARCH_1
notification.os_version = OS_VERSION_1
notification.os_distro = OS_DISTRO_1
notification.instance = INSTANCE_ID_1
notification.request_id = REQUEST_ID_1
notification.instance_type_id = INSTANCE_TYPE_ID_1
notification.message = None
notification = self._create_mock_notification()
raw = self.mox.CreateMockAnything()
raw.event = 'compute.instance.rescue.end'
@ -651,18 +576,8 @@ class StacktachUsageParsingTestCase(StacktachBaseTestCase):
self.mox.VerifyAll()
def test_process_usage_for_updates_create_end_success_message(self):
notification = self.mox.CreateMockAnything()
notification.launched_at = str(DUMMY_TIME)
notification.tenant = TENANT_ID_1
notification.rax_options = RAX_OPTIONS_1
notification.os_architecture = OS_ARCH_1
notification.os_version = OS_VERSION_1
notification.os_distro = OS_DISTRO_1
notification.instance = INSTANCE_ID_1
notification.request_id = REQUEST_ID_1
notification.instance_type_id = INSTANCE_TYPE_ID_1
notification = self._create_mock_notification()
notification.message = 'Success'
raw = self.mox.CreateMockAnything()
raw.event = 'compute.instance.create.end'
@ -675,7 +590,6 @@ class StacktachUsageParsingTestCase(StacktachBaseTestCase):
self.mox.ReplayAll()
views._process_usage_for_updates(raw, notification)
self.assertEqual(usage.launched_at, utils.decimal_utc(DUMMY_TIME))
self.assertEqual(usage.tenant, TENANT_ID_1)
self.assertEquals(usage.os_architecture, OS_ARCH_1)
@ -698,18 +612,7 @@ class StacktachUsageParsingTestCase(StacktachBaseTestCase):
self.mox.VerifyAll()
def test_process_usage_for_updates_revert_end(self):
notification = self.mox.CreateMockAnything()
notification.launched_at = str(DUMMY_TIME)
notification.tenant = TENANT_ID_1
notification.rax_options = RAX_OPTIONS_1
notification.os_architecture = OS_ARCH_1
notification.os_version = OS_VERSION_1
notification.os_distro = OS_DISTRO_1
notification.instance = INSTANCE_ID_1
notification.request_id = REQUEST_ID_1
notification.instance_type_id = INSTANCE_TYPE_ID_1
notification.message = None
notification = self._create_mock_notification()
raw = self.mox.CreateMockAnything()
raw.event = 'compute.instance.resize.revert.end'
@ -733,24 +636,15 @@ class StacktachUsageParsingTestCase(StacktachBaseTestCase):
self.mox.VerifyAll()
def test_process_usage_for_updates_prep_end(self):
notification = self.mox.CreateMockAnything()
notification.launched_at = str(DUMMY_TIME)
notification.tenant = TENANT_ID_1
notification.rax_options = RAX_OPTIONS_1
notification.os_architecture = OS_ARCH_1
notification.os_version = OS_VERSION_1
notification.os_distro = OS_DISTRO_1
notification.instance = INSTANCE_ID_1
notification.request_id = REQUEST_ID_1
notification.new_instance_type_id = INSTANCE_TYPE_ID_2
notification.message = None
def test_process_usage_for_updates_finish_resize_end(self):
notification = self._create_mock_notification()
raw = self.mox.CreateMockAnything()
raw.event = 'compute.instance.resize.prep.end'
raw.event = 'compute.instance.finish_resize.end'
usage = self.mox.CreateMockAnything()
usage.launched_at = None
usage.instance_type_id = INSTANCE_TYPE_ID_2
usage.instance_flavor_id = INSTANCE_FLAVOR_ID_2
views.STACKDB.get_or_create_instance_usage(instance=INSTANCE_ID_1,
request_id=REQUEST_ID_1) \
.AndReturn((usage, True))
@ -759,7 +653,8 @@ class StacktachUsageParsingTestCase(StacktachBaseTestCase):
views._process_usage_for_updates(raw, notification)
self.assertEqual(usage.instance_type_id, INSTANCE_TYPE_ID_2)
self.assertEqual(usage.instance_type_id, INSTANCE_TYPE_ID_1)
self.assertEqual(usage.instance_flavor_id, INSTANCE_FLAVOR_ID_1)
self.assertEquals(usage.tenant, TENANT_ID_1)
self.assertEquals(usage.os_architecture, OS_ARCH_1)
self.assertEquals(usage.os_version, OS_VERSION_1)
@ -811,15 +706,11 @@ class StacktachUsageParsingTestCase(StacktachBaseTestCase):
self.mox.VerifyAll()
def test_process_exists(self):
def _create_exists_notification(self, audit_beginning, current_time,
launch_time, deleted_time):
notification = self.mox.CreateMockAnything()
current_time = datetime.datetime.utcnow()
launch_time = current_time - datetime.timedelta(hours=23)
launch_decimal = utils.decimal_utc(launch_time)
audit_beginning = current_time - datetime.timedelta(hours=20)
audit_beginning_decimal = utils.decimal_utc(audit_beginning)
audit_ending_decimal = utils.decimal_utc(current_time)
notification.launched_at = str(launch_time)
notification.deleted_at = str(deleted_time)
notification.audit_period_beginning = str(audit_beginning)
notification.audit_period_ending = str(current_time)
notification.tenant = TENANT_ID_1
@ -828,16 +719,27 @@ class StacktachUsageParsingTestCase(StacktachBaseTestCase):
notification.os_distro = OS_DISTRO_1
notification.rax_options = RAX_OPTIONS_1
notification.instance = INSTANCE_ID_1
notification.deleted_at = ''
notification.instance_type_id = INSTANCE_TYPE_ID_1
notification.instance_flavor_id = INSTANCE_FLAVOR_ID_1
notification.message_id = MESSAGE_ID_1
notification.bandwidth_public_out = BANDWIDTH_PUBLIC_OUTBOUND
return notification
def test_process_exists(self):
current_time = datetime.datetime.utcnow()
launch_time = current_time - datetime.timedelta(hours=23)
launch_decimal = utils.decimal_utc(launch_time)
audit_beginning = current_time - datetime.timedelta(hours=20)
audit_beginning_decimal = utils.decimal_utc(audit_beginning)
audit_ending_decimal = utils.decimal_utc(current_time)
notification = self._create_exists_notification(
audit_beginning, current_time, launch_time, deleted_time='')
raw = self.mox.CreateMockAnything()
usage = self.mox.CreateMockAnything()
launched_range = (launch_decimal, launch_decimal+1)
views.STACKDB.get_instance_usage(instance=INSTANCE_ID_1,
launched_at__range=launched_range)\
.AndReturn(usage)
views.STACKDB.get_instance_usage(
instance=INSTANCE_ID_1,
launched_at__range=launched_range).AndReturn(usage)
exists_values = {
'message_id': MESSAGE_ID_1,
'instance': INSTANCE_ID_1,
@ -845,6 +747,7 @@ class StacktachUsageParsingTestCase(StacktachBaseTestCase):
'audit_period_beginning': audit_beginning_decimal,
'audit_period_ending': audit_ending_decimal,
'instance_type_id': INSTANCE_TYPE_ID_1,
'instance_flavor_id': INSTANCE_FLAVOR_ID_1,
'usage': usage,
'raw': raw,
'tenant': TENANT_ID_1,
@ -874,7 +777,6 @@ class StacktachUsageParsingTestCase(StacktachBaseTestCase):
self.mox.VerifyAll()
def test_process_exists_with_deleted_at(self):
notification = self.mox.CreateMockAnything()
current_time = datetime.datetime.utcnow()
launch_time = current_time - datetime.timedelta(hours=23)
launch_decimal = utils.decimal_utc(launch_time)
@ -883,20 +785,8 @@ class StacktachUsageParsingTestCase(StacktachBaseTestCase):
audit_beginning = current_time - datetime.timedelta(hours=20)
audit_beginning_decimal = utils.decimal_utc(audit_beginning)
audit_ending_decimal = utils.decimal_utc(current_time)
notification.launched_at = str(launch_time)
notification.audit_period_beginning = str(audit_beginning)
notification.audit_period_ending = str(current_time)
notification.tenant = TENANT_ID_1
notification.os_architecture = OS_ARCH_1
notification.os_version = OS_VERSION_1
notification.os_distro = OS_DISTRO_1
notification.rax_options = RAX_OPTIONS_1
notification.instance = INSTANCE_ID_1
notification.instance_type_id = INSTANCE_TYPE_ID_1
notification.message_id = MESSAGE_ID_1
notification.deleted_at = str(delete_time)
notification.bandwidth_public_out = BANDWIDTH_PUBLIC_OUTBOUND
notification = self._create_exists_notification(
audit_beginning, current_time, launch_time, delete_time)
raw = self.mox.CreateMockAnything()
usage = self.mox.CreateMockAnything()
launched_range = (launch_decimal, launch_decimal+1)
@ -915,6 +805,7 @@ class StacktachUsageParsingTestCase(StacktachBaseTestCase):
'audit_period_beginning': audit_beginning_decimal,
'audit_period_ending': audit_ending_decimal,
'instance_type_id': INSTANCE_TYPE_ID_1,
'instance_flavor_id': INSTANCE_FLAVOR_ID_1,
'usage': usage,
'delete': delete,
'raw': raw,

View File

@ -59,9 +59,10 @@ class StacktachDBTestCase(StacktachBaseTestCase):
def setup_mock_log(self, name=None):
if name is None:
stacklog.get_logger(name=mox.IgnoreArg()).AndReturn(self.log)
stacklog.get_logger(name=mox.IgnoreArg(),
is_parent=False).AndReturn(self.log)
else:
stacklog.get_logger(name=name).AndReturn(self.log)
stacklog.get_logger(name=name, is_parent=False).AndReturn(self.log)
def test_safe_get(self):
Model = self.mox.CreateMockAnything()

View File

@ -19,6 +19,7 @@
# IN THE SOFTWARE.
import mox
import decimal
from stacktach import utils as stacktach_utils
from utils import INSTANCE_ID_1
@ -60,4 +61,33 @@ class StacktachUtilsTestCase(StacktachBaseTestCase):
def test_is_message_id_like_invalid(self):
uuid = "$-^&#$"
self.assertFalse(stacktach_utils.is_request_id_like(uuid))
self.assertFalse(stacktach_utils.is_request_id_like(uuid))
def test_str_time_to_unix(self):
self.assertEqual(
stacktach_utils.str_time_to_unix("2013-05-15T11:51:11Z"),
decimal.Decimal('1368618671'))
self.assertEqual(
stacktach_utils.str_time_to_unix("2013-05-15T11:51:11.123Z"),
decimal.Decimal('1368618671.123'))
self.assertEqual(
stacktach_utils.str_time_to_unix("2013-05-15T11:51:11"),
decimal.Decimal('1368618671'))
self.assertEqual(
stacktach_utils.str_time_to_unix("2013-05-15T11:51:11.123"),
decimal.Decimal('1368618671.123'))
self.assertEqual(
stacktach_utils.str_time_to_unix("2013-05-15 11:51:11"),
decimal.Decimal('1368618671'))
self.assertEqual(
stacktach_utils.str_time_to_unix("2013-05-15 11:51:11.123"),
decimal.Decimal('1368618671.123'))
with self.assertRaises(Exception):
stacktach_utils.str_time_to_unix("invalid date"),
decimal.Decimal('1368618671')

View File

@ -29,7 +29,8 @@ from stacktach import datetime_to_decimal as dt
from stacktach import models
from stacktach import stacky_server
import utils
from utils import INSTANCE_ID_1
from utils import INSTANCE_ID_1, INSTANCE_TYPE_ID_1
from utils import INSTANCE_FLAVOR_ID_1
from utils import INSTANCE_ID_2
from utils import REQUEST_ID_1
@ -110,13 +111,24 @@ class StackyServerTestCase(StacktachBaseTestCase):
self.mox.VerifyAll()
def test_get_host_names(self):
def test_get_host_names_for_nova(self):
result = self.mox.CreateMockAnything()
models.RawData.objects.values('host').AndReturn(result)
result.distinct().AndReturn(result)
self.mox.ReplayAll()
event_names = stacky_server.get_host_names()
event_names = stacky_server.get_host_names('nova')
self.assertEqual(event_names, result)
self.mox.VerifyAll()
def test_get_host_names_for_glance(self):
result = self.mox.CreateMockAnything()
models.GlanceRawData.objects.values('host').AndReturn(result)
result.distinct().AndReturn(result)
self.mox.ReplayAll()
event_names = stacky_server.get_host_names('glance')
self.assertEqual(event_names, result)
self.mox.VerifyAll()
@ -280,11 +292,12 @@ class StackyServerTestCase(StacktachBaseTestCase):
def test_do_hosts(self):
fake_request = self.mox.CreateMockAnything()
fake_request.GET = {'service': 'service'}
host1 = {'host': 'www.demo.com'}
host2 = {'host': 'www.example.com'}
hosts = [host1, host2]
self.mox.StubOutWithMock(stacky_server, 'get_host_names')
stacky_server.get_host_names().AndReturn(hosts)
stacky_server.get_host_names('service').AndReturn(hosts)
self.mox.ReplayAll()
resp = stacky_server.do_hosts(fake_request)
@ -1062,7 +1075,8 @@ class StackyServerTestCase(StacktachBaseTestCase):
usage = self.mox.CreateMockAnything()
usage.instance = INSTANCE_ID_1
usage.launched_at = utils.decimal_utc()
usage.instance_type_id = 1
usage.instance_type_id = INSTANCE_TYPE_ID_1
usage.instance_flavor_id = INSTANCE_FLAVOR_ID_1
results[None:50].AndReturn(results)
results.__iter__().AndReturn([usage].__iter__())
self.mox.ReplayAll()
@ -1072,11 +1086,13 @@ class StackyServerTestCase(StacktachBaseTestCase):
resp_json = json.loads(resp.content)
self.assertEqual(len(resp_json), 2)
self.assertEqual(resp_json[0], ["UUID", "Launched At",
"Instance Type Id"])
"Instance Type Id",
"Instance Flavor Id"])
self.assertEqual(resp_json[1][0], INSTANCE_ID_1)
time_str = dt.dt_from_decimal(usage.launched_at)
self.assertEqual(resp_json[1][1], str(time_str))
self.assertEqual(resp_json[1][2], 1)
self.assertEqual(resp_json[1][2], INSTANCE_TYPE_ID_1)
self.assertEqual(resp_json[1][3], INSTANCE_FLAVOR_ID_1)
self.mox.VerifyAll()
@ -1089,7 +1105,8 @@ class StackyServerTestCase(StacktachBaseTestCase):
usage = self.mox.CreateMockAnything()
usage.instance = INSTANCE_ID_1
usage.launched_at = utils.decimal_utc()
usage.instance_type_id = 1
usage.instance_type_id = INSTANCE_TYPE_ID_1
usage.instance_flavor_id = INSTANCE_FLAVOR_ID_1
results[None:50].AndReturn(results)
results.__iter__().AndReturn([usage].__iter__())
self.mox.ReplayAll()
@ -1099,11 +1116,13 @@ class StackyServerTestCase(StacktachBaseTestCase):
resp_json = json.loads(resp.content)
self.assertEqual(len(resp_json), 2)
self.assertEqual(resp_json[0], ["UUID", "Launched At",
"Instance Type Id"])
"Instance Type Id",
"Instance Flavor Id"])
self.assertEqual(resp_json[1][0], INSTANCE_ID_1)
time_str = dt.dt_from_decimal(usage.launched_at)
self.assertEqual(resp_json[1][1], str(time_str))
self.assertEqual(resp_json[1][2], 1)
self.assertEqual(resp_json[1][2], INSTANCE_TYPE_ID_1)
self.assertEqual(resp_json[1][3], INSTANCE_FLAVOR_ID_1)
self.mox.VerifyAll()
@ -1199,7 +1218,8 @@ class StackyServerTestCase(StacktachBaseTestCase):
usage.instance = INSTANCE_ID_1
usage.launched_at = utils.decimal_utc()
usage.deleted_at = usage.launched_at + 10
usage.instance_type_id = 1
usage.instance_type_id = INSTANCE_TYPE_ID_1
usage.instance_flavor_id = INSTANCE_FLAVOR_ID_1
usage.message_id = 'someid'
usage.status = 'pending'
results[None:50].AndReturn(results)
@ -1211,13 +1231,18 @@ class StackyServerTestCase(StacktachBaseTestCase):
resp_json = json.loads(resp.content)
self.assertEqual(len(resp_json), 2)
self.assertEqual(resp_json[0], ["UUID", "Launched At", "Deleted At",
"Instance Type Id", "Message ID",
"Instance Type Id",
"Instance Flavor Id", "Message ID",
"Status"])
self.assertEqual(resp_json[1][0], INSTANCE_ID_1)
launch_time_str = dt.dt_from_decimal(usage.launched_at)
self.assertEqual(resp_json[1][1], str(launch_time_str))
delete_time_str = dt.dt_from_decimal(usage.deleted_at)
self.assertEqual(resp_json[1][2], str(delete_time_str))
self.assertEqual(resp_json[1][3], INSTANCE_TYPE_ID_1)
self.assertEqual(resp_json[1][4], INSTANCE_FLAVOR_ID_1)
self.assertEqual(resp_json[1][5], 'someid')
self.assertEqual(resp_json[1][6], 'pending')
self.mox.VerifyAll()
def test_do_list_usage_exists_with_instance(self):
@ -1230,7 +1255,8 @@ class StackyServerTestCase(StacktachBaseTestCase):
usage.instance = INSTANCE_ID_1
usage.launched_at = utils.decimal_utc()
usage.deleted_at = usage.launched_at + 10
usage.instance_type_id = 1
usage.instance_type_id = INSTANCE_TYPE_ID_1
usage.instance_flavor_id = INSTANCE_FLAVOR_ID_1
usage.message_id = 'someid'
usage.status = 'pending'
results[None:50].AndReturn(results)
@ -1242,13 +1268,18 @@ class StackyServerTestCase(StacktachBaseTestCase):
resp_json = json.loads(resp.content)
self.assertEqual(len(resp_json), 2)
self.assertEqual(resp_json[0], ["UUID", "Launched At", "Deleted At",
"Instance Type Id", "Message ID",
"Instance Type Id",
"Instance Flavor Id", "Message ID",
"Status"])
self.assertEqual(resp_json[1][0], INSTANCE_ID_1)
launch_time_str = dt.dt_from_decimal(usage.launched_at)
self.assertEqual(resp_json[1][1], str(launch_time_str))
delete_time_str = dt.dt_from_decimal(usage.deleted_at)
self.assertEqual(resp_json[1][2], str(delete_time_str))
self.assertEqual(resp_json[1][3], INSTANCE_TYPE_ID_1)
self.assertEqual(resp_json[1][4], INSTANCE_FLAVOR_ID_1)
self.assertEqual(resp_json[1][5], 'someid')
self.assertEqual(resp_json[1][6], 'pending')
self.mox.VerifyAll()
def test_do_list_usage_exists_bad_instance(self):

View File

@ -23,7 +23,7 @@ import json
import kombu
import mox
from stacktach import db
from stacktach import db, stacklog
from stacktach import views
import worker.worker as worker
from tests.unit import StacktachBaseTestCase
@ -36,6 +36,12 @@ class ConsumerTestCase(StacktachBaseTestCase):
def tearDown(self):
self.mox.UnsetStubs()
def _setup_mock_logger(self):
mock_logger = self.mox.CreateMockAnything()
self.mox.StubOutWithMock(stacklog, 'get_logger')
stacklog.get_logger('worker', is_parent=False).AndReturn(mock_logger)
return mock_logger
def _test_topics(self):
return [
dict(queue="queue1", routing_key="monitor.info"),
@ -103,7 +109,6 @@ class ConsumerTestCase(StacktachBaseTestCase):
self.assertEqual(actual_queue, queue)
self.mox.VerifyAll()
def test_create_queue_with_queue_args(self):
self.mox.StubOutClassWithMocks(kombu, 'Queue')
exchange = self.mox.CreateMockAnything()
@ -157,6 +162,14 @@ class ConsumerTestCase(StacktachBaseTestCase):
worker.POST_PROCESS_METHODS["RawData"] = old_handler
def test_run(self):
mock_logger = self._setup_mock_logger()
self.mox.StubOutWithMock(mock_logger, 'info')
mock_logger.info('east_coast.prod.global: nova 10.0.0.1 5672 rabbit /')
self.mox.StubOutWithMock(mock_logger, 'debug')
mock_logger.debug("Processing on 'east_coast.prod.global nova'")
mock_logger.debug("Completed processing on "
"'east_coast.prod.global nova'")
config = {
'name': 'east_coast.prod.global',
'durable_queue': False,
@ -168,10 +181,10 @@ class ConsumerTestCase(StacktachBaseTestCase):
"services": ["nova"],
"topics": {"nova": self._test_topics()}
}
self.mox.StubOutWithMock(db, 'get_or_create_deployment')
self.mox.StubOutWithMock(db, 'get_deployment')
deployment = self.mox.CreateMockAnything()
db.get_or_create_deployment(config['name'])\
.AndReturn((deployment, True))
deployment.id = 1
db.get_deployment(deployment.id).AndReturn(deployment)
self.mox.StubOutWithMock(kombu.connection, 'BrokerConnection')
params = dict(hostname=config['rabbit_host'],
port=config['rabbit_port'],
@ -193,10 +206,18 @@ class ConsumerTestCase(StacktachBaseTestCase):
consumer.run()
worker.continue_running().AndReturn(False)
self.mox.ReplayAll()
worker.run(config, exchange)
worker.run(config, deployment.id, exchange)
self.mox.VerifyAll()
def test_run_queue_args(self):
mock_logger = self._setup_mock_logger()
self.mox.StubOutWithMock(mock_logger, 'info')
mock_logger.info("east_coast.prod.global: nova 10.0.0.1 5672 rabbit /")
self.mox.StubOutWithMock(mock_logger, 'debug')
mock_logger.debug("Processing on 'east_coast.prod.global nova'")
mock_logger.debug("Completed processing on "
"'east_coast.prod.global nova'")
config = {
'name': 'east_coast.prod.global',
'durable_queue': False,
@ -210,10 +231,10 @@ class ConsumerTestCase(StacktachBaseTestCase):
"services": ["nova"],
"topics": {"nova": self._test_topics()}
}
self.mox.StubOutWithMock(db, 'get_or_create_deployment')
self.mox.StubOutWithMock(db, 'get_deployment')
deployment = self.mox.CreateMockAnything()
db.get_or_create_deployment(config['name'])\
.AndReturn((deployment, True))
deployment.id = 1
db.get_deployment(deployment.id).AndReturn(deployment)
self.mox.StubOutWithMock(kombu.connection, 'BrokerConnection')
params = dict(hostname=config['rabbit_host'],
port=config['rabbit_port'],
@ -236,5 +257,5 @@ class ConsumerTestCase(StacktachBaseTestCase):
consumer.run()
worker.continue_running().AndReturn(False)
self.mox.ReplayAll()
worker.run(config, exchange)
worker.run(config, deployment.id, exchange)
self.mox.VerifyAll()

View File

@ -30,6 +30,9 @@ IMAGE_UUID_1 = "12345678-6352-4dbc-8271-96cc54bf14cd"
INSTANCE_ID_1 = "08f685d9-6352-4dbc-8271-96cc54bf14cd"
INSTANCE_ID_2 = "515adf96-41d3-b86d-5467-e584edc61dab"
INSTANCE_FLAVOR_ID_1 = "performance1-120"
INSTANCE_FLAVOR_ID_2 = "performance2-120"
INSTANCE_TYPE_ID_1 = "12345"
INSTANCE_TYPE_ID_2 = '54321'
@ -66,6 +69,9 @@ PORT = '5672'
VIRTUAL_HOST = '/'
USERID = 'rabbit'
PASSWORD = 'password'
NOVA_VERIFIER_EVENT_TYPE = 'compute.instance.exists.verified.old'
GLANCE_VERIFIER_EVENT_TYPE = 'image.exists.verified.old'
FLAVOR_FIELD_NAME = 'flavor_field_name'
def decimal_utc(t = datetime.datetime.utcnow()):
return dt.dt_to_decimal(t)
@ -152,7 +158,8 @@ def create_tracker(mox, request_id, lifecycle, start, last_timing=None,
class FakeVerifierConfig(object):
def __init__(self, host, port, virtual_host, userid, password, tick_time,
settle_time, settle_units, durable_queue, topics, notifs):
settle_time, settle_units, durable_queue, topics, notifs,
nova_event_type, glance_event_type, flavor_field_name):
self.host = lambda: host
self.port = lambda: port
self.virtual_host = lambda: virtual_host
@ -166,11 +173,17 @@ class FakeVerifierConfig(object):
self.topics = lambda: topics
self.enable_notifications = lambda: notifs
self.validation_level = lambda: 'all'
self.nova_event_type = lambda: nova_event_type
self.glance_event_type = lambda: glance_event_type
self.flavor_field_name = lambda: flavor_field_name
def make_verifier_config(notifs):
topics = {'exchange': ['notifications.info']}
config = FakeVerifierConfig(HOST, PORT, VIRTUAL_HOST, USERID,
PASSWORD, TICK_TIME, SETTLE_TIME,
SETTLE_UNITS, True, topics, notifs)
SETTLE_UNITS, True, topics, notifs,
NOVA_VERIFIER_EVENT_TYPE,
GLANCE_VERIFIER_EVENT_TYPE,
FLAVOR_FIELD_NAME)
return config

View File

@ -27,6 +27,7 @@ import time
import multiprocessing
from django.db import transaction
from stacktach import message_service
POSSIBLE_TOPDIR = os.path.normpath(os.path.join(os.path.abspath(sys.argv[0]),
@ -34,10 +35,18 @@ POSSIBLE_TOPDIR = os.path.normpath(os.path.join(os.path.abspath(sys.argv[0]),
if os.path.exists(os.path.join(POSSIBLE_TOPDIR, 'stacktach')):
sys.path.insert(0, POSSIBLE_TOPDIR)
from verifier import WrongTypeException
from django.db import close_connection
from django.db import reset_queries
from django.core import exceptions
from stacktach import stacklog, message_service
LOG = stacklog.get_logger('verifier')
from verifier import WrongTypeException
from stacktach import stacklog
stacklog.set_default_logger_name('verifier')
def _get_child_logger():
return stacklog.get_logger('verifier', is_parent=False)
def _has_field(d1, d2, field1, field2=None):
@ -151,10 +160,11 @@ class Verifier(object):
if self.reconciler:
self.reconcile_failed()
msg = "%s: N: %s, P: %s, S: %s, E: %s" % values
LOG.info(msg)
_get_child_logger().info(msg)
time.sleep(tick_time)
def run(self):
logger = _get_child_logger()
if self.enable_notifications:
exchange_name = self.exchange()
exchange = message_service.create_exchange(
@ -167,15 +177,31 @@ class Verifier(object):
self.config.userid(), self.config.password(),
"librabbitmq", self.config.virtual_host()) as conn:
def callback(result):
try:
(verified, exist) = result
if verified:
self.send_verified_notification(
exist, conn, exchange,
routing_keys=routing_keys)
except Exception, e:
msg = "ERROR in Callback %s: %s" % (exchange_name, e)
LOG.exception(msg, e)
attempt = 0
while attempt < 2:
try:
(verified, exist) = result
if verified:
self.send_verified_notification(
exist, conn, exchange,
routing_keys=routing_keys)
break
except exceptions.ObjectDoesNotExist:
if attempt < 1:
logger.warn("ObjectDoesNotExist in callback, "
"attempting to reconnect and try "
"again.")
close_connection()
reset_queries()
else:
logger.error("ObjectDoesNotExist in callback "
"again, giving up.")
except Exception, e:
msg = "ERROR in Callback %s: %s" % (exchange_name,
e)
logger.exception(msg)
break
attempt += 1
try:
self._run(callback=callback)
except Exception, e:

View File

@ -29,8 +29,12 @@ except ImportError:
pass
config = None
with open(config_filename, "r") as f:
config = json.load(f)
def load():
global config
with open(config_filename, "r") as f:
config = json.load(f)
def enable_notifications():
@ -91,3 +95,15 @@ def virtual_host():
def validation_level():
return config['validation_level']
def nova_event_type():
return config.get('nova_event_type', 'compute.instance.exists.verified.old')
def glance_event_type():
return config.get('glance_event_type', 'image.exists.verified.old')
def flavor_field_name():
return config['flavor_field_name']

View File

@ -37,9 +37,15 @@ from verifier import base_verifier
from verifier import NullFieldException
from verifier import NotFound
from stacktach import datetime_to_decimal as dt
from stacktach import stacklog
from stacktach import message_service
import datetime
from stacktach import stacklog, message_service
LOG = stacklog.get_logger('verifier')
stacklog.set_default_logger_name('verifier')
def _get_child_logger():
return stacklog.get_logger('verifier', is_parent=False)
def _verify_field_mismatch(exists, usage):
@ -115,31 +121,27 @@ def _verify_for_delete(exist, delete=None):
raise VerificationException(reason)
if delete:
if not base_verifier._verify_date_field(
delete.created_at, exist.created_at, same_second=True):
raise FieldMismatch('created_at', exist.created_at,
delete.created_at)
if not base_verifier._verify_date_field(
delete.deleted_at, exist.deleted_at, same_second=True):
raise FieldMismatch('deleted_at', exist.deleted_at,
delete.deleted_at)
def _verify(exist):
verified = False
try:
_verify_for_usage(exist)
_verify_for_delete(exist)
_verify_validity(exist)
def _verify(exists):
verified = True
for exist in exists:
try:
_verify_for_usage(exist)
_verify_for_delete(exist)
_verify_validity(exist)
verified = True
exist.mark_verified()
except Exception, e:
exist.mark_failed(reason=e.__class__.__name__)
LOG.exception("glance: %s" % e)
exist.mark_verified()
except Exception, e:
verified = False
exist.mark_failed(reason=e.__class__.__name__)
_get_child_logger().exception("glance: %s" % e)
return verified, exist
return verified, exists[0]
class GlanceVerifier(Verifier):
@ -147,25 +149,28 @@ class GlanceVerifier(Verifier):
super(GlanceVerifier, self).__init__(config, pool=pool)
def verify_for_range(self, ending_max, callback=None):
exists = models.ImageExists.find(
ending_max=ending_max, status=models.ImageExists.PENDING)
count = exists.count()
exists_grouped_by_owner_and_rawid = \
models.ImageExists.find_and_group_by_owner_and_raw_id(
ending_max=ending_max,
status=models.ImageExists.PENDING)
count = len(exists_grouped_by_owner_and_rawid)
added = 0
update_interval = datetime.timedelta(seconds=30)
next_update = datetime.datetime.utcnow() + update_interval
LOG.info("glance: Adding %s exists to queue." % count)
_get_child_logger().info("glance: Adding %s per-owner exists to queue." % count)
while added < count:
for exist in exists[0:1000]:
exist.status = models.ImageExists.VERIFYING
exist.save()
result = self.pool.apply_async(_verify, args=(exist,),
for exists in exists_grouped_by_owner_and_rawid.values():
for exist in exists:
exist.status = models.ImageExists.VERIFYING
exist.save()
result = self.pool.apply_async(_verify, args=(exists,),
callback=callback)
self.results.append(result)
added += 1
if datetime.datetime.utcnow() > next_update:
values = ((added,) + self.clean_results())
msg = "glance: N: %s, P: %s, S: %s, E: %s" % values
LOG.info(msg)
_get_child_logger().info(msg)
next_update = datetime.datetime.utcnow() + update_interval
return count
@ -178,7 +183,7 @@ class GlanceVerifier(Verifier):
# So, grab a new InstanceExists object from the database and use it.
body = models.ImageExists.objects.get(id=exist.id).raw.json
json_body = json.loads(body)
json_body[1]['event_type'] = 'image.exists.verified.old'
json_body[1]['event_type'] = self.config.glance_event_type()
json_body[1]['original_message_id'] = json_body[1]['message_id']
json_body[1]['message_id'] = str(uuid.uuid4())
if routing_keys is None:

View File

@ -31,26 +31,36 @@ if os.path.exists(os.path.join(POSSIBLE_TOPDIR, 'stacktach')):
sys.path.insert(0, POSSIBLE_TOPDIR)
from verifier import base_verifier
from verifier import config
from verifier import NullFieldException
from stacktach import models
from stacktach import stacklog
from stacktach import datetime_to_decimal as dt
from verifier import FieldMismatch
from verifier import AmbiguousResults
from verifier import NotFound
from verifier import VerificationException
from stacktach import stacklog, message_service
LOG = stacklog.get_logger('verifier')
from stacktach import message_service
stacklog.set_default_logger_name('verifier')
def _get_child_logger():
return stacklog.get_logger('verifier', is_parent=False)
def _verify_field_mismatch(exists, launch):
flavor_field_name = config.flavor_field_name()
if not base_verifier._verify_date_field(
launch.launched_at, exists.launched_at, same_second=True):
raise FieldMismatch('launched_at', exists.launched_at,
launch.launched_at)
if launch.instance_type_id != exists.instance_type_id:
raise FieldMismatch('instance_type_id', exists.instance_type_id,
launch.instance_type_id)
if getattr(launch, flavor_field_name) != \
getattr(exists, flavor_field_name):
raise FieldMismatch(flavor_field_name,
getattr(exists, flavor_field_name),
getattr(launch, flavor_field_name))
if launch.tenant != exists.tenant:
raise FieldMismatch('tenant', exists.tenant,
@ -146,10 +156,13 @@ def _verify_for_delete(exist, delete=None,
def _verify_basic_validity(exist):
fields = {exist.tenant: 'tenant',
exist.launched_at: 'launched_at',
exist.instance_type_id: 'instance_type_id'}
for (field_value, field_name) in fields.items():
flavor_field_name = config.flavor_field_name()
fields = {
'tenant': exist.tenant,
'launched_at': exist.launched_at,
flavor_field_name: getattr(exist, flavor_field_name)
}
for (field_name, field_value) in fields.items():
if field_value is None:
raise NullFieldException(field_name, exist.id)
base_verifier._is_hex_owner_id('tenant', exist.tenant, exist.id)
@ -171,13 +184,6 @@ def _verify_optional_validity(exist):
base_verifier._is_alphanumeric('os_distro', exist.os_distro, exist.id)
base_verifier._is_alphanumeric('os_version', exist.os_version, exist.id)
def verify_fields_not_null(exist_id, null_value, fields):
for (field_value, field_name) in fields.items():
print "value: %s, name = %s" % (field_value, field_name)
if field_value == null_value:
raise NullFieldException(field_name, exist_id)
def _verify_validity(exist, validation_level):
if validation_level == 'none':
@ -233,7 +239,7 @@ def _attempt_reconciled_verify(exist, orig_e):
exist.mark_failed(reason=str(rec_e))
except Exception, rec_e:
exist.mark_failed(reason=rec_e.__class__.__name__)
LOG.exception("nova: %s" % rec_e)
_get_child_logger().exception("nova: %s" % rec_e)
return verified
@ -242,7 +248,6 @@ def _verify(exist, validation_level):
try:
if not exist.launched_at:
raise VerificationException("Exists without a launched_at")
_verify_validity(exist, validation_level)
_verify_for_launch(exist)
_verify_for_delete(exist)
@ -254,7 +259,7 @@ def _verify(exist, validation_level):
verified = _attempt_reconciled_verify(exist, orig_e)
except Exception, e:
exist.mark_failed(reason=e.__class__.__name__)
LOG.exception("nova: %s" % e)
_get_child_logger().exception("nova: %s" % e)
return verified, exist
@ -274,7 +279,7 @@ class NovaVerifier(base_verifier.Verifier):
# So, grab a new InstanceExists object from the database and use it.
body = models.InstanceExists.objects.get(id=exist.id).raw.json
json_body = json.loads(body)
json_body[1]['event_type'] = 'compute.instance.exists.verified.old'
json_body[1]['event_type'] = self.config.nova_event_type()
json_body[1]['original_message_id'] = json_body[1]['message_id']
json_body[1]['message_id'] = str(uuid.uuid4())
if routing_keys is None:
@ -292,7 +297,7 @@ class NovaVerifier(base_verifier.Verifier):
added = 0
update_interval = datetime.timedelta(seconds=30)
next_update = datetime.datetime.utcnow() + update_interval
LOG.info("nova: Adding %s exists to queue." % count)
_get_child_logger().info("nova: Adding %s exists to queue." % count)
while added < count:
for exist in exists[0:1000]:
exist.update_status(models.InstanceExists.VERIFYING)
@ -306,7 +311,7 @@ class NovaVerifier(base_verifier.Verifier):
if datetime.datetime.utcnow() > next_update:
values = ((added,) + self.clean_results())
msg = "nova: N: %s, P: %s, S: %s, E: %s" % values
LOG.info(msg)
_get_child_logger().info(msg)
next_update = datetime.datetime.utcnow() + update_interval
return count

View File

@ -29,7 +29,7 @@ POSSIBLE_TOPDIR = os.path.normpath(os.path.join(os.path.abspath(sys.argv[0]),
os.pardir, os.pardir))
if os.path.exists(os.path.join(POSSIBLE_TOPDIR, 'stacktach')):
sys.path.insert(0, POSSIBLE_TOPDIR)
from stacktach import stacklog
from stacktach import reconciler
from verifier import nova_verifier
from verifier import glance_verifier
@ -42,11 +42,16 @@ except ImportError:
pass
process = None
log_listener = None
processes = []
stacklog.set_default_logger_name('verifier')
def _get_parent_logger():
return stacklog.get_logger('verifier', is_parent=True)
def kill_time(signal, frame):
log_listener.end()
print "dying ..."
for process in processes:
process.terminate()
@ -80,6 +85,9 @@ if __name__ == '__main__':
verifier.run()
verifier_config.load()
log_listener = stacklog.LogListener(_get_parent_logger())
log_listener.start()
for exchange in verifier_config.topics().keys():
process = Process(target=make_and_start_verifier, args=(exchange,))
process.start()

View File

@ -9,13 +9,23 @@ POSSIBLE_TOPDIR = os.path.normpath(os.path.join(os.path.abspath(sys.argv[0]),
if os.path.exists(os.path.join(POSSIBLE_TOPDIR, 'stacktach')):
sys.path.insert(0, POSSIBLE_TOPDIR)
from stacktach import db, stacklog
from django.db import close_connection
import worker.worker as worker
from worker import config
processes = []
log_listener = None
stacklog.set_default_logger_name('worker')
def _get_parent_logger():
return stacklog.get_logger('worker', is_parent=True)
def kill_time(signal, frame):
log_listener.end()
print "dying ..."
for process in processes:
process.terminate()
@ -27,11 +37,19 @@ def kill_time(signal, frame):
if __name__ == '__main__':
log_listener = stacklog.LogListener(_get_parent_logger())
log_listener.start()
for deployment in config.deployments():
if deployment.get('enabled', True):
db_deployment, new = db.get_or_create_deployment(deployment['name'])
# NOTE (apmelton)
# Close the connection before spinning up the child process,
# otherwise the child process will attempt to use the connection
# the parent process opened up to get/create the deployment.
close_connection()
for exchange in deployment.get('topics').keys():
process = Process(target=worker.run, args=(deployment,
db_deployment.id,
exchange,))
process.daemon = True
process.start()

View File

@ -34,12 +34,16 @@ except ImportError:
from pympler.process import ProcessMemoryInfo
from stacktach import db, message_service
from stacktach import db
from stacktach import message_service
from stacktach import stacklog
from stacktach import views
stacklog.set_default_logger_name('worker')
LOG = stacklog.get_logger()
def _get_child_logger():
return stacklog.get_logger('worker', is_parent=False)
class Consumer(kombu.mixins.ConsumerMixin):
@ -58,9 +62,10 @@ class Consumer(kombu.mixins.ConsumerMixin):
self.exchange = exchange
def _create_exchange(self, name, type, exclusive=False, auto_delete=False):
return message_service.create_exchange(name, exchange_type=type, exclusive=exclusive,
durable=self.durable,
auto_delete=auto_delete)
return message_service.create_exchange(name, exchange_type=type,
exclusive=exclusive,
durable=self.durable,
auto_delete=auto_delete)
def _create_queue(self, name, nova_exchange, routing_key, exclusive=False,
auto_delete=False):
@ -115,7 +120,7 @@ class Consumer(kombu.mixins.ConsumerMixin):
per_message = 0
if self.total_processed:
per_message = idiff / self.total_processed
LOG.debug("%20s %20s %6dk/%6dk ram, "
_get_child_logger().debug("%20s %20s %6dk/%6dk ram, "
"%3d/%4d msgs @ %6dk/msg" %
(self.name, self.exchange, diff, idiff, self.processed,
self.total_processed, per_message))
@ -126,9 +131,8 @@ class Consumer(kombu.mixins.ConsumerMixin):
try:
self._process(message)
except Exception, e:
LOG.debug("Problem: %s\nFailed message body:\n%s" %
(e, json.loads(str(message.body)))
)
_get_child_logger().debug("Problem: %s\nFailed message body:\n%s" %
(e, json.loads(str(message.body))))
raise
@ -142,7 +146,7 @@ def exit_or_sleep(exit=False):
time.sleep(5)
def run(deployment_config, exchange):
def run(deployment_config, deployment_id, exchange):
name = deployment_config['name']
host = deployment_config.get('rabbit_host', 'localhost')
port = deployment_config.get('rabbit_port', 5672)
@ -153,12 +157,13 @@ def run(deployment_config, exchange):
queue_arguments = deployment_config.get('queue_arguments', {})
exit_on_exception = deployment_config.get('exit_on_exception', False)
topics = deployment_config.get('topics', {})
logger = _get_child_logger()
deployment, new = db.get_or_create_deployment(name)
deployment = db.get_deployment(deployment_id)
print "Starting worker for '%s %s'" % (name, exchange)
LOG.info("%s: %s %s %s %s %s" % (name, exchange, host, port, user_id,
virtual_host))
logger.info("%s: %s %s %s %s %s" %
(name, exchange, host, port, user_id, virtual_host))
params = dict(hostname=host,
port=port,
@ -170,7 +175,7 @@ def run(deployment_config, exchange):
# continue_running() is used for testing
while continue_running():
try:
LOG.debug("Processing on '%s %s'" % (name, exchange))
logger.debug("Processing on '%s %s'" % (name, exchange))
with kombu.connection.BrokerConnection(**params) as conn:
try:
consumer = Consumer(name, conn, deployment, durable,
@ -178,18 +183,19 @@ def run(deployment_config, exchange):
topics[exchange])
consumer.run()
except Exception as e:
LOG.error("!!!!Exception!!!!")
LOG.exception("name=%s, exchange=%s, exception=%s. "
"Reconnecting in 5s" %
(name, exchange, e))
logger.error("!!!!Exception!!!!")
logger.exception(
"name=%s, exchange=%s, exception=%s. "
"Reconnecting in 5s" % (name, exchange, e))
exit_or_sleep(exit_on_exception)
LOG.debug("Completed processing on '%s %s'" % (name, exchange))
except:
LOG.error("!!!!Exception!!!!")
logger.debug("Completed processing on '%s %s'" %
(name, exchange))
except Exception:
logger.error("!!!!Exception!!!!")
e = sys.exc_info()[0]
msg = "Uncaught exception: deployment=%s, exchange=%s, " \
"exception=%s. Retrying in 5s"
LOG.exception(msg % (name, exchange, e))
logger.exception(msg % (name, exchange, e))
exit_or_sleep(exit_on_exception)
POST_PROCESS_METHODS = {