Merge pull request #265 from ramielrowe/notif_scrubber

Adding notification scrubber script
This commit is contained in:
Andrew Melton 2014-02-20 14:39:31 -05:00
commit 1c2042c8bf
3 changed files with 155 additions and 0 deletions

View File

@ -0,0 +1,10 @@
{
"host": "devstack.example.com",
"port": 5672,
"userid": "guest",
"password": "password",
"durable_queue": false,
"exchange": "nova",
"virtual_host": "/",
"routing_key": "monitor.info"
}

View File

@ -0,0 +1,75 @@
import argparse
import json
import os
import sys
import time
sys.path.append(os.environ.get('STACKTACH_INSTALL_DIR', '/stacktach'))
from stacktach import message_service as msg
from stacktach import utils
import scrubbers
def scrub(args, send_notif=lambda x: None):
print "Starting scrub."
start = utils.str_time_to_unix(args.start)
end = utils.str_time_to_unix(args.end)
if hasattr(scrubbers, args.scrubber):
Scrubber = getattr(scrubbers, args.scrubber)
scrubber = Scrubber(start, end)
count = 0
for raw in scrubber.raws():
matches, body = scrubber.filter(raw)
if matches and not body:
body = json.loads(raw['json'])[1]
if matches and body:
scrubbed = scrubber.scrub(body)
count += 1
send_notif(scrubbed)
return count
else:
print "No scrubber class %s." % args.scrubber
return 0
def scrub_with_notifications(args):
print "!!!!!! WARNING: SENDING TO RABBIT !!!!!!"
print "!!!!!! Sleeping for 30 seconds !!!!!!"
print "!!!!!! before proceeding !!!!!!"
time.sleep(30)
with open(args.rabbit_config) as fp:
rabbit_config = json.load(fp)
exchange = msg.create_exchange(rabbit_config['exchange'],
'topic',
durable=rabbit_config['durable_queue'])
conn_conf = (rabbit_config['host'], rabbit_config['port'],
rabbit_config['userid'], rabbit_config['password'],
'librabbitmq', rabbit_config['virtual_host'])
with msg.create_connection(*conn_conf) as conn:
def send_notif(notif):
msg.send_notification(notif, rabbit_config['routing_key'],
conn, exchange)
count = scrub(args, send_notif=send_notif)
return count
if __name__ == '__main__':
parser = argparse.ArgumentParser('Stacktach Notification Scrubber')
parser.add_argument('--rabbit', action='store_true')
parser.add_argument('--rabbit_config', default='rabbit_config.json')
parser.add_argument('--scrubber', required=True)
parser.add_argument('--start', required=True)
parser.add_argument('--end', required=True)
args = parser.parse_args()
if args.rabbit:
print "%s Events Scrubbed" % scrub_with_notifications(args)
else:
print "%s Events Scrubbed" % scrub(args)

70
scripts/scrubbers.py Normal file
View File

@ -0,0 +1,70 @@
import json
import uuid
from django.db.models import F
from stacktach import models
class ScrubberBase(object):
def __init__(self, start, end):
self.start = start
self.end = end
def raws(self):
""" Returns an iterable of Raws to scrub
"""
return [].__iter__()
def filter(self, raw_data):
""" Returns whether or not the provided RawData needs to be scrubbed.
If the implementing function parses the json body to determine
if it needs to be scrubbed, it should be returned as the second
return value. This is done so that it will not need to be parsed
a second time for scrubbing. Negative matches need not return
parsed json bodies
@raw_data: a RawData dictionary
"""
return True, None
def scrub(self, body):
""" Returns the scrubbed json body of the RawData.
@body: Dictionary version of the RawData's json.
"""
return body
class ExistsCreatedAt(ScrubberBase):
def raws(self):
filters = {
'raw__when__gte': self.start,
'raw__when__lte': self.end,
'audit_period_ending__lt': F('audit_period_beginning') + (60*60*24)
}
exists = models.InstanceExists.objects.filter(**filters)
exists = exists.select_related('raw')
for exist in exists.iterator():
rawdata = exist.raw
yield {'json': rawdata.json}
def filter(self, raw_data):
if '+00:00' in raw_data['json']:
body = json.loads(raw_data['json'])[1]
created_at = body.get('payload', {}).get('created_at')
if created_at and '+00:00' in created_at:
return True, body
else:
return False, None
else:
return False, None
def scrub(self, body):
created_at = body['payload']['created_at']
scrubbed_created_at = created_at.replace('+00:00', '')
body['payload']['created_at'] = scrubbed_created_at
body['message_id'] = str(uuid.uuid4())
return body