db host config, summary fix, durable queues

This commit is contained in:
Sandy Walsh 2012-11-05 11:02:16 -04:00
parent 693f7d56ce
commit 87f58e982c
6 changed files with 25 additions and 15 deletions

View File

@ -1,4 +1,5 @@
export STACKTACH_DB_NAME="stacktach"
export STACKTACH_DB_HOST=""
export STACKTACH_DB_USERNAME="root"
export STACKTACH_DB_PASSWORD="password"
export STACKTACH_INSTALL_DIR="/srv/www/stacktach/"

View File

@ -1,6 +1,7 @@
{"deployments": [
{
"name": "east_coast.prod.global",
"durable_queue": false,
"rabbit_host": "10.0.0.1",
"rabbit_port": 5672,
"rabbit_userid": "rabbit",
@ -9,6 +10,7 @@
},
{
"name": "east_coast.prod.cell1",
"durable_queue": false,
"rabbit_host": "10.0.1.1",
"rabbit_port": 5672,
"rabbit_userid": "rabbit",

View File

@ -2,6 +2,7 @@
import os
db_name = os.environ['STACKTACH_DB_NAME']
db_host = os.environ.get('STACKTACH_DB_HOST', "")
db_username = os.environ['STACKTACH_DB_USERNAME']
db_password = os.environ['STACKTACH_DB_PASSWORD']
install_dir = os.environ['STACKTACH_INSTALL_DIR']
@ -21,7 +22,7 @@ DATABASES = {
'NAME': db_name,
'USER': db_username,
'PASSWORD': db_password,
'HOST': '', # Set to empty string for localhost.
'HOST': db_host, # Set to empty string for localhost.
'PORT': '', # Set to empty string for default.
}
}

View File

@ -149,14 +149,15 @@ def do_summary(request):
for name in interesting:
timings = models.Timing.objects.filter(name=name) \
.exclude(Q(start_raw=None) | Q(end_raw=None))
.exclude(Q(start_raw=None) | Q(end_raw=None)) \
.exclude(diff__lt=0)
if not timings:
continue
total, _min, _max = 0.0, None, None
num = len(timings)
for t in timings:
seconds = seconds_from_timing(t)
seconds = float(t.diff)
total += seconds
if _min is None:
_min = seconds

View File

@ -81,6 +81,9 @@ def aggregate(raw):
We can use this for summarized timing reports.
"""
if not raw.instance:
return
# While we hope only one lifecycle ever exists it's quite
# likely we get multiple due to the workers and threads.
lifecycle = None

View File

@ -34,18 +34,6 @@ handler = logging.handlers.TimedRotatingFileHandler('worker.log',
when='h', interval=6, backupCount=4)
LOG.addHandler(handler)
nova_exchange = kombu.entity.Exchange("nova", type="topic", exclusive=False,
durable=True, auto_delete=False)
nova_queues = [
kombu.Queue("monitor.info", nova_exchange, durable=True,
auto_delete=False,
exclusive=False, routing_key='monitor.info'),
kombu.Queue("monitor.error", nova_exchange, durable=True,
auto_delete=False,
exclusive=False, routing_key='monitor.error'),
]
class NovaConsumer(kombu.mixins.ConsumerMixin):
def __init__(self, name, connection, deployment):
@ -54,6 +42,20 @@ class NovaConsumer(kombu.mixins.ConsumerMixin):
self.name = name
def get_consumers(self, Consumer, channel):
durable = self.deployment_config.get('durable_queue', True)
nova_exchange = kombu.entity.Exchange("nova", type="topic",
exclusive=False, durable=durable, auto_delete=False)
nova_queues = [
kombu.Queue("monitor.info", nova_exchange, durable=durable,
auto_delete=False,
exclusive=False, routing_key='monitor.info'),
kombu.Queue("monitor.error", nova_exchange, durable=durable,
auto_delete=False,
exclusive=False, routing_key='monitor.error'),
]
return [Consumer(queues=nova_queues, callbacks=[self.on_nova])]
def _process(self, body, message):