Merge pull request #38 from ramielrowe/range_query_args
Adding range query args
This commit is contained in:
commit
d7beacdbb4
@ -1,76 +1,148 @@
|
|||||||
import decimal
|
import decimal
|
||||||
|
import functools
|
||||||
import json
|
import json
|
||||||
|
|
||||||
|
from django.db.models import FieldDoesNotExist
|
||||||
from django.forms.models import model_to_dict
|
from django.forms.models import model_to_dict
|
||||||
from django.http import HttpResponse
|
from django.http import HttpResponse
|
||||||
|
from django.http import HttpResponseBadRequest
|
||||||
|
from django.http import HttpResponseServerError
|
||||||
from django.shortcuts import get_object_or_404
|
from django.shortcuts import get_object_or_404
|
||||||
|
|
||||||
import datetime_to_decimal as dt
|
from stacktach import datetime_to_decimal as dt
|
||||||
import models
|
from stacktach import models
|
||||||
|
from stacktach import utils
|
||||||
|
|
||||||
|
|
||||||
|
class APIException(Exception):
|
||||||
|
def __init__(self):
|
||||||
|
self.status = 500
|
||||||
|
self.message = "Internal Server Error"
|
||||||
|
|
||||||
|
def to_dict(self):
|
||||||
|
return {'message': self.message,
|
||||||
|
'status': self.status}
|
||||||
|
|
||||||
|
|
||||||
|
class BadRequestException(APIException):
|
||||||
|
def __init__(self, message="Bad Request"):
|
||||||
|
self.status = 400
|
||||||
|
self.message = message
|
||||||
|
|
||||||
|
|
||||||
def rsp(data):
|
def rsp(data):
|
||||||
return HttpResponse(json.dumps(data), content_type="application/json")
|
return HttpResponse(json.dumps(data), content_type="application/json")
|
||||||
|
|
||||||
|
|
||||||
|
def api_call(func):
|
||||||
|
|
||||||
|
@functools.wraps(func)
|
||||||
|
def handled(*args, **kwargs):
|
||||||
|
try:
|
||||||
|
return rsp(func(*args, **kwargs))
|
||||||
|
except BadRequestException, e:
|
||||||
|
return HttpResponseBadRequest(json.dumps(e.to_dict()),
|
||||||
|
content_type="application/json")
|
||||||
|
except APIException, e:
|
||||||
|
return HttpResponseServerError(json.dumps(e.to_dict()),
|
||||||
|
content_type="application/json")
|
||||||
|
|
||||||
|
return handled
|
||||||
|
|
||||||
|
|
||||||
|
@api_call
|
||||||
|
def list_usage_launches(request):
|
||||||
|
filter_args = _get_filter_args(models.InstanceUsage, request)
|
||||||
|
|
||||||
|
if len(filter_args) > 0:
|
||||||
|
objects = models.InstanceUsage.objects.filter(**filter_args)
|
||||||
|
else:
|
||||||
|
objects = models.InstanceUsage.objects.all()
|
||||||
|
|
||||||
|
dicts = _convert_model_list(objects.order_by("launched_at"))
|
||||||
|
return {'launches': dicts}
|
||||||
|
|
||||||
|
|
||||||
|
@api_call
|
||||||
|
def get_usage_launch(request, launch_id):
|
||||||
|
return {'launch': _get_model_by_id(models.InstanceUsage, launch_id)}
|
||||||
|
|
||||||
|
|
||||||
|
@api_call
|
||||||
|
def list_usage_deletes(request):
|
||||||
|
filter_args = _get_filter_args(models.InstanceDeletes, request)
|
||||||
|
|
||||||
|
if len(filter_args) > 0:
|
||||||
|
objects = models.InstanceDeletes.objects.filter(**filter_args)
|
||||||
|
else:
|
||||||
|
objects = models.InstanceDeletes.objects.all()
|
||||||
|
|
||||||
|
dicts = _convert_model_list(objects.order_by("launched_at"))
|
||||||
|
return {'deletes': dicts}
|
||||||
|
|
||||||
|
|
||||||
|
@api_call
|
||||||
|
def get_usage_delete(request, delete_id):
|
||||||
|
return {'delete': _get_model_by_id(models.InstanceDeletes, delete_id)}
|
||||||
|
|
||||||
|
|
||||||
|
@api_call
|
||||||
|
def list_usage_exists(request):
|
||||||
|
filter_args = _get_filter_args(models.InstanceExists, request)
|
||||||
|
|
||||||
|
if len(filter_args) > 0:
|
||||||
|
objects = models.InstanceExists.objects.filter(**filter_args)
|
||||||
|
else:
|
||||||
|
objects = models.InstanceExists.objects.all()
|
||||||
|
|
||||||
|
dicts = _convert_model_list(objects.order_by("id"))
|
||||||
|
return {'exists': dicts}
|
||||||
|
|
||||||
|
|
||||||
|
@api_call
|
||||||
|
def get_usage_exist(request, exist_id):
|
||||||
|
return {'exist': _get_model_by_id(models.InstanceExists, exist_id)}
|
||||||
|
|
||||||
|
|
||||||
def _get_model_by_id(klass, model_id):
|
def _get_model_by_id(klass, model_id):
|
||||||
model = get_object_or_404(klass, id=model_id)
|
model = get_object_or_404(klass, id=model_id)
|
||||||
model_dict = _convert_model(model)
|
model_dict = _convert_model(model)
|
||||||
return model_dict
|
return model_dict
|
||||||
|
|
||||||
|
|
||||||
def list_usage_launches(request):
|
def _check_has_field(klass, field_name):
|
||||||
|
try:
|
||||||
|
klass._meta.get_field_by_name(field_name)
|
||||||
|
except FieldDoesNotExist:
|
||||||
|
msg = "No such field '%s'." % field_name
|
||||||
|
raise BadRequestException(msg)
|
||||||
|
|
||||||
|
|
||||||
|
def _get_filter_args(klass, request):
|
||||||
filter_args = {}
|
filter_args = {}
|
||||||
if 'instance' in request.GET:
|
if 'instance' in request.GET:
|
||||||
filter_args['instance'] = request.GET['instance']
|
filter_args['instance'] = request.GET['instance']
|
||||||
|
|
||||||
if len(filter_args) > 0:
|
for (key, value) in request.GET.items():
|
||||||
objects = models.InstanceUsage.objects.filter(**filter_args)
|
|
||||||
else:
|
|
||||||
objects = models.InstanceUsage.objects.all()
|
|
||||||
|
|
||||||
dicts = _convert_model_list(objects.order_by("launched_at"))
|
if key.endswith('_min'):
|
||||||
return rsp({'launches': dicts})
|
k = key[0:-4]
|
||||||
|
_check_has_field(klass, k)
|
||||||
|
try:
|
||||||
|
filter_args['%s__gte' % k] = utils.str_time_to_unix(value)
|
||||||
|
except AttributeError:
|
||||||
|
msg = "Range filters must be dates."
|
||||||
|
raise BadRequestException(message=msg)
|
||||||
|
elif key.endswith('_max'):
|
||||||
|
k = key[0:-4]
|
||||||
|
_check_has_field(klass, k)
|
||||||
|
try:
|
||||||
|
filter_args['%s__lte' % k] = utils.str_time_to_unix(value)
|
||||||
|
except AttributeError:
|
||||||
|
msg = "Range filters must be dates."
|
||||||
|
raise BadRequestException(message=msg)
|
||||||
|
|
||||||
|
return filter_args
|
||||||
def get_usage_launch(request, launch_id):
|
|
||||||
return rsp({'launch': _get_model_by_id(models.InstanceUsage, launch_id)})
|
|
||||||
|
|
||||||
|
|
||||||
def list_usage_deletes(request):
|
|
||||||
filter_args = {}
|
|
||||||
if 'instance' in request.GET:
|
|
||||||
filter_args['instance'] = request.GET['instance']
|
|
||||||
|
|
||||||
if len(filter_args) > 0:
|
|
||||||
objects = models.InstanceDeletes.objects.filter(**filter_args)
|
|
||||||
else:
|
|
||||||
objects = models.InstanceDeletes.objects.all()
|
|
||||||
|
|
||||||
dicts = _convert_model_list(objects.order_by("launched_at"))
|
|
||||||
return rsp({'deletes': dicts})
|
|
||||||
|
|
||||||
|
|
||||||
def get_usage_delete(request, delete_id):
|
|
||||||
return rsp({'delete': _get_model_by_id(models.InstanceDeletes, delete_id)})
|
|
||||||
|
|
||||||
|
|
||||||
def list_usage_exists(request):
|
|
||||||
filter_args = {}
|
|
||||||
if 'instance' in request.GET:
|
|
||||||
filter_args['instance'] = request.GET['instance']
|
|
||||||
|
|
||||||
if len(filter_args) > 0:
|
|
||||||
objects = models.InstanceExists.objects.filter(**filter_args)
|
|
||||||
else:
|
|
||||||
objects = models.InstanceExists.objects.all()
|
|
||||||
|
|
||||||
dicts = _convert_model_list(objects.order_by("id"))
|
|
||||||
return rsp({'exists': dicts})
|
|
||||||
|
|
||||||
|
|
||||||
def get_usage_exist(request, exist_id):
|
|
||||||
return rsp({'exist': _get_model_by_id(models.InstanceExists, exist_id)})
|
|
||||||
|
|
||||||
|
|
||||||
def _convert_model(model):
|
def _convert_model(model):
|
||||||
|
@ -14,13 +14,14 @@ from test_utils import REQUEST_ID_1
|
|||||||
from test_utils import REQUEST_ID_2
|
from test_utils import REQUEST_ID_2
|
||||||
from test_utils import REQUEST_ID_3
|
from test_utils import REQUEST_ID_3
|
||||||
from test_utils import create_raw
|
from test_utils import create_raw
|
||||||
|
import utils
|
||||||
import views
|
import views
|
||||||
|
|
||||||
|
|
||||||
class ViewsUtilsTestCase(unittest.TestCase):
|
class ViewsUtilsTestCase(unittest.TestCase):
|
||||||
|
|
||||||
def test_srt_time_to_unix(self):
|
def test_srt_time_to_unix(self):
|
||||||
unix = views.str_time_to_unix('2012-12-21 12:34:56.123')
|
unix = utils.str_time_to_unix('2012-12-21 12:34:56.123')
|
||||||
self.assertEqual(unix, decimal.Decimal('1356093296.123'))
|
self.assertEqual(unix, decimal.Decimal('1356093296.123'))
|
||||||
|
|
||||||
|
|
||||||
@ -30,9 +31,9 @@ class ViewsLifecycleWorkflowTestCase(unittest.TestCase):
|
|||||||
self.deployment = Deployment(name='TestDeployment')
|
self.deployment = Deployment(name='TestDeployment')
|
||||||
self.deployment.save()
|
self.deployment.save()
|
||||||
|
|
||||||
when1 = views.str_time_to_unix('2012-12-21 12:34:50.123')
|
when1 = utils.str_time_to_unix('2012-12-21 12:34:50.123')
|
||||||
when2 = views.str_time_to_unix('2012-12-21 12:34:56.123')
|
when2 = utils.str_time_to_unix('2012-12-21 12:34:56.123')
|
||||||
when3 = views.str_time_to_unix('2012-12-21 12:36:56.124')
|
when3 = utils.str_time_to_unix('2012-12-21 12:36:56.124')
|
||||||
self.update_raw = create_raw(self.deployment, when1,
|
self.update_raw = create_raw(self.deployment, when1,
|
||||||
'compute.instance.update',
|
'compute.instance.update',
|
||||||
host='api', service='api')
|
host='api', service='api')
|
||||||
@ -98,9 +99,9 @@ class ViewsLifecycleWorkflowTestCase(unittest.TestCase):
|
|||||||
expected_diff)
|
expected_diff)
|
||||||
|
|
||||||
def test_multiple_instance_lifecycles(self):
|
def test_multiple_instance_lifecycles(self):
|
||||||
when1 = views.str_time_to_unix('2012-12-21 13:32:50.123')
|
when1 = utils.str_time_to_unix('2012-12-21 13:32:50.123')
|
||||||
when2 = views.str_time_to_unix('2012-12-21 13:34:50.123')
|
when2 = utils.str_time_to_unix('2012-12-21 13:34:50.123')
|
||||||
when3 = views.str_time_to_unix('2012-12-21 13:37:50.124')
|
when3 = utils.str_time_to_unix('2012-12-21 13:37:50.124')
|
||||||
update_raw2 = create_raw(self.deployment, when1,
|
update_raw2 = create_raw(self.deployment, when1,
|
||||||
'compute.instance.update',
|
'compute.instance.update',
|
||||||
instance=INSTANCE_ID_2,
|
instance=INSTANCE_ID_2,
|
||||||
@ -151,9 +152,9 @@ class ViewsLifecycleWorkflowTestCase(unittest.TestCase):
|
|||||||
|
|
||||||
|
|
||||||
def test_same_instance_multiple_timings(self):
|
def test_same_instance_multiple_timings(self):
|
||||||
when1 = views.str_time_to_unix('2012-12-21 13:32:50.123')
|
when1 = utils.str_time_to_unix('2012-12-21 13:32:50.123')
|
||||||
when2 = views.str_time_to_unix('2012-12-21 13:34:50.123')
|
when2 = utils.str_time_to_unix('2012-12-21 13:34:50.123')
|
||||||
when3 = views.str_time_to_unix('2012-12-21 13:37:50.124')
|
when3 = utils.str_time_to_unix('2012-12-21 13:37:50.124')
|
||||||
update_raw2 = create_raw(self.deployment, when1,
|
update_raw2 = create_raw(self.deployment, when1,
|
||||||
'compute.instance.update',
|
'compute.instance.update',
|
||||||
request_id=REQUEST_ID_2,
|
request_id=REQUEST_ID_2,
|
||||||
@ -220,9 +221,9 @@ class ViewsLifecycleWorkflowTestCase(unittest.TestCase):
|
|||||||
self.update_raw.when, expected_diff)
|
self.update_raw.when, expected_diff)
|
||||||
|
|
||||||
def test_multiple_instance_kpi(self):
|
def test_multiple_instance_kpi(self):
|
||||||
when1 = views.str_time_to_unix('2012-12-21 13:32:50.123')
|
when1 = utils.str_time_to_unix('2012-12-21 13:32:50.123')
|
||||||
when2 = views.str_time_to_unix('2012-12-21 13:34:50.123')
|
when2 = utils.str_time_to_unix('2012-12-21 13:34:50.123')
|
||||||
when3 = views.str_time_to_unix('2012-12-21 13:37:50.124')
|
when3 = utils.str_time_to_unix('2012-12-21 13:37:50.124')
|
||||||
update_raw2 = create_raw(self.deployment, when1,
|
update_raw2 = create_raw(self.deployment, when1,
|
||||||
'compute.instance.update',
|
'compute.instance.update',
|
||||||
instance=INSTANCE_ID_2,
|
instance=INSTANCE_ID_2,
|
||||||
@ -264,9 +265,9 @@ class ViewsLifecycleWorkflowTestCase(unittest.TestCase):
|
|||||||
update_raw2.when, expected_diff2)
|
update_raw2.when, expected_diff2)
|
||||||
|
|
||||||
def test_single_instance_multiple_kpi(self):
|
def test_single_instance_multiple_kpi(self):
|
||||||
when1 = views.str_time_to_unix('2012-12-21 13:32:50.123')
|
when1 = utils.str_time_to_unix('2012-12-21 13:32:50.123')
|
||||||
when2 = views.str_time_to_unix('2012-12-21 13:34:50.123')
|
when2 = utils.str_time_to_unix('2012-12-21 13:34:50.123')
|
||||||
when3 = views.str_time_to_unix('2012-12-21 13:37:50.124')
|
when3 = utils.str_time_to_unix('2012-12-21 13:37:50.124')
|
||||||
update_raw2 = create_raw(self.deployment, when1,
|
update_raw2 = create_raw(self.deployment, when1,
|
||||||
'compute.instance.update',
|
'compute.instance.update',
|
||||||
request_id=REQUEST_ID_2,
|
request_id=REQUEST_ID_2,
|
||||||
@ -303,9 +304,9 @@ class ViewsLifecycleWorkflowTestCase(unittest.TestCase):
|
|||||||
update_raw2.when, expected_diff2)
|
update_raw2.when, expected_diff2)
|
||||||
|
|
||||||
def test_single_instance_multiple_kpi_out_of_order(self):
|
def test_single_instance_multiple_kpi_out_of_order(self):
|
||||||
when1 = views.str_time_to_unix('2012-12-21 13:32:50.123')
|
when1 = utils.str_time_to_unix('2012-12-21 13:32:50.123')
|
||||||
when2 = views.str_time_to_unix('2012-12-21 13:34:50.123')
|
when2 = utils.str_time_to_unix('2012-12-21 13:34:50.123')
|
||||||
when3 = views.str_time_to_unix('2012-12-21 13:37:50.124')
|
when3 = utils.str_time_to_unix('2012-12-21 13:37:50.124')
|
||||||
update_raw2 = create_raw(self.deployment, when1,
|
update_raw2 = create_raw(self.deployment, when1,
|
||||||
'compute.instance.update',
|
'compute.instance.update',
|
||||||
request_id=REQUEST_ID_2,
|
request_id=REQUEST_ID_2,
|
||||||
@ -357,7 +358,7 @@ class ViewsUsageTestCase(unittest.TestCase):
|
|||||||
InstanceExists.objects.all().delete()
|
InstanceExists.objects.all().delete()
|
||||||
|
|
||||||
def test_process_new_launch_create_start(self):
|
def test_process_new_launch_create_start(self):
|
||||||
when = views.str_time_to_unix('2012-12-21 12:34:50.123')
|
when = utils.str_time_to_unix('2012-12-21 12:34:50.123')
|
||||||
json = test_utils.make_create_start_json()
|
json = test_utils.make_create_start_json()
|
||||||
raw = create_raw(self.deployment, when,
|
raw = create_raw(self.deployment, when,
|
||||||
views.INSTANCE_EVENT['create_start'], json=json)
|
views.INSTANCE_EVENT['create_start'], json=json)
|
||||||
@ -372,7 +373,7 @@ class ViewsUsageTestCase(unittest.TestCase):
|
|||||||
self.assertEqual(usage.request_id, REQUEST_ID_1)
|
self.assertEqual(usage.request_id, REQUEST_ID_1)
|
||||||
|
|
||||||
def test_process_new_launch_resize_prep_start(self):
|
def test_process_new_launch_resize_prep_start(self):
|
||||||
when = views.str_time_to_unix('2012-12-21 12:34:50.123')
|
when = utils.str_time_to_unix('2012-12-21 12:34:50.123')
|
||||||
json = test_utils.make_resize_prep_start_json()
|
json = test_utils.make_resize_prep_start_json()
|
||||||
raw = create_raw(self.deployment, when,
|
raw = create_raw(self.deployment, when,
|
||||||
views.INSTANCE_EVENT['resize_prep_start'], json=json)
|
views.INSTANCE_EVENT['resize_prep_start'], json=json)
|
||||||
@ -389,7 +390,7 @@ class ViewsUsageTestCase(unittest.TestCase):
|
|||||||
self.assertIsNone(usage.instance_type_id)
|
self.assertIsNone(usage.instance_type_id)
|
||||||
|
|
||||||
def test_process_new_launch_resize_revert_start(self):
|
def test_process_new_launch_resize_revert_start(self):
|
||||||
when = views.str_time_to_unix('2012-12-21 12:34:50.123')
|
when = utils.str_time_to_unix('2012-12-21 12:34:50.123')
|
||||||
json = test_utils.make_resize_revert_start_json()
|
json = test_utils.make_resize_revert_start_json()
|
||||||
raw = create_raw(self.deployment, when,
|
raw = create_raw(self.deployment, when,
|
||||||
views.INSTANCE_EVENT['resize_revert_start'],
|
views.INSTANCE_EVENT['resize_revert_start'],
|
||||||
@ -415,7 +416,7 @@ class ViewsUsageTestCase(unittest.TestCase):
|
|||||||
InstanceUsage(**values).save()
|
InstanceUsage(**values).save()
|
||||||
|
|
||||||
sent = '2012-12-21 12:34:50.123'
|
sent = '2012-12-21 12:34:50.123'
|
||||||
when = views.str_time_to_unix(sent)
|
when = utils.str_time_to_unix(sent)
|
||||||
json = test_utils.make_create_end_json(sent)
|
json = test_utils.make_create_end_json(sent)
|
||||||
raw = create_raw(self.deployment, when,
|
raw = create_raw(self.deployment, when,
|
||||||
views.INSTANCE_EVENT['create_end'], json=json)
|
views.INSTANCE_EVENT['create_end'], json=json)
|
||||||
@ -436,7 +437,7 @@ class ViewsUsageTestCase(unittest.TestCase):
|
|||||||
InstanceUsage(**values).save()
|
InstanceUsage(**values).save()
|
||||||
|
|
||||||
sent = '2012-12-21 12:34:50.123'
|
sent = '2012-12-21 12:34:50.123'
|
||||||
when = views.str_time_to_unix(sent)
|
when = utils.str_time_to_unix(sent)
|
||||||
json = test_utils.make_resize_finish_json(sent)
|
json = test_utils.make_resize_finish_json(sent)
|
||||||
raw = create_raw(self.deployment, when,
|
raw = create_raw(self.deployment, when,
|
||||||
views.INSTANCE_EVENT['resize_finish_end'], json=json)
|
views.INSTANCE_EVENT['resize_finish_end'], json=json)
|
||||||
@ -456,7 +457,7 @@ class ViewsUsageTestCase(unittest.TestCase):
|
|||||||
InstanceUsage(**values).save()
|
InstanceUsage(**values).save()
|
||||||
|
|
||||||
sent = '2012-12-21 12:34:50.123'
|
sent = '2012-12-21 12:34:50.123'
|
||||||
when = views.str_time_to_unix(sent)
|
when = utils.str_time_to_unix(sent)
|
||||||
json = test_utils.make_resize_revert_end_json(sent)
|
json = test_utils.make_resize_revert_end_json(sent)
|
||||||
raw = create_raw(self.deployment, when,
|
raw = create_raw(self.deployment, when,
|
||||||
views.INSTANCE_EVENT['resize_revert_end'], json=json)
|
views.INSTANCE_EVENT['resize_revert_end'], json=json)
|
||||||
@ -477,7 +478,7 @@ class ViewsUsageTestCase(unittest.TestCase):
|
|||||||
InstanceUsage(**values).save()
|
InstanceUsage(**values).save()
|
||||||
|
|
||||||
sent = '2012-12-21 12:34:50.123'
|
sent = '2012-12-21 12:34:50.123'
|
||||||
when = views.str_time_to_unix(sent)
|
when = utils.str_time_to_unix(sent)
|
||||||
json = test_utils.make_resize_prep_end_json(sent)
|
json = test_utils.make_resize_prep_end_json(sent)
|
||||||
raw = create_raw(self.deployment, when,
|
raw = create_raw(self.deployment, when,
|
||||||
views.INSTANCE_EVENT['resize_prep_end'], json=json)
|
views.INSTANCE_EVENT['resize_prep_end'], json=json)
|
||||||
@ -491,9 +492,9 @@ class ViewsUsageTestCase(unittest.TestCase):
|
|||||||
|
|
||||||
def test_process_delete(self):
|
def test_process_delete(self):
|
||||||
launched_str = '2012-12-21 06:34:50.123'
|
launched_str = '2012-12-21 06:34:50.123'
|
||||||
launched = views.str_time_to_unix(launched_str)
|
launched = utils.str_time_to_unix(launched_str)
|
||||||
deleted_str = '2012-12-21 12:34:50.123'
|
deleted_str = '2012-12-21 12:34:50.123'
|
||||||
deleted = views.str_time_to_unix(deleted_str)
|
deleted = utils.str_time_to_unix(deleted_str)
|
||||||
json = test_utils.make_delete_end_json(launched_str, deleted_str)
|
json = test_utils.make_delete_end_json(launched_str, deleted_str)
|
||||||
raw = create_raw(self.deployment, deleted,
|
raw = create_raw(self.deployment, deleted,
|
||||||
views.INSTANCE_EVENT['delete_end'], json=json)
|
views.INSTANCE_EVENT['delete_end'], json=json)
|
||||||
@ -510,7 +511,7 @@ class ViewsUsageTestCase(unittest.TestCase):
|
|||||||
|
|
||||||
def test_process_exists(self):
|
def test_process_exists(self):
|
||||||
launched_str = '2012-12-21 06:34:50.123'
|
launched_str = '2012-12-21 06:34:50.123'
|
||||||
launched = views.str_time_to_unix(launched_str)
|
launched = utils.str_time_to_unix(launched_str)
|
||||||
values = {
|
values = {
|
||||||
'instance': INSTANCE_ID_1,
|
'instance': INSTANCE_ID_1,
|
||||||
'request_id': REQUEST_ID_1,
|
'request_id': REQUEST_ID_1,
|
||||||
@ -520,7 +521,7 @@ class ViewsUsageTestCase(unittest.TestCase):
|
|||||||
InstanceUsage(**values).save()
|
InstanceUsage(**values).save()
|
||||||
|
|
||||||
exists_str = '2012-12-21 23:30:00.000'
|
exists_str = '2012-12-21 23:30:00.000'
|
||||||
exists_time = views.str_time_to_unix(exists_str)
|
exists_time = utils.str_time_to_unix(exists_str)
|
||||||
json = test_utils.make_exists_json(launched_str)
|
json = test_utils.make_exists_json(launched_str)
|
||||||
raw = create_raw(self.deployment, exists_time,
|
raw = create_raw(self.deployment, exists_time,
|
||||||
views.INSTANCE_EVENT['exists'], json=json)
|
views.INSTANCE_EVENT['exists'], json=json)
|
||||||
@ -543,9 +544,9 @@ class ViewsUsageTestCase(unittest.TestCase):
|
|||||||
|
|
||||||
def test_process_exists_with_deleted_at(self):
|
def test_process_exists_with_deleted_at(self):
|
||||||
launched_str = '2012-12-21 06:34:50.123'
|
launched_str = '2012-12-21 06:34:50.123'
|
||||||
launched = views.str_time_to_unix(launched_str)
|
launched = utils.str_time_to_unix(launched_str)
|
||||||
deleted_str = '2012-12-21 06:36:50.123'
|
deleted_str = '2012-12-21 06:36:50.123'
|
||||||
deleted = views.str_time_to_unix(deleted_str)
|
deleted = utils.str_time_to_unix(deleted_str)
|
||||||
values = {
|
values = {
|
||||||
'instance': INSTANCE_ID_1,
|
'instance': INSTANCE_ID_1,
|
||||||
'request_id': REQUEST_ID_1,
|
'request_id': REQUEST_ID_1,
|
||||||
@ -555,7 +556,7 @@ class ViewsUsageTestCase(unittest.TestCase):
|
|||||||
InstanceUsage(**values).save()
|
InstanceUsage(**values).save()
|
||||||
|
|
||||||
exists_str = '2012-12-21 23:30:00.000'
|
exists_str = '2012-12-21 23:30:00.000'
|
||||||
exists_time = views.str_time_to_unix(exists_str)
|
exists_time = utils.str_time_to_unix(exists_str)
|
||||||
json = test_utils.make_exists_json(launched_str, deleted_at=deleted_str)
|
json = test_utils.make_exists_json(launched_str, deleted_at=deleted_str)
|
||||||
raw = create_raw(self.deployment, exists_time,
|
raw = create_raw(self.deployment, exists_time,
|
||||||
views.INSTANCE_EVENT['exists'], json=json)
|
views.INSTANCE_EVENT['exists'], json=json)
|
||||||
@ -595,9 +596,9 @@ class ViewsUsageWorkflowTestCase(unittest.TestCase):
|
|||||||
|
|
||||||
def test_create_workflow(self):
|
def test_create_workflow(self):
|
||||||
created_str = '2012-12-21 06:30:50.123'
|
created_str = '2012-12-21 06:30:50.123'
|
||||||
created = views.str_time_to_unix(created_str)
|
created = utils.str_time_to_unix(created_str)
|
||||||
launched_str = '2012-12-21 06:34:50.123'
|
launched_str = '2012-12-21 06:34:50.123'
|
||||||
launched = views.str_time_to_unix(launched_str)
|
launched = utils.str_time_to_unix(launched_str)
|
||||||
create_start_json = test_utils.make_create_start_json()
|
create_start_json = test_utils.make_create_start_json()
|
||||||
create_end_json = test_utils.make_create_end_json(launched_str)
|
create_end_json = test_utils.make_create_end_json(launched_str)
|
||||||
create_start_raw = create_raw(self.deployment, created,
|
create_start_raw = create_raw(self.deployment, created,
|
||||||
@ -617,9 +618,9 @@ class ViewsUsageWorkflowTestCase(unittest.TestCase):
|
|||||||
|
|
||||||
def test_create_workflow_start_late(self):
|
def test_create_workflow_start_late(self):
|
||||||
created_str = '2012-12-21 06:30:50.123'
|
created_str = '2012-12-21 06:30:50.123'
|
||||||
created = views.str_time_to_unix(created_str)
|
created = utils.str_time_to_unix(created_str)
|
||||||
launched_str = '2012-12-21 06:34:50.123'
|
launched_str = '2012-12-21 06:34:50.123'
|
||||||
launched = views.str_time_to_unix(launched_str)
|
launched = utils.str_time_to_unix(launched_str)
|
||||||
create_start_json = test_utils.make_create_start_json()
|
create_start_json = test_utils.make_create_start_json()
|
||||||
create_end_json = test_utils.make_create_end_json(launched_str)
|
create_end_json = test_utils.make_create_end_json(launched_str)
|
||||||
create_start_raw = create_raw(self.deployment, created,
|
create_start_raw = create_raw(self.deployment, created,
|
||||||
@ -639,7 +640,7 @@ class ViewsUsageWorkflowTestCase(unittest.TestCase):
|
|||||||
|
|
||||||
def test_resize_workflow(self):
|
def test_resize_workflow(self):
|
||||||
launched_str = '2012-12-21 06:34:50.123'
|
launched_str = '2012-12-21 06:34:50.123'
|
||||||
launched = views.str_time_to_unix(launched_str)
|
launched = utils.str_time_to_unix(launched_str)
|
||||||
values = {
|
values = {
|
||||||
'instance': INSTANCE_ID_1,
|
'instance': INSTANCE_ID_1,
|
||||||
'request_id': REQUEST_ID_1,
|
'request_id': REQUEST_ID_1,
|
||||||
@ -649,11 +650,11 @@ class ViewsUsageWorkflowTestCase(unittest.TestCase):
|
|||||||
InstanceUsage(**values).save()
|
InstanceUsage(**values).save()
|
||||||
|
|
||||||
started_str = '2012-12-22 06:34:50.123'
|
started_str = '2012-12-22 06:34:50.123'
|
||||||
started_time = views.str_time_to_unix(started_str)
|
started_time = utils.str_time_to_unix(started_str)
|
||||||
pre_end_str = '2012-12-22 06:36:50.123'
|
pre_end_str = '2012-12-22 06:36:50.123'
|
||||||
prep_end_time = views.str_time_to_unix(pre_end_str)
|
prep_end_time = utils.str_time_to_unix(pre_end_str)
|
||||||
finish_str = '2012-12-22 06:38:50.123'
|
finish_str = '2012-12-22 06:38:50.123'
|
||||||
finish_time = views.str_time_to_unix(finish_str)
|
finish_time = utils.str_time_to_unix(finish_str)
|
||||||
prep_start_json = test_utils\
|
prep_start_json = test_utils\
|
||||||
.make_resize_prep_start_json(request_id=REQUEST_ID_2)
|
.make_resize_prep_start_json(request_id=REQUEST_ID_2)
|
||||||
prep_end_json = test_utils\
|
prep_end_json = test_utils\
|
||||||
@ -690,7 +691,7 @@ class ViewsUsageWorkflowTestCase(unittest.TestCase):
|
|||||||
|
|
||||||
def test_resize_workflow_out_of_order(self):
|
def test_resize_workflow_out_of_order(self):
|
||||||
launched_str = '2012-12-21 06:34:50.123'
|
launched_str = '2012-12-21 06:34:50.123'
|
||||||
launched = views.str_time_to_unix(launched_str)
|
launched = utils.str_time_to_unix(launched_str)
|
||||||
values = {
|
values = {
|
||||||
'instance': INSTANCE_ID_1,
|
'instance': INSTANCE_ID_1,
|
||||||
'request_id': REQUEST_ID_1,
|
'request_id': REQUEST_ID_1,
|
||||||
@ -700,11 +701,11 @@ class ViewsUsageWorkflowTestCase(unittest.TestCase):
|
|||||||
InstanceUsage(**values).save()
|
InstanceUsage(**values).save()
|
||||||
|
|
||||||
started_str = '2012-12-22 06:34:50.123'
|
started_str = '2012-12-22 06:34:50.123'
|
||||||
started_time = views.str_time_to_unix(started_str)
|
started_time = utils.str_time_to_unix(started_str)
|
||||||
pre_end_str = '2012-12-22 06:36:50.123'
|
pre_end_str = '2012-12-22 06:36:50.123'
|
||||||
prep_end_time = views.str_time_to_unix(pre_end_str)
|
prep_end_time = utils.str_time_to_unix(pre_end_str)
|
||||||
finish_str = '2012-12-22 06:38:50.123'
|
finish_str = '2012-12-22 06:38:50.123'
|
||||||
finish_time = views.str_time_to_unix(finish_str)
|
finish_time = utils.str_time_to_unix(finish_str)
|
||||||
prep_start_json = test_utils\
|
prep_start_json = test_utils\
|
||||||
.make_resize_prep_start_json(request_id=REQUEST_ID_2)
|
.make_resize_prep_start_json(request_id=REQUEST_ID_2)
|
||||||
prep_end_json = test_utils\
|
prep_end_json = test_utils\
|
||||||
@ -744,7 +745,7 @@ class ViewsUsageWorkflowTestCase(unittest.TestCase):
|
|||||||
|
|
||||||
def test_resize_workflow_start_late(self):
|
def test_resize_workflow_start_late(self):
|
||||||
launched_str = '2012-12-21 06:34:50.123'
|
launched_str = '2012-12-21 06:34:50.123'
|
||||||
launched = views.str_time_to_unix(launched_str)
|
launched = utils.str_time_to_unix(launched_str)
|
||||||
values = {
|
values = {
|
||||||
'instance': INSTANCE_ID_1,
|
'instance': INSTANCE_ID_1,
|
||||||
'request_id': REQUEST_ID_1,
|
'request_id': REQUEST_ID_1,
|
||||||
@ -754,11 +755,11 @@ class ViewsUsageWorkflowTestCase(unittest.TestCase):
|
|||||||
InstanceUsage(**values).save()
|
InstanceUsage(**values).save()
|
||||||
|
|
||||||
started_str = '2012-12-22 06:34:50.123'
|
started_str = '2012-12-22 06:34:50.123'
|
||||||
started_time = views.str_time_to_unix(started_str)
|
started_time = utils.str_time_to_unix(started_str)
|
||||||
pre_end_str = '2012-12-22 06:36:50.123'
|
pre_end_str = '2012-12-22 06:36:50.123'
|
||||||
prep_end_time = views.str_time_to_unix(pre_end_str)
|
prep_end_time = utils.str_time_to_unix(pre_end_str)
|
||||||
finish_str = '2012-12-22 06:38:50.123'
|
finish_str = '2012-12-22 06:38:50.123'
|
||||||
finish_time = views.str_time_to_unix(finish_str)
|
finish_time = utils.str_time_to_unix(finish_str)
|
||||||
prep_start_json = test_utils\
|
prep_start_json = test_utils\
|
||||||
.make_resize_prep_start_json(request_id=REQUEST_ID_2)
|
.make_resize_prep_start_json(request_id=REQUEST_ID_2)
|
||||||
prep_end_json = test_utils\
|
prep_end_json = test_utils\
|
||||||
@ -795,7 +796,7 @@ class ViewsUsageWorkflowTestCase(unittest.TestCase):
|
|||||||
|
|
||||||
def test_resize_revert_workflow(self):
|
def test_resize_revert_workflow(self):
|
||||||
launched_str = '2012-12-21 06:34:50.123'
|
launched_str = '2012-12-21 06:34:50.123'
|
||||||
launched = views.str_time_to_unix(launched_str)
|
launched = utils.str_time_to_unix(launched_str)
|
||||||
values = {
|
values = {
|
||||||
'instance': INSTANCE_ID_1,
|
'instance': INSTANCE_ID_1,
|
||||||
'request_id': REQUEST_ID_1,
|
'request_id': REQUEST_ID_1,
|
||||||
@ -804,7 +805,7 @@ class ViewsUsageWorkflowTestCase(unittest.TestCase):
|
|||||||
}
|
}
|
||||||
InstanceUsage(**values).save()
|
InstanceUsage(**values).save()
|
||||||
resize_launched_str = '2012-12-22 06:34:50.123'
|
resize_launched_str = '2012-12-22 06:34:50.123'
|
||||||
resize_launched = views.str_time_to_unix(resize_launched_str)
|
resize_launched = utils.str_time_to_unix(resize_launched_str)
|
||||||
values = {
|
values = {
|
||||||
'instance': INSTANCE_ID_1,
|
'instance': INSTANCE_ID_1,
|
||||||
'request_id': REQUEST_ID_2,
|
'request_id': REQUEST_ID_2,
|
||||||
@ -814,9 +815,9 @@ class ViewsUsageWorkflowTestCase(unittest.TestCase):
|
|||||||
InstanceUsage(**values).save()
|
InstanceUsage(**values).save()
|
||||||
|
|
||||||
started_str = '2012-12-22 06:34:50.123'
|
started_str = '2012-12-22 06:34:50.123'
|
||||||
started_time = views.str_time_to_unix(started_str)
|
started_time = utils.str_time_to_unix(started_str)
|
||||||
end_str = '2012-12-22 06:36:50.123'
|
end_str = '2012-12-22 06:36:50.123'
|
||||||
end_time = views.str_time_to_unix(end_str)
|
end_time = utils.str_time_to_unix(end_str)
|
||||||
start_json = test_utils\
|
start_json = test_utils\
|
||||||
.make_resize_revert_start_json(request_id=REQUEST_ID_3)
|
.make_resize_revert_start_json(request_id=REQUEST_ID_3)
|
||||||
end_json = test_utils\
|
end_json = test_utils\
|
||||||
@ -846,7 +847,7 @@ class ViewsUsageWorkflowTestCase(unittest.TestCase):
|
|||||||
|
|
||||||
def test_resize_revert_workflow_start_late(self):
|
def test_resize_revert_workflow_start_late(self):
|
||||||
launched_str = '2012-12-21 06:34:50.123'
|
launched_str = '2012-12-21 06:34:50.123'
|
||||||
launched = views.str_time_to_unix(launched_str)
|
launched = utils.str_time_to_unix(launched_str)
|
||||||
values = {
|
values = {
|
||||||
'instance': INSTANCE_ID_1,
|
'instance': INSTANCE_ID_1,
|
||||||
'request_id': REQUEST_ID_1,
|
'request_id': REQUEST_ID_1,
|
||||||
@ -855,7 +856,7 @@ class ViewsUsageWorkflowTestCase(unittest.TestCase):
|
|||||||
}
|
}
|
||||||
InstanceUsage(**values).save()
|
InstanceUsage(**values).save()
|
||||||
resize_launched_str = '2012-12-22 06:34:50.123'
|
resize_launched_str = '2012-12-22 06:34:50.123'
|
||||||
resize_launched = views.str_time_to_unix(resize_launched_str)
|
resize_launched = utils.str_time_to_unix(resize_launched_str)
|
||||||
values = {
|
values = {
|
||||||
'instance': INSTANCE_ID_1,
|
'instance': INSTANCE_ID_1,
|
||||||
'request_id': REQUEST_ID_2,
|
'request_id': REQUEST_ID_2,
|
||||||
@ -865,9 +866,9 @@ class ViewsUsageWorkflowTestCase(unittest.TestCase):
|
|||||||
InstanceUsage(**values).save()
|
InstanceUsage(**values).save()
|
||||||
|
|
||||||
started_str = '2012-12-22 06:34:50.123'
|
started_str = '2012-12-22 06:34:50.123'
|
||||||
started_time = views.str_time_to_unix(started_str)
|
started_time = utils.str_time_to_unix(started_str)
|
||||||
end_str = '2012-12-22 06:36:50.123'
|
end_str = '2012-12-22 06:36:50.123'
|
||||||
end_time = views.str_time_to_unix(end_str)
|
end_time = utils.str_time_to_unix(end_str)
|
||||||
start_json = test_utils\
|
start_json = test_utils\
|
||||||
.make_resize_revert_start_json(request_id=REQUEST_ID_3)
|
.make_resize_revert_start_json(request_id=REQUEST_ID_3)
|
||||||
end_json = test_utils\
|
end_json = test_utils\
|
||||||
|
25
stacktach/utils.py
Normal file
25
stacktach/utils.py
Normal file
@ -0,0 +1,25 @@
|
|||||||
|
import datetime
|
||||||
|
|
||||||
|
from stacktach import datetime_to_decimal as dt
|
||||||
|
|
||||||
|
def str_time_to_unix(when):
|
||||||
|
if 'T' in when:
|
||||||
|
try:
|
||||||
|
# Old way of doing it
|
||||||
|
when = datetime.datetime.strptime(when, "%Y-%m-%dT%H:%M:%S.%f")
|
||||||
|
except ValueError:
|
||||||
|
try:
|
||||||
|
# Old way of doing it, no millis
|
||||||
|
when = datetime.datetime.strptime(when, "%Y-%m-%dT%H:%M:%S")
|
||||||
|
except Exception, e:
|
||||||
|
print "BAD DATE: ", e
|
||||||
|
else:
|
||||||
|
try:
|
||||||
|
when = datetime.datetime.strptime(when, "%Y-%m-%d %H:%M:%S.%f")
|
||||||
|
except ValueError:
|
||||||
|
try:
|
||||||
|
when = datetime.datetime.strptime(when, "%Y-%m-%d %H:%M:%S")
|
||||||
|
except Exception, e:
|
||||||
|
print "BAD DATE: ", e
|
||||||
|
|
||||||
|
return dt.dt_to_decimal(when)
|
@ -5,14 +5,13 @@ import json
|
|||||||
import pprint
|
import pprint
|
||||||
|
|
||||||
from django import db
|
from django import db
|
||||||
from django import http
|
|
||||||
from django.shortcuts import render_to_response
|
from django.shortcuts import render_to_response
|
||||||
from django import template
|
|
||||||
|
|
||||||
from stacktach import datetime_to_decimal as dt
|
from stacktach import datetime_to_decimal as dt
|
||||||
from stacktach import db as stackdb
|
from stacktach import db as stackdb
|
||||||
from stacktach import models
|
from stacktach import models
|
||||||
from stacktach import image_type
|
from stacktach import image_type
|
||||||
|
from stacktach import utils
|
||||||
|
|
||||||
|
|
||||||
STACKDB = stackdb
|
STACKDB = stackdb
|
||||||
@ -243,7 +242,7 @@ def _process_usage_for_updates(raw):
|
|||||||
if raw.event in [INSTANCE_EVENT['create_end'],
|
if raw.event in [INSTANCE_EVENT['create_end'],
|
||||||
INSTANCE_EVENT['resize_finish_end'],
|
INSTANCE_EVENT['resize_finish_end'],
|
||||||
INSTANCE_EVENT['resize_revert_end']]:
|
INSTANCE_EVENT['resize_revert_end']]:
|
||||||
usage.launched_at = str_time_to_unix(payload['launched_at'])
|
usage.launched_at = utils.str_time_to_unix(payload['launched_at'])
|
||||||
|
|
||||||
if raw.event == INSTANCE_EVENT['resize_revert_end']:
|
if raw.event == INSTANCE_EVENT['resize_revert_end']:
|
||||||
usage.instance_type_id = payload['instance_type_id']
|
usage.instance_type_id = payload['instance_type_id']
|
||||||
@ -257,7 +256,7 @@ def _process_delete(raw):
|
|||||||
notif = json.loads(raw.json)
|
notif = json.loads(raw.json)
|
||||||
payload = notif[1]['payload']
|
payload = notif[1]['payload']
|
||||||
instance_id = payload['instance_id']
|
instance_id = payload['instance_id']
|
||||||
deleted_at = str_time_to_unix(payload['deleted_at'])
|
deleted_at = utils.str_time_to_unix(payload['deleted_at'])
|
||||||
values = {
|
values = {
|
||||||
'instance': instance_id,
|
'instance': instance_id,
|
||||||
'deleted_at': deleted_at,
|
'deleted_at': deleted_at,
|
||||||
@ -266,7 +265,7 @@ def _process_delete(raw):
|
|||||||
|
|
||||||
launched_at = payload.get('launched_at')
|
launched_at = payload.get('launched_at')
|
||||||
if launched_at and launched_at != '':
|
if launched_at and launched_at != '':
|
||||||
launched_at = str_time_to_unix(launched_at)
|
launched_at = utils.str_time_to_unix(launched_at)
|
||||||
values['launched_at'] = launched_at
|
values['launched_at'] = launched_at
|
||||||
|
|
||||||
delete = STACKDB.create_instance_delete(**values)
|
delete = STACKDB.create_instance_delete(**values)
|
||||||
@ -277,7 +276,7 @@ def _process_exists(raw):
|
|||||||
notif = json.loads(raw.json)
|
notif = json.loads(raw.json)
|
||||||
payload = notif[1]['payload']
|
payload = notif[1]['payload']
|
||||||
instance_id = payload['instance_id']
|
instance_id = payload['instance_id']
|
||||||
launched_at = str_time_to_unix(payload['launched_at'])
|
launched_at = utils.str_time_to_unix(payload['launched_at'])
|
||||||
launched_range = (launched_at, launched_at+1)
|
launched_range = (launched_at, launched_at+1)
|
||||||
usage = STACKDB.get_instance_usage(instance=instance_id,
|
usage = STACKDB.get_instance_usage(instance=instance_id,
|
||||||
launched_at__range=launched_range)
|
launched_at__range=launched_range)
|
||||||
@ -296,7 +295,7 @@ def _process_exists(raw):
|
|||||||
|
|
||||||
deleted_at = payload.get('deleted_at')
|
deleted_at = payload.get('deleted_at')
|
||||||
if deleted_at and deleted_at != '':
|
if deleted_at and deleted_at != '':
|
||||||
deleted_at = str_time_to_unix(deleted_at)
|
deleted_at = utils.str_time_to_unix(deleted_at)
|
||||||
values['deleted_at'] = deleted_at
|
values['deleted_at'] = deleted_at
|
||||||
|
|
||||||
exists = STACKDB.create_instance_exists(**values)
|
exists = STACKDB.create_instance_exists(**values)
|
||||||
@ -324,28 +323,6 @@ def aggregate_usage(raw):
|
|||||||
USAGE_PROCESS_MAPPING[raw.event](raw)
|
USAGE_PROCESS_MAPPING[raw.event](raw)
|
||||||
|
|
||||||
|
|
||||||
def str_time_to_unix(when):
|
|
||||||
if 'T' in when:
|
|
||||||
try:
|
|
||||||
# Old way of doing it
|
|
||||||
when = datetime.datetime.strptime(when, "%Y-%m-%dT%H:%M:%S.%f")
|
|
||||||
except ValueError:
|
|
||||||
try:
|
|
||||||
# Old way of doing it, no millis
|
|
||||||
when = datetime.datetime.strptime(when, "%Y-%m-%dT%H:%M:%S")
|
|
||||||
except Exception, e:
|
|
||||||
print "BAD DATE: ", e
|
|
||||||
else:
|
|
||||||
try:
|
|
||||||
when = datetime.datetime.strptime(when, "%Y-%m-%d %H:%M:%S.%f")
|
|
||||||
except ValueError:
|
|
||||||
try:
|
|
||||||
when = datetime.datetime.strptime(when, "%Y-%m-%d %H:%M:%S")
|
|
||||||
except Exception, e:
|
|
||||||
print "BAD DATE: ", e
|
|
||||||
return dt.dt_to_decimal(when)
|
|
||||||
|
|
||||||
|
|
||||||
def process_raw_data(deployment, args, json_args):
|
def process_raw_data(deployment, args, json_args):
|
||||||
"""This is called directly by the worker to add the event to the db."""
|
"""This is called directly by the worker to add the event to the db."""
|
||||||
db.reset_queries()
|
db.reset_queries()
|
||||||
@ -363,7 +340,7 @@ def process_raw_data(deployment, args, json_args):
|
|||||||
when = body['timestamp']
|
when = body['timestamp']
|
||||||
except KeyError:
|
except KeyError:
|
||||||
when = body['_context_timestamp'] # Old way of doing it
|
when = body['_context_timestamp'] # Old way of doing it
|
||||||
values['when'] = str_time_to_unix(when)
|
values['when'] = utils.str_time_to_unix(when)
|
||||||
values['routing_key'] = routing_key
|
values['routing_key'] = routing_key
|
||||||
values['json'] = json_args
|
values['json'] = json_args
|
||||||
record = STACKDB.create_rawdata(**values)
|
record = STACKDB.create_rawdata(**values)
|
||||||
|
84
tests/unit/test_dbapi.py
Normal file
84
tests/unit/test_dbapi.py
Normal file
@ -0,0 +1,84 @@
|
|||||||
|
import datetime
|
||||||
|
import unittest
|
||||||
|
|
||||||
|
from django.db.models import FieldDoesNotExist
|
||||||
|
import mox
|
||||||
|
|
||||||
|
from stacktach import dbapi
|
||||||
|
import utils
|
||||||
|
from utils import INSTANCE_ID_1
|
||||||
|
|
||||||
|
|
||||||
|
class StacktachRawParsingTestCase(unittest.TestCase):
|
||||||
|
def setUp(self):
|
||||||
|
self.mox = mox.Mox()
|
||||||
|
|
||||||
|
def tearDown(self):
|
||||||
|
self.mox.UnsetStubs()
|
||||||
|
|
||||||
|
def make_fake_model(self):
|
||||||
|
fake_model = self.mox.CreateMockAnything()
|
||||||
|
fake_meta = self.mox.CreateMockAnything()
|
||||||
|
fake_model._meta = fake_meta
|
||||||
|
return fake_model
|
||||||
|
|
||||||
|
def test_get_filter_args(self):
|
||||||
|
start_time = datetime.datetime.utcnow()
|
||||||
|
start_decimal = utils.decimal_utc(start_time)
|
||||||
|
end_time = start_time + datetime.timedelta(days=1)
|
||||||
|
end_decimal = utils.decimal_utc(end_time)
|
||||||
|
fake_request = self.mox.CreateMockAnything()
|
||||||
|
fake_model = self.make_fake_model()
|
||||||
|
fake_model._meta.get_field_by_name('launched_at')
|
||||||
|
fake_model._meta.get_field_by_name('launched_at')
|
||||||
|
fake_request.GET = {'instance': INSTANCE_ID_1,
|
||||||
|
'launched_at_min': str(start_time),
|
||||||
|
'launched_at_max': str(end_time)}
|
||||||
|
self.mox.ReplayAll()
|
||||||
|
|
||||||
|
filter_args = dbapi._get_filter_args(fake_model, fake_request)
|
||||||
|
|
||||||
|
self.mox.VerifyAll()
|
||||||
|
self.assertEquals(filter_args['instance'], INSTANCE_ID_1)
|
||||||
|
self.assertEquals(filter_args.get('launched_at__gte'),
|
||||||
|
start_decimal)
|
||||||
|
self.assertEquals(filter_args.get('launched_at__lte'),
|
||||||
|
end_decimal)
|
||||||
|
|
||||||
|
def test_get_filter_args_bad_min_value(self):
|
||||||
|
fake_request = self.mox.CreateMockAnything()
|
||||||
|
fake_request.GET = {'launched_at_min': 'obviouslybaddatetime'}
|
||||||
|
fake_model = self.make_fake_model()
|
||||||
|
fake_model._meta.get_field_by_name('launched_at')
|
||||||
|
self.mox.ReplayAll()
|
||||||
|
|
||||||
|
self.assertRaises(dbapi.BadRequestException, dbapi._get_filter_args,
|
||||||
|
fake_model, fake_request)
|
||||||
|
|
||||||
|
self.mox.VerifyAll()
|
||||||
|
|
||||||
|
def test_get_filter_args_bad_max_value(self):
|
||||||
|
fake_request = self.mox.CreateMockAnything()
|
||||||
|
fake_request.GET = {'launched_at_max': 'obviouslybaddatetime'}
|
||||||
|
fake_model = self.make_fake_model()
|
||||||
|
fake_model._meta.get_field_by_name('launched_at')
|
||||||
|
self.mox.ReplayAll()
|
||||||
|
|
||||||
|
self.assertRaises(dbapi.BadRequestException, dbapi._get_filter_args,
|
||||||
|
fake_model, fake_request)
|
||||||
|
|
||||||
|
self.mox.VerifyAll()
|
||||||
|
|
||||||
|
def test_get_filter_args_bad_range_key(self):
|
||||||
|
start_time = datetime.datetime.utcnow()
|
||||||
|
fake_request = self.mox.CreateMockAnything()
|
||||||
|
fake_request.GET = {'somebadfield_max': str(start_time)}
|
||||||
|
fake_model = self.make_fake_model()
|
||||||
|
fake_model._meta.get_field_by_name('somebadfield')\
|
||||||
|
.AndRaise(FieldDoesNotExist())
|
||||||
|
self.mox.ReplayAll()
|
||||||
|
|
||||||
|
self.assertRaises(dbapi.BadRequestException, dbapi._get_filter_args,
|
||||||
|
fake_model, fake_request)
|
||||||
|
|
||||||
|
self.mox.VerifyAll()
|
Loading…
x
Reference in New Issue
Block a user