From a9b4bedbcffadeb8d51200a307e1752185afc069 Mon Sep 17 00:00:00 2001 From: Manali Latkar Date: Wed, 6 Nov 2013 15:37:42 +0530 Subject: [PATCH 01/53] stacktach picks up all exists with status sent_unverified and verifies them without sending them to yagi --- stacktach/models.py | 22 +++++++++++-- tests/unit/test_glance_verifier.py | 30 +++++++++++++++-- tests/unit/test_nova_verifier.py | 53 +++++++++++++++++++++++++++--- verifier/glance_verifier.py | 28 +++++++++++----- verifier/nova_verifier.py | 20 ++++++++--- 5 files changed, 131 insertions(+), 22 deletions(-) diff --git a/stacktach/models.py b/stacktach/models.py index b30686e..c1a4c49 100644 --- a/stacktach/models.py +++ b/stacktach/models.py @@ -256,12 +256,18 @@ class InstanceExists(models.Model): VERIFIED = 'verified' RECONCILED = 'reconciled' FAILED = 'failed' + SENT_UNVERIFIED = 'sent_unverified' + SENT_FAILED = 'sent_failed' + SENT_VERIFYING = 'sent_verifying' STATUS_CHOICES = [ (PENDING, 'Pending Verification'), (VERIFYING, 'Currently Being Verified'), (VERIFIED, 'Passed Verification'), (RECONCILED, 'Passed Verification After Reconciliation'), (FAILED, 'Failed Verification'), + (SENT_UNVERIFIED, 'Unverified but sent by Yagi'), + (SENT_FAILED, 'Failed Verification but sent by Yagi'), + (SENT_VERIFYING, 'Currently being verified but sent by Yagi') ] instance = models.CharField(max_length=50, null=True, @@ -321,7 +327,10 @@ class InstanceExists(models.Model): self.save() def mark_failed(self, reason=None): - self.status = InstanceExists.FAILED + if self.status == InstanceExists.SENT_VERIFYING: + self.status = InstanceExists.SENT_FAILED + else: + self.status = InstanceExists.FAILED if reason: self.fail_reason = reason self.save() @@ -458,11 +467,17 @@ class ImageExists(models.Model): VERIFYING = 'verifying' VERIFIED = 'verified' FAILED = 'failed' + SENT_UNVERIFIED = 'sent_unverified' + SENT_FAILED = 'sent_failed' + SENT_VERIFYING = 'sent_verifying' STATUS_CHOICES = [ (PENDING, 'Pending Verification'), (VERIFYING, 'Currently Being Verified'), (VERIFIED, 'Passed Verification'), (FAILED, 'Failed Verification'), + (SENT_UNVERIFIED, 'Unverified but sent by Yagi'), + (SENT_FAILED, 'Failed Verification but sent by Yagi'), + (SENT_VERIFYING, 'Currently being verified but sent by Yagi') ] uuid = models.CharField(max_length=50, db_index=True, null=True) @@ -511,7 +526,10 @@ class ImageExists(models.Model): self.save() def mark_failed(self, reason=None): - self.status = InstanceExists.FAILED + if self.status == ImageExists.SENT_VERIFYING: + self.status = ImageExists.SENT_FAILED + else: + self.status = ImageExists.FAILED if reason: self.fail_reason = reason self.save() diff --git a/tests/unit/test_glance_verifier.py b/tests/unit/test_glance_verifier.py index e783038..9fc7a79 100644 --- a/tests/unit/test_glance_verifier.py +++ b/tests/unit/test_glance_verifier.py @@ -462,25 +462,41 @@ class GlanceVerifierTestCase(StacktachBaseTestCase): self.mox.VerifyAll() self.assertFalse(verified) - def test_verify_for_range_without_callback(self): + + def test_verify_for_range_without_callback_for_sent_unverified(self): mock_logger = self._setup_mock_logger() self.mox.StubOutWithMock(mock_logger, 'info') + stacklog.get_logger('verifier', is_parent=False).AndReturn(mock_logger) + mock_logger.info('glance: Adding 2 per-owner exists to queue.') mock_logger.info('glance: Adding 2 per-owner exists to queue.') - when_max = datetime.utcnow() models.ImageExists.VERIFYING = 'verifying' models.ImageExists.PENDING = 'pending' + models.ImageExists.SENT_VERIFYING = 'sent_verifying' + models.ImageExists.SENT_UNVERIFIED = 'sent_unverified' self.mox.StubOutWithMock(models.ImageExists, 'find') exist1 = self.mox.CreateMockAnything() exist2 = self.mox.CreateMockAnything() exist3 = self.mox.CreateMockAnything() + exist4 = self.mox.CreateMockAnything() + exist5 = self.mox.CreateMockAnything() results = {'owner1': [exist1, exist2], 'owner2': [exist3]} + sent_results = {'owner1': [exist4], 'owner2': [exist5]} + models.ImageExists.find_and_group_by_owner_and_raw_id( + ending_max=when_max, + status=models.ImageExists.SENT_UNVERIFIED).AndReturn(sent_results) models.ImageExists.find_and_group_by_owner_and_raw_id( ending_max=when_max, status=models.ImageExists.PENDING).AndReturn(results) exist1.save() exist2.save() exist3.save() + exist4.save() + exist5.save() + self.pool.apply_async(glance_verifier._verify, + args=([exist4],), callback=None) + self.pool.apply_async(glance_verifier._verify, args=([exist5],), + callback=None) self.pool.apply_async(glance_verifier._verify, args=([exist1, exist2],), callback=None) self.pool.apply_async(glance_verifier._verify, args=([exist3],), @@ -491,21 +507,29 @@ class GlanceVerifierTestCase(StacktachBaseTestCase): self.assertEqual(exist1.status, 'verifying') self.assertEqual(exist2.status, 'verifying') self.assertEqual(exist3.status, 'verifying') + self.assertEqual(exist4.status, 'sent_verifying') + self.assertEqual(exist5.status, 'sent_verifying') self.mox.VerifyAll() def test_verify_for_range_with_callback(self): mock_logger = self._setup_mock_logger() self.mox.StubOutWithMock(mock_logger, 'info') + stacklog.get_logger('verifier', is_parent=False).AndReturn(mock_logger) + mock_logger.info('glance: Adding 0 per-owner exists to queue.') mock_logger.info('glance: Adding 2 per-owner exists to queue.') - callback = self.mox.CreateMockAnything() when_max = datetime.utcnow() + models.ImageExists.SENT_VERIFYING = 'sent_verifying' + models.ImageExists.SENT_UNVERIFIED = 'sent_unverified' models.ImageExists.PENDING = 'pending' models.ImageExists.VERIFYING = 'verifying' exist1 = self.mox.CreateMockAnything() exist2 = self.mox.CreateMockAnything() exist3 = self.mox.CreateMockAnything() results = {'owner1': [exist1, exist2], 'owner2': [exist3]} + models.ImageExists.find_and_group_by_owner_and_raw_id( + ending_max=when_max, + status=models.ImageExists.SENT_UNVERIFIED).AndReturn([]) models.ImageExists.find_and_group_by_owner_and_raw_id( ending_max=when_max, status=models.ImageExists.PENDING).AndReturn(results) diff --git a/tests/unit/test_nova_verifier.py b/tests/unit/test_nova_verifier.py index eb3490d..e56959e 100644 --- a/tests/unit/test_nova_verifier.py +++ b/tests/unit/test_nova_verifier.py @@ -798,18 +798,23 @@ class NovaVerifierVerifyTestCase(StacktachBaseTestCase): self.assertFalse(result) self.mox.VerifyAll() - def test_verify_for_range_without_callback(self): mock_logger = self._create_mock_logger() stacklog.get_logger('verifier', is_parent=False).AndReturn(mock_logger) + stacklog.get_logger('verifier', is_parent=False).AndReturn(mock_logger) + mock_logger.info('nova: Adding 0 exists to queue.') mock_logger.info('nova: Adding 2 exists to queue.') - when_max = datetime.datetime.utcnow() results = self.mox.CreateMockAnything() + sent_results = self.mox.CreateMockAnything() models.InstanceExists.PENDING = 'pending' models.InstanceExists.VERIFYING = 'verifying' + models.InstanceExists.SENT_UNVERIFIED = 'sent_unverified' + models.InstanceExists.find( + ending_max=when_max, status='sent_unverified').AndReturn(sent_results) models.InstanceExists.find( ending_max=when_max, status='pending').AndReturn(results) + sent_results.count().AndReturn(0) results.count().AndReturn(2) exist1 = self.mox.CreateMockAnything() exist2 = self.mox.CreateMockAnything() @@ -827,18 +832,25 @@ class NovaVerifierVerifyTestCase(StacktachBaseTestCase): self.verifier.verify_for_range(when_max) self.mox.VerifyAll() + def test_verify_for_range_with_callback(self): + callback = self.mox.CreateMockAnything() mock_logger = self._create_mock_logger() stacklog.get_logger('verifier', is_parent=False).AndReturn(mock_logger) - mock_logger.info("nova: Adding 2 exists to queue.") - - callback = self.mox.CreateMockAnything() + stacklog.get_logger('verifier', is_parent=False).AndReturn(mock_logger) + mock_logger.info('nova: Adding 0 exists to queue.') + mock_logger.info('nova: Adding 2 exists to queue.') when_max = datetime.datetime.utcnow() results = self.mox.CreateMockAnything() + sent_results = self.mox.CreateMockAnything() models.InstanceExists.PENDING = 'pending' models.InstanceExists.VERIFYING = 'verifying' + models.InstanceExists.SENT_UNVERIFIED = 'sent_unverified' + models.InstanceExists.find( + ending_max=when_max, status='sent_unverified').AndReturn(sent_results) models.InstanceExists.find( ending_max=when_max, status='pending').AndReturn(results) + sent_results.count().AndReturn(0) results.count().AndReturn(2) exist1 = self.mox.CreateMockAnything() exist2 = self.mox.CreateMockAnything() @@ -857,6 +869,37 @@ class NovaVerifierVerifyTestCase(StacktachBaseTestCase): self.mox.VerifyAll() + def test_verify_for_range_when_found_sent_unverified_messages(self): + callback = self.mox.CreateMockAnything() + when_max = datetime.datetime.utcnow() + results = self.mox.CreateMockAnything() + sent_results = self.mox.CreateMockAnything() + models.InstanceExists.PENDING = 'pending' + models.InstanceExists.VERIFYING = 'verifying' + models.InstanceExists.SENT_VERIFYING = 'sent_verifying' + models.InstanceExists.SENT_UNVERIFIED = 'sent_unverified' + models.InstanceExists.find( + ending_max=when_max, status='sent_unverified').AndReturn(sent_results) + models.InstanceExists.find( + ending_max=when_max, status='pending').AndReturn(results) + sent_results.count().AndReturn(2) + results.count().AndReturn(0) + exist1 = self.mox.CreateMockAnything() + exist2 = self.mox.CreateMockAnything() + sent_results.__getslice__(0, 1000).AndReturn(sent_results) + sent_results.__iter__().AndReturn([exist1, exist2].__iter__()) + exist1.update_status('sent_verifying') + exist2.update_status('sent_verifying') + exist1.save() + exist2.save() + self.pool.apply_async(nova_verifier._verify, args=(exist1, 'all'), + callback=None) + self.pool.apply_async(nova_verifier._verify, args=(exist2, 'all'), + callback=None) + self.mox.ReplayAll() + self.verifier.verify_for_range(when_max, callback=callback) + self.mox.VerifyAll() + class NovaVerifierSendVerifiedNotificationTestCase(StacktachBaseTestCase): def setUp(self): self.mox = mox.Mox() diff --git a/verifier/glance_verifier.py b/verifier/glance_verifier.py index 459b0a3..8a8977b 100644 --- a/verifier/glance_verifier.py +++ b/verifier/glance_verifier.py @@ -148,20 +148,16 @@ class GlanceVerifier(Verifier): def __init__(self, config, pool=None): super(GlanceVerifier, self).__init__(config, pool=pool) - def verify_for_range(self, ending_max, callback=None): - exists_grouped_by_owner_and_rawid = \ - models.ImageExists.find_and_group_by_owner_and_raw_id( - ending_max=ending_max, - status=models.ImageExists.PENDING) - count = len(exists_grouped_by_owner_and_rawid) + def verify_exists(self, grouped_exists, callback, verifying_status): + count = len(grouped_exists) added = 0 update_interval = datetime.timedelta(seconds=30) next_update = datetime.datetime.utcnow() + update_interval _get_child_logger().info("glance: Adding %s per-owner exists to queue." % count) while added < count: - for exists in exists_grouped_by_owner_and_rawid.values(): + for exists in grouped_exists.values(): for exist in exists: - exist.status = models.ImageExists.VERIFYING + exist.status = verifying_status exist.save() result = self.pool.apply_async(_verify, args=(exists,), callback=callback) @@ -174,6 +170,22 @@ class GlanceVerifier(Verifier): next_update = datetime.datetime.utcnow() + update_interval return count + def verify_for_range(self, ending_max, callback=None): + unsent_exists_grouped_by_owner_and_rawid = \ + models.ImageExists.find_and_group_by_owner_and_raw_id( + ending_max=ending_max, + status=models.ImageExists.SENT_UNVERIFIED) + unsent_count = self.verify_exists(unsent_exists_grouped_by_owner_and_rawid, + None, models.ImageExists.SENT_VERIFYING) + exists_grouped_by_owner_and_rawid = \ + models.ImageExists.find_and_group_by_owner_and_raw_id( + ending_max=ending_max, + status=models.ImageExists.PENDING) + count = self.verify_exists(exists_grouped_by_owner_and_rawid, callback, + models.ImageExists.VERIFYING) + + return count+unsent_count + def send_verified_notification(self, exist, connection, exchange, routing_keys=None): # NOTE (apmelton) diff --git a/verifier/nova_verifier.py b/verifier/nova_verifier.py index 09165cd..9c91a97 100644 --- a/verifier/nova_verifier.py +++ b/verifier/nova_verifier.py @@ -290,9 +290,7 @@ class NovaVerifier(base_verifier.Verifier): message_service.send_notification( json_body[1], key, connection, exchange) - def verify_for_range(self, ending_max, callback=None): - exists = models.InstanceExists.find( - ending_max=ending_max, status=models.InstanceExists.PENDING) + def verify_exists(self, callback, exists, verifying_status): count = exists.count() added = 0 update_interval = datetime.timedelta(seconds=30) @@ -300,7 +298,7 @@ class NovaVerifier(base_verifier.Verifier): _get_child_logger().info("nova: Adding %s exists to queue." % count) while added < count: for exist in exists[0:1000]: - exist.update_status(models.InstanceExists.VERIFYING) + exist.update_status(verifying_status) exist.save() validation_level = self.config.validation_level() result = self.pool.apply_async( @@ -315,6 +313,20 @@ class NovaVerifier(base_verifier.Verifier): next_update = datetime.datetime.utcnow() + update_interval return count + def verify_for_range(self, ending_max, callback=None): + sent_unverified_exists = models.InstanceExists.find( + ending_max=ending_max, status= + models.InstanceExists.SENT_UNVERIFIED) + sent_unverified_count = self.verify_exists(None, + sent_unverified_exists, + models.InstanceExists. + SENT_VERIFYING) + exists = models.InstanceExists.find( + ending_max=ending_max, status=models.InstanceExists.PENDING) + count = self.verify_exists(callback, exists, + models.InstanceExists.VERIFYING) + return count+sent_unverified_count + def reconcile_failed(self): for failed_exist in self.failed: self.reconciler.failed_validation(failed_exist) From a997a6a1024c256ec851f0e28e08fc8f6c2c8e63 Mon Sep 17 00:00:00 2001 From: Manali Latkar Date: Wed, 13 Nov 2013 10:22:10 +0530 Subject: [PATCH 02/53] Added db APIs for Glance --- .gitignore | 1 + stacktach/dbapi.py | 52 ++++++++--- stacktach/models.py | 4 + tests/unit/test_dbapi.py | 195 ++++++++++++++++++++++++++++++++++++++- 4 files changed, 235 insertions(+), 17 deletions(-) diff --git a/.gitignore b/.gitignore index 40780a1..6abacdf 100644 --- a/.gitignore +++ b/.gitignore @@ -11,3 +11,4 @@ etc/stacktach_worker_config.json etc/stacktach_verifier_config.json verifier.log verifier.log.* +.gitattributes diff --git a/stacktach/dbapi.py b/stacktach/dbapi.py index abed81f..60d07cf 100644 --- a/stacktach/dbapi.py +++ b/stacktach/dbapi.py @@ -79,13 +79,6 @@ def _log_api_exception(cls, ex, request): stacklog.error(msg) -def _exists_model_factory(service): - if service == 'glance': - return models.ImageExists - elif service == 'nova': - return models.InstanceExists - - def api_call(func): @functools.wraps(func) @@ -108,28 +101,59 @@ def api_call(func): return handled +def _usage_model_factory(service): + if service == 'nova': + return {'klass': models.InstanceUsage, 'order_by': 'launched_at'} + if service == 'glance': + return {'klass': models.ImageUsage, 'order_by': 'created_at'} + + +def _exists_model_factory(service): + if service == 'nova': + return {'klass': models.InstanceExists, 'order_by': 'id'} + if service == 'glance': + return {'klass': models.ImageExists, 'order_by': 'id'} + + +def _deletes_model_factory(service): + if service == 'nova': + return {'klass': models.InstanceDeletes, 'order_by': 'launched_at'} + if service == 'glance': + return {'klass': models.ImageDeletes, 'order_by': 'deleted_at'} + + @api_call def list_usage_launches(request): - objects = get_db_objects(models.InstanceUsage, request, 'launched_at') + service = request.GET.get('service', 'nova') + model = _usage_model_factory(service) + objects = get_db_objects(model['klass'], request, + model['order_by']) dicts = _convert_model_list(objects) return {'launches': dicts} @api_call def get_usage_launch(request, launch_id): - return {'launch': _get_model_by_id(models.InstanceUsage, launch_id)} + service = request.GET.get('service', 'nova') + model = _usage_model_factory(service) + return {'launch': _get_model_by_id(model['klass'], launch_id)} @api_call def list_usage_deletes(request): - objects = get_db_objects(models.InstanceDeletes, request, 'launched_at') + service = request.GET.get('service', 'nova') + model = _deletes_model_factory(service) + objects = get_db_objects(model['klass'], request, + model['order_by']) dicts = _convert_model_list(objects) return {'deletes': dicts} @api_call def get_usage_delete(request, delete_id): - return {'delete': _get_model_by_id(models.InstanceDeletes, delete_id)} + service = request.GET.get('service', 'nova') + model = _deletes_model_factory(service) + return {'delete': _get_model_by_id(model['klass'], delete_id)} def _exists_extra_values(exist): @@ -139,6 +163,8 @@ def _exists_extra_values(exist): @api_call def list_usage_exists(request): + service = request.GET.get('service', 'nova') + model = _exists_model_factory(service) try: custom_filters = {} if 'received_min' in request.GET: @@ -155,7 +181,7 @@ def list_usage_exists(request): msg = "Range filters must be dates." raise BadRequestException(message=msg) - objects = get_db_objects(models.InstanceExists, request, 'id', + objects = get_db_objects(model['klass'], request, 'id', custom_filters=custom_filters) dicts = _convert_model_list(objects, _exists_extra_values) return {'exists': dicts} @@ -210,7 +236,7 @@ def _find_exists_with_message_id(msg_id, exists_model, service): def _ping_processing_with_service(pings, service): - exists_model = _exists_model_factory(service) + exists_model = _exists_model_factory(service)['klass'] with transaction.commit_on_success(): for msg_id, status_code in pings.items(): try: diff --git a/stacktach/models.py b/stacktach/models.py index 6cb930a..2666e14 100644 --- a/stacktach/models.py +++ b/stacktach/models.py @@ -437,6 +437,10 @@ class ImageUsage(models.Model): size = models.BigIntegerField(max_length=20) last_raw = models.ForeignKey(GlanceRawData, null=True) + @property + def launched_at(self): + return self.created_at + class ImageDeletes(models.Model): uuid = models.CharField(max_length=50, db_index=True) diff --git a/tests/unit/test_dbapi.py b/tests/unit/test_dbapi.py index 1265d29..8c539c6 100644 --- a/tests/unit/test_dbapi.py +++ b/tests/unit/test_dbapi.py @@ -326,12 +326,26 @@ class DBAPITestCase(StacktachBaseTestCase): self.mox.VerifyAll() - def test_list_usage_exists_no_custom_filters(self): + def test_list_usage_exists_no_custom_filters_for_nova(self): fake_request = self.mox.CreateMockAnything() - fake_request.GET = {} + fake_request.GET = {'service': 'glance'} self.mox.StubOutWithMock(dbapi, 'get_db_objects') objects = self.mox.CreateMockAnything() - dbapi.get_db_objects(models.InstanceExists, fake_request, 'id', + dbapi.get_db_objects(models.ImageExists, fake_request, 'id', + custom_filters={}).AndReturn(objects) + self.mox.StubOutWithMock(dbapi, '_convert_model_list') + dbapi._convert_model_list(objects, dbapi._exists_extra_values) + self.mox.ReplayAll() + resp = dbapi.list_usage_exists(fake_request) + self.assertEqual(resp.status_code, 200) + self.mox.VerifyAll() + + def test_list_usage_exists_no_custom_filters_for_glance(self): + fake_request = self.mox.CreateMockAnything() + fake_request.GET = {'service': 'glance'} + self.mox.StubOutWithMock(dbapi, 'get_db_objects') + objects = self.mox.CreateMockAnything() + dbapi.get_db_objects(models.ImageExists, fake_request, 'id', custom_filters={}).AndReturn(objects) self.mox.StubOutWithMock(dbapi, '_convert_model_list') dbapi._convert_model_list(objects, dbapi._exists_extra_values) @@ -361,15 +375,16 @@ class DBAPITestCase(StacktachBaseTestCase): fake_request = self.mox.CreateMockAnything() date = str(datetime.datetime.utcnow()) fake_request.GET = {'received_max': date} - self.mox.StubOutWithMock(dbapi, 'get_db_objects') unix_date = stacktach_utils.str_time_to_unix(date) custom_filters = {'received_max': {'raw__when__lte': unix_date}} objects = self.mox.CreateMockAnything() + self.mox.StubOutWithMock(dbapi, 'get_db_objects') dbapi.get_db_objects(models.InstanceExists, fake_request, 'id', custom_filters=custom_filters).AndReturn(objects) self.mox.StubOutWithMock(dbapi, '_convert_model_list') dbapi._convert_model_list(objects, dbapi._exists_extra_values) self.mox.ReplayAll() + resp = dbapi.list_usage_exists(fake_request) self.assertEqual(resp.status_code, 200) self.mox.VerifyAll() @@ -734,3 +749,175 @@ class DBAPITestCase(StacktachBaseTestCase): msg = "'messages' missing from request body" self.assertEqual(body.get('message'), msg) self.mox.VerifyAll() + + def test_list_usage_launches_without_service(self): + fake_request = self.mox.CreateMockAnything() + fake_request.method = 'GET' + fake_request.GET = {} + self.mox.StubOutWithMock(dbapi, 'get_db_objects') + mock_objects = self.mox.CreateMockAnything() + launches = {'a': 1} + self.mox.StubOutWithMock(dbapi, '_convert_model_list') + dbapi._convert_model_list(mock_objects).AndReturn(launches) + dbapi.get_db_objects(models.InstanceUsage, fake_request, 'launched_at').AndReturn(mock_objects) + self.mox.ReplayAll() + + resp = dbapi.list_usage_launches(fake_request) + self.assertEqual(resp.status_code, 200) + self.assertEqual(json.loads(resp.content), {'launches': launches}) + self.mox.VerifyAll() + + def test_list_usage_launches_for_glance(self): + fake_request = self.mox.CreateMockAnything() + fake_request.method = 'GET' + fake_request.GET = {'service': 'glance'} + self.mox.StubOutWithMock(dbapi, 'get_db_objects') + mock_objects = self.mox.CreateMockAnything() + launches = {'a': 1} + self.mox.StubOutWithMock(dbapi, '_convert_model_list') + dbapi._convert_model_list(mock_objects).AndReturn(launches) + dbapi.get_db_objects(models.ImageUsage, fake_request, 'created_at').AndReturn(mock_objects) + self.mox.ReplayAll() + + resp = dbapi.list_usage_launches(fake_request) + self.assertEqual(resp.status_code, 200) + self.assertEqual(json.loads(resp.content), {'launches': launches}) + self.mox.VerifyAll() + + def test_list_usage_launches_for_nova(self): + fake_request = self.mox.CreateMockAnything() + fake_request.method = 'GET' + fake_request.GET = {'service': 'nova'} + self.mox.StubOutWithMock(dbapi, 'get_db_objects') + mock_objects = self.mox.CreateMockAnything() + launches = {'a': 1} + self.mox.StubOutWithMock(dbapi, '_convert_model_list') + dbapi._convert_model_list(mock_objects).AndReturn(launches) + dbapi.get_db_objects(models.InstanceUsage, fake_request, 'launched_at').AndReturn(mock_objects) + self.mox.ReplayAll() + + resp = dbapi.list_usage_launches(fake_request) + self.assertEqual(resp.status_code, 200) + self.assertEqual(json.loads(resp.content), {'launches': launches}) + self.mox.VerifyAll() + + def test_get_usage_launch_with_no_service(self): + fake_request = self.mox.CreateMockAnything() + fake_request.method = 'GET' + fake_request.GET = {} + launch = {'a': 1} + self.mox.StubOutWithMock(dbapi, '_get_model_by_id') + dbapi._get_model_by_id(models.InstanceUsage, 1).AndReturn(launch) + self.mox.ReplayAll() + + resp = dbapi.get_usage_launch(fake_request, 1) + self.assertEqual(resp.status_code, 200) + self.assertEqual(json.loads(resp.content), {'launch': {'a': 1}}) + self.mox.VerifyAll() + + def test_get_usage_launch_for_nova(self): + fake_request = self.mox.CreateMockAnything() + fake_request.method = 'GET' + fake_request.GET = {'service': 'nova'} + launch = {'a': 1} + self.mox.StubOutWithMock(dbapi, '_get_model_by_id') + dbapi._get_model_by_id(models.InstanceUsage, 1).AndReturn(launch) + self.mox.ReplayAll() + + resp = dbapi.get_usage_launch(fake_request, 1) + self.assertEqual(resp.status_code, 200) + self.assertEqual(json.loads(resp.content), {'launch': {'a': 1}}) + self.mox.VerifyAll() + + def test_get_usage_launch_for_glance(self): + fake_request = self.mox.CreateMockAnything() + fake_request.method = 'GET' + fake_request.GET = {'service': 'glance'} + launch = {'a': 1} + self.mox.StubOutWithMock(dbapi, '_get_model_by_id') + dbapi._get_model_by_id(models.ImageUsage, 1).AndReturn(launch) + self.mox.ReplayAll() + + resp = dbapi.get_usage_launch(fake_request, 1) + self.assertEqual(resp.status_code, 200) + self.assertEqual(json.loads(resp.content), {'launch': {'a': 1}}) + self.mox.VerifyAll() + + def test_get_usage_delete_for_nova(self): + fake_request = self.mox.CreateMockAnything() + fake_request.method = 'GET' + fake_request.GET = {'service': 'nova'} + delete = {'a': 1} + self.mox.StubOutWithMock(dbapi, '_get_model_by_id') + dbapi._get_model_by_id(models.InstanceDeletes, 1).AndReturn(delete) + self.mox.ReplayAll() + + resp = dbapi.get_usage_delete(fake_request, 1) + self.assertEqual(resp.status_code, 200) + self.assertEqual(json.loads(resp.content), {'delete': {'a': 1}}) + self.mox.VerifyAll() + + def test_get_usage_delete_for_glance(self): + fake_request = self.mox.CreateMockAnything() + fake_request.method = 'GET' + fake_request.GET = {'service': 'glance'} + delete = {'a': 1} + self.mox.StubOutWithMock(dbapi, '_get_model_by_id') + dbapi._get_model_by_id(models.ImageDeletes, 1).AndReturn(delete) + self.mox.ReplayAll() + + resp = dbapi.get_usage_delete(fake_request, 1) + self.assertEqual(resp.status_code, 200) + self.assertEqual(json.loads(resp.content), {'delete': {'a': 1}}) + self.mox.VerifyAll() + + def test_list_usage_deletes_with_no_service(self): + fake_request = self.mox.CreateMockAnything() + fake_request.method = 'GET' + fake_request.GET = {} + self.mox.StubOutWithMock(dbapi, 'get_db_objects') + mock_objects = self.mox.CreateMockAnything() + deletes = {'a': 1} + self.mox.StubOutWithMock(dbapi, '_convert_model_list') + dbapi._convert_model_list(mock_objects).AndReturn(deletes) + dbapi.get_db_objects(models.InstanceDeletes, fake_request, 'launched_at').AndReturn(mock_objects) + self.mox.ReplayAll() + + resp = dbapi.list_usage_deletes(fake_request) + self.assertEqual(resp.status_code, 200) + self.assertEqual(json.loads(resp.content), {'deletes': deletes}) + self.mox.VerifyAll() + + def test_list_usage_deletes_for_nova(self): + fake_request = self.mox.CreateMockAnything() + fake_request.method = 'GET' + fake_request.GET = {'service': 'nova'} + self.mox.StubOutWithMock(dbapi, 'get_db_objects') + mock_objects = self.mox.CreateMockAnything() + deletes = {'a': 1} + self.mox.StubOutWithMock(dbapi, '_convert_model_list') + dbapi._convert_model_list(mock_objects).AndReturn(deletes) + dbapi.get_db_objects(models.InstanceDeletes, fake_request, 'launched_at').AndReturn(mock_objects) + self.mox.ReplayAll() + + resp = dbapi.list_usage_deletes(fake_request) + self.assertEqual(resp.status_code, 200) + self.assertEqual(json.loads(resp.content), {'deletes': deletes}) + self.mox.VerifyAll() + + def test_list_usage_deletes_for_glance(self): + fake_request = self.mox.CreateMockAnything() + fake_request.method = 'GET' + fake_request.GET = {'service': 'glance'} + self.mox.StubOutWithMock(dbapi, 'get_db_objects') + mock_objects = self.mox.CreateMockAnything() + deletes = {'a': 1} + self.mox.StubOutWithMock(dbapi, '_convert_model_list') + dbapi._convert_model_list(mock_objects).AndReturn(deletes) + dbapi.get_db_objects(models.ImageDeletes, fake_request, 'deleted_at').AndReturn(mock_objects) + self.mox.ReplayAll() + + resp = dbapi.list_usage_deletes(fake_request) + self.assertEqual(resp.status_code, 200) + self.assertEqual(json.loads(resp.content), {'deletes': deletes}) + self.mox.VerifyAll() From 6bab377a372bee0728a8b17b7664033e75443d36 Mon Sep 17 00:00:00 2001 From: Manali Latkar Date: Mon, 25 Nov 2013 15:11:01 +0530 Subject: [PATCH 03/53] changed urls and response format according to the review comments --- stacktach/dbapi.py | 59 +++++++++++++++++++++++++++++++++------- stacktach/models.py | 4 --- stacktach/urls.py | 22 +++++++++++++++ tests/unit/test_dbapi.py | 34 +++++++++++------------ 4 files changed, 88 insertions(+), 31 deletions(-) diff --git a/stacktach/dbapi.py b/stacktach/dbapi.py index 60d07cf..1d0cc4d 100644 --- a/stacktach/dbapi.py +++ b/stacktach/dbapi.py @@ -121,27 +121,48 @@ def _deletes_model_factory(service): if service == 'glance': return {'klass': models.ImageDeletes, 'order_by': 'deleted_at'} - @api_call def list_usage_launches(request): - service = request.GET.get('service', 'nova') + return {'launches': list_usage_launches_with_service(request, 'nova')} + +@api_call +def list_usage_images(request): + return { 'images': list_usage_launches_with_service(request, 'glance')} + + +def list_usage_launches_with_service(request, service): model = _usage_model_factory(service) objects = get_db_objects(model['klass'], request, model['order_by']) dicts = _convert_model_list(objects) - return {'launches': dicts} + return dicts +def get_usage_launch_with_service(launch_id, service): + model = _usage_model_factory(service) + return {'launch': _get_model_by_id(model['klass'], launch_id)} + @api_call def get_usage_launch(request, launch_id): - service = request.GET.get('service', 'nova') - model = _usage_model_factory(service) - return {'launch': _get_model_by_id(model['klass'], launch_id)} + return get_usage_launch_with_service(launch_id, 'nova') + + +@api_call +def get_usage_image(request, image_id): + return get_usage_launch_with_service(image_id, 'glance') @api_call def list_usage_deletes(request): - service = request.GET.get('service', 'nova') + return list_usage_deletes_with_service(request, 'nova') + + +@api_call +def list_usage_deletes_glance(request): + return list_usage_deletes_with_service(request, 'glance') + + +def list_usage_deletes_with_service(request, service): model = _deletes_model_factory(service) objects = get_db_objects(model['klass'], request, model['order_by']) @@ -151,8 +172,13 @@ def list_usage_deletes(request): @api_call def get_usage_delete(request, delete_id): - service = request.GET.get('service', 'nova') - model = _deletes_model_factory(service) + model = _deletes_model_factory('nova') + return {'delete': _get_model_by_id(model['klass'], delete_id)} + + +@api_call +def get_usage_delete_glance(request, delete_id): + model = _deletes_model_factory('glance') return {'delete': _get_model_by_id(model['klass'], delete_id)} @@ -163,7 +189,15 @@ def _exists_extra_values(exist): @api_call def list_usage_exists(request): - service = request.GET.get('service', 'nova') + return list_usage_exists_with_service(request, 'nova') + + +@api_call +def list_usage_exists_glance(request): + return list_usage_exists_with_service(request, 'glance') + + +def list_usage_exists_with_service(request, service): model = _exists_model_factory(service) try: custom_filters = {} @@ -192,6 +226,11 @@ def get_usage_exist(request, exist_id): return {'exist': _get_model_by_id(models.InstanceExists, exist_id, _exists_extra_values)} +@api_call +def get_usage_exist_glance(request, exist_id): + return {'exist': _get_model_by_id(models.ImageExists, exist_id, + _exists_extra_values)} + @api_call def exists_send_status(request, message_id): diff --git a/stacktach/models.py b/stacktach/models.py index 2666e14..6cb930a 100644 --- a/stacktach/models.py +++ b/stacktach/models.py @@ -437,10 +437,6 @@ class ImageUsage(models.Model): size = models.BigIntegerField(max_length=20) last_raw = models.ForeignKey(GlanceRawData, null=True) - @property - def launched_at(self): - return self.created_at - class ImageDeletes(models.Model): uuid = models.CharField(max_length=50, db_index=True) diff --git a/stacktach/urls.py b/stacktach/urls.py index 8503787..5579957 100644 --- a/stacktach/urls.py +++ b/stacktach/urls.py @@ -36,15 +36,37 @@ urlpatterns = patterns('', url(r'db/usage/launches/$', 'stacktach.dbapi.list_usage_launches'), + url(r'db/usage/nova/launches/$', + 'stacktach.dbapi.list_usage_launches'), + url(r'db/usage/glance/images/$', + 'stacktach.dbapi.list_usage_images'), url(r'db/usage/launches/(?P\d+)/$', 'stacktach.dbapi.get_usage_launch'), + url(r'db/usage/nova/launches/(?P\d+)/$', + 'stacktach.dbapi.get_usage_launch'), + url(r'db/usage/glance/images/(?P\d+)/$', + 'stacktach.dbapi.get_usage_image'), url(r'db/usage/deletes/$', 'stacktach.dbapi.list_usage_deletes'), + url(r'db/usage/nova/deletes/$', + 'stacktach.dbapi.list_usage_deletes'), + url(r'db/usage/glance/deletes/$', + 'stacktach.dbapi.list_usage_deletes_glance'), url(r'db/usage/deletes/(?P\d+)/$', 'stacktach.dbapi.get_usage_delete'), + url(r'db/usage/nova/deletes/(?P\d+)/$', + 'stacktach.dbapi.get_usage_delete'), + url(r'db/usage/glance/deletes/(?P\d+)/$', + 'stacktach.dbapi.get_usage_delete_glance'), url(r'db/usage/exists/$', 'stacktach.dbapi.list_usage_exists'), + url(r'db/usage/nova/exists/$', 'stacktach.dbapi.list_usage_exists'), + url(r'db/usage/glance/exists/$', 'stacktach.dbapi.list_usage_exists_glance'), url(r'db/usage/exists/(?P\d+)/$', 'stacktach.dbapi.get_usage_exist'), + url(r'db/usage/nova/exists/(?P\d+)/$', + 'stacktach.dbapi.get_usage_exist'), + url(r'db/usage/glance/exists/(?P\d+)/$', + 'stacktach.dbapi.get_usage_exist_glance'), url(r'db/confirm/usage/exists/(?P[\w\-]+)/$', 'stacktach.dbapi.exists_send_status'), diff --git a/tests/unit/test_dbapi.py b/tests/unit/test_dbapi.py index 8c539c6..4b335a5 100644 --- a/tests/unit/test_dbapi.py +++ b/tests/unit/test_dbapi.py @@ -328,10 +328,10 @@ class DBAPITestCase(StacktachBaseTestCase): def test_list_usage_exists_no_custom_filters_for_nova(self): fake_request = self.mox.CreateMockAnything() - fake_request.GET = {'service': 'glance'} + fake_request.GET = {} self.mox.StubOutWithMock(dbapi, 'get_db_objects') objects = self.mox.CreateMockAnything() - dbapi.get_db_objects(models.ImageExists, fake_request, 'id', + dbapi.get_db_objects(models.InstanceExists, fake_request, 'id', custom_filters={}).AndReturn(objects) self.mox.StubOutWithMock(dbapi, '_convert_model_list') dbapi._convert_model_list(objects, dbapi._exists_extra_values) @@ -342,7 +342,7 @@ class DBAPITestCase(StacktachBaseTestCase): def test_list_usage_exists_no_custom_filters_for_glance(self): fake_request = self.mox.CreateMockAnything() - fake_request.GET = {'service': 'glance'} + fake_request.GET = {} self.mox.StubOutWithMock(dbapi, 'get_db_objects') objects = self.mox.CreateMockAnything() dbapi.get_db_objects(models.ImageExists, fake_request, 'id', @@ -350,7 +350,7 @@ class DBAPITestCase(StacktachBaseTestCase): self.mox.StubOutWithMock(dbapi, '_convert_model_list') dbapi._convert_model_list(objects, dbapi._exists_extra_values) self.mox.ReplayAll() - resp = dbapi.list_usage_exists(fake_request) + resp = dbapi.list_usage_exists_glance(fake_request) self.assertEqual(resp.status_code, 200) self.mox.VerifyAll() @@ -770,7 +770,7 @@ class DBAPITestCase(StacktachBaseTestCase): def test_list_usage_launches_for_glance(self): fake_request = self.mox.CreateMockAnything() fake_request.method = 'GET' - fake_request.GET = {'service': 'glance'} + fake_request.GET = {} self.mox.StubOutWithMock(dbapi, 'get_db_objects') mock_objects = self.mox.CreateMockAnything() launches = {'a': 1} @@ -779,15 +779,15 @@ class DBAPITestCase(StacktachBaseTestCase): dbapi.get_db_objects(models.ImageUsage, fake_request, 'created_at').AndReturn(mock_objects) self.mox.ReplayAll() - resp = dbapi.list_usage_launches(fake_request) + resp = dbapi.list_usage_images(fake_request) self.assertEqual(resp.status_code, 200) - self.assertEqual(json.loads(resp.content), {'launches': launches}) + self.assertEqual(json.loads(resp.content), {'images': launches}) self.mox.VerifyAll() def test_list_usage_launches_for_nova(self): fake_request = self.mox.CreateMockAnything() fake_request.method = 'GET' - fake_request.GET = {'service': 'nova'} + fake_request.GET = {} self.mox.StubOutWithMock(dbapi, 'get_db_objects') mock_objects = self.mox.CreateMockAnything() launches = {'a': 1} @@ -818,7 +818,7 @@ class DBAPITestCase(StacktachBaseTestCase): def test_get_usage_launch_for_nova(self): fake_request = self.mox.CreateMockAnything() fake_request.method = 'GET' - fake_request.GET = {'service': 'nova'} + fake_request.GET = {} launch = {'a': 1} self.mox.StubOutWithMock(dbapi, '_get_model_by_id') dbapi._get_model_by_id(models.InstanceUsage, 1).AndReturn(launch) @@ -832,13 +832,13 @@ class DBAPITestCase(StacktachBaseTestCase): def test_get_usage_launch_for_glance(self): fake_request = self.mox.CreateMockAnything() fake_request.method = 'GET' - fake_request.GET = {'service': 'glance'} + fake_request.GET = {} launch = {'a': 1} self.mox.StubOutWithMock(dbapi, '_get_model_by_id') dbapi._get_model_by_id(models.ImageUsage, 1).AndReturn(launch) self.mox.ReplayAll() - resp = dbapi.get_usage_launch(fake_request, 1) + resp = dbapi.get_usage_image(fake_request, 1) self.assertEqual(resp.status_code, 200) self.assertEqual(json.loads(resp.content), {'launch': {'a': 1}}) self.mox.VerifyAll() @@ -846,7 +846,7 @@ class DBAPITestCase(StacktachBaseTestCase): def test_get_usage_delete_for_nova(self): fake_request = self.mox.CreateMockAnything() fake_request.method = 'GET' - fake_request.GET = {'service': 'nova'} + fake_request.GET = {} delete = {'a': 1} self.mox.StubOutWithMock(dbapi, '_get_model_by_id') dbapi._get_model_by_id(models.InstanceDeletes, 1).AndReturn(delete) @@ -860,13 +860,13 @@ class DBAPITestCase(StacktachBaseTestCase): def test_get_usage_delete_for_glance(self): fake_request = self.mox.CreateMockAnything() fake_request.method = 'GET' - fake_request.GET = {'service': 'glance'} + fake_request.GET = {} delete = {'a': 1} self.mox.StubOutWithMock(dbapi, '_get_model_by_id') dbapi._get_model_by_id(models.ImageDeletes, 1).AndReturn(delete) self.mox.ReplayAll() - resp = dbapi.get_usage_delete(fake_request, 1) + resp = dbapi.get_usage_delete_glance(fake_request, 1) self.assertEqual(resp.status_code, 200) self.assertEqual(json.loads(resp.content), {'delete': {'a': 1}}) self.mox.VerifyAll() @@ -891,7 +891,7 @@ class DBAPITestCase(StacktachBaseTestCase): def test_list_usage_deletes_for_nova(self): fake_request = self.mox.CreateMockAnything() fake_request.method = 'GET' - fake_request.GET = {'service': 'nova'} + fake_request.GET = {} self.mox.StubOutWithMock(dbapi, 'get_db_objects') mock_objects = self.mox.CreateMockAnything() deletes = {'a': 1} @@ -908,7 +908,7 @@ class DBAPITestCase(StacktachBaseTestCase): def test_list_usage_deletes_for_glance(self): fake_request = self.mox.CreateMockAnything() fake_request.method = 'GET' - fake_request.GET = {'service': 'glance'} + fake_request.GET = {} self.mox.StubOutWithMock(dbapi, 'get_db_objects') mock_objects = self.mox.CreateMockAnything() deletes = {'a': 1} @@ -917,7 +917,7 @@ class DBAPITestCase(StacktachBaseTestCase): dbapi.get_db_objects(models.ImageDeletes, fake_request, 'deleted_at').AndReturn(mock_objects) self.mox.ReplayAll() - resp = dbapi.list_usage_deletes(fake_request) + resp = dbapi.list_usage_deletes_glance(fake_request) self.assertEqual(resp.status_code, 200) self.assertEqual(json.loads(resp.content), {'deletes': deletes}) self.mox.VerifyAll() From cdff368be0fad404a0eb495e401fa507bd069b48 Mon Sep 17 00:00:00 2001 From: Manali Latkar Date: Thu, 19 Dec 2013 18:31:43 +0530 Subject: [PATCH 04/53] adding the new status counts to usage audit --- reports/glance_usage_audit.py | 2 +- reports/nova_usage_audit.py | 2 +- reports/usage_audit.py | 13 ++++++++++--- 3 files changed, 12 insertions(+), 5 deletions(-) diff --git a/reports/glance_usage_audit.py b/reports/glance_usage_audit.py index 7588d7f..5e50167 100644 --- a/reports/glance_usage_audit.py +++ b/reports/glance_usage_audit.py @@ -176,7 +176,7 @@ def store_results(start, end, summary, details): 'created': dt.dt_to_decimal(datetime.datetime.utcnow()), 'period_start': start, 'period_end': end, - 'version': 4, + 'version': 6, 'name': 'glance usage audit' } diff --git a/reports/nova_usage_audit.py b/reports/nova_usage_audit.py index 5afebbd..b4ed71c 100644 --- a/reports/nova_usage_audit.py +++ b/reports/nova_usage_audit.py @@ -224,7 +224,7 @@ def store_results(start, end, summary, details): 'created': dt.dt_to_decimal(datetime.datetime.utcnow()), 'period_start': start, 'period_end': end, - 'version': 5, + 'version': 6, 'name': 'nova usage audit' } diff --git a/reports/usage_audit.py b/reports/usage_audit.py index 284c57d..4d62ac4 100644 --- a/reports/usage_audit.py +++ b/reports/usage_audit.py @@ -9,8 +9,11 @@ def _status_queries(exists_query): fail = exists_query.filter(status=models.InstanceExists.FAILED) pending = exists_query.filter(status=models.InstanceExists.PENDING) verifying = exists_query.filter(status=models.InstanceExists.VERIFYING) - - return verified, reconciled, fail, pending, verifying + sent_unverified = exists_query.filter(status=models.InstanceExists.SENT_UNVERIFIED) + sent_failed = exists_query.filter(status=models.InstanceExists.VERIFYING) + sent_verifying = exists_query.filter(status=models.InstanceExists.SENT_VERIFYING) + return verified, reconciled, fail, pending, verifying, sent_unverified, \ + sent_failed, sent_verifying def _send_status_queries(exists_query): @@ -28,7 +31,8 @@ def _send_status_queries(exists_query): def _audit_for_exists(exists_query): (verified, reconciled, - fail, pending, verifying) = _status_queries(exists_query) + fail, pending, verifying, sent_unverified, + sent_failed, sent_verifying) = _status_queries(exists_query) (success, unsent, redirect, client_error, server_error) = _send_status_queries(verified) @@ -43,6 +47,9 @@ def _audit_for_exists(exists_query): 'failed': fail.count(), 'pending': pending.count(), 'verifying': verifying.count(), + 'sent_unverified': sent_unverified.count(), + 'sent_failed': sent_failed.count(), + 'sent_verifying': sent_verifying.count(), 'send_status': { 'success': success.count(), 'unsent': unsent.count(), From 9b6422021f65732a813be1507ad82bf564ce1854 Mon Sep 17 00:00:00 2001 From: Andrew Melton Date: Fri, 20 Dec 2013 18:47:22 -0500 Subject: [PATCH 05/53] Adding notification scrubber script --- scripts/example_rabbit_config.json | 10 ++++ scripts/notification_scrubber.py | 76 ++++++++++++++++++++++++++++++ scripts/scrubbers.py | 70 +++++++++++++++++++++++++++ 3 files changed, 156 insertions(+) create mode 100644 scripts/example_rabbit_config.json create mode 100644 scripts/notification_scrubber.py create mode 100644 scripts/scrubbers.py diff --git a/scripts/example_rabbit_config.json b/scripts/example_rabbit_config.json new file mode 100644 index 0000000..05a1160 --- /dev/null +++ b/scripts/example_rabbit_config.json @@ -0,0 +1,10 @@ +{ + "host": "devstack.example.com", + "port": 5672, + "userid": "guest", + "password": "password", + "durable_queue": false, + "exchange": "nova", + "virtual_host": "/", + "routing_key": "monitor.info" +} diff --git a/scripts/notification_scrubber.py b/scripts/notification_scrubber.py new file mode 100644 index 0000000..4ad9f62 --- /dev/null +++ b/scripts/notification_scrubber.py @@ -0,0 +1,76 @@ +import argparse +import csv +import json +import os +import sys +import time + +sys.path.append(os.environ.get('STACKTACH_INSTALL_DIR', '/stacktach')) + +from stacktach import message_service as msg +from stacktach import utils + +import scrubbers + + +def scrub(args, send_notif=lambda x: None): + print "Starting scrub." + start = utils.str_time_to_unix(args.start) + end = utils.str_time_to_unix(args.end) + + if hasattr(scrubbers, args.scrubber): + Scrubber = getattr(scrubbers, args.scrubber) + scrubber = Scrubber(start, end) + + count = 0 + for raw in scrubber.raws(): + matches, body = scrubber.filter(raw) + if matches and not body: + body = json.loads(raw['json'])[1] + if matches and body: + scrubbed = scrubber.scrub(body) + count += 1 + send_notif(scrubbed) + return count + else: + print "No scrubber class %s." % args.scrubber + return 0 + + +def scrub_with_notifications(args): + print "!!!!!! WARNING: SENDING TO RABBIT !!!!!!" + print "!!!!!! Sleeping for 30 seconds !!!!!!" + print "!!!!!! before proceeding !!!!!!" + time.sleep(30) + with open(args.rabbit_config) as fp: + rabbit_config = json.load(fp) + exchange = msg.create_exchange(rabbit_config['exchange'], + 'topic', + durable=rabbit_config['durable_queue']) + conn_conf = (rabbit_config['host'], rabbit_config['port'], + rabbit_config['userid'], rabbit_config['password'], + 'librabbitmq', rabbit_config['virtual_host']) + + with msg.create_connection(*conn_conf) as conn: + def send_notif(notif): + msg.send_notification(notif, rabbit_config['routing_key'], + conn, exchange) + count = scrub(args, send_notif=send_notif) + return count + + +if __name__ == '__main__': + parser = argparse.ArgumentParser('Stacktach Notification Scrubber') + parser.add_argument('--rabbit', action='store_true') + parser.add_argument('--rabbit_config', default='rabbit_config.json') + parser.add_argument('--scrubber', required=True) + parser.add_argument('--start', required=True) + parser.add_argument('--end', required=True) + args = parser.parse_args() + + if args.rabbit: + print "%s Events Scrubbed" % scrub_with_notifications(args) + else: + print "%s Events Scrubbed" % scrub(args) + + diff --git a/scripts/scrubbers.py b/scripts/scrubbers.py new file mode 100644 index 0000000..a7364d6 --- /dev/null +++ b/scripts/scrubbers.py @@ -0,0 +1,70 @@ +import json +import uuid + +from django.db.models import F + +from stacktach import models + + +class ScrubberBase(object): + def __init__(self, start, end): + self.start = start + self.end = end + + def raws(self): + """ Returns an iterable of Raws to scrub + """ + return [].__iter__() + + def filter(self, raw_data): + """ Returns whether or not the provided RawData needs to be scrubbed. + If the implementing function parses the json body to determine + if it needs to be scrubbed, it should be returned as the second + return value. This is done so that it will not need to be parsed + a second time for scrubbing. Negative matches need not return + parsed json bodies + + @raw_data: a RawData dictionary + """ + return True, None + + def scrub(self, body): + """ Returns the scrubbed json body of the RawData. + + @body: Dictionary version of the RawData's json. + """ + return body + + +class ExistsCreatedAt(ScrubberBase): + + def raws(self): + filters = { + 'raw__when__gte': self.start, + 'raw__when__lte': self.end, + 'audit_period_ending__lt': F('audit_period_beginning') + (60*60*24) + } + exists = models.InstanceExists.objects.filter(**filters) + exists = exists.select_related('raw') + for exist in exists.iterator(): + rawdata = exist.raw + yield {'json': rawdata.json} + + def filter(self, raw_data): + if '+00:00' in raw_data['json']: + body = json.loads(raw_data['json'])[1] + created_at = body.get('payload', {}).get('created_at') + if created_at and '+00:00' in created_at: + return True, body + else: + return False, None + else: + return False, None + + def scrub(self, body): + created_at = body['payload']['created_at'] + scrubbed_created_at = created_at.replace('+00:00', '') + body['payload']['created_at'] = scrubbed_created_at + body['message_id'] = str(uuid.uuid4()) + return body + From bb6e7a6c2e6a32a3309ae99c2e4dbba58af765fb Mon Sep 17 00:00:00 2001 From: Andrew Melton Date: Fri, 3 Jan 2014 16:00:36 -0500 Subject: [PATCH 06/53] Correctly handling glance verification exception --- tests/unit/test_glance_verifier.py | 6 +----- verifier/glance_verifier.py | 3 +++ 2 files changed, 4 insertions(+), 5 deletions(-) diff --git a/tests/unit/test_glance_verifier.py b/tests/unit/test_glance_verifier.py index e783038..2983b42 100644 --- a/tests/unit/test_glance_verifier.py +++ b/tests/unit/test_glance_verifier.py @@ -435,10 +435,6 @@ class GlanceVerifierTestCase(StacktachBaseTestCase): self.assertTrue(verified) def test_verify_exist_marks_exist_failed_if_field_mismatch_exception(self): - mock_logger = self._setup_mock_logger() - self.mox.StubOutWithMock(mock_logger, 'info') - mock_logger.exception("glance: Expected field to be 'expected' " - "got 'actual'") exist1 = self.mox.CreateMockAnything() exist2 = self.mox.CreateMockAnything() @@ -450,7 +446,7 @@ class GlanceVerifierTestCase(StacktachBaseTestCase): field_mismatch_exc = FieldMismatch('field', 'expected', 'actual') glance_verifier._verify_for_usage(exist1).AndRaise( exception=field_mismatch_exc) - exist1.mark_failed(reason='FieldMismatch') + exist1.mark_failed(reason="Expected field to be 'expected' got 'actual'") glance_verifier._verify_for_usage(exist2) glance_verifier._verify_for_delete(exist2) diff --git a/verifier/glance_verifier.py b/verifier/glance_verifier.py index 459b0a3..7e80973 100644 --- a/verifier/glance_verifier.py +++ b/verifier/glance_verifier.py @@ -136,6 +136,9 @@ def _verify(exists): _verify_validity(exist) exist.mark_verified() + except VerificationException, e: + verified = False + exist.mark_failed(reason=str(e)) except Exception, e: verified = False exist.mark_failed(reason=e.__class__.__name__) From f67f09af739d5a6a7064a17c54a7aeab2c9524b4 Mon Sep 17 00:00:00 2001 From: Andrew Melton Date: Mon, 6 Jan 2014 11:14:03 -0500 Subject: [PATCH 07/53] Removing csv import from scrubber --- scripts/notification_scrubber.py | 1 - 1 file changed, 1 deletion(-) diff --git a/scripts/notification_scrubber.py b/scripts/notification_scrubber.py index 4ad9f62..9d2f8d2 100644 --- a/scripts/notification_scrubber.py +++ b/scripts/notification_scrubber.py @@ -1,5 +1,4 @@ import argparse -import csv import json import os import sys From 6142358db23813177cd1bbf8f0886e4c56d9af89 Mon Sep 17 00:00:00 2001 From: Priyanka Agrawal Date: Wed, 8 Jan 2014 17:16:52 +0530 Subject: [PATCH 08/53] Fixed default value of notification in the verifier config file --- verifier/config.py | 4 ++-- 1 file changed, 2 insertions(+), 2 deletions(-) diff --git a/verifier/config.py b/verifier/config.py index 2ef7882..8bd0684 100644 --- a/verifier/config.py +++ b/verifier/config.py @@ -98,11 +98,11 @@ def validation_level(): def nova_event_type(): - return config.get('nova_event_type', 'compute.instance.exists.verified.old') + return config.get('nova_event_type', 'compute.instance.exists.verified') def glance_event_type(): - return config.get('glance_event_type', 'image.exists.verified.old') + return config.get('glance_event_type', 'image.exists.verified') def flavor_field_name(): From 3a522b8f2d8bfc71d520f0666c2441103145f042 Mon Sep 17 00:00:00 2001 From: Andrew Melton Date: Thu, 9 Jan 2014 14:32:17 -0500 Subject: [PATCH 09/53] Updating glance usage seed to work with latest code --- util/glance_usage_seed.py | 4 ++-- 1 file changed, 2 insertions(+), 2 deletions(-) diff --git a/util/glance_usage_seed.py b/util/glance_usage_seed.py index ff87032..eb31a15 100644 --- a/util/glance_usage_seed.py +++ b/util/glance_usage_seed.py @@ -114,8 +114,8 @@ def _get_deletes(start, session): def seed(period_length): start = get_period_start(datetime.datetime.utcnow(), period_length) - db_api.configure_db() - session = db_api.get_session() + db_api.setup_db_env() + session = db_api._get_session() print "Populating active image usages" usages = _get_usages(start, session) From 43c0b9e65493d7f581b9f1ecccc2abebcfd91e7d Mon Sep 17 00:00:00 2001 From: Sandy Walsh Date: Tue, 14 Jan 2014 18:05:33 +0000 Subject: [PATCH 10/53] Sphinx documentation --- docs/Makefile | 177 +++++++++++++++++++++++++++++++++ docs/conf.py | 258 +++++++++++++++++++++++++++++++++++++++++++++++++ docs/index.rst | 23 +++++ docs/intro.rst | 36 +++++++ 4 files changed, 494 insertions(+) create mode 100644 docs/Makefile create mode 100644 docs/conf.py create mode 100644 docs/index.rst create mode 100644 docs/intro.rst diff --git a/docs/Makefile b/docs/Makefile new file mode 100644 index 0000000..9345374 --- /dev/null +++ b/docs/Makefile @@ -0,0 +1,177 @@ +# Makefile for Sphinx documentation +# + +# You can set these variables from the command line. +SPHINXOPTS = +SPHINXBUILD = sphinx-build +PAPER = +BUILDDIR = _build + +# User-friendly check for sphinx-build +ifeq ($(shell which $(SPHINXBUILD) >/dev/null 2>&1; echo $$?), 1) +$(error The '$(SPHINXBUILD)' command was not found. Make sure you have Sphinx installed, then set the SPHINXBUILD environment variable to point to the full path of the '$(SPHINXBUILD)' executable. Alternatively you can add the directory with the executable to your PATH. If you don't have Sphinx installed, grab it from http://sphinx-doc.org/) +endif + +# Internal variables. +PAPEROPT_a4 = -D latex_paper_size=a4 +PAPEROPT_letter = -D latex_paper_size=letter +ALLSPHINXOPTS = -d $(BUILDDIR)/doctrees $(PAPEROPT_$(PAPER)) $(SPHINXOPTS) . +# the i18n builder cannot share the environment and doctrees with the others +I18NSPHINXOPTS = $(PAPEROPT_$(PAPER)) $(SPHINXOPTS) . + +.PHONY: help clean html dirhtml singlehtml pickle json htmlhelp qthelp devhelp epub latex latexpdf text man changes linkcheck doctest gettext + +help: + @echo "Please use \`make ' where is one of" + @echo " html to make standalone HTML files" + @echo " dirhtml to make HTML files named index.html in directories" + @echo " singlehtml to make a single large HTML file" + @echo " pickle to make pickle files" + @echo " json to make JSON files" + @echo " htmlhelp to make HTML files and a HTML help project" + @echo " qthelp to make HTML files and a qthelp project" + @echo " devhelp to make HTML files and a Devhelp project" + @echo " epub to make an epub" + @echo " latex to make LaTeX files, you can set PAPER=a4 or PAPER=letter" + @echo " latexpdf to make LaTeX files and run them through pdflatex" + @echo " latexpdfja to make LaTeX files and run them through platex/dvipdfmx" + @echo " text to make text files" + @echo " man to make manual pages" + @echo " texinfo to make Texinfo files" + @echo " info to make Texinfo files and run them through makeinfo" + @echo " gettext to make PO message catalogs" + @echo " changes to make an overview of all changed/added/deprecated items" + @echo " xml to make Docutils-native XML files" + @echo " pseudoxml to make pseudoxml-XML files for display purposes" + @echo " linkcheck to check all external links for integrity" + @echo " doctest to run all doctests embedded in the documentation (if enabled)" + +clean: + rm -rf $(BUILDDIR)/* + +html: + $(SPHINXBUILD) -b html $(ALLSPHINXOPTS) $(BUILDDIR)/html + @echo + @echo "Build finished. The HTML pages are in $(BUILDDIR)/html." + +dirhtml: + $(SPHINXBUILD) -b dirhtml $(ALLSPHINXOPTS) $(BUILDDIR)/dirhtml + @echo + @echo "Build finished. The HTML pages are in $(BUILDDIR)/dirhtml." + +singlehtml: + $(SPHINXBUILD) -b singlehtml $(ALLSPHINXOPTS) $(BUILDDIR)/singlehtml + @echo + @echo "Build finished. The HTML page is in $(BUILDDIR)/singlehtml." + +pickle: + $(SPHINXBUILD) -b pickle $(ALLSPHINXOPTS) $(BUILDDIR)/pickle + @echo + @echo "Build finished; now you can process the pickle files." + +json: + $(SPHINXBUILD) -b json $(ALLSPHINXOPTS) $(BUILDDIR)/json + @echo + @echo "Build finished; now you can process the JSON files." + +htmlhelp: + $(SPHINXBUILD) -b htmlhelp $(ALLSPHINXOPTS) $(BUILDDIR)/htmlhelp + @echo + @echo "Build finished; now you can run HTML Help Workshop with the" \ + ".hhp project file in $(BUILDDIR)/htmlhelp." + +qthelp: + $(SPHINXBUILD) -b qthelp $(ALLSPHINXOPTS) $(BUILDDIR)/qthelp + @echo + @echo "Build finished; now you can run "qcollectiongenerator" with the" \ + ".qhcp project file in $(BUILDDIR)/qthelp, like this:" + @echo "# qcollectiongenerator $(BUILDDIR)/qthelp/StackTach.qhcp" + @echo "To view the help file:" + @echo "# assistant -collectionFile $(BUILDDIR)/qthelp/StackTach.qhc" + +devhelp: + $(SPHINXBUILD) -b devhelp $(ALLSPHINXOPTS) $(BUILDDIR)/devhelp + @echo + @echo "Build finished." + @echo "To view the help file:" + @echo "# mkdir -p $$HOME/.local/share/devhelp/StackTach" + @echo "# ln -s $(BUILDDIR)/devhelp $$HOME/.local/share/devhelp/StackTach" + @echo "# devhelp" + +epub: + $(SPHINXBUILD) -b epub $(ALLSPHINXOPTS) $(BUILDDIR)/epub + @echo + @echo "Build finished. The epub file is in $(BUILDDIR)/epub." + +latex: + $(SPHINXBUILD) -b latex $(ALLSPHINXOPTS) $(BUILDDIR)/latex + @echo + @echo "Build finished; the LaTeX files are in $(BUILDDIR)/latex." + @echo "Run \`make' in that directory to run these through (pdf)latex" \ + "(use \`make latexpdf' here to do that automatically)." + +latexpdf: + $(SPHINXBUILD) -b latex $(ALLSPHINXOPTS) $(BUILDDIR)/latex + @echo "Running LaTeX files through pdflatex..." + $(MAKE) -C $(BUILDDIR)/latex all-pdf + @echo "pdflatex finished; the PDF files are in $(BUILDDIR)/latex." + +latexpdfja: + $(SPHINXBUILD) -b latex $(ALLSPHINXOPTS) $(BUILDDIR)/latex + @echo "Running LaTeX files through platex and dvipdfmx..." + $(MAKE) -C $(BUILDDIR)/latex all-pdf-ja + @echo "pdflatex finished; the PDF files are in $(BUILDDIR)/latex." + +text: + $(SPHINXBUILD) -b text $(ALLSPHINXOPTS) $(BUILDDIR)/text + @echo + @echo "Build finished. The text files are in $(BUILDDIR)/text." + +man: + $(SPHINXBUILD) -b man $(ALLSPHINXOPTS) $(BUILDDIR)/man + @echo + @echo "Build finished. The manual pages are in $(BUILDDIR)/man." + +texinfo: + $(SPHINXBUILD) -b texinfo $(ALLSPHINXOPTS) $(BUILDDIR)/texinfo + @echo + @echo "Build finished. The Texinfo files are in $(BUILDDIR)/texinfo." + @echo "Run \`make' in that directory to run these through makeinfo" \ + "(use \`make info' here to do that automatically)." + +info: + $(SPHINXBUILD) -b texinfo $(ALLSPHINXOPTS) $(BUILDDIR)/texinfo + @echo "Running Texinfo files through makeinfo..." + make -C $(BUILDDIR)/texinfo info + @echo "makeinfo finished; the Info files are in $(BUILDDIR)/texinfo." + +gettext: + $(SPHINXBUILD) -b gettext $(I18NSPHINXOPTS) $(BUILDDIR)/locale + @echo + @echo "Build finished. The message catalogs are in $(BUILDDIR)/locale." + +changes: + $(SPHINXBUILD) -b changes $(ALLSPHINXOPTS) $(BUILDDIR)/changes + @echo + @echo "The overview file is in $(BUILDDIR)/changes." + +linkcheck: + $(SPHINXBUILD) -b linkcheck $(ALLSPHINXOPTS) $(BUILDDIR)/linkcheck + @echo + @echo "Link check complete; look for any errors in the above output " \ + "or in $(BUILDDIR)/linkcheck/output.txt." + +doctest: + $(SPHINXBUILD) -b doctest $(ALLSPHINXOPTS) $(BUILDDIR)/doctest + @echo "Testing of doctests in the sources finished, look at the " \ + "results in $(BUILDDIR)/doctest/output.txt." + +xml: + $(SPHINXBUILD) -b xml $(ALLSPHINXOPTS) $(BUILDDIR)/xml + @echo + @echo "Build finished. The XML files are in $(BUILDDIR)/xml." + +pseudoxml: + $(SPHINXBUILD) -b pseudoxml $(ALLSPHINXOPTS) $(BUILDDIR)/pseudoxml + @echo + @echo "Build finished. The pseudo-XML files are in $(BUILDDIR)/pseudoxml." diff --git a/docs/conf.py b/docs/conf.py new file mode 100644 index 0000000..21cae29 --- /dev/null +++ b/docs/conf.py @@ -0,0 +1,258 @@ +# -*- coding: utf-8 -*- +# +# StackTach documentation build configuration file, created by +# sphinx-quickstart on Tue Jan 14 14:34:29 2014. +# +# This file is execfile()d with the current directory set to its +# containing dir. +# +# Note that not all possible configuration values are present in this +# autogenerated file. +# +# All configuration values have a default; values that are commented out +# serve to show the default. + +import sys +import os + +# If extensions (or modules to document with autodoc) are in another directory, +# add these directories to sys.path here. If the directory is relative to the +# documentation root, use os.path.abspath to make it absolute, like shown here. +#sys.path.insert(0, os.path.abspath('.')) + +# -- General configuration ------------------------------------------------ + +# If your documentation needs a minimal Sphinx version, state it here. +#needs_sphinx = '1.0' + +# Add any Sphinx extension module names here, as strings. They can be +# extensions coming with Sphinx (named 'sphinx.ext.*') or your custom +# ones. +extensions = [] + +# Add any paths that contain templates here, relative to this directory. +templates_path = ['_templates'] + +# The suffix of source filenames. +source_suffix = '.rst' + +# The encoding of source files. +#source_encoding = 'utf-8-sig' + +# The master toctree document. +master_doc = 'index' + +# General information about the project. +project = u'StackTach' +copyright = u'2014, Sandy Walsh' + +# The version info for the project you're documenting, acts as replacement for +# |version| and |release|, also used in various other places throughout the +# built documents. +# +# The short X.Y version. +version = '1.0' +# The full version, including alpha/beta/rc tags. +release = '1.0' + +# The language for content autogenerated by Sphinx. Refer to documentation +# for a list of supported languages. +#language = None + +# There are two options for replacing |today|: either, you set today to some +# non-false value, then it is used: +#today = '' +# Else, today_fmt is used as the format for a strftime call. +#today_fmt = '%B %d, %Y' + +# List of patterns, relative to source directory, that match files and +# directories to ignore when looking for source files. +exclude_patterns = ['_build'] + +# The reST default role (used for this markup: `text`) to use for all +# documents. +#default_role = None + +# If true, '()' will be appended to :func: etc. cross-reference text. +#add_function_parentheses = True + +# If true, the current module name will be prepended to all description +# unit titles (such as .. function::). +#add_module_names = True + +# If true, sectionauthor and moduleauthor directives will be shown in the +# output. They are ignored by default. +#show_authors = False + +# The name of the Pygments (syntax highlighting) style to use. +pygments_style = 'sphinx' + +# A list of ignored prefixes for module index sorting. +#modindex_common_prefix = [] + +# If true, keep warnings as "system message" paragraphs in the built documents. +#keep_warnings = False + + +# -- Options for HTML output ---------------------------------------------- + +# The theme to use for HTML and HTML Help pages. See the documentation for +# a list of builtin themes. +html_theme = 'default' + +# Theme options are theme-specific and customize the look and feel of a theme +# further. For a list of options available for each theme, see the +# documentation. +#html_theme_options = {} + +# Add any paths that contain custom themes here, relative to this directory. +#html_theme_path = [] + +# The name for this set of Sphinx documents. If None, it defaults to +# " v documentation". +#html_title = None + +# A shorter title for the navigation bar. Default is the same as html_title. +#html_short_title = None + +# The name of an image file (relative to this directory) to place at the top +# of the sidebar. +#html_logo = None + +# The name of an image file (within the static path) to use as favicon of the +# docs. This file should be a Windows icon file (.ico) being 16x16 or 32x32 +# pixels large. +#html_favicon = None + +# Add any paths that contain custom static files (such as style sheets) here, +# relative to this directory. They are copied after the builtin static files, +# so a file named "default.css" will overwrite the builtin "default.css". +html_static_path = ['_static'] + +# Add any extra paths that contain custom files (such as robots.txt or +# .htaccess) here, relative to this directory. These files are copied +# directly to the root of the documentation. +#html_extra_path = [] + +# If not '', a 'Last updated on:' timestamp is inserted at every page bottom, +# using the given strftime format. +#html_last_updated_fmt = '%b %d, %Y' + +# If true, SmartyPants will be used to convert quotes and dashes to +# typographically correct entities. +#html_use_smartypants = True + +# Custom sidebar templates, maps document names to template names. +#html_sidebars = {} + +# Additional templates that should be rendered to pages, maps page names to +# template names. +#html_additional_pages = {} + +# If false, no module index is generated. +#html_domain_indices = True + +# If false, no index is generated. +#html_use_index = True + +# If true, the index is split into individual pages for each letter. +#html_split_index = False + +# If true, links to the reST sources are added to the pages. +#html_show_sourcelink = True + +# If true, "Created using Sphinx" is shown in the HTML footer. Default is True. +#html_show_sphinx = True + +# If true, "(C) Copyright ..." is shown in the HTML footer. Default is True. +#html_show_copyright = True + +# If true, an OpenSearch description file will be output, and all pages will +# contain a tag referring to it. The value of this option must be the +# base URL from which the finished HTML is served. +#html_use_opensearch = '' + +# This is the file name suffix for HTML files (e.g. ".xhtml"). +#html_file_suffix = None + +# Output file base name for HTML help builder. +htmlhelp_basename = 'StackTachdoc' + + +# -- Options for LaTeX output --------------------------------------------- + +latex_elements = { +# The paper size ('letterpaper' or 'a4paper'). +#'papersize': 'letterpaper', + +# The font size ('10pt', '11pt' or '12pt'). +#'pointsize': '10pt', + +# Additional stuff for the LaTeX preamble. +#'preamble': '', +} + +# Grouping the document tree into LaTeX files. List of tuples +# (source start file, target name, title, +# author, documentclass [howto, manual, or own class]). +latex_documents = [ + ('index', 'StackTach.tex', u'StackTach Documentation', + u'Sandy Walsh', 'manual'), +] + +# The name of an image file (relative to this directory) to place at the top of +# the title page. +#latex_logo = None + +# For "manual" documents, if this is true, then toplevel headings are parts, +# not chapters. +#latex_use_parts = False + +# If true, show page references after internal links. +#latex_show_pagerefs = False + +# If true, show URL addresses after external links. +#latex_show_urls = False + +# Documents to append as an appendix to all manuals. +#latex_appendices = [] + +# If false, no module index is generated. +#latex_domain_indices = True + + +# -- Options for manual page output --------------------------------------- + +# One entry per manual page. List of tuples +# (source start file, name, description, authors, manual section). +man_pages = [ + ('index', 'stacktach', u'StackTach Documentation', + [u'Sandy Walsh'], 1) +] + +# If true, show URL addresses after external links. +#man_show_urls = False + + +# -- Options for Texinfo output ------------------------------------------- + +# Grouping the document tree into Texinfo files. List of tuples +# (source start file, target name, title, author, +# dir menu entry, description, category) +texinfo_documents = [ + ('index', 'StackTach', u'StackTach Documentation', + u'Sandy Walsh', 'StackTach', 'One line description of project.', + 'Miscellaneous'), +] + +# Documents to append as an appendix to all manuals. +#texinfo_appendices = [] + +# If false, no module index is generated. +#texinfo_domain_indices = True + +# How to display URL addresses: 'footnote', 'no', or 'inline'. +#texinfo_show_urls = 'footnote' + +# If true, do not generate a @detailmenu in the "Top" node's menu. +#texinfo_no_detailmenu = False diff --git a/docs/index.rst b/docs/index.rst new file mode 100644 index 0000000..4f67888 --- /dev/null +++ b/docs/index.rst @@ -0,0 +1,23 @@ +.. StackTach documentation master file, created by + sphinx-quickstart on Tue Jan 14 14:34:29 2014. + You can adapt this file completely to your liking, but it should at least + contain the root `toctree` directive. + +Welcome to StackTach's documentation! +===================================== + +Contents: + +.. toctree:: + :maxdepth: 2 + + intro + setup + + +Indices and tables +================== + +* :ref:`genindex` +* :ref:`search` + diff --git a/docs/intro.rst b/docs/intro.rst new file mode 100644 index 0000000..5370fd0 --- /dev/null +++ b/docs/intro.rst @@ -0,0 +1,36 @@ + +An Introduction to StackTach +============================ + +StackTach was initially created as a browser-based debugging tool +for OpenStack Nova. Since that time, StackTach has evolved into a +tool that can do debugging, performance monitoring and perform +audit, validation and reconcilation of Nova and Glance usage in a +manner suitable for billing. + + +How it works +************ + +Nearly all OpenStack components are capable of generating +*notifications* when significant events occur. Notifications +are messages placed on the OpenStack queue (generally RabbitMQ) +for consumption by downstream systems. + +The OpenStack wiki has info on the `notification format`_. + +.. _notification format: http://wiki.openstack.org/SystemUsageData + +StackTach has a *worker* that is configured to read these notifications +and store them in a database (ideally a database separate from the +OpenStack production database). From there, StackTach reviews the stream +of notifications to glean usage information and assemble it in an +easy-to-query fashion. + +Users can inquire on instances, requests, servers, etc using the +browser interface or command line tool (`Stacky`_). + +.. _Stacky: https://github.com/rackerlabs/stacky + + + From 26d2f6fedc4740b3fec08d9be33f3973f05d950c Mon Sep 17 00:00:00 2001 From: Sandy Walsh Date: Tue, 14 Jan 2014 21:07:35 +0000 Subject: [PATCH 11/53] setup and arch diagram --- docs/images/diagram.gif | Bin 0 -> 33868 bytes docs/index.rst | 6 +-- docs/intro.rst | 14 +++---- docs/setup.rst | 90 ++++++++++++++++++++++++++++++++++++++++ 4 files changed, 100 insertions(+), 10 deletions(-) create mode 100644 docs/images/diagram.gif create mode 100644 docs/setup.rst diff --git a/docs/images/diagram.gif b/docs/images/diagram.gif new file mode 100644 index 0000000000000000000000000000000000000000..3d7afcb95b141cd3779042220c95a52de0b4b3b7 GIT binary patch literal 33868 zcmV)7K*zsFNk%w1VITuo0`~v_0000A3=$6z4+{|*5DpFy4iFF!4;L2~1r8ww5+w*0 zH5?ow5gs-UC_f$`ClVz&ARQeb9w8wh9w{g&EGsZCFfA@IHZd(MF)lJPFfKSZHwGb1 z1SC`wE=CAAWF|gIJv%`^Ks^dUUlc}rBSl$1Ku0c1Z9zReK|Vu7K|XwH9%^|aNJmIc zOHfWxOH)ltQB6})QcYf6T})bVUSL~aU}I!jS7cmgXJcG)adHe&lLK6(8(^FPW2yja zvH*3u7-OOwe6uQ8g(zy5DRh}FZlnNw!a;bMTWW$ucB6cDf?S0rW{Wd-m`ZW( z3WL2Jhq@z#xi65sJBhdfg~1xrq}DgF+asdjDYN7;sNgrV;!2ONO_R1%g|%Ff zxqpCyX_vcuq_jkx#b1`eMxe_@tJzAl+flCEQnAiewA)Cn;YhaPR=DG8p22XQ#%HC& zXS>>dqsDW**l4}wSHR_Y#NBbi=CwN!l~x#pWE`nQ6Rl7^lT%fPd3CW)O{{cm$|?;1 z84Uh38vZpX+!uA^5?bICX3J42)MGpQQ6BkFDE($L(`Z1#b#V4tLi~75^L$~xEZej* z{e^gpmV2hVO8l^L)Vy!$)*Fh@AfL)3vB4?P!93!@SNq3o{DPHEkAr)IqFaopXQGFH zqO^V6k9PfpTlm70#Go2iJ3q_L5rwyc$or;)Oz zxQnNzr>?EAtF@)FtgErCv$V6Vwzjs8u+X)Q&Ao-;v9rpt!k4_vsE)_pipc4U%=3-Y z?2OR&lFICz)8?M!_oc($uhHhZ==9Q>ht;K#$+fNSorCA4iRr49>av*mrHJ{hlJm2V z__Ukpwx;>JrNWom(vJGWwaVM2`r^6!#?Q3*$+*?i)ZE+Q+u_yQ;^y+!%<0hg=;Gb= z^z;A#00000A^8LW00930EC2ui03ZWc0ssi_0ENjINU)&6g9sBUT*$DY!-o(fN}Ncs zqQ#3CGiuz(v7^V2AVZ2ANwTELlPD?X0Kl@P%a<@?%A85Frp=o;bL!m5v!~CWK!XY$ zO0=laqezn~UCOkn)2C3Q_LL#Ds@1DlvufSSwX4^!V8eUmMUd_6-Yne4(%brcUwr#>@Z|mO8ySMMGm46E#PQ18rnV*jc!5ZOP@|{J#p;n*t2Wj-WEFd@8H9Wcba>=`Sa-0+gz!>z5Dmzy~CeRzkYG~_VerC z@A^9b{{RNqS$P5$XyAc}1&82*3^r(2at=l);e=x0N8yDSX4upX8g}U6heHtv;)o=c z_>zAnrl?|v7Ov>xi(Ckb2;+=2{>NU8IObSii#qn`V|+gbDdcx34r%0(%*lx4l1%od zV3SZrNgISxR%vBwJYK2gmX|%r<(FVWMrD{}mRT5TW~Qm8Uq!Cz=9_OR3Fn-2isfaU zc;-*3)tGwr>E}~f{we67LbXZgp@>eE=%S43#AlZ4R#u6qm6aiY^&{d z+i=5uwc2Z^kq6p$tDOfPZkPC9CLor4;LSvB7gd-fp6vTi9E)W5CHwoPQjB|hqXg~uj z$e|8*$ip7`@P|MQq7aAwNW>xz@rDzaz*RA}j za3dRF^ddLDh>bFS5fEC)0wBDIjWN3MjbqHlAFdFEWwZkw>QDze(vinJt^*zHXh%B+ z5k)IRAptoV*$jCz0U6{#7{2fYF^Ex(N?P)gn9QUmH_6FPdh(N?45cS084O?WA_p26 zKvX2TIU_8k7n3v;{GQp*yB{APcgLs6;19(TZC1q86=! z9~7a8GG2q3AXNk*TnPwl^cW50us;9XPE=O6zfsVYJc; zWMJn_h4tDI=)DPch2Xu6nhiRnS96IeLh) zl9i)mg=QB1poo)(;~wA$M_lg_4!0`84^@!DIChZ^ouYJ)>`19Qc3}=WWTBPo%t_RW zN}B`Zz!px3s$?fi*~%KU3APAE2^^|bBWU%fMv!P|tuWfrTC}uZ1?y+?pop?!W16qU zh9VN-+C%LBp$bL-L|gkITeKpAqu^kvB3@dAX6#}YPLR7|@ z<^a5~1wJjy-R^oXe%e{cY532Cto3vQOwUK}?o}l73 zyIIBmQ$4d#JTPYc#`nH>&hwr1%;!F}MFm*Ut4VgB}b`>|z@`4P#D&9_mU6K)B%#&xSUoyU_Od(!#4&Xx1OetjD7R-sUK5z7k}@=ulw+We|MJyp6-6<0wPYj(r|zw3}E;|7|c+4?RBFc;RwI@#orHY zm>~;4Ac7=D;a7dEe(Kw{sVGj-1Ha22Co8YLX-b^?{-4|$5MUH=_jkkxc*IA52UvJ? zXLt!%cMWKK2WWtCmj%mqQVOPE-f(@__XR8@Zd+gm*ics&SXU$%4u}8-Tkr!zuzuxG z4p`<1m*5F9m}M5NS?^2yXBN+4lulcm+q81z&ImKc@!&XCQ`ZP=;k_24PSIK2QXbAcJoBhHxkf zH)wbFH&i|N8b2t6dgx|ESQKyfgn$@`f~bU9a0Op*1y6K{SFngxu!vmvcTnJni--k| zxCM=P1yB%oJ1_&1(21V-iJu4wLV$xh2pf0k8hN;fs(503=zBnrfIDD!vFHP_Sc?Z( zi#srjwn&S(NQ=BEi?*1HvUr5P=mSUC1IU<+JkSHnn2gTojLX=J%E*k;n2b7bjn_B= z*tm^4AdIGgJxQgazS-dBJJJ18U*p9yFjl}1U!N`lfSd6_WjK6q~@A!_- zc#QpMjQtpp&Ipjtn2YS#jtAL}wO9oI2oQ4PxEkenjuIJFtw+bQuRlK#N770JEo)su7Vs`IiJ`4G!Rk9ng)1 zuu_Pbn2AY!irJWs`IwLynUdL<`5*|nc$2xqm#N{GfZ3V<1e8L-0XhH#!9{)dfKsQ4 zQWmJ1DwUdud76y5n*9Kqs%e_`DixcNNqxGRm|HfPEA?q8RS1`flXj??rNNn=nVk9r znn59y+<=<>z?ji_o78zyscD_9xl*pVQoHGzyE&cFDVvJ9nz%WgvbmZU_nou3p4X6z zIk=a`NgBwhoXKeoZD*0A8C<@(pZwXM{`sHOC!F0#oHnso@#%v+NuTt|oIoL(JD{J^ z`JWOxp%m(yClw93CI|->YKdqs3?^Ki)yEK zdZ9aTjT=f6Jm{x>Aq;F5sP#FRzITmes!}cx1ImyCIFJKwngbKySi+D310VxQDh$x@ zsyUDYb*c=!pi(uE11aTL$#4oO#SgqNsmh=X!Y~Xj$_sTW47@cjYO*x(4skT0FIRdApig<;0yi00!j3%IK-s& z>Z1P=Ag%pi4-r5O$}ji z3PS}-uA2d<=emc3$`fN6rDX~LN!kzA5CF#Nq?YOn42x14002;WumfPMFpE;jumDWT z3yW$Gj)hV+FaZwGqRIdO$v_PU0Hsc9LzgN6_AmiL>kASfsRvuNFHIGNty$5 z%MVLS4;DZRaoP_WAOrY7t1aqC*wqim00BZep(9(PXDbupn6`yMs%smf65xjp8eIR| ziBdGs0xDIvjcTdAAOUU~0)&fFwD7cunYd9p0W;eR47;_|0KCAPrgVx7A)vHLDggW% zwaE|<2cQg<+Yc7N0)i`{`5>S=(0gWPx|pG|sq23&yAy94x0#Ak4{NtMdjQOUQYEmv z{h$OAfSAsUQo=9+E_wjRkOLt*5XvC7{Q#%dzyii<0Xe`65&*aNfB{NC3?*O>!e9Z* zZ~(MWq4)Wq2paFBx54|Ip6@3I-&n3MWfev zqc8!&AzT+Dyj1W;5A=W!WPEn{WX5V-#%!#{t(H$u(8guFZvSKpWNgOP@W#vO#{AUA z)nE;3Ovn1PdumJ!-qp7F$*!-Ps67k-HDI^=E5y}nw*fG-_Amey+YhKG12>?W2&__0 zFaQ&Qiki%Io2CGu0m`7X z5CE(!Aj-vh05ZU&RB^BtKny7L4F*6CDg_M_aKAF_vnnMF0+0g13jqIhY7ZGOuP6l= zI3T0>kf%D}!7-%E2-;oNteE(~W|3(P`V`HG*-uPB4~>Zr`lM3B49C4m4^3bVDwPcT zq|rlK3x}MpLs6(a(5{Hu56_yzmi((!DhxEh3;4hSFpvy0kOLcwQp_-{_8_$VKo3ft zQo``F{QwP3eXzbTvo<}|(3(=;FbCWFpP@UHqZ<>71krrK%L3KWkNM1zDab5M&HE(J zj`_yUtWUTg&G)2Ig51n0^~^5~q^ioA+{*$9t;v+BsfN9n;OeP4;Z9_Y7iJw$wXjn3 z@W(6l1ecV|{cuTdjMj*m*GoX8la1LUEz%^NQuu&ROrTQuU}9&fol>4%S%@6d*Z2+m zfT9x00`z;>y6u?yz^Pdp)?M7#J8G^`Rm`rP(JJNA#H>>H#81eam?It1rhQMGJ*4lzJc+MiwR-WtB1 z3*C)on-ir=-;Pb(P!-;_uwBoMQp%lA{@qUjKBUvVPb!7cEUr=rzSm38*0@m3y?NTz zP2+v-b|{?T8(!r9xn=O}&?hV6iDBRVG*vS`$hBY#RQ}&BzE4n&QuvU}{IKG!jZ$>n z-x0n9BFzNJyxLgq-6+KbFTUd4726cvjnsg8C&i>ktmQ*nrxsY}HN~WMKD4Qc!*>pz z-&yE@y_;K>=vuevFy82jp57^}U?jT(2w<|H%H&Nx-jn|o-r|kr{)FHC#L|`I)&zds zHLi7Oyw`PY>ie|nDjwedP2T1ml(Rjgh@QeVWt%G1qL0p-jqai-b#aK=+c4he#oiB} z#_L>mQd_3$srl@H9_@oJph{j7jE(7U;p9&)+OZz$%q>;PTn%5I*)q=ROF-S*rP^HX zPcJ^%DP;}a4%-tR6cvu=*U6f7u9__h?Hhbj_+Wu8+M=u3qGt}X`5^EqC7aio@6c(R z*m;`$zMiy+o##2U(@C;A0NgJz*4Q2w+WzU1?dIN|Qq4Wcy{TOVF5O@r?>C;9JMK?v z4(Z#?@{?u4I65f0nm-s@FI7tr z$B2o|$W8KCuH8Pa=4)T-U>;D73I1uX`rrR$^Ek1t*N_juiSW~D|3z>3gwC1*FX#Z# zPhh`${qhmiXKJ|g789ws zvO0}gZ6Lj;Vh28fnzb6&swJ~pWIGk>+gera-lb}7!qEU6bm*WaQKP_({US=d2r^>E zg832;W|*+?dGP3nAPEm{tnpy9zdBaxtg%$~Y}&PL-^QI= z_io<0o8kq2mx;Bv_|S&U8u#hpe%Ak<+R9pY2@|z<-M+O|wN`QE!M#JZ<^bsi9Xp~~ z{!H?+LCF#sKDKBuz2L(jGjik@-!#P0_Yzht2qd@Wvu`u@0>sE7ks>OjJRx4oNrv7g zq_9E@FT^lI4L5A*x9>iDiV3gOQqC-am{6{yYM$C|th5GN@u??l!3CG>mT=20vZ#7O zM-YFsMgpY#(gB=(7J@9ngEBiUAIA)oPc^}o94xVmN}GtW)COB@v(gl*%$_G1s?V~K z4#J1bj5cZzB?xiCfkQdxq_a*t@5Iw64}bj7M8m);D>H;O{DeRcz zM>?sPV6LCz`1DS%A^-ZOvcvljOwT@j@`>m$Gzt3Bvcf8>NI;VIF?G~_{!{ZJDo?E_ zGgcXL$VpobB-0?)bl~C5n`X!}S!I`HwpnMD+7nZ$o~Xu}YSs#|Qc0J9ZXo3j9f+SC zUla)2B~0XJxv`k=6PK#|>`q#{G(E}#PW4-IO~UkLY%|YD;!-pC41+aF$TIrUN%{D> zugX~s!pE_Q7=E~6SrtQaF^U`1;lZ8ad^Ti}M<%&sIir>Lxo`pXb6a?$8u2Ze$?Ye` zbHjDiMe4$h_hxTX1}?nu{yL9Ji}Fc`)PcHG+P+xL+|n~su^f$IS(zTmRfQYQb)qCQ z!v{f8J7R5z9gP3ANkWs?X1i^--}VXRl)F8VEpT1_3OSi)&beHu{8U$<9p@5gTc~30 z2SuRo3X(4(ze%#Ng95wHn?xcJ$K!om0MaS=(aRO;PL($D#G_3D7&4@WsC4cm&l5&-=nAM<;CaS>nQSc zIIm2DDDD+CStp;eGKTqy$RRbGJPqQb6;3)cqcJmZ$zU-@-=f0e%hyYSh6MuJY7&#( z70`eOL?Ca3ajC@tj~=yvAYEXV9JXA=MNo0zQ`};US^zIyOJL59C?%+_Q1E(Hp_ftY z1(?m;<8}W?lHAs!ggO`I>U&8d$mHa=5g9f{hW6M>I7KQ}(TY9kowRC^qwC-UP(C{mRC<&=s7&vIm5R@zsK+S3NyU06oQlx2SDy0> zQ9fc4Vu38z7*&LK)wG*{n(UO#>hLY*mn`>B?WvT#rWX!7GNHYIYqHAWcA)Np5!2n zg8tGysxruXc-cB;8m3pL0_Z-Rn9Z3W>zf}1X-MxBPE4^2jqs!j;s6D_?$A?sXrxD> zpc2BUfXiHOjvfFsy6=q-0rzJ6z%-Wr1*1 zfg>of2)HJ9xo};9a+y0F;L3#R!gUGNZNyW+`xHiGH@r(IFC`f9ltyHi z5nO4ndfA&@M?6IZ^tCU3m!N`0#B!D@{OM>$;_H*%<9ODibLd*Tg4Au}#kU9qu;Q5ibsJa$oG?MK~hHj;L{X zqq~Sj=orV2Snqdh9Aq3XV!hn0aeamSVy?XS#pF#gl%I@V9xJ&KN%rmu^-2E}%SIzh z(Ik^Z^${k-xKNk~E=V;KsVf9A5y8IAPh-fyKr z-RbMe(I`X;%z|{aOZnI@P3MzpX_#o+$Z6IdrJ^-HA{MZ)xt40mWG0z}T`Iy*c-FkF zTw;_KF|UsJB_$?wwzVBnVf0M0-v)QMv9~GiT_(2XJKuZI_bZR!5t?H%3_r;1qyTkT zngL=WY7v#o-a<^NQSI!nJA0tfDCROxS`Wu2sgp`>+u;wtq@l$Eb$^2(w17}d)STFgcstlgzqmA$X|XPtx<;y(wv z0asjcq8A-RTOJB|fDjGUo*8rhTq3l+HW+;X^tLXHxnTrO=7?blh-3yyr8 zob7DWzzmuA{jzLd#fe9QK6kpqP2xt!d)|S>(^nI1As@Mj3x5?cHJ^Q&fZVlKL#`{F z=L1#yW`!}BB|fiv>Q&FXFY4X(uSX$e zi{cj^L<5LTPZMVFu@Bb9yJ_E}cSQeL>s`tU*8q(w*Z;Zl-SU=KU&5o+Co(F17ZaJ^ zllMK!w(tn8-uvHQ*3e-;{)%6GC>f8nM|KY}=bz6paE4V=OYU!7jcW6rkywYF-(g|H zYO@)kzG+f92jepWBZ&KRl3ntgu-PnmtEOrT0NVk-38cW2X+848z`R2`q3{dyh$zll zHljikipd|`6D`NUI#hA3$siS_`LeOgl9}TzrAasch^@5KhidXHn$sU&0=|nV8`l7| z3M9fJtPt_bz$C;cSDU?`il$>z4Imlg)yA|s^3I=l@eRKh%5oDB>LA~}!SlPKP@I*2--`^&uV zIlIj|EzH?H&!{^8h!|24tt-U8m`jt&o53*?!!{H>YWk&OtDLI4t<5`$zc9U)XgE7m z#Z@EoxT0urHg4YGNe!TOYwoKq=|P zWsHn=gCsE|2r+RXe7GjKy9v}|#ckxqoNz^01jp~-L!juuKa3pL>cW{*jm}a;DeO2> z5jJ1(nrTBpBO0@rBeNL{o9APXUvr_#0loiyth|HcEHb%=HUWYJ*a^Ao#)fprnYg=f zl*m2gyC&2)I6J(j%OUiE6~Oy7c@w=zw8x2fIx&1C&+y03J3Myu$4FErJo=aDo2Fr_ zx6W}UH95O{7#1B+$ew%1p9D$(_{NDO$~cleqUb!+` zoF>XSLA8?-j)OC2s>Z#vhkYPHssued`>ZU>f>N-D~&4yx+1e~VQf-`FT zh}Oc4mHS86auV4TJFI-WQ)G!i1I^|n!iXHr=wzIVB#ITH271e^dsD`G!_3*Lrg}p_ z&Z5HGT*n+@%*H&o*}@}@q{f)r%H5>C!t|xfFwBXFzkAzEa z^a+0YvN4;?84OGw%E;g3A(|`Bh{-`-%&^~VM2Oi3Gt*&5r!X8$~TK{Y#q*y(%rek2%y= zMbrya)I~kYq|e&D^#qsnX4&s2ofZbdOJomC9P%RY3%@-$0gL`RH#%{lGDX?ncubkJoZPC)F` zGriGeG`4CImyY{Cl;b$tLegQ`fu76>{`A&^B`$Cc*YW#Op5Uj$gR@<9z9@Vq{3DeP zb<7q_8cm#7WST-Rgs4rO2!N#~q}i-XoY-FrOtI|$M2NMAYvMkfU`~Xk*|PEl23pwY zWYnR6wLOJRVimiXgIG?j(A`u(wDZ*BTupcTHGn0_l#E!Z%e+nPN-=r7OSQsCyg|+y zmba`4gT0dj5ZkdO+p;y=vqjsqRok^?+qQMvw{=@6s#z<_fqKf>(PV;TH~@0naB6?dnSOGnK8(H2POMo<0v$QR-!`vwZJu%6*mn{$A zVz_1b1${8x(?#9XRo&HP-PU#8*M;5KmEGBm-F}FKxwWDYNCt1a+i+}xVAxw)a>7xG z$C9%xsiev%G&AA_STuw-rd191yglB!+Lpv0=-=@9TG-ggkfZZkFqHF?QkXzx^ zfp2KHrm4&{>BwEQEm3X3nOw~l{<>Ag*n%RGWaCfmxrGu~8z1loQ4I6nP4 zd)UnKg%dm>fJj&eAJ*eN=HouD-(6_`hh5l&Aui%7k^z}Y;&5aFVyIOvBbu-4)KZmA zGRaWnRV}at%OJXBXj4SlygnYHx3RoO2ZgA>98TdJQz@Lx%*_WuFdG>j-8+c{b--ai zw&h#KW&8zVL4Jf>$bmwxqPk6FaBKo%r~swNxHz~6a3IMvZDyZk=1@*kaBxYIjOINB zRi9nvi2w&nJ>?aw9A<8 zKX!*)*o1%1g#!lW-5CHHK;mIm!r#>Z*7iLU65*65IaXdpoU>5MLEi*9L-rf7~9>6A9aFJLuJ-D$?rINU z0G=S?3>kp*#pnH%UmQ+rw!UZfZR@wz1-G7SxL#|!cHgvC>-g2{`|W2=80tYTYNOVj z3CICrFr1|>y5I4I!8Qs7Ad;$%VU(C`t*&gUZjH;XY|6%L&F<{K@NCJ(Y|y6a7`|!F zK5fx%jSkRH0QhA&5r9arXQ0mEzRqXc&R^SR>w56#z#imzCT!gi01e;;Yd~bhrnn|Z z24;|f;H`}SKmZaj0jr)8YOuEM>#ph%z;5lv?ylDE2g(2G72S{mpoKi9?ej+O`xR>6 z9^|qP?gI*d98d;kNNQW)p!@Ei{MK&{+HVNrZ^Z)e02lB9C-4F{@B>G11LyAr?;!go z258`gh1QJ#h(H9Gff+D?3)k=s=kN^Ya1HnH4;OI}_wXB_gJBp3B^dDy4{;WE@fC-0 z5~uK%2ydB{lUc^?^w#n74r2BOXyATt13CZ}$blj^awjNqB+mgQXYwR>@+F6IB$x6i zr}8PEa$2BrVOR(x$8s)*@-EK-FbDH1uktdtawtdhCol3AzKT-^K$NCwz!Y z78r=vE*W395L(FdSJ&l9S8usi_1c!-KA&}32Xb2{!oU!SMdwCO7YJY<8Bw>5M^^{^ zO?G1k_q=uoLB{mJu61Z%#bg)=3I9oJ?}uks7HzMMV4&r_4)=S{;UMOOOE33KM|W0q zcWiIS92f|APZoohjkE6d+{X8Yr{8)ohkp0%TtN7LUp)#K2xSOLf)@yR&lC5)jYp4k zhbQ?SUT=vX>T|dFI$Vfj{>ft?40b1(ich}^NdNX7rt6%4b)9c@SaqyJC5%MfxmTV5Qv6N7LP{?Zr_DbkZT_fdS&P9KhFBQ#`*W{^P``5kXQN% z)PR9t2IuZZ2Pky5ADKp<4fGv(d(ZK$FL_)B;)o}Eb5HuS54riHH z4fG9SpU?Zpk6)wjdviB?!2iA$Scn%0Q1K@?mFa6zL>mHx{vSL7YA;gFhCsM3v@gl~I7VGWW z_b%R+ zIe_xYYZU*7(*u#zRm|G;Fyh3B>y_g~_VLJ)ekW6|Z22_r=dzAeAJo@zNYsyeMSD6gE>(k5N%yfQ1z(o!4>R0bSfC1K{ zkAMXl7$AWM3YZ{+|0M`ug9AD^p@bDicp-%aVpt)E2>u5adE=RdABiQJ_?!kl5eH2U z@}0<=2JQv*qChqr1Zps3J^I)qk3sG@3XeGoS!9t!8o8vBM>2^dlS)bnrISrA zS>^wbQclSbkxA;~SctiqMq;16rlr(D2e?twtS?gN*iQVCJ1gnaV1%$tt_-psv39*oeJK zJFTMzXfPbEm)fID4ka8=C$%rJAmgPjl1b`L$1=MvyX_+C?6YIV8ZN!{;unAn%8-j| zKHSQ{f(8?~X>UOj+&b4X=8}2CjqW=9FvNS-6EC#r5_~bnz%5Wh4$0h;?OgU)ql^$R zzT{8?3l)G%tHHwT8`R4GG+La~Hxb)>v$F6~9MZfQ`}}ig0ZhPx%OI*04VP*NIc?$>OwYzx`zbOrQbf!Yl*L)k^n{>pi<_P&``T>yxe0%kVNR zwa^!of6qJenK zr8MOHCqP-Efpp?Z14zSFJa}P8g4uE=xU}acwfU-Dd=UT(AO>fqHXlZ6AO$k`C>qg- zgOA2weF_UGN=;KAT%`$q3;(rO>hJ_nd-AfE^Ev=EfD+U5NZTriXjzPR{QMIpb*$c>sVO2FbM|NEu99Xd_)-d%90F(U5PD3++ z806p>=upQlpwKBa8sJ4T z(oq$gU1t~L(CbS@E8LezV6eo|hZ1lVS;sY0CSP?e2w_W}#%`t`mb^>_a6k@pWY!af z(?vPR@rwu!x4btw0RKVN1lSe5Np=-Y7j$!3)7Qy%AnQGaWmpi2bCg3D-kshq)FF=e zn76<(nLv8yG6yyB0|naiW7pyvQG9-+u>hced+b4(YrR#o`nygtmTTYx_fEL;mtWiaeKikn75ZRh^T=7-NxqLL~Ja$V%w~mmG*= z7xui)G3W>8FPj)B`H)dzx5wfirdf>hRU8uI0+eUCB?ZYSN0YJXTcPav%cJ3p9`&rf z)!L-B56g~jb@Hp(U4;5ipgwAojjG!m_es^e)srrCOKP~{Y|XIZG*HM9+zIA@9X*Eb zF2wPIN)Pz}8FtT-Yl&uurGq%@(9C;A}gf=0)#MpA@=WB=2#a4-xI^L z#p-&1n@=(C2emC#kB+zN&|=F4zX`J5=F&t0v@l1Z`J{_+XjPr4Rn_zkmGEmjyhe5xg7hdVn%OhCRT;^U1>bl^^*X;P|nh|M`_)_$gomzMuF_ z-vdVA`qkeAvY!JY-~~cp^{oQ+MPL5e12XU*Zx9_!xEQmkNx(&$>*!~;B}LKm(f8@8bvzF`-Bp*-}0mtn@~-BJ-I zi|qY`!nNMCyMuKj=dZ&ILcn)i9174E}^M`bY2e;FJMCJ53@dom5UT znZ^N}f;5a`EuJGfrehRNA3WRuJXOXGAj9;jBR$rmEXIN~V3K8+;7dH09_pMQ;v68V z!!yoaLH2`Y)tsJO8#^T2G)`Q_3B;wDo5=HVkm%n+PAqFI?HRDg(o?IPJ$koc}1FPMfBKE^W zQdl*j*H1V@OzIORVq``lp>4@yOI{^bR-r7wWL^}2{}RMvR)%F(Zsk+v1TQ*NP4ZzZ zvEU!NAjIJwA?k_jv6?%?pj^(yCjjM7Fd{X^4pUxAS!zI3N@bxOAuVCX1Pp^YiX~&R zWGW0pUVLR^PUcE(WdTqI@C{BkvWi~n!?o3A3+iM)@MJ7i8!_rh*9F{N5~5xT&F-{~&gqPqLmve!{ikAZSYC&21-mDy7_& zmOYr9O`Ioso{F$l#sM6|evT+AZsl7PCyAyg9Jb?RG^bZRCs96J3x2{}5Tu^;B%eg% zC)}L21!UHB9Wq8tBbL#iQrGnawbUh-p3|f`_#F4rL3vlC-qRJ?2 zj)-tV3tQ9LwoRg!Y4yvWuve#p#_E2+YI)aHy)H$}XIxtHSE7E-J0IMLc|KtZM(* ze=eK5n4K_2V_VXwr7GfrvL3)iXpj~hF{Z;hL}Ma4%#ae253QG(wFC}4Yqm1!F+f0G zyehdeYPhn6tuAb#<|=cdYrE*BPqM3PB4|OztGgOv*M(cxWy^QEOSH<%=>e=vP(Z2{ z?5fg2!g?#jPHDrgg*?P8p_*%6aBEBmD69Ocy%I;I{)COfYo9!vbULWpP$LjTTG#{EZmGoLMXSOD&#uZ)#+91nWQ`VNy)GoK z1TDzy9K`sZl$|V4M6E!uz|dB$p91QfZEe`@r`M(h*bc6W;w)b1Ew4W3g3AA>+#X^; zzU@&4W5@P`I~>*Sf$F;S>v*8sMh?|v%oT>vQR`Knz4tK}LT$n2nKYUiHNYoExT^o|j1S|YN&ga)ka^tLQq z&;sjvFJ;cdTg0yaA}8&_g^MDVq++Tt0_~oR0>-|pn_3F79%HL1q{EbKWcUyZ0ENrC z#K7h+tBiy78U^+aFmi6MQ`D>pBPaO61>?5F*{X`V?#aWc-2>wwpN#*4jE*J^v#-)& zXNBTZH3Gm5^@2+r06E|$2>;10s90N+@CyGX3OB{!M)7YJ@QEG;3cAZK0LXWCF@Qu` zI=F)uCoek)h&xbQIsl0Jp7DR+aL@InN@zgx$eCWm?-C12_5M}=S}|5m@ljNn7`9u$!G82DpbZv;?Xqavryu>f*#4^zk6GBp@4w0Dp2=VzFC< zX@q6+xA7P4EE?#{+?=gx0REI>|O3QL42 zCJVEklmjZK1r+o0RRXYE2(mP{q$#(B?XpA+Gqa$uG9;pe0c1ZIOR&H>c(b39Lt8|G zEnl-b<}y)OvpbKZFSmvG_98kX3pjfOCzwMFtOSj%MD*tKskFlg>qIoqGcE_PLSz4P ziynoi5wsvp+&CQaK;Ta9BRucKE{}9b)-#k+Y)2akIQO$f z4+KGl<3X^%$hLH#{HX-2h1R0YwJCbz^E<8+~T?qkp?CzL}>4@7D8 zgBrb?QWr`&{Ign!^iZE8Q0IhDcXchQ^jf?yH!pKbL-JFb;Ck^9N+9u77iJ`cbUTZ6 zDN=J=jIv$VVo|TfHm^iCD>WH)!g#cDAf^P3N6#BMq&%-*(!*B0{JishG9QSe0gFGzea{smz%0oZ=c7CP;b7wbkn?i19 z_dd)+8p7a_bv-Jb64RPGBo;%DrJGg^7G}#!>`JBVCF4Vc6yF-)d!dlA#Oy)`qlFXfI2A^Dbwi6b9pQdxusjm6lOWD?87KFd8I!gl8bmOP&$r3;j3@0s4F>>i?|e)c!E#H zXUjxHYj`$M3{Vm4`KcTKjfp4xPZ&79 ztBG;?`-#s*Eb4l;bor~BIIlDOC{lcmr#KagIa)AyFhhHwNEcym0w+{7d&Wecy^W?t zLTt0j?zL-n5=Wm1FSQjfuF&mt{ybV6ib1P;DSo=Ub9sO3dMp+>coTz@+W)aYw54cxGDJi zr-Qlh4?NvxeG~%wdRKqI55M+5{K!A*jSRg+#~3Gw!(7h^04IZ?3abQM=A3=r``Ds@U04GtVRJoF6O8_}!nUXn^ zW=)$jV-bXBD%QS*GII`$DT`oCnFQy_ghw#e%%uaHJ{2fa9#fh@1@63Q%BR46IJ@@y zx|QtKc`nhWRSV#rO`<|^7Nse*U(=dzBaWSk)G0xkVaEdhwtH1>%~WB3(wsL?FW$O; z8P^P()MmW3moc-XkvTK8k)J__7CoADY15_kZBo6Ob!*j}#F^xrGKIZ}1nnI->7d=| z(;Nx%`v_d1JA}J#PyED@-^5YC5fU{1Nf*C?!hMS#)ogZmm}SA!)X5WZznEC}#fs&4 z@OV>nX|gx9c%UiAlxI3NdUG;Oz~KReWuNW{Oi3QM3L_7$zMw+zwtZ3&Zy@}n8jCNw zw1S1O1vk7c!kPY)Nj_8fk}#{A>?u&im1bDcCC{>pQN|f*l(8Pya9oX(l3dh4ncD`^ z;G_xIsqwQ>th;Tvh&OrXaqJfMQi8~k7nqxlz$s*`Ltl+|m zGX3_NP${cKw2D4~V4-U;pxomxvYF=8i%hc40+Ta)925vY`(#N}Lk0!9GfuDOG|$9- zDD;af#sp-{Q9K9IswSlJoX=AG;0&=%LvON7P!(}lRV5g^j8)cIO-n6D*ElIB5(B`L zAO|4(q2;6(ilo&cV3)KAp>&cv=*cMwif$lwJUIv=E{*+UoC#cQ>AOG6?3Aa7c9M!R zJ5TLrr#cB*^gTJ5BFIlp1sd~HH$mJJ+^q)eb}d5hTFBm;=yI3Of#j)5+;lyKkHiTb z^Ry;32gYlk`~1^azD!Y_RN|RZ^#6A@8AJvE)@)HuIaXV5WzC#cU1heZ5rDqtS>eDV6WUyW?^3OHk_Lj?ELCZ?fQDi0u3lJiKvJ3l|H#>yLsk6g>_zzw_70 zE|)+BjwExpG6yXLP_hI3Y9S^eN9l}ZyLuprb_nrVKZ^7mp%LvM%o+&gq@%l~8EY3U z%8U&1VGn4WA_Z$v!us}w81+OXDPch$n*!J|uJJ@+9V?>1Zc?>1UU83yWQV$hdR)@|UJKB2qS0HT!)to5QThJW!C*6P!Q;EYLs+TB-pMz|^HM z4S`HGkOM{D<{6PFUrOSZ#9bP%maoiZ6z4~l^6)Znp<3f(Fr>eEHPe^EG=(0`>5|Qe zOr7lXswHqeYdbwVty9F!%(5PU7vzAhtW3ze>{=JD z0{Q23ca~#`bT@Yg>EtJG!;eQIR3P{0hm&pNkZxCFpcjS6iHE--J~hv{)sjPci@*h?iD51{z;II1pWM#7U~= zZjTEhP#+WYHaupboD;kZ8E^}+-s=ftCS)#+-EF^N{B?s!Oc#4`SaINV3~!HjibyDd z7XM1%MGmgbi)qsW+t}WAw?kLj@j(;-t99_A+2<%eYv|ZwS=99k)<1P270r2b7vUKF zmsS zDPLybmK6I87R%d6I?-l>-&g1o@Y@1~zzea9sW#Rhk4@U)07!8hP?<`pqb|O!qE}08P&nU?KP@=9zX2wi=DIfUY|< z02)pS3Rj}`G78a(%FM)Kq%>+Ky6}nQrQRead3>Tg#^Q}C@aq<5`DCK`C~d2T3kDUj zXH-KRf`JWz>jo+b@wz0tfaP^K;uEART7vNWKu-NoO@WedErfyS@GmW-Fl{;y7DP(l zD)3^UNahGo3y+WgJSAe*BNt^s5VvOx4eShO!vG+WCCu&Fb}@OzBo5CAY=B8!5(W71 z#pxUc0V`xM(Bu!z`u&to`~3Gc3U9!esNpL+b2fsjSftk>~=uMn1amsq{jmmPjm?DEY_` z+N8$yfQy;9uFwkYPHL&$#^mPsZw^bu1ZPW{*yu3M$mulX!TgYI!Vv#Y?Ce}+ z#mI3H^JtyRya9bU_#BD!Z^_9%JWcf=1P@3%ls*m_CVhkns5kC>5n9mI9;D z+(iR#1CIo-=0-&t=>=FWO!f8;7|lr|aql)hCf<DWiA@@^0x6>v6N7)~&7{zBk81DqBEsB?^KO#QF65!>n2sXhXiWcftKHfXPT-;I z=3&4F@r;;H9s4n6aKHv+AO~>Z25{g#aUeeD6F%+pJ?WD^@3TJnlRjynJ_R&D1r!Ge z)IZ_#KAyh#1GY9q)LpAh0-BUEx@c_bYb+UyZY>ghVWghE_ zM3-{bsDq<8j-W`ba++q3^zSUSQr^mNE(&cs-={j=q!x)!ESf?&Ek*34Qy|OYISEJt zx8mjY;eJLh5KpQw*253S1c0{88WRKoFYp%4Y!}<88$U)v{u0yzb2QO3P1Teo7HT14 z)JQ6>M2`&C^pR(Ra7BS6BXmZAMj;s@V2^HS|9C<@2-Pl9p-}(Tdqc^XOQ4c5^xRaV7xve#RZUq!TK^ z5^mEIDxne(0(DjQAXs;GQ}=W&p%PBFbZ_$$VizGU;S$`zAUt7qJHd5kDI{|?Y z!V)Zjfj_}?A^3rRw}BVfbR+nJEqH=|w|GC{5kym*Ys7-TfJhtW8VRRS4&Nh)(P`bL2qvMPDY zxQ<=8Q?m+s?Rbx07>9uiS=Bg@Ik!!L1!%b6_ytE1dAXR68QH+n^~9L8ktLaV%aqb` zR**5db8)~LI4Wolxy9H41+t147I~6Qd0+6@oM!lxS=prgxUBv+lVw?F17HCtKnC6` zBKX08KRMIR0U)1-j$3({{{)OhCX$IcnO&)pPb?f~`I(n>0-vlN z`LJI)qxZV8{Y9DuVi@8HC1hZs1zTlwffls5stsGQX?Um~hps)_j_-PpWZJP&dp02d zyC0N+C7L0yFPm1%;RR02sz-Z__hGYvtDkRslD#^sdOEd*J1r)lD~N$5UO*jUJAq94 z(}0VVd%Kn8+Eqk5y1lrx2?(r*d%M>{0tP}EWb6O{d$}zq9dxs{JKMUwcDh%Ex7B-( zf7_gbo4e_INMvrihm-+PilTZRMNuGR6h>pQ^#KmulF8Rk#CVY|O& zL>*25)NH%JnU}o-fV7XufegwMo1mc-$o>;2a_n{KrQl zkx?4S$r#36M8>t8d29dtX+pfobG*UX5UQ=5Mn+*Ay18WV`O9sX%U8t9;oNJL+{Veg z$?H!bpy5wl0s_W6&1J+K&KodQq0Vg>7UY4_s6mYoooits%u7b8^E|;NfExJRLtBCc z%t6p&1k0Nm86KU6V?i1yt;I*Zg=JwFaJJG#e9xzB7}4C*OT(l!9n=KC7aBFyyA~dp zyjwuv&3N5h;X#z+F0WmExFx{;`dlpt;1(>K)x%Wlk*-LWCS{>TBCSz-dxTG~Zp7brdMa^TRzeQQ+#1_llTq+!?Z-N2;bp%47r z>s!bB0UElQ1;+mY-a*6tbXy$@Knd*q-vw0`Zh)!>4h4jr;#E=KNAuT5Q~pGLyyW;?oK_Kjb-CF_&qFvf`;TXC-s}A54{GH>a z1|D?y#8}`Rke;*Pq0n2Ma}D4a^x^8UKI^r9>$$$`z5eULKJ3MQ?8UwwuHNjk9__Kd zAC^8X3Sb#@p66R45`JFLO?v37y5FBZZBhXlS^!N^02v&8@9#_+r2dR~_5dOQX@|1$ z9sltmKk_Ai@+rUaE&uW{AM7SI6xII`15N=Rjvn?S>=de9O>cZO4Il>?Kl6#d_>KSgkw5txe;3#x7ji+nmC*j| zf%ZW^&{1C<^uZp;LHqTA9^!!>wBH`UUkS>N0CIpDR3Y~XKNVEL8Ds$O&8Yxb02$I> z_iq9gRDl{`0RBOoX90o$t#rF|89azEp~8g>8#;UlF`~qY4tKH3wJ8@30317d{OIw3 zvVQ&SIRJ_BBY|V1KDm4eGp5X$G;7+ti8H58Pcc1>N|rLH(4j*Q+z=*B)~ZydnmT<7 zHLBF9RI6IOiZ!cNr>RoOLN+W%0itBfn*De{22xpFx4L}`H?CZ)UR5cZG4`z9y&rM# z{R{tiU_pw63mZO+xUfZx8D;PlNz&c~z7tsLyo@=s=FK;KDgz8UB>@&Wav+l?Pu@Iu zt6Q^v?K(E>*|KZXzCAB@Zrr?W`<|UTxbWe^=Sf4RBnOHBp_`u#5CX?3RPThtBc2*K z_U+tN_x`@RyZG$Eugf!k9X&?GI7D;(t=k!53u8*JNfk*@s`~MGMfE8V|NrAce zwbud5kOa+Sd|hA}XA(|GA%&xSmS1xOEHJ?Y8c={ih$4c6h+>L4kkk(_7o-Sd zj54O!;Q<0;kKSJ|qcqaKjFNqydW+JiKncxC$F@y&7*k5rLj|nTxv#raP^Jv5J!| z!78_DFvYc6kPk`3xJ>i8ywq7UxMA{YCC5JhtY|%qeQcn`W=%lyGo?EJw*Z;~uMBmC zuI3Er1o$vWb=J6&JgwHe3To@M83Ro=$GfZ(3en9zdlu3p9Ya8VIEu}wtPXpWH?!D}ow4gFa;;+jC;8==`mU*BWk2N3&}tX$IZ3_bc);9s zqYgbys^5;^0W7{wefFVEAnoT%YfmNOfQsEaqKprZzG5C953f@7rPdH93g z^a}Wwn5hedBt%Is1V|Fwm9RcL8y|lPa=wbh(1A2ume3^lF^%vgfGVU?0ZwoXcHFOn zQPbZF^+Ga!9C3+Uqa8GsNS_1d=>Wgj;N)tEMSHme6gbQw1$7y}ft=zDM6i}>zDJ}m8GhFPjyMS!QV&2@1XUo1-)`%ySO+29o8*dQ8J zrp7v^glm%g9}0NE3$EEi8OrceCHK-oJPlwVR}5tE09iiAHN=msJS9RLwZ1Nb?2bek zNl6F@s0-NO81+cmBUcs4PkO|b`oMrN{dY(i9Dt6;B#IO< zv4T~uUZqDcfa;M7u!v!J1)MW}+SinXl68tZ)*cblN>l<;UPna~JC@4SZSEzRZ;TsC zEGwG8KIWvF)f+wL*jbP~Gegl#j8Xr1MSWHZpZ|;=WI)QmhSmhxYTVONyxDP>Nc!H9pV~yCvccDA%Viw63q-S~dQmiC~4e9r|xIkJej80Bws^5{zR^3xr=$R>Lw zdlnh6=okaI?|pp{nM6AHTinxcWKI%Vp*p9uhFPwFeb(FqbIz$LnO#3LJmJn#HpSnn zsD=3&+QHI?ier1o9+N9W%ptW{7Fn-|@zu8Ofq=!@J>+{b)!zTGErIbfgn4=|(qt(wD|G zr9GW!NLL!vo))#GN1=}&9}l`@Q7}pT{Ab+0mn3k~ z1|l&?xW;5#2X+9`l8mxzrDPN)TeD6MO|O5A?aT!G$8-jASth_DYlQCEn#J`1$^EL` z3XmKa#%(<_WQ}agTPNEVHpq)-C;>3>t}G7J1Bj3sD9;nb%OqSoH|N?P zuX#Nf?(LzZ;Cd5lIZ(M4UZ2Bh0Lh>otaDp!(9pc*9uK*&M=q2=*G}j_HMhyA9;HL} zZc#C-P% z3z)$oe0VW=wSdM=k z_wv>~dvlS419a#FIq2TNi+{}g3S;)z*UvH~;D8QkVEj3_0R}Yi-2)gv{ehLawVB_1 z#~a8V(11o0S@ew;bibJrkQP5Uzkd^O0i=-wrvMDRfC|W$UqL%fDG^;3o{rCv5*Qi z*cD8XgFBdmHW&*tC=2f(4fQ7sIUoZOa7?uqT^=ZU@|O`Pln?fh13`C!#}sYqgmoPP z1E)X@xiN!GF&9|zgXVz@H-LWJ2R`nnbWB(HYatj}xnK|W;0(#Y3pwz5Rd`GSkVJ$B z0i^K@F(`wv5QbqW3lR7YVPS?##DNCKhPmg2(Dx+sfrk@iOXo&)2LJ;|@eZ(Zul|X#y*7fig=}N_JCRFmKLYrfV#+w*;o~0hzxePKg!1!@rI1Q zq>P)uj5x%5&p1!brhn)L0Xg6dGpLR8m=!Ig-Lw0+N} z0l**+vOtduxfQVh57b}-!{{#%^hyh8HE5tYb zfMiDT4%q+LlTk?*@GuOfSTjLM78Y5QP!p4~G?PgwOvkWaEQSH701sd&m1&6;K`0h_ zqlSgXk6Xzt8Od-P36`H!0xZ%DDJBEb0F`P9m{s8w!yu97vWSW&m(VkM=O{~`P;7TO zQQf10xupc~5SW))kJ505F7uX*n3$;3m0k3ekSR$uaDCKO05_nJn8}(}5eurIaXE9C zF8P_K6PoCVl%!ced{bZA6#zM~nyo3EQxOZ&V3h}hiuj?GwYe?j$e6>1V7gh8Eb
Yx7oq&_)8}OZAYUsge$~0~5NC4k5}S`cOJAp$j8O5)yKo667ocx=x3Lg?M!e z!O5UM3J(VVYA&6Ln4G~57FG{W)1eEIH(fTAyPr254UFEI)-+7C2(lsMW< zdN`K2MFH{fqh;ElCr2@}sTbn788JG7Q0fx2;|@Gg6H7`H`HpX{J9K3-NH3ycU-;QKf?;K>EO;dwLUrN;{qk z6C^4?E#ap#>JoZN5-ds*?%)!H+N%6OtF+3en}MjSCZLO&J&{w@MT4pkFazs50{D~_gWJydaDPUtpu%{J(D?4IWP|$tDUj14>ePsN3D}yjBq6h(u&Fis39<^Y6%v}}dT%@{Ogic>fyE2^@Rvk;nef1rG_$j2 zL7|*MvVbbEF>4cq$`6~`53sthcWSaQ`>=tltu{*%I6JN*^$2t}v8y(>W~)68z%|wX z8CXsr3q$L+tXYUhOEWjXYc$%TFuI>7YZIxe4F`*H+Yez& zA-OtYlKV+4z%})HM8H{yn~Ryb__1=EBW$V}O?wkdS`*dkq4^3E`MRfr>#$n8x3r_U zoT0mB$Ge|&NcOOq??el1A%nyVn7L>Qp6fD6n-T}N8O(dN>q)=aI;YPIsxN!s zP!BAct?FPC*BfQ@yPla!6W!Zh-z!PxrVQrZ#7WA!n(WD{3d*9K zqRN^O6w<=%^TKPh%91nzpG6YXP^<-X!%~sRZClG&!5W4f6^!gKYPy6R9LadgB2cTz zG_k4r%A)-6Jkaa1Y;43R%(X)dwr!%op5lzvTt^rX57#54P$a=rk-6#{&R5|Tg=`hM z%$Y%~#?Sf^nC!eV(aD3e4(xES{g5hpdZkm0uqSM|UMs8Hi>o>-(313W%5c!ZOUqDE zh%y+)4Q&dEjGb0t&JBak6$#1xpsxq(&POa0sw<guTL6s!*IkgEaCVk`@(!HNG_A<-0*oW>_1mVC8&Td@BMqgo2m$NaZ){jvni#89oh zZ4%9-7r7H41NKKd_E3;zp~VrmU6Ei9A*v7d5D5}6jWk)jev}1T3Gkq0h{V*|16m^@dty<4)ZP7C!sX4zUs<4F1-9inndWzC{E!B@$dlOKJ!yVHsAVU4H&7q+I!q5!L@C&c3b^4_Y&EO2b zZ~&!FtTSGEot zz0NTD#7m88A?~XF>#BSzt6=61QpU{m)}^|&+j4Z)DbWJLO%jrbCosUO;@fw_J>0K+ zFap;P6<0!$Z4W8%NeLZ>7QPRGNZ~vgimb63W%F7VrbG61{%3y%!|60qCC zzybpx11E$+iJmjjMms?3KQ!RuzdC%9NN7c z6Mic|^qj47M9=>$>c-;zvP-?z0;>xwJH+N~VV%IeH(nIh2Mr(~0P#&bUKiQ-O%^Bs z5)dFqAIVNABn&|(0o9J&wK8r0O>1uy+5IqhOBBoWSc5gq8s71Rb8+gbaUQcg>I+HT z4ui(Di8sj{W$7uSsmtDTp2SRD-e2pgFtH2C?7tjj=bwD2EKL*wS8D*UAae9XL=ix7 zn;*d73|mDKY1=-AT{;3h}i?sCFlf9aw>No8@aoDTn2A5;G6W%W?wIbquYH54Wv876}a7G_^Vq1&L9A1FjeAb&mC^%3`1@f|_! zC}a=N9wiLk4=D8!;J)ONQ~;+yJM@6*`y=J7d4uT=gfw`A*;(%mlk^|4^c3ROiMiL) zY@-|}a6QEAv}3tKaq&1}_s51p^JQW75Su*CHG&Tktq(EQw>&6;LPvH2{+yF z&+6OM*6mxkapk(rRik7Mf;ki_<4Dk6#fUBJ4FsGO0LPv-ITkkDxTH#gEFDj#Y;^&z ziPT0`=KPZuQK_Ltk0xDu>AcRBaZt9V*7a-Hv1QM$H`n&gLAKM$Km5-u-*CY;nWw zgqzd27$$)WbjVkrGGREfha?vA39d2aZc5NRtDqA|G6tPH00b9gU~IAja=@-1dvb^% z!tRv#Zafl8G!ZG(K=cW#$+m*;MHpjz|4qL6YTRuc8&2AC=p)VF9VgWe>C%SN^ z1y%IP!G0iAvatzr_(d7w(5oy2{|0I%ha;bKERtAG6mv}R%JXuj)moHsO*Y%?ibfl6 z;{=@?btLG*EQJ|hLxF?|47)B9nt&H*>@k!cXk=(kp$iPo*vhXlNo zQUG|l)E+AzIzSk5E>%Vb0!%u9mu2WNlpHPr`1G%YEY;LdKM$JKqml?6sG zK_vruS^NynSY_7_%Q(&pzeSy2Z9Esa19&ETby%F%FLQevP(^L zgBCii+SZ+G&irI@5E$Y51mFNZ`|$-L7Wg^PA(#~wD2G%J$}nDnc*$XG3lGw`pLq`| zZtNV&2H@AURA&0X&WV;1~I#!4Uy8W`iA+Z0`0OWt|*)F1p1(Kk; z17yH^=4*RKNkpAH&lz0JBcg#a^@cY6bg>{i>zhu}{5&WC!B{~G`dqP9nwcnHc>Az&_U_1`0Zj0_^d5h_r?Wnyjy@reM` zx=B@|FRnWQJ{B<6&~3WebNb2TIev#&Bo}r5QCHn)POM{eHIadVww91A5#t~eh+3!? zqK~T~WKfaKkO4`Acn&e1L%P>Ng%BVZ5^I)&RtA7m^krlqh#KygD2Z>w;2gfh(3Ft!mIpGJPb-V~7M0pR%;k@>Po(xdyggHDClcWPj zia?|p>k|}lpuw0N{|clTNCZF(qBcE5Hs0}SzEmlPb|C0mUUp|crpMA;+9Yl3Z0uxfuft;`f_lRTfSX#)50ImQs$l>HTs7#55 zG?^AnNaV1_xDuKZqyur`O4b@kh0xG17pfgK4swAUSXH5w%4j4pII!|z>i{wM1>Ni+ zj2xU(bDsffW1&J%P4W;Z)HrHo+mqCznPZAg4Jbkeu+ly;rl1aqnnUC|94g^btO<#z zLkY4d*&T415_5misVuGRPRnET8JqgiJMp)67h5L94Yc zwRU)`K7}Y)XUL~sU`-f?Y8KRJ3*0`sm83_?q5e{GKEF`FV|{f<00!9)jokOUPjY}0 z3iqHKXkY?74G}!I>sZMDMsdSHY0Y?DoEFT&rCl`GW@W}G!TUyQv@I>!a{Iz8H}816Co+gpM-1HmcflRjQRQVh>1K}e`DfDm8K*9`y z@#7oE7+VP-FqsKFHzL_R@ri{RMSB{ibT8&JSz_##0B)8tPUWUw;tMF^PUM^u9EdVX zkb#OI#-t8&GLheAWD4JfBf=Z9K-#*!1ygw<-+=8?)wfX%P^xyypc+T=j1V0BCsi&O zXcP~C0U5AYVfG*gGOxSvTcN{y44eLpn>~A z)6-&5&Y{zQt3hNxLBT_s+Y^;;y1NOn07#4rVS6wG7-CE8i1kB3nnC29^n(Y%_5lxy z7iAv=n+L|5sBHXI8@wv#dyF}e4ncF~#)9h0NOmkoj)V~+z?nm*gF9AA3v(c)MtsnV zj*`B~nVJ*-`J=oaFU}rAT_82C9hU(BRZyAKi~#V(m$&{j4X}W35cZ3tpn(j;%0LDh zxPT2CCkzMD8E0)**gWDscZJELi6zJ1tF7W}zVjKg?UL+d2>sQG&{V!Rx9Y$T)^lZ<2y5lnzHZsAx_*t=^pseVFwxnt~?ijp;&;Zryrqe^CoFfRYvO403 zmhuBP$Lo?gO0)uHpq@c!qC)AOftyeqfdo0080h7UC0#xr0D2q&P6_hcWDjd8i-eiyyrU3h2wg z@sc+5N{1Y9x#@s8h&Tq02(W@;jP8pF3m^{gTS4V&!AWDmPT>e8GYP!0!6(B($@@76 za5|FMkHWaWGm1B_*%5^*ttG6918^l4XaT;lDl5DgVps+V6q7FOhl|TNFciaq*oTjc zg;m^#`k}zksXm`*LpQXnQ@W49a}E!Duf_PD-9dEuj+#V!FhjOQihrFZA1qlgOHhxB%;5zY$b0$cv-<>W2`B2$WI6d<(206o_xo z!6v(}VgZ@PqesWcx?Mx8#Mr`aVZeciJEjx}&`8Lmmj3g8Y2m8Pj z^u{aPw}BvsxcW(s;01gLEut*Snd}E^z$DxW$j&LqqIk-FfJHAv$b($S|8qY7@LG?p z94~d@1aY8+xU9(bA_)PA8sPc4GF-IQbiA)5JT*dyKkSQ=jEur~MnkMEx)G`XxUZq~ z8t2JE!eqfj>W63OlKt=|opcDO!Uwy7Dus&7eh9F~gv{E5i65k+R6G&PGzHC!%Fiqc zsvHQbe7!XU3SKNte1a2BD2GT`P1bap9uyEX!nsl#ObG0lnIfb@4DHUw$hyOV zpORQRfg}j0q)~(H%%Xq=sRYf}3(RJdPtyc6akx(wI zfDww&+##6WB#4t+2!#RBm@_0M;Sjqavq#|+B%K{zDN}nusQ{>+Ccztfs5*o=EgYK8 z#4I1cxPiIEo2{8f7Oe^bkO3ALIET;{`q9i99SHQqQCV0|fp9?j!9~pp#-PAJAgwGK zF^5QKxXvg+(_7B|q)WemiBo9Dfxt@uxGzxQg;TqjIS>Y$lg5W=J&Bov{eT8NZ4zE6 z0SyoaXec6kpcJ}c78zJ6YLS6nc-6jmhF=-LnYl@{QPO^Jh7e!@A{q|isU7z^E2pE@ zzCgGrF)Xa?)9Fx3ZYfVgy-_dZ2;)Ql!>dHONh-B;wh@b$QQy6|&w0;Fvh2e$zo4kiG zfC1!%>zV-VT#1n}fRo%(02oVxkq&?j!phMaictaxd%BGk!iR8x0R#yM(}zU-SS6V_ zwi{XV9N98Nx482hhKy8qZQ1!8hnSVwff&(}2wL9C&|QN8A^9-lP{+O)29J;-!A+>} z1l0)TQ;Yc*WjGuy{1wAd017~} z?4zRr6W^MQq+XyF9NG|m;Dr;^Lf9b)eBe%mpaGboPJbyM0oACxeT?40lzf}Xnc-8^ zjSzAr6LVGDScC@+21DiJ(XA8BpkM(Tuz|KIVXiu16K)$EP+=Ts;TmXR7^VRjZebaQ zVH;Rs8Ngv3W?>q}VHN)W;T(=(5XK@y0RYY6XePvT%RBrB^x9u58RcCyrW~0mpE}7$e{%2e^wzF+Y zqd3oPo=R`_oLLT+l?7;r{$&RMnF{i`e!$O%zUYr-XYzDs3{K{Fj%A&x=bE`^j4o*w zJ%A}4mI3W$lWyrKvgVvAPex_MgD#3#0K@j&x^Q)Ao(?4n7>t=)>7E{HdW-~BE|FE7 z2MwO-&3s*XfCqU1!!z{Y?#fc4zUtR|mT1YL0>Emo4ke8?5zYLUd4Pr2{TEpxtL}2fQwkh5yEuon~y!c8m~^mkO|e$T{839_`5p>k^TLeMoK9PHm-DYomzj z)mCk#CONhyZQFK^!|g|^y=~s6jF3gP?YitU(Z${_?wYt*fe>xuUT%1aUEppkkq(#4 zW^U?!2ppgrX{~PUt_jtBZWIIO?GA60D7YM;)j4PzmKN{jHf`^w=VexJ_>OP+1`3#N zZ+Q}KFKO%f-f#YvZ}zrtoQZC1Y3Tkga03tTR0ivao2)FJ3R&Z_+@H3h2 z2+wd0$7;u>@OWZyB_Z$)A8``D>IwI7G1-UzbMO*xaTj;!`bKfU4yD?CaT~w!d^Tzs z_aqDV62ZoCAQlgDU_S93ClL@=5&af&C13JBCGsLK&l8npaw(s3)a~#mU+dRnaVp<( zE{`84zjD*)@h8FXE-!O4XOI{N^KLnE5c%;le{(qR4lq%3Wqv&}k8?cF^Qs_JI+xuQ z*K|rO7RFH=#Pj+c9^A=$CW*3iGK>vn6pLTBFa%A}HYZniA z0318-c5|2V8L;+nCyiK;hV)hvUq5$w-*9BO^mRXqRnXjWpLc%0@e7a!aKHD^kOd{^ z#8&Tjgy(M!PzC1}_@n(125r z2cCa!T9gJD2sYHc`Lw_L+ctn4a0a+bdi&0}co+sweiuxk$9uzP>=ZZ!WcYf&KQUUI zhGDn?p#Nvll|OvTuWL$Hf_Gqi@1{k0AbSnynqQ9h%P;-So&d#<2gYywoJj?XlZAH( z{1#b15T`M9r_hGF0Z8OVTfE_~cie!IRk7Et_mn1`pfI}-tZ zcz}LsIEAuH`Ej;;(7>X#bE?e|U)fctD0z zxPgc>0E-T1ihXE=>2 zbz3Ucs#dRJ&8l@P*RBY0cnvFdEZMSV&!SDMHmHrYZr{R8_e8K;~ay*Q_xcXHGuo_q2MQI35A8mK~R k3R)q?&JJprJjQ{`u literal 0 HcmV?d00001 diff --git a/docs/index.rst b/docs/index.rst index 4f67888..58d8010 100644 --- a/docs/index.rst +++ b/docs/index.rst @@ -9,10 +9,10 @@ Welcome to StackTach's documentation! Contents: .. toctree:: - :maxdepth: 2 + :maxdepth: 3 - intro - setup + intro + setup Indices and tables diff --git a/docs/intro.rst b/docs/intro.rst index 5370fd0..9c47136 100644 --- a/docs/intro.rst +++ b/docs/intro.rst @@ -2,11 +2,11 @@ An Introduction to StackTach ============================ -StackTach was initially created as a browser-based debugging tool -for OpenStack Nova. Since that time, StackTach has evolved into a +StackTach was initially created as a browser-based debugging tool +for OpenStack Nova. Since that time, StackTach has evolved into a tool that can do debugging, performance monitoring and perform audit, validation and reconcilation of Nova and Glance usage in a -manner suitable for billing. +manner suitable for billing. How it works @@ -15,7 +15,7 @@ How it works Nearly all OpenStack components are capable of generating *notifications* when significant events occur. Notifications are messages placed on the OpenStack queue (generally RabbitMQ) -for consumption by downstream systems. +for consumption by downstream systems. The OpenStack wiki has info on the `notification format`_. @@ -25,12 +25,12 @@ StackTach has a *worker* that is configured to read these notifications and store them in a database (ideally a database separate from the OpenStack production database). From there, StackTach reviews the stream of notifications to glean usage information and assemble it in an -easy-to-query fashion. +easy-to-query fashion. Users can inquire on instances, requests, servers, etc using the -browser interface or command line tool (`Stacky`_). +browser interface or command line tool (`Stacky`_). .. _Stacky: https://github.com/rackerlabs/stacky - +.. image:: images/diagram.gif diff --git a/docs/setup.rst b/docs/setup.rst new file mode 100644 index 0000000..2ed5b44 --- /dev/null +++ b/docs/setup.rst @@ -0,0 +1,90 @@ + +Installing StackTach +#################### + +The "Hurry Up" Install Guide +**************************** +#. Create a database for StackTach to use. By default, StackTach assumes MySql, but you can modify the settings.py file to others. +#. Install django and the other required libraries listed in ``./etc/pip-requires.txt`` (please let us know if any are missing) +#. Clone this repo +#. Copy and configure the config files in ``./etc`` (see below for details) +#. Create the necessary database tables (python manage.py syncdb) You don't need an administrator account since there are no user profiles used. +#. Configure OpenStack to publish Notifications back into RabbitMQ (see below) +#. Restart the OpenStack services. +#. Run the Worker to start consuming messages. (see below) +#. Run the web server (``python manage.py runserver``) +#. Point your browser to ``http://127.0.0.1:8000`` (the default server location) +#. Click on stuff, see what happens. You can't hurt anything, it's all read-only. + +Of course, this is only suitable for playing around. If you want to get serious about deploying StackTach you should set up a proper webserver and database on standalone servers. There is a lot of data that gets collected by StackTach (depending on your deployment size) ... be warned. Keep an eye on DB size. + +The Config Files +**************** +There are two config files for StackTach. The first one tells us where the second one is. A sample of these two files is in ``./etc/sample_*``. Create a local copy of these files and populate them with the appropriate config values as described below. + +The ``sample_stacktach_config.sh`` shell script defines the necessary environment variables StackTach needs. Most of these are just information about the database (assuming MySql) but some are a little different. Copy this file and modify it for your environment. ``source`` this +``stacktach_config.sh`` shell script to set up the necessary environment variables. + +``STACKTACH_INSTALL_DIR`` should point to where StackTach is running out of. In most cases this will be your repo directory, but it could be elsewhere if your going for a proper deployment. +The StackTach worker needs to know which RabbitMQ servers to listen to. This information is stored in the deployment file. ``STACKTACH_DEPLOYMENTS_FILE`` should point to this json file. To learn more about the deployments file, see further down. + +Finally, ``DJANGO_SETTINGS_MODULE`` tells Django where to get its configuration from. This should point to the ``setting.py`` file. You shouldn't have to do much with the ``settings.py`` file and most of what it needs is in these environment variables. + +The ``sample_stacktach_worker_config.json`` file tells StackTach where each of the RabbitMQ servers are that it needs to get events from. In most cases you'll only have one entry in this file, but for large multi-cell deployments, this file can get pretty large. It's also handy for setting up one StackTach for each developer environment. + +The file is in json format and the main configuration is under the ``deployments`` key, which should contain a list of deployment dictionaries. + +A blank worker config file would look like this: :: + + {"deployments": [] } + +But that's not much fun. A deployment entry would look like this: :: + + {"deployments": [ + { + "name": "east_coast.prod.cell1", + "durable_queue": false, + "rabbit_host": "10.0.1.1", + "rabbit_port": 5672, + "rabbit_userid": "rabbit", + "rabbit_password": "rabbit", + "rabbit_virtual_host": "/" + } + ]} + +where, *name* is whatever you want to call your deployment, and *rabbit_\** are the connectivity details for your rabbit server. It should be the same information in your `nova.conf` file that OpenStack is using. Note, json has no concept of comments, so using ``#``, ``//`` or ``/* */`` as a comment won't work. + +By default, Nova uses ephemeral queues. If you are using durable queues, be sure to change the necessary flag here. + +You can add as many deployments as you like. + +Starting the Worker +=================== + +Note: the worker now uses librabbitmq, be sure to install that first. + +``./worker/start_workers.py`` will spawn a worker.py process for each deployment defined. Each worker will consume from a single Rabbit queue. + + +Configuring Nova to Generate Notifications +========================================== + +In the OpenStack service you wish to have generate notifications, add the +following to its ``.conf`` file: :: + + --notification_driver=nova.openstack.common.notifier.rabbit_notifier + --notification_topics=monitor + +**Note:** *This will likely change once the various project switch to ``oslo.messaging`` +which uses endpoints to define the notification drivers.* + +This will tell OpenStack to publish notifications to a Rabbit exchange starting with +``monitor.*`` ... this may result in ``monitor.info``, ``monitor.error``, etc. + +You'll need to restart Nova once these changes are made. + +Next Steps +========== + +Once you have this working well, you should download and install ``Stacky`` and play with the command line tool. + From 3fabcecd9f635d917c914f1c8955eba6b1077d2e Mon Sep 17 00:00:00 2001 From: Sandy Walsh Date: Tue, 14 Jan 2014 21:34:58 +0000 Subject: [PATCH 12/53] started working on rest api docs --- docs/api.rst | 103 +++++++++++++++++++++++++++++++++++++++++++ docs/conf.py | 2 +- docs/index.rst | 1 + etc/pip-requires.txt | 3 +- 4 files changed, 107 insertions(+), 2 deletions(-) create mode 100644 docs/api.rst diff --git a/docs/api.rst b/docs/api.rst new file mode 100644 index 0000000..df521d3 --- /dev/null +++ b/docs/api.rst @@ -0,0 +1,103 @@ +The StackTach REST Interface +############################ + +stacky/deployments +================== + +.. http:get:: /stacky/deployments/ + + The list of all available deployments + + **Example request**: + + .. sourcecode:: http + + GET /stacky/deployments HTTP/1.1 + Host: example.com + Accept: application/json + + **Example response**: + + .. sourcecode:: http + + HTTP/1.1 200 OK + Vary: Accept + Content-Type: text/json + + [ + { + "post_id": 12345, + "author_id": 123, + "tags": ["server", "web"], + "subject": "I tried Nginx" + }, + { + "post_id": 12346, + "author_id": 123, + "tags": ["html5", "standards", "web"], + "subject": "We go to HTML 5" + } + ] + + :query sort: one of ``hit``, ``created-at`` + :query offset: offset number. default is 0 + :query limit: limit number. default is 30 + :reqheader Accept: the response content type depends on + :mailheader:`Accept` header + :reqheader Authorization: optional OAuth token to authenticate + :resheader Content-Type: this depends on :mailheader:`Accept` + header of request + :statuscode 200: no error + :statuscode 404: there's no user + + +stacky/events +============= + +stacky/hosts +============ + +stacky/uuid +=========== + +stacky/timings +============== + +stacky/timings/uuid +=================== + +stacky/summary +============== + +stacky/request +============== + +stacky/reports +============== + +stacky/report/ +========================= + +stacky/show/ +====================== + +stacky/watch/ +============================ + +stacky/search +============= + +stacky/kpi +========== + +stacky/kpi/ +====================== + +stacky/usage/launches +===================== + +stacky/usage/deletes +==================== + +stacky/usage/exists +=================== diff --git a/docs/conf.py b/docs/conf.py index 21cae29..583f35c 100644 --- a/docs/conf.py +++ b/docs/conf.py @@ -28,7 +28,7 @@ import os # Add any Sphinx extension module names here, as strings. They can be # extensions coming with Sphinx (named 'sphinx.ext.*') or your custom # ones. -extensions = [] +extensions = ['sphinxcontrib.httpdomain'] # Add any paths that contain templates here, relative to this directory. templates_path = ['_templates'] diff --git a/docs/index.rst b/docs/index.rst index 58d8010..7b4b5ca 100644 --- a/docs/index.rst +++ b/docs/index.rst @@ -13,6 +13,7 @@ Contents: intro setup + api Indices and tables diff --git a/etc/pip-requires.txt b/etc/pip-requires.txt index c02219a..c40dd61 100644 --- a/etc/pip-requires.txt +++ b/etc/pip-requires.txt @@ -7,4 +7,5 @@ prettytable>=0.7.2 argparse Pympler requests -south \ No newline at end of file +south +sphinxcontrib-httpdomain From 8e07497927591e8cb09ee5b4e7dc4d2a139ce492 Mon Sep 17 00:00:00 2001 From: Sandy Walsh Date: Wed, 15 Jan 2014 16:08:27 +0000 Subject: [PATCH 13/53] fleshing out REST API --- docs/api.rst | 166 +++++++++++++++++++++++++++++++++++++++++++-------- 1 file changed, 140 insertions(+), 26 deletions(-) diff --git a/docs/api.rst b/docs/api.rst index df521d3..e256c2c 100644 --- a/docs/api.rst +++ b/docs/api.rst @@ -1,10 +1,26 @@ The StackTach REST Interface ############################ +JSON Response Format +******************** + +StackTach uses an tabular JSON response format to make it easier for +Stacky to display generic results. + +The JSON response format is as follows: :: + + [ + ['column header', 'column header', 'column header', ...], + ['row 1, col 1', 'row 1, col 2', 'row 1, col 3', ...], + ['row 2, col 1', 'row 2, col 2', 'row 2, col 3', ...], + ['row 3, col 1', 'row 3, col 2', 'row 3, col 3', ...], + ... + ] + stacky/deployments ================== -.. http:get:: /stacky/deployments/ +.. http:get:: http://example.com/stacky/deployments/ The list of all available deployments @@ -12,7 +28,7 @@ stacky/deployments .. sourcecode:: http - GET /stacky/deployments HTTP/1.1 + GET /stacky/deployments/ HTTP/1.1 Host: example.com Accept: application/json @@ -25,41 +41,139 @@ stacky/deployments Content-Type: text/json [ - { - "post_id": 12345, - "author_id": 123, - "tags": ["server", "web"], - "subject": "I tried Nginx" - }, - { - "post_id": 12346, - "author_id": 123, - "tags": ["html5", "standards", "web"], - "subject": "We go to HTML 5" - } + ['#', 'Name'], + [1, 'deployment name'], + [2, 'deployment name'], + ... ] - :query sort: one of ``hit``, ``created-at`` - :query offset: offset number. default is 0 - :query limit: limit number. default is 30 - :reqheader Accept: the response content type depends on - :mailheader:`Accept` header - :reqheader Authorization: optional OAuth token to authenticate - :resheader Content-Type: this depends on :mailheader:`Accept` - header of request - :statuscode 200: no error - :statuscode 404: there's no user - - stacky/events ============= +.. http:get:: http://example.com/stacky/events/ + + The distinct list of all event names + + **Example request**: + + .. sourcecode:: http + + GET /stacky/events/ HTTP/1.1 + Host: example.com + Accept: application/json + + **Example response**: + + .. sourcecode:: http + + HTTP/1.1 200 OK + Vary: Accept + Content-Type: text/json + + [ + ['Event Name'], + ["add_fixed_ip_to_instance"], + ["attach_volume"], + ["change_instance_metadata"], + ["compute.instance.create.end"], + ["compute.instance.create.error"], + ["compute.instance.create.start"], + ["compute.instance.create_ip.end"], + ... + ] + + :query service: ``nova`` or ``glance``. default="nova" + stacky/hosts ============ +.. http:get:: http://example.com/stacky/hosts/ + + The distinct list of all hosts sending notifications. + + **Example request**: + + .. sourcecode:: http + + GET /stacky/hosts/ HTTP/1.1 + Host: example.com + Accept: application/json + + **Example response**: + + .. sourcecode:: http + + HTTP/1.1 200 OK + Vary: Accept + Content-Type: text/json + + [ + ['Host Name'], + ["compute-1"], + ["compute-2"], + ["scheduler-x"], + ["api-88"], + ... + ] + + :query service: ``nova`` or ``glance``. default="nova" + + stacky/uuid =========== +.. http:get:: http://example.com/stacky/uuid/ + + Retrieve all notifications for instances with a given UUID. + + **Example request**: + + .. sourcecode:: http + + GET /stacky/uuid/?uuid=77e0f192-00a2-4f14-ad56-7467897828ea HTTP/1.1 + Host: example.com + Accept: application/json + + **Example response**: + + .. sourcecode:: http + + HTTP/1.1 200 OK + Vary: Accept + Content-Type: text/json + + [ + ["#", "?", "When", "Deployment", "Event", "Host", "State", + "State'", "Task"], + [ + 40065869, + " ", + "2014-01-14 15:39:22.574829", + "region-1", + "compute.instance.snapshot.start", + "compute-99", + "active", + "", + "" + ], + [ + 40065879, + " ", + "2014-01-14 15:39:23.599298", + "region-1", + "compute.instance.update", + "compute-99", + "active", + "active", + "image_snapshot" + ], + ... + ] + + :query uuid: UUID of desired instance. + :query service: ``nova`` or ``glance``. default="nova" + + stacky/timings ============== From d58bab1a840794f1195072604c8cac906624c8c2 Mon Sep 17 00:00:00 2001 From: Sandy Walsh Date: Wed, 15 Jan 2014 20:56:10 +0000 Subject: [PATCH 14/53] 3/4 of the stacky cmds documented --- docs/api.rst | 360 ++++++++++++++++++++++++++++++++++++++++++++++++++- 1 file changed, 354 insertions(+), 6 deletions(-) diff --git a/docs/api.rst b/docs/api.rst index e256c2c..481bfeb 100644 --- a/docs/api.rst +++ b/docs/api.rst @@ -114,10 +114,9 @@ stacky/hosts ["scheduler-x"], ["api-88"], ... - ] :query service: ``nova`` or ``glance``. default="nova" - + ] stacky/uuid =========== @@ -174,27 +173,376 @@ stacky/uuid :query service: ``nova`` or ``glance``. default="nova" -stacky/timings -============== +stacky/timings/uuid/ +==================== -stacky/timings/uuid -=================== +.. http:get:: http://example.com/stacky/timings/uuid/ + + Retrieve all timings for a given instance. Timings are the time + deltas between related .start and .end notifications. For example, + the time difference between ``compute.instance.run_instance.start`` + and ``compute.instance.run_instance.end``. + + The first column of the response will be + + * ``S`` if there is a ``.start`` event and no ``.end`` + * ``E`` if there is a ``.end`` event and no ``.start`` + * ``.`` if there was a ``.start`` and ``.end`` event + + No time difference will be returned in the ``S`` or ``E`` cases. + + **Example request**: + + .. sourcecode:: http + + GET /stacky/timings/uuid/?uuid=77e0f192-00a2-4f14-ad56-7467897828ea HTTP/1.1 + Host: example.com + Accept: application/json + + **Example response**: + + .. sourcecode:: http + + HTTP/1.1 200 OK + Vary: Accept + Content-Type: text/json + + [ + ["?", "Event", "Time (secs)"], + [".", "compute.instance.create", "0d 00:00:55.50"], + [".", "compute.instance.snapshot", "0d 00:14:11.71"], + [".", "compute.instance.snapshot", "0d 00:17:31.33"], + [".", "compute.instance.snapshot", "0d 00:16:48.88"] + ... + ] + + :query uuid: UUID of desired instance. + :query service: ``nova`` or ``glance``. default="nova" stacky/summary ============== +.. http:get:: http://example.com/stacky/summary/ + + Returns timing summary information for each event type + collected. Only notifications with ``.start``/``.end`` pairs + are considered. + + This includes: :: + + * the number of events seen of each type (N) + * the Minimum time seen + * the Maximum time seen + * the Average time seen + + **Example request**: + + .. sourcecode:: http + + GET /stacky/summary/ HTTP/1.1 + Host: example.com + Accept: application/json + + **Example response**: + + .. sourcecode:: http + + HTTP/1.1 200 OK + Vary: Accept + Content-Type: text/json + + [ + ["Event", "N", "Min", "Max", "Avg"], + ["compute.instance.create", 50, + "0d 00:00:52.88", "0d 01:41:14.27", "0d 00:08:26"], + ["compute.instance.create_ip", 50, + "0d 00:00:06.80", "5d 20:16:47.08", "0d 03:47:17"], + ... + ] + + :query uuid: UUID of desired instance. + :query service: ``nova`` or ``glance``. default="nova" + :query limit: the number of timings to return. + :query offset: offset into query result set to start from. + + stacky/request ============== +.. http:get:: http://example.com/stacky/request/ + + Returns all notifications related to a particular Request ID. + + The ``?`` column will be ``E`` if the event came from the ``.error`` + queue. ``State`` and ``State'`` are the current state and the previous + state, respectively. + + **Example request**: + + .. sourcecode:: http + + GET /stacky/request/?request_id=req-a7517402-6192-4d0a-85a1-e14051790d5a HTTP/1.1 + Host: example.com + Accept: application/json + + **Example response**: + + .. sourcecode:: http + + HTTP/1.1 200 OK + Vary: Accept + Content-Type: text/json + + [ + ["#", "?", "When", "Deployment", "Event", "Host", "State", + "State'", "Task'" + ], + [ + 40368306, + " ", + "2014-01-15 15:39:34.130286", + "region-1", + "compute.instance.update", + "api-1", + "active", + "active", + null + ], + [ + 40368308, + " ", + "2014-01-15 15:39:34.552434", + "region-1", + "compute.instance.update", + "api-1", + "active", + null, + null + ], + + ... + ] + + :query request_id: desired request ID + :query when_min: unixtime to start search + :query when_max: unixtime to end search + :query limit: the number of timings to return. + :query offset: offset into query result set to start from. + + stacky/reports ============== +.. http:get:: http://example.com/stacky/reports/ + + Returns a list of all available reports. + + The ``Start`` and ``End`` columns refer to the time span + the report covers (in unixtime). + + **Example request**: + + .. sourcecode:: http + + GET /stacky/reports/ HTTP/1.1 + Host: example.com + Accept: application/json + + **Example response**: + + .. sourcecode:: http + + HTTP/1.1 200 OK + Vary: Accept + Content-Type: text/json + + [ + ["Id", "Start", "End", "Created", "Name", "Version"], + [ + 5971, + 1389726000.0, + 1389729599.0, + 1389730212.9474499, + "summary for region: all", + 4 + ], + [ + 5972, + 1389729600.0, + 1389733199.0, + 1389733809.979934, + "summary for region: all", + 4 + ], + + ... + ] + + :query created_from: unixtime to start search + :query created_to: unixtime to end search + :query limit: the number of timings to return. + :query offset: offset into query result set to start from. + stacky/report/ ========================= +.. http:get:: http://example.com/stacky/report/ + + Returns a specific report. + + The contents of the report varies by the specific report, but + all are in row/column format with Row 0 being a special *metadata* row. + + Row 0 of each report is a dictionary of metadata about the report. The + actual row/columns of the report start at Row 1 onwards (where Row 1 + is the Column headers and Rows 2+ are the details, as with other result + sets) + + **Example request**: + + .. sourcecode:: http + + GET /stacky/report/1/ HTTP/1.1 + Host: example.com + Accept: application/json + + **Example response**: + + .. sourcecode:: http + + HTTP/1.1 200 OK + Vary: Accept + Content-Type: text/json + + [ + { + "4xx failure count": 0, + "4xx failure percentage": 0.0, + "5xx failure count": 1, + "5xx failure percentage": 0.018284904, + "> 30 failure count": 13, + "> 30 failure percentage": 1.13479794, + "cells": [ + "c0001", + "global", + "c0003", + "c0004", + "c0011", + "c0010", + "a0001", + "c0012", + "b0002", + "a0002" + ], + "end": 1389729599.0, + "failure_grand_rate": 0.2445074415308293, + "failure_grand_total": 14, + "hours": 1, + "pct": 0.014999999999999999, + "percentile": 97, + "region": null, + "start": 1389726000.0, + "state failure count": 0, + "state failure percentage": 0.0, + "total": 411 + }, + ["Operation", "Image", "OS Type", "Min", "Max", "Med", "97%", "Requests", + "4xx", "% 4xx", "5xx", "% 5xx", "> 30", "% > 30", "state", "% state"], + [ + "aux", + "snap", + "windows", + "0s", + "5s", + "0s", + "5s", + 6, + 0, + 0.0, + 0, + 0.0, + 0, + 0.0, + 0, + 0.0 + ], + [ + "resize", + "base", + "linux", + "1s", + "5:44s", + "1:05s", + "3:44s", + 9, + 0, + 0.0, + 0, + 0.0, + 0, + 0.0, + 0, + 0.0 + ], + + ... + ] + stacky/show/ ====================== +.. http:get:: http://example.com/stacky/show// + + Show the details on a specific notification. + + The response of this operation is non-standard. It returns 3 rows: + + * The first row is the traditional row-column result set used by most + commands. + * The second row is a prettied, stringified version of the full JSON payload + of the raw notification. + * The third row is the UUID of the related instance, if any. + + **Example request**: + + .. sourcecode:: http + + GET /stacky/show/1234/ HTTP/1.1 + Host: example.com + Accept: application/json + + **Example response**: + + .. sourcecode:: http + + HTTP/1.1 200 OK + Vary: Accept + Content-Type: text/json + + [ + [ + ["Key", "Value"], + ["#", 1234 ], + ["When", "2014-01-15 20:39:44.277745"], + ["Deployment", "region-1"], + ["Category", "monitor.info"], + ["Publisher", "compute-1"], + ["State", "active"], + ["Event", "compute.instance.update"], + ["Service", "compute"], + ["Host", "compute-1"], + ["UUID", "8eba1a6d-43eb-1343-8d1a-5e596f5233b5"], + ["Req ID", "req-1368539d-f645-4d96-842e-03b5c5c9dc8c"], + ... + ], + "[\n \"monitor.info\", \n {\n \"_context_request_id\": \"req-13685e9d-f645-4d96-842e-03b5c5c9dc8c\", \n \"_context_quota_class\": null, \n \"event_type\": \"compute.instance.update\", \n \"_context_service_catalog\": [], \n \"_context_auth_token\": \"d81a25d03bb340bb82b4b67d105cc42d\", \n \"_context_user_id\": \"b83e2fac644c4215bc449fb4b5c9bbfa\", \n \"payload\": {\n \"state_description\": \"\", \n \"availability_zone\": null, \n \"terminated_at\": \"\", \n \"ephemeral_gb\": 300, \n ...", + "8eba1a6d-43eb-1343-8d1a-5e596f5233b5" + ] + + :query service: ``nova`` or ``glance``. default="nova" + :query event_id: desired Event ID + + stacky/watch/ ============================ From ca070c246d496f806b095286e4a446398f6e1f01 Mon Sep 17 00:00:00 2001 From: Sandy Walsh Date: Thu, 16 Jan 2014 17:01:03 +0000 Subject: [PATCH 15/53] fix config --- README.md | 4 ++-- docs/setup.rst | 25 +++++++++++++++++++++++-- 2 files changed, 25 insertions(+), 4 deletions(-) diff --git a/README.md b/README.md index 3cfe97f..d246137 100644 --- a/README.md +++ b/README.md @@ -47,7 +47,7 @@ Finally, `DJANGO_SETTINGS_MODULE` tells Django where to get its configuration fr The `sample_stacktach_worker_config.json` file tells StackTach where each of the RabbitMQ servers are that it needs to get events from. In most cases you'll only have one entry in this file, but for large multi-cell deployments, this file can get pretty large. It's also handy for setting up one StackTach for each developer environment. -The file is in json format and the main configuration is under the `"deployments"` key, which should contain a list of deployment dictionaries. +The file is in json format and the main configuration is under the `"deployments"` key, which should contain a list of deployment dictionaries. A blank worker config file would look like this: ``` @@ -74,7 +74,7 @@ where, *name* is whatever you want to call your deployment, and *rabbit_<>* are By default, Nova uses ephemeral queues. If you are using durable queues, be sure to change the necessary flag here. -You can add as many deployments as you like. +You can add as many deployments as you like. #### Starting the Worker diff --git a/docs/setup.rst b/docs/setup.rst index 2ed5b44..0c53f2f 100644 --- a/docs/setup.rst +++ b/docs/setup.rst @@ -48,7 +48,13 @@ But that's not much fun. A deployment entry would look like this: :: "rabbit_port": 5672, "rabbit_userid": "rabbit", "rabbit_password": "rabbit", - "rabbit_virtual_host": "/" + "rabbit_virtual_host": "/", + "topics": { + "nova": [ + {"queue": "notifications.info", "routing_key": "notifications.info"}, + {"queue": "notifications.error", "routing_key": "notifications.error"}, + ] + } } ]} @@ -56,8 +62,12 @@ where, *name* is whatever you want to call your deployment, and *rabbit_\** are By default, Nova uses ephemeral queues. If you are using durable queues, be sure to change the necessary flag here. +The topics section defines which queues to pull notifications from. You should +pull notifications from all related queues (``.error``, ``.info``, ``.warn``, etc) + You can add as many deployments as you like. + Starting the Worker =================== @@ -72,7 +82,7 @@ Configuring Nova to Generate Notifications In the OpenStack service you wish to have generate notifications, add the following to its ``.conf`` file: :: - --notification_driver=nova.openstack.common.notifier.rabbit_notifier + --notification_driver=nova.openstack.common.notifier.rpc_notifier --notification_topics=monitor **Note:** *This will likely change once the various project switch to ``oslo.messaging`` @@ -83,6 +93,17 @@ This will tell OpenStack to publish notifications to a Rabbit exchange starting You'll need to restart Nova once these changes are made. +If you're using `DevStack`_ you may want to set up your ``local.conf`` to include the following: :: + + [[post-config|$NOVA_CONF]] + [DEFAULT] + notification_driver=nova.openstack.common.notifier.rpc_notifier + notification_topics=notifications,monitor + notify_on_state_change=vm_and_task_state + notify_on_any_change=True + instance_usage_audit=True + instance_usage_audit_period=hour + Next Steps ========== From ee017cc6e6d7dc19bffb4488d1f090842b058efa Mon Sep 17 00:00:00 2001 From: Sandy Walsh Date: Thu, 16 Jan 2014 17:39:08 +0000 Subject: [PATCH 16/53] Finished REST API docs --- docs/api.rst | 180 +++++++++++++++++++++++++++++++++++++++++++++++-- docs/intro.rst | 3 + 2 files changed, 179 insertions(+), 4 deletions(-) diff --git a/docs/api.rst b/docs/api.rst index 481bfeb..340c263 100644 --- a/docs/api.rst +++ b/docs/api.rst @@ -546,20 +546,192 @@ stacky/show/ stacky/watch/ ============================ +.. http:get:: http://example.com/stacky/watch// + + Get a real-time feed of notifications. + + Once again, this is a non-standard response (not the typical row-column format). + This call returns a tuple of information: + + * A list of column widths, to be used as a hint for formatting. + * A list of events that meet the query criteria. + * the db id of the event + * the type of event (``E`` for errors, ``.`` otherwise) + * stringified date of the event + * stringified time of the event + * deployment name + * the event name + * the instance UUID, if available + * The ending unixtime timestamp. The last time covered by this query + (utcnow, essentially) + + **Example request**: + + .. sourcecode:: http + + GET /stacky/watch/14/ HTTP/1.1 + Host: example.com + Accept: application/json + + **Example response**: + + .. sourcecode:: http + + HTTP/1.1 200 OK + Vary: Accept + Content-Type: text/json + + [ + [10, 1, 15, 20, 50, 36], + [ + ... events ... + ] + "1389892207" + ] + + :query service: ``nova`` or ``glance``. default="nova" + :query since: get all events since ``unixtime``. Defaults to 2 seconds ago. + :query event_name: only watch for ``event_name`` notifications. Defaults to all events. + + stacky/search ============= -stacky/kpi -========== +.. http:get:: http://example.com/stacky/search/ -stacky/kpi/ -====================== + Search for notifications. + + Returns:: + + * Event ID + * ``E`` for errors, ``.`` otherwise + * unixtime for when the event was generated + * the deployment name + * the event name + * the host name + * the instance UUID + * the request ID + + **Example request**: + + .. sourcecode:: http + + GET /stacky/search/ HTTP/1.1 + Host: example.com + Accept: application/json + + **Example response**: + + .. sourcecode:: http + + HTTP/1.1 200 OK + Vary: Accept + Content-Type: text/json + + [ + [...event info as listed above...] + ] + + :query service: ``nova`` or ``glance``. default="nova" + :query field: notification field to search on. + :query value: notification values to find. + :query when_min: unixtime to start search + :query when_max: unixtime to end search stacky/usage/launches ===================== +.. http:get:: http://example.com/stacky/launches/ + + Return a list of all instance launches. + + **Example request**: + + .. sourcecode:: http + + GET /stacky/usages/launches/ HTTP/1.1 + Host: example.com + Accept: application/json + + **Example response**: + + .. sourcecode:: http + + HTTP/1.1 200 OK + Vary: Accept + Content-Type: text/json + + [ + ["UUID", "Launched At", "Instance Type Id", "Instance Flavor Id"], + [ + ... usage launch records ... + ] + ] + + :query instance: desired instance UUID (optional) + stacky/usage/deletes ==================== +.. http:get:: http://example.com/stacky/deletes/ + + Return a list of all instance deletes. + + **Example request**: + + .. sourcecode:: http + + GET /stacky/usages/deletes/ HTTP/1.1 + Host: example.com + Accept: application/json + + **Example response**: + + .. sourcecode:: http + + HTTP/1.1 200 OK + Vary: Accept + Content-Type: text/json + + [ + ["UUID", "Launched At", "Deleted At"] + [ + ... usage deleted records ... + ] + ] + + :query instance: desired instance UUID (optional) + + stacky/usage/exists =================== + +.. http:get:: http://example.com/stacky/exists/ + + Return a list of all instance exists notifications. + + **Example request**: + + .. sourcecode:: http + + GET /stacky/usages/exists/ HTTP/1.1 + Host: example.com + Accept: application/json + + **Example response**: + + .. sourcecode:: http + + HTTP/1.1 200 OK + Vary: Accept + Content-Type: text/json + + [ + ["UUID", "Launched At", "Deleted At", "Instance Type Id", + "Instance Flavor Id", "Message ID", "Status"] + [ + ... usage exists records ... + ] + ] + + :query instance: desired instance UUID (optional) diff --git a/docs/intro.rst b/docs/intro.rst index 9c47136..e40973c 100644 --- a/docs/intro.rst +++ b/docs/intro.rst @@ -34,3 +34,6 @@ browser interface or command line tool (`Stacky`_). .. image:: images/diagram.gif +To get a general sense of notification adoption across OpenStack projects `read this blog post`_ + +.. _read this blog post: http://www.sandywalsh.com/2013/09/notification-usage-in-openstack-report.html \ No newline at end of file From 3c124e758f34ed2a748b6b2fc3fbac1375df4997 Mon Sep 17 00:00:00 2001 From: Sandy Walsh Date: Thu, 16 Jan 2014 17:41:31 +0000 Subject: [PATCH 17/53] missing ref --- docs/setup.rst | 3 +++ 1 file changed, 3 insertions(+) diff --git a/docs/setup.rst b/docs/setup.rst index 0c53f2f..0061b68 100644 --- a/docs/setup.rst +++ b/docs/setup.rst @@ -104,6 +104,9 @@ If you're using `DevStack`_ you may want to set up your ``local.conf`` to includ instance_usage_audit=True instance_usage_audit_period=hour +.. _DevStack: http://devstack.org/ + + Next Steps ========== From d07301215d5f962ae4eb17f84e543f3011db6645 Mon Sep 17 00:00:00 2001 From: Sandy Walsh Date: Thu, 16 Jan 2014 18:02:15 +0000 Subject: [PATCH 18/53] tidy up loose ends --- README.md | 88 ++-------------------------------------------------- docs/api.rst | 2 +- 2 files changed, 3 insertions(+), 87 deletions(-) diff --git a/README.md b/README.md index d246137..c18e203 100644 --- a/README.md +++ b/README.md @@ -11,89 +11,5 @@ OpenStack has the ability to publish notifications to a RabbitMQ exchange as the A detailed description of the notifications published by OpenStack [is available here](http://wiki.openstack.org/SystemUsageData) -StackTach has three primary components: -1. The Worker daemon. Consumes the notifications from the Rabbit queue and writes it to a SQL database. -1. The Web UI, which is a Django application. Provides a real-time display of notifications as they are consumed by the worker. Also provides for point-and-click analysis of the events for following related events. -1. Stacky, the command line tool. Operator and Admins aren't big fans of web interfaces. StackTach also exposes a REST interface which Stacky can use to provide output suitable for tail/grep post-processing. - -## Installing StackTach - -### The "Hurry Up" Install Guide -1. Create a database for StackTach to use. By default, StackTach assumes MySql, but you can modify the settings.py file to others. -1. Install django and the other required libraries listed in `./etc/pip-requires.txt` (I hope I got 'em all) -1. Clone this repo -1. Copy and configure the config files in `./etc` (see below for details) -1. Create the necessary database tables (python manage.py syncdb) You don't need an administrator account since there are no user profiles used. -1. Configure OpenStack to publish Notifications back into RabbitMQ (see below) -1. Restart the OpenStack services. -1. Run the Worker to start consuming messages. (see below) -1. Run the web server (python manage.py runserver) -1. Point your browser to `http://127.0.0.1:8000` (the default server location) -1. Click on stuff, see what happens. You can't hurt anything, it's all read-only. - -Of course, this is only suitable for playing around. If you want to get serious about deploying StackTach you should set up a proper webserver and database on standalone servers. There is a lot of data that gets collected by StackTach (depending on your deployment size) ... be warned. Keep an eye on DB size. - -#### The Config Files -There are two config files for StackTach. The first one tells us where the second one is. A sample of these two files is in `./etc/sample_*`. Create a local copy of these files and populate them with the appropriate config values as described below. - -The `sample_stacktach_config.sh` shell script defines the necessary environment variables StackTach needs. Most of these are just information about the database (assuming MySql) but some are a little different. **Remember to source the local copy of the `sample_stacktach_config.sh` shell script to set up the necessary environment variables.** - -If your db host is not on the same machine, you'll need to set this flag. Otherwise the empty string is fine. - -`STACKTACH_INSTALL_DIR` should point to where StackTach is running out of. In most cases this will be your repo directory, but it could be elsewhere if your going for a proper deployment. -The StackTach worker needs to know which RabbitMQ servers to listen to. This information is stored in the deployment file. `STACKTACH_DEPLOYMENTS_FILE` should point to this json file. To learn more about the deployments file, see further down. - -Finally, `DJANGO_SETTINGS_MODULE` tells Django where to get its configuration from. This should point to the `setting.py` file. You shouldn't have to do much with the `settings.py` file and most of what it needs is in these environment variables. - -The `sample_stacktach_worker_config.json` file tells StackTach where each of the RabbitMQ servers are that it needs to get events from. In most cases you'll only have one entry in this file, but for large multi-cell deployments, this file can get pretty large. It's also handy for setting up one StackTach for each developer environment. - -The file is in json format and the main configuration is under the `"deployments"` key, which should contain a list of deployment dictionaries. - -A blank worker config file would look like this: -``` -{"deployments": [] } -``` - -But that's not much fun. A deployment entry would look like this: - -``` -{"deployments": [ - { - "name": "east_coast.prod.cell1", - "durable_queue": false, - "rabbit_host": "10.0.1.1", - "rabbit_port": 5672, - "rabbit_userid": "rabbit", - "rabbit_password": "rabbit", - "rabbit_virtual_host": "/" - } -]} -``` - -where, *name* is whatever you want to call your deployment, and *rabbit_<>* are the connectivity details for your rabbit server. It should be the same information in your `nova.conf` file that OpenStack is using. Note, json has no concept of comments, so using `#`, `//` or `/* */` as a comment won't work. - -By default, Nova uses ephemeral queues. If you are using durable queues, be sure to change the necessary flag here. - -You can add as many deployments as you like. - -#### Starting the Worker - -Note: the worker now uses librabbitmq, be sure to install that first. - -`./worker/start_workers.py` will spawn a worker.py process for each deployment defined. Each worker will consume from a single Rabbit queue. - - -#### Configuring Nova to generate Notifications - -`--notification_driver=nova.openstack.common.notifier.rabbit_notifier` -`--notification_topics=monitor` - -This will tell OpenStack to publish notifications to a Rabbit exchange starting with `monitor.*` ... this may result in `monitor.info`, `monitor.error`, etc. - -You'll need to restart Nova once these changes are made. - -### Next Steps - -Once you have this working well, you should download and install Stacky and play with the command line tool. - - +## Documentation +`cd` into the `docs` directory and run `make html` to get the installation and API docs. \ No newline at end of file diff --git a/docs/api.rst b/docs/api.rst index 340c263..c7b5166 100644 --- a/docs/api.rst +++ b/docs/api.rst @@ -601,7 +601,7 @@ stacky/search Search for notifications. - Returns:: + Returns: * Event ID * ``E`` for errors, ``.`` otherwise From c996a845071fb57c032624ddff9fe76c111cb9b5 Mon Sep 17 00:00:00 2001 From: Sandy Walsh Date: Thu, 16 Jan 2014 20:03:01 +0000 Subject: [PATCH 19/53] messing with readthedocs.org --- README.md | 2 +- etc/doc-requires.txt | 1 + 2 files changed, 2 insertions(+), 1 deletion(-) create mode 100644 etc/doc-requires.txt diff --git a/README.md b/README.md index c18e203..482a6ae 100644 --- a/README.md +++ b/README.md @@ -12,4 +12,4 @@ OpenStack has the ability to publish notifications to a RabbitMQ exchange as the A detailed description of the notifications published by OpenStack [is available here](http://wiki.openstack.org/SystemUsageData) ## Documentation -`cd` into the `docs` directory and run `make html` to get the installation and API docs. \ No newline at end of file +`cd` into the `docs` directory and run `make html` to get the installation and API docs. Or you can view the `rst` files [directly here](https://github.com/rackerlabs/stacktach/blob/master/docs/setup.rst) diff --git a/etc/doc-requires.txt b/etc/doc-requires.txt new file mode 100644 index 0000000..4df3b68 --- /dev/null +++ b/etc/doc-requires.txt @@ -0,0 +1 @@ +sphinxcontrib-httpdomain From ef2ef7a4b804365e7b99137d72f08aa19dc0f90f Mon Sep 17 00:00:00 2001 From: Sandy Walsh Date: Thu, 16 Jan 2014 20:09:22 +0000 Subject: [PATCH 20/53] remove etc/doc_requires --- etc/doc-requires.txt | 1 - 1 file changed, 1 deletion(-) delete mode 100644 etc/doc-requires.txt diff --git a/etc/doc-requires.txt b/etc/doc-requires.txt deleted file mode 100644 index 4df3b68..0000000 --- a/etc/doc-requires.txt +++ /dev/null @@ -1 +0,0 @@ -sphinxcontrib-httpdomain From cce229b709906cdfbbb55043edbcedfed12a78db Mon Sep 17 00:00:00 2001 From: Andrew Melton Date: Thu, 16 Jan 2014 15:19:03 -0500 Subject: [PATCH 21/53] Adding usage verification docs --- docs/index.rst | 1 + docs/setup.rst | 2 + docs/usage.rst | 89 +++++++++++++++++++++++ etc/sample_stacktach_verifier_config.json | 3 +- 4 files changed, 94 insertions(+), 1 deletion(-) create mode 100644 docs/usage.rst diff --git a/docs/index.rst b/docs/index.rst index 7b4b5ca..3a7c222 100644 --- a/docs/index.rst +++ b/docs/index.rst @@ -14,6 +14,7 @@ Contents: intro setup api + usage Indices and tables diff --git a/docs/setup.rst b/docs/setup.rst index 0061b68..79f8a78 100644 --- a/docs/setup.rst +++ b/docs/setup.rst @@ -18,6 +18,8 @@ The "Hurry Up" Install Guide Of course, this is only suitable for playing around. If you want to get serious about deploying StackTach you should set up a proper webserver and database on standalone servers. There is a lot of data that gets collected by StackTach (depending on your deployment size) ... be warned. Keep an eye on DB size. +.. _stacktach-config-files: + The Config Files **************** There are two config files for StackTach. The first one tells us where the second one is. A sample of these two files is in ``./etc/sample_*``. Create a local copy of these files and populate them with the appropriate config values as described below. diff --git a/docs/usage.rst b/docs/usage.rst new file mode 100644 index 0000000..5e2e9ef --- /dev/null +++ b/docs/usage.rst @@ -0,0 +1,89 @@ + +StackTach Usage Verification +############################ + +Usage Basics +************ +In OpenStack, usage is tracked through notifications. The notifications are emitted by each service as users request changes and each service performs those changes. Services like Nova can also be configured to emitted periodic audit notifications exposing the state of the database at the time of the audit. The periodic audit notifications are useful for billing as it is not necessary to store past states. + +But, we want to be sure what we're billing for is correct and that we've received audit notifications for every instance that should be billable. Thus, it is a good idea to track instance state so that periodic audit notifications can be validated against that state. The notifications each service sends as changes are requested and performed are extremely useful for tracking instance state through different billable states. + +The idea behind StackTach's Usage Verification is to track changes through instantaneous notifications, then compare them to the periodic audit notifications for correctness. After being validated, StackTach itself will emit a copy of the notification with a new event_type indicating that is has been verified. StackTach also provides a set of scripts which can be used to confirm that exists were sent for all instances in a billable state. + +Configuring Usage Verification +****************************** +Usage Verification in StackTach is done by a separate verifier process. A sample configuration file can be found at ``./etc/sample_stacktach_verifier_config.sjon`` + +The default config provides most all settings that are required for the verifier. :: + + { + "tick_time": 30, + "settle_time": 5, + "settle_units": "minutes", + "pool_size": 2, + "enable_notifications": true, + "validation_level": "all", + "flavor_field_name": "instance_type_id", + "rabbit": { + "durable_queue": false, + "host": "10.0.0.1", + "port": 5672, + "userid": "rabbit", + "password": "rabbit", + "virtual_host": "/", + "topics": { + "nova": ["notifications.info"], + "glance": ["notifications.info"] + } + } + } + +* tick_time: Time in seconds to sleep before attempting to retrieve pending usage entries for verifications +* settle_time: Amount of time between when a usage notification was emitted and when it should be picked up for verification. +* settle_units: Units for the settle_time value +* pool_size: Amount of verifier processes to create for the verifier pool. +* enable_notifications: Whether or not to emit verified notifications. +* validation_level: Determines how strict datatype validation will be on usage notifications. Values are ``none``, ``basic``, and ``all``. +* flavor_field_name: Field to use for flavor verification. Values are ``instance_type_id`` and ``instance_flavor_id``. +* rabbit: Rabbit config, please see :ref:`StackTach install guide ` for rabbit config details. + + * The topics here are how the verifier determines which services to verify. For example, Nova and Glance services will be verified and verified notifications will be emitted with a routing_key of notifications.info with our sample config. + * An alternate config that would only verify Nova and emit verified notifications on notifications.info and monitor.info: :: + + "topics": { + "nova": ["notifications.info", "monitor.info"] + } + +* Other Config Options: + + * nova_event_type: Event type to emit for Nova events + + * Default: compute.instance.exists.verified.old + + * glance_event_type: Event type to emit for Glance events + + * Default: image.exists.verified.old + +Starting the Verifier +********************* + +``./verifier/start_verifier.py`` will spawn a verifier.py process for each service being verified along with a pool of processes to verify each usage entry. + +Audit Reports +************* + +StackTach also provides a few reports for auditing the audit notifications, which can be useful for confirming all usage was sent for a deployment. + +* ``./reports/nova_usage_audit.py`` + + * Suggested Arguments: + * --period_length: ``day`` or ``year``, default: ``day`` + * --utcdatetime: Overrides datetime used to audit, default: current utc datetime + * --store: ``True`` or ``False``, whether or not to store report in StackTach database + +* ``./reports/glance_usage_audit`` + + * Suggested Arguments: + * --period_length: ``day`` or ``year``, default: ``day`` + * --utcdatetime: Overrides datetime used to audit, default: current utc datetime + * --store: ``True`` or ``False``, whether or not to store report in StackTach database \ No newline at end of file diff --git a/etc/sample_stacktach_verifier_config.json b/etc/sample_stacktach_verifier_config.json index f18e901..da997ec 100644 --- a/etc/sample_stacktach_verifier_config.json +++ b/etc/sample_stacktach_verifier_config.json @@ -4,7 +4,8 @@ "settle_units": "minutes", "pool_size": 2, "enable_notifications": true, - "validation_level": "all", + "validation_level": "all", + "flavor_field_name": "instance_type_id", "rabbit": { "durable_queue": false, "host": "10.0.0.1", From 589de1852d2d74677d50311cca8371533d304ea2 Mon Sep 17 00:00:00 2001 From: Andrew Melton Date: Fri, 17 Jan 2014 12:01:04 -0500 Subject: [PATCH 22/53] Adding database api to docs --- docs/dbapi.rst | 776 +++++++++++++++++++++++++++++++++++++++++++++++++ docs/index.rst | 1 + 2 files changed, 777 insertions(+) create mode 100644 docs/dbapi.rst diff --git a/docs/dbapi.rst b/docs/dbapi.rst new file mode 100644 index 0000000..8426cc8 --- /dev/null +++ b/docs/dbapi.rst @@ -0,0 +1,776 @@ +The StackTach Database REST Interface +############################ + +JSON Response Format +******************** + +The StackTach Database API uses a more standard data model for access of to database objects + +The JSON response format uses an envelope with a single key to indicate the type of object returned. This object can be either a dictionary in the case of queries that return single objects, or a list when multiple objects are turned. + +Sample JSON response, single object: :: + + { + "enitity": + { + "id": 1 + "key1": "value1", + "key2": "value2" + } + } + +Sample JSON response, multiple objects: :: + + { + "enitities": + [ + { + "id": 1, + "key1": "value1", + "key2": "value2" + }, + { + "id": 2, + "key1": "value1", + "key2": "value2" + } + ] + } + +db/usage/launches/ +================== + +.. http:get:: http://example.com/db/usage/launches/ + +Deprecated, see: :ref:`dbapi-nova-launches` + +.. _dbapi-nova-launches: + +db/usage/nova/launches/ +======================= + +.. http:get:: http://example.com/db/usage/nova/launches/ + + Returns a list of instance launches matching provided query criteria. + + **Query Parameters** + + * ``launched_at_min``: datetime (yyyy-mm-dd hh:mm:ss) + * ``launched_at_max``: datetime (yyyy-mm-dd hh:mm:ss) + * ``instance``: uuid + * ``limit``: int, default: 50, max: 1000 + * ``offset``: int, default: 0 + + **Example request**: + + .. sourcecode:: http + + GET /db/usage/nova/launches/ HTTP/1.1 + Host: example.com + Accept: application/json + + **Example response**: + + .. sourcecode:: http + + HTTP/1.1 200 OK + Vary: Accept + Content-Type: application/json + + { + "launches": + [ + { + "os_distro": "org.centos", + "os_version": "5.8", + "instance_flavor_id": "2", + "instance_type_id": "2", + "launched_at": "2014-01-17 15:35:44", + "instance": "72e4d8e8-9f63-47cb-a904-0193e5edac6e", + "os_architecture": "x64", + "request_id": "req-7a86ed49-e1f4-4403-b3ef-22636f7acb7d", + "rax_options": "0", + "id": 91899, + "tenant": "5853600" + }, + { + "os_distro": "org.centos", + "os_version": "5.8", + "instance_flavor_id": "performance1-4", + "instance_type_id": "11", + "launched_at": "2014-01-17 15:35:20", + "instance": "932bcfd9-af68-4261-805e-6e43156c3b40", + "os_architecture": "x64", + "request_id": "req-6bfe911f-40f2-4fd8-946a-070c10bed014", + "rax_options": "0", + "id": 91898, + "tenant": "5853595" + } + ] + } + +db/usage/glance/images/ +======================= + +.. http:get:: http://example.com/db/usage/glance/images/ + + Returns a list of images matching provided query criteria. + + **Query Parameters** + + * ``created_at_min``: datetime (yyyy-mm-dd hh:mm:ss) + * ``created_at_max``: datetime (yyyy-mm-dd hh:mm:ss) + * ``limit``: int, default: 50, max: 1000 + * ``offset``: int, default: 0 + + **Example request**: + + .. sourcecode:: http + + GET /db/usage/glance/images/ HTTP/1.1 + Host: example.com + Accept: application/json + + **Example response**: + + .. sourcecode:: http + + HTTP/1.1 200 OK + Vary: Accept + Content-Type: application/json + + { + "images": + [ + { + "uuid": "2048efd8-fdce-4123-bdbc-add3bfe64b83", + "created_at": "2014-01-17 02:28:08", + "owner": null, + "last_raw": 299977, + "id": 4837, + "size": 9192352 + }, + { + "uuid": "aa2c07dd-fd1c-4ad3-9f73-6a6d7d8a0dbd", + "created_at": "2014-01-17 02:24:18", + "owner": "5937488", + "last_raw": 299967, + "id": 4836, + "size": 9 + } + ] + } + +db/usage/launches// +============================== + +.. http:get:: http://example.com/db/usage/launches// + +Deprecated, see: :ref:`dbapi-nova-launch` + +.. _dbapi-nova-launch: + +db/usage/nova/launches// +=================================== + +.. http:get:: http://example.com/db/usage/nova/launches// + +Returns the single launch with id matching the provided id. + + **Example request**: + + .. sourcecode:: http + + GET /db/usage/nova/launches/91898/ HTTP/1.1 + Host: example.com + Accept: application/json + + **Example response**: + + .. sourcecode:: http + + HTTP/1.1 200 OK + Vary: Accept + Content-Type: application/json + + { + "launch": + { + "os_distro": "org.centos", + "os_version": "5.8", + "instance_flavor_id": "performance1-4", + "instance_type_id": "11", + "launched_at": "2014-01-17 15:35:20", + "instance": "932bcfd9-af68-4261-805e-6e43156c3b40", + "os_architecture": "x64", + "request_id": "req-6bfe911f-40f2-4fd8-946a-070c10bed014", + "rax_options": "0", + "id": 91898, + "tenant": "5853595" + } + } + +db/usage/glance/images// +================================== + +.. http:get:: http://example.com/db/usage/glance/images// + +Returns the single image with id matching the provided id. + + **Example request**: + + .. sourcecode:: http + + GET /db/usage/glance/images/4836/ HTTP/1.1 + Host: example.com + Accept: application/json + + **Example response**: + + .. sourcecode:: http + + HTTP/1.1 200 OK + Vary: Accept + Content-Type: application/json + + { + "launch": + { + "uuid": "aa2c07dd-fd1c-4ad3-9f73-6a6d7d8a0dbd", + "created_at": "2014-01-17 02:24:18", + "owner": "5937488", + "last_raw": 299967, + "id": 4836, + "size": 9 + } + } + +db/usage/deletes/ +================= + +.. http:get:: http://example.com/db/usage/deletes/ + +Deprecated, see: :ref:`dbapi-nova-deletes` + +.. _dbapi-nova-deletes: + +db/usage/nova/deletes/ +====================== + +.. http:get:: http://example.com/db/usage/nova/deletes/ + + Returns a list of instance deletes matching provided query criteria. + + **Query Parameters** + + * ``launched_at_min``: datetime (yyyy-mm-dd hh:mm:ss) + * ``launched_at_max``: datetime (yyyy-mm-dd hh:mm:ss) + * ``deleted_at_min``: datetime (yyyy-mm-dd hh:mm:ss) + * ``deleted_at_max``: datetime (yyyy-mm-dd hh:mm:ss) + * ``instance``: uuid + * ``limit``: int, default: 50, max: 1000 + * ``offset``: int, default: 0 + + **Example request**: + + .. sourcecode:: http + + GET /db/usage/nova/deletes/ HTTP/1.1 + Host: example.com + Accept: application/json + + **Example response**: + + .. sourcecode:: http + + HTTP/1.1 200 OK + Vary: Accept + Content-Type: application/json + + { + "deletes": + [ + { + "raw": 14615347, + "instance": "b36a8c2d-af88-4371-b14c-14dadf7073e5", + "deleted_at": "2014-01-17 16:07:30", + "id": 65110, + "launched_at": "2014-01-17 16:06:54" + }, + { + "raw": 14615248, + "instance": "3fd6797d-bc35-42d9-ad85-157a2ea93023", + "deleted_at": "2014-01-17 16:05:23", + "id": 65108, + "launched_at": "2014-01-17 16:05:00" + } + ] + } + +db/usage/glance/deletes/ +======================== + +.. http:get:: http://example.com/db/usage/glance/deletes/ + + Returns a list of image deletes matching provided query criteria. + + **Query Parameters** + + * ``deleted_at_min``: datetime (yyyy-mm-dd hh:mm:ss) + * ``deleted_at_max``: datetime (yyyy-mm-dd hh:mm:ss) + * ``limit``: int, default: 50, max: 1000 + * ``offset``: int, default: 0 + + **Example request**: + + .. sourcecode:: http + + GET /db/usage/glance/deletes/ HTTP/1.1 + Host: example.com + Accept: application/json + + **Example response**: + + .. sourcecode:: http + + HTTP/1.1 200 OK + Vary: Accept + Content-Type: application/json + + { + "deletes": + [ + { + "raw": 300523, + "deleted_at": "2014-01-17 15:28:18.154927", + "id": 3169, + "uuid": "f8b02f0e-b392-40f5-9d39-0458ae6ebfb3" + }, + { + "raw": 300512, + "deleted_at": "2014-01-17 14:28:20.544617", + "id": 3168, + "uuid": "4c9dc0be-856b-4e98-81a5-1b63df108e7d" + } + ] + } + +db/usage/deletes// +============================= + +.. http:get:: http://example.com/db/usage/deletes/ + +Deprecated, see: :ref:`dbapi-nova-delete` + +.. _dbapi-nova-delete: + +db/usage/nova/deletes// +================================== + +.. http:get:: http://example.com/db/usage/nova/deletes/ + +Returns the single instance delete with id matching the provided id. + + **Example request**: + + .. sourcecode:: http + + GET /db/usage/nova/deletes/65110/ HTTP/1.1 + Host: example.com + Accept: application/json + + **Example response**: + + .. sourcecode:: http + + HTTP/1.1 200 OK + Vary: Accept + Content-Type: application/json + + { + "delete": + { + "raw": 14615347, + "instance": "b36a8c2d-af88-4371-b14c-14dadf7073e5", + "deleted_at": "2014-01-17 16:07:30", + "id": 65110, + "launched_at": "2014-01-17 16:06:54" + } + } + +db/usage/glance/deletes// +==================================== + +.. http:get:: http://example.com/db/usage/glance/deletes/ + +Returns the single image delete with id matching the provided id. + + **Example request**: + + .. sourcecode:: http + + GET /db/usage/glance/deletes/3168/ HTTP/1.1 + Host: example.com + Accept: application/json + + **Example response**: + + .. sourcecode:: http + + HTTP/1.1 200 OK + Vary: Accept + Content-Type: application/json + + { + "delete": + { + "raw": 300512, + "deleted_at": "2014-01-17 14:28:20.544617", + "id": 3168, + "uuid": "4c9dc0be-856b-4e98-81a5-1b63df108e7d" + } + } + +db/usage/exists/ +================ + +.. http:get:: http://example.com/db/usage/exists/ + +Deprecated, see: :ref:`dbapi-nova-exists` + +.. _dbapi-nova-exists: + +db/usage/nova/exists/ +===================== + +.. http:get:: http://example.com/db/usage/nova/exists/ + + Returns a list of instance exists matching provided query criteria. + + **Query Parameters** + + * ``audit_period_beginning_min``: datetime (yyyy-mm-dd hh:mm:ss) + * ``audit_period_beginning_max``: datetime (yyyy-mm-dd hh:mm:ss) + * ``audit_period_ending_min``: datetime (yyyy-mm-dd hh:mm:ss) + * ``audit_period_ending_max``: datetime (yyyy-mm-dd hh:mm:ss) + * ``launched_at_min``: datetime (yyyy-mm-dd hh:mm:ss) + * ``launched_at_max``: datetime (yyyy-mm-dd hh:mm:ss) + * ``deleted_at_min``: datetime (yyyy-mm-dd hh:mm:ss) + * ``deleted_at_max``: datetime (yyyy-mm-dd hh:mm:ss) + * ``received_min``: datetime (yyyy-mm-dd hh:mm:ss) + * ``received_max``: datetime (yyyy-mm-dd hh:mm:ss) + * ``instance``: uuid + * ``limit``: int, default: 50, max: 1000 + * ``offset``: int, default: 0 + + **Example request**: + + .. sourcecode:: http + + GET /db/usage/nova/exists/ HTTP/1.1 + Host: example.com + Accept: application/json + + **Example response**: + + .. sourcecode:: http + + HTTP/1.1 200 OK + Vary: Accept + Content-Type: application/json + + { + "exists": + [ + { + "status": "verified", + "os_distro": "org.centos", + "bandwidth_public_out": 0, + "received": "2014-01-17 16:16:43.695474", + "instance_type_id": "2", + "raw": 14615544, + "os_architecture": "x64", + "rax_options": "0", + "audit_period_ending": "2014-01-17 16:16:43", + "deleted_at": null, + "id": 135106, + "tenant": "5889124", + "audit_period_beginning": "2014-01-17 00:00:00", + "fail_reason": null, + "instance": "978b32ea-374b-48c6-814b-bb6151e2fb5c", + "instance_flavor_id": "2", + "launched_at": "2014-01-17 16:16:09", + "os_version": "6.0", + "usage": 91932, + "send_status": 201, + "message_id": "9d28fa15-d163-40c7-8195-2853ad13179b", + "delete": null + }, + { + "status": "verified", + "os_distro": "org.centos", + "bandwidth_public_out": 0, + "received": "2014-01-17 16:10:42.112505", + "instance_type_id": "2", + "raw": 14615459, + "os_architecture": "x64", + "rax_options": "0", + "audit_period_ending": "2014-01-17 16:10:42", + "deleted_at": null, + "id": 135105, + "tenant": "5824940", + "audit_period_beginning": "2014-01-17 00:00:00", + "fail_reason": null, + "instance": "860b5df0-d58b-498d-8838-7156d701732c", + "instance_flavor_id": "2", + "launched_at": "2014-01-17 16:10:08", + "os_version": "5.9", + "usage": 91937, + "send_status": 201, + "message_id": "0a6b1c58-8443-4788-ac08-05cd03e6be53", + "delete": null + } + ] + } + +db/usage/glance/exists/ +======================= + +.. http:get:: http://example.com/db/usage/glance/exists/ + + Returns a list of instance exists matching provided query criteria. + + **Query Parameters** + + * ``audit_period_beginning_min``: datetime (yyyy-mm-dd hh:mm:ss) + * ``audit_period_beginning_max``: datetime (yyyy-mm-dd hh:mm:ss) + * ``audit_period_ending_min``: datetime (yyyy-mm-dd hh:mm:ss) + * ``audit_period_ending_max``: datetime (yyyy-mm-dd hh:mm:ss) + * ``created_at_min``: datetime (yyyy-mm-dd hh:mm:ss) + * ``created_at_max``: datetime (yyyy-mm-dd hh:mm:ss) + * ``deleted_at_min``: datetime (yyyy-mm-dd hh:mm:ss) + * ``deleted_at_max``: datetime (yyyy-mm-dd hh:mm:ss) + * ``received_min``: datetime (yyyy-mm-dd hh:mm:ss) + * ``received_max``: datetime (yyyy-mm-dd hh:mm:ss) + * ``limit``: int, default: 50, max: 1000 + * ``offset``: int, default: 0 + + **Example request**: + + .. sourcecode:: http + + GET /db/usage/glance/exists/ HTTP/1.1 + Host: example.com + Accept: application/json + + **Example response**: + + .. sourcecode:: http + + HTTP/1.1 200 OK + Vary: Accept + Content-Type: application/json + + { + "exists": + [ + { + "status": "verified", + "audit_period_beginning": "2014-01-13 00:00:00", + "fail_reason": null, + "uuid": "d39a04bd-6ba0-4d20-8591-937ab43897dc", + "usage": 2553, + "created_at": "2013-05-11 15:37:34", + "size": 11213393920, + "owner": "389886", + "message_id": "9c5fd5af-60b4-45ad-b524-c4a9964f31e4", + "raw": 283303, + "audit_period_ending": "2014-01-13 23:59:59", + "received": "2014-01-13 09:20:02.777965", + "deleted_at": null, + "send_status": 0, + "id": 5301, + "delete": null + }, + { + "status": "verified", + "audit_period_beginning": "2014-01-13 00:00:00", + "fail_reason": null, + "uuid": "6713c136-0555-4a93-b726-edb181d4b69e", + "usage": 1254, + "created_at": "2013-05-11 15:37:56", + "size": 11254732800, + "owner": "389886", + "message_id": "9c5fd5af-60b4-45ad-b524-c4a9964f31e4", + "raw": 283303, + "audit_period_ending": "2014-01-13 23:59:59", + "received": "2014-01-13 09:20:02.777965", + "deleted_at": null, + "send_status": 0, + "id": 5300, + "delete": null + } + ] + } + +db/usage/exists// +=========================== + +.. http:get:: http://example.com/db/usage/exists/ + +Deprecated, see: :ref:`dbapi-nova-exist` + +.. _dbapi-nova-exist: + +db/usage/nova/exists// +================================ + +.. http:get:: http://example.com/db/usage/nova/exists/ + + Returns a single instance exists matching provided id + + **Example request**: + + .. sourcecode:: http + + GET /db/usage/nova/exists/135105/ HTTP/1.1 + Host: example.com + Accept: application/json + + **Example response**: + + .. sourcecode:: http + + HTTP/1.1 200 OK + Vary: Accept + Content-Type: application/json + + { + "exist": + { + "status": "verified", + "os_distro": "org.centos", + "bandwidth_public_out": 0, + "received": "2014-01-17 16:10:42.112505", + "instance_type_id": "2", + "raw": 14615459, + "os_architecture": "x64", + "rax_options": "0", + "audit_period_ending": "2014-01-17 16:10:42", + "deleted_at": null, + "id": 135105, + "tenant": "5824940", + "audit_period_beginning": "2014-01-17 00:00:00", + "fail_reason": null, + "instance": "860b5df0-d58b-498d-8838-7156d701732c", + "instance_flavor_id": "2", + "launched_at": "2014-01-17 16:10:08", + "os_version": "5.9", + "usage": 91937, + "send_status": 201, + "message_id": "0a6b1c58-8443-4788-ac08-05cd03e6be53", + "delete": null + } + } + +db/usage/glance/exists// +================================== + +.. http:get:: http://example.com/db/usage/glance/exists// + + Returns a single instance exists matching provided id + + **Example request**: + + .. sourcecode:: http + + GET /db/usage/glance/exists/5300/ HTTP/1.1 + Host: example.com + Accept: application/json + + **Example response**: + + .. sourcecode:: http + + HTTP/1.1 200 OK + Vary: Accept + Content-Type: application/json + + { + "exist": + { + "status": "verified", + "audit_period_beginning": "2014-01-13 00:00:00", + "fail_reason": null, + "uuid": "6713c136-0555-4a93-b726-edb181d4b69e", + "usage": 1254, + "created_at": "2013-05-11 15:37:56", + "size": 11254732800, + "owner": "389886", + "message_id": "9c5fd5af-60b4-45ad-b524-c4a9964f31e4", + "raw": 283303, + "audit_period_ending": "2014-01-13 23:59:59", + "received": "2014-01-13 09:20:02.777965", + "deleted_at": null, + "send_status": 0, + "id": 5300, + "delete": null + } + } + +db/confirm/usage/exists/batch/ +===================================== + +.. http:put:: http://example.com/db/confirm/usage/exists/batch/ + + Uses the provided message_id's and http status codes to update image and instance exists send_status values. + + **Example V0 request**: + + .. sourcecode:: http + + PUT db/confirm/usage/exists/batch/ HTTP/1.1 + Host: example.com + Accept: application/json + + { + "messages": + [ + {"nova_message_id": 200}, + {"nova_message_id": 400} + ] + } + + **Example V1 request**: + + .. sourcecode:: http + + PUT db/confirm/usage/exists/batch/ HTTP/1.1 + Host: example.com + Accept: application/json + + { + "messages": + [ + { + "nova": + [ + {"nova_message_id1": 200}, + {"nova_message_id2": 400} + ], + "glance": + [ + {"glance_message_id1": 200}, + {"glance_message_id2": 400} + ] + } + ] + "version": 1 + } + **Example response**: + + .. sourcecode:: http + + HTTP/1.1 200 OK + Vary: Accept + Content-Type: application/json \ No newline at end of file diff --git a/docs/index.rst b/docs/index.rst index 3a7c222..286ecad 100644 --- a/docs/index.rst +++ b/docs/index.rst @@ -15,6 +15,7 @@ Contents: setup api usage + dbapi Indices and tables From e2a78cff3f3646ff1b5a2bfe01819d40824baad2 Mon Sep 17 00:00:00 2001 From: Andrew Melton Date: Fri, 17 Jan 2014 12:15:28 -0500 Subject: [PATCH 23/53] Moving batch ping back call to top of docs --- docs/dbapi.rst | 117 +++++++++++++++++++++++++------------------------ 1 file changed, 59 insertions(+), 58 deletions(-) diff --git a/docs/dbapi.rst b/docs/dbapi.rst index 8426cc8..6494d71 100644 --- a/docs/dbapi.rst +++ b/docs/dbapi.rst @@ -37,6 +37,64 @@ Sample JSON response, multiple objects: :: ] } + +db/confirm/usage/exists/batch/ +===================================== + +.. http:put:: http://example.com/db/confirm/usage/exists/batch/ + + Uses the provided message_id's and http status codes to update image and instance exists send_status values. + + **Example V0 request**: + + .. sourcecode:: http + + PUT db/confirm/usage/exists/batch/ HTTP/1.1 + Host: example.com + Accept: application/json + + { + "messages": + [ + {"nova_message_id": 200}, + {"nova_message_id": 400} + ] + } + + **Example V1 request**: + + .. sourcecode:: http + + PUT db/confirm/usage/exists/batch/ HTTP/1.1 + Host: example.com + Accept: application/json + + { + "messages": + [ + { + "nova": + [ + {"nova_message_id1": 200}, + {"nova_message_id2": 400} + ], + "glance": + [ + {"glance_message_id1": 200}, + {"glance_message_id2": 400} + ] + } + ] + "version": 1 + } + **Example response**: + + .. sourcecode:: http + + HTTP/1.1 200 OK + Vary: Accept + Content-Type: application/json + db/usage/launches/ ================== @@ -716,61 +774,4 @@ db/usage/glance/exists// "id": 5300, "delete": null } - } - -db/confirm/usage/exists/batch/ -===================================== - -.. http:put:: http://example.com/db/confirm/usage/exists/batch/ - - Uses the provided message_id's and http status codes to update image and instance exists send_status values. - - **Example V0 request**: - - .. sourcecode:: http - - PUT db/confirm/usage/exists/batch/ HTTP/1.1 - Host: example.com - Accept: application/json - - { - "messages": - [ - {"nova_message_id": 200}, - {"nova_message_id": 400} - ] - } - - **Example V1 request**: - - .. sourcecode:: http - - PUT db/confirm/usage/exists/batch/ HTTP/1.1 - Host: example.com - Accept: application/json - - { - "messages": - [ - { - "nova": - [ - {"nova_message_id1": 200}, - {"nova_message_id2": 400} - ], - "glance": - [ - {"glance_message_id1": 200}, - {"glance_message_id2": 400} - ] - } - ] - "version": 1 - } - **Example response**: - - .. sourcecode:: http - - HTTP/1.1 200 OK - Vary: Accept - Content-Type: application/json \ No newline at end of file + } \ No newline at end of file From d161cb22fc6531d95dd1b17ad5806aed4e7f5d38 Mon Sep 17 00:00:00 2001 From: Sandy Walsh Date: Fri, 17 Jan 2014 17:50:13 +0000 Subject: [PATCH 24/53] setup using pbr --- etc/pip-requires.txt | 1 + setup.cfg | 22 ++++++++++++++++++++++ setup.py | 8 ++++++++ 3 files changed, 31 insertions(+) create mode 100644 setup.cfg create mode 100644 setup.py diff --git a/etc/pip-requires.txt b/etc/pip-requires.txt index c40dd61..f14c1f6 100644 --- a/etc/pip-requires.txt +++ b/etc/pip-requires.txt @@ -9,3 +9,4 @@ Pympler requests south sphinxcontrib-httpdomain +pbr \ No newline at end of file diff --git a/setup.cfg b/setup.cfg new file mode 100644 index 0000000..01a1ff0 --- /dev/null +++ b/setup.cfg @@ -0,0 +1,22 @@ +[metadata] +name = stacktach +author = Dark Secret Software Inc., Rackspace Hosting +author-email = admin@darksecretsoftware.com +summary = OpenStack Monitoring and Billing +description-file = README.md +license = Apache-2 +classifier = + Development Status :: 2 - Pre-Alpha + Environment :: Console + Intended Audience :: Developers + Intended Audience :: Information Technology + License :: OSI Approved :: Apache Software License + Operating System :: OS Independent + Programming Language :: Python + Topic :: Software Development :: Libraries :: Python Modules +keywords = + setup + distutils +[files] +packages = + stacktach \ No newline at end of file diff --git a/setup.py b/setup.py new file mode 100644 index 0000000..e7f83a9 --- /dev/null +++ b/setup.py @@ -0,0 +1,8 @@ +#!/usr/bin/env python + +from setuptools import setup + +setup( + setup_requires=['pbr'], + pbr=True, +) \ No newline at end of file From 5a8a95a90d00d6b9358e670071f35c27827b2abe Mon Sep 17 00:00:00 2001 From: Andrew Melton Date: Fri, 17 Jan 2014 13:20:33 -0500 Subject: [PATCH 25/53] A little more details in database api description --- docs/dbapi.rst | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/docs/dbapi.rst b/docs/dbapi.rst index 6494d71..b205eb1 100644 --- a/docs/dbapi.rst +++ b/docs/dbapi.rst @@ -4,7 +4,7 @@ The StackTach Database REST Interface JSON Response Format ******************** -The StackTach Database API uses a more standard data model for access of to database objects +The StackTach Database API uses a more standard data model for access to database objects. The Database API is read only, with the exception of usage confirmation, which is used to indicate that usage has been sent downstream. The JSON response format uses an envelope with a single key to indicate the type of object returned. This object can be either a dictionary in the case of queries that return single objects, or a list when multiple objects are turned. From e39ca9907c85c6913e63552e05a3214430b94545 Mon Sep 17 00:00:00 2001 From: Andrew Melton Date: Fri, 17 Jan 2014 13:22:30 -0500 Subject: [PATCH 26/53] Separating read and write db apis --- docs/dbapi.rst | 5 +++++ 1 file changed, 5 insertions(+) diff --git a/docs/dbapi.rst b/docs/dbapi.rst index b205eb1..86bc54a 100644 --- a/docs/dbapi.rst +++ b/docs/dbapi.rst @@ -37,6 +37,8 @@ Sample JSON response, multiple objects: :: ] } +Write APIs +********** db/confirm/usage/exists/batch/ ===================================== @@ -95,6 +97,9 @@ db/confirm/usage/exists/batch/ Vary: Accept Content-Type: application/json +Read APIs +********* + db/usage/launches/ ================== From 30b78e76fe34c52fc6854a8767cf9509735d7df9 Mon Sep 17 00:00:00 2001 From: Sandy Walsh Date: Fri, 17 Jan 2014 18:40:19 +0000 Subject: [PATCH 27/53] points to readthedocs --- README.md | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/README.md b/README.md index 482a6ae..8501790 100644 --- a/README.md +++ b/README.md @@ -12,4 +12,4 @@ OpenStack has the ability to publish notifications to a RabbitMQ exchange as the A detailed description of the notifications published by OpenStack [is available here](http://wiki.openstack.org/SystemUsageData) ## Documentation -`cd` into the `docs` directory and run `make html` to get the installation and API docs. Or you can view the `rst` files [directly here](https://github.com/rackerlabs/stacktach/blob/master/docs/setup.rst) +http://stacktach.readthedocs.org/ From 2d2583824fa2061c86bef02ca302ddc5c0060100 Mon Sep 17 00:00:00 2001 From: Andrew Melton Date: Fri, 17 Jan 2014 13:43:47 -0500 Subject: [PATCH 28/53] Fixing some doc formatting --- docs/dbapi.rst | 18 +++++++++--------- 1 file changed, 9 insertions(+), 9 deletions(-) diff --git a/docs/dbapi.rst b/docs/dbapi.rst index 86bc54a..e6d596f 100644 --- a/docs/dbapi.rst +++ b/docs/dbapi.rst @@ -45,7 +45,7 @@ db/confirm/usage/exists/batch/ .. http:put:: http://example.com/db/confirm/usage/exists/batch/ - Uses the provided message_id's and http status codes to update image and instance exists send_status values. +Uses the provided message_id's and http status codes to update image and instance exists send_status values. **Example V0 request**: @@ -114,7 +114,7 @@ db/usage/nova/launches/ .. http:get:: http://example.com/db/usage/nova/launches/ - Returns a list of instance launches matching provided query criteria. +Returns a list of instance launches matching provided query criteria. **Query Parameters** @@ -177,7 +177,7 @@ db/usage/glance/images/ .. http:get:: http://example.com/db/usage/glance/images/ - Returns a list of images matching provided query criteria. +Returns a list of images matching provided query criteria. **Query Parameters** @@ -322,7 +322,7 @@ db/usage/nova/deletes/ .. http:get:: http://example.com/db/usage/nova/deletes/ - Returns a list of instance deletes matching provided query criteria. +Returns a list of instance deletes matching provided query criteria. **Query Parameters** @@ -375,7 +375,7 @@ db/usage/glance/deletes/ .. http:get:: http://example.com/db/usage/glance/deletes/ - Returns a list of image deletes matching provided query criteria. +Returns a list of image deletes matching provided query criteria. **Query Parameters** @@ -508,7 +508,7 @@ db/usage/nova/exists/ .. http:get:: http://example.com/db/usage/nova/exists/ - Returns a list of instance exists matching provided query criteria. +Returns a list of instance exists matching provided query criteria. **Query Parameters** @@ -601,7 +601,7 @@ db/usage/glance/exists/ .. http:get:: http://example.com/db/usage/glance/exists/ - Returns a list of instance exists matching provided query criteria. +Returns a list of instance exists matching provided query criteria. **Query Parameters** @@ -690,7 +690,7 @@ db/usage/nova/exists// .. http:get:: http://example.com/db/usage/nova/exists/ - Returns a single instance exists matching provided id +Returns a single instance exists matching provided id **Example request**: @@ -741,7 +741,7 @@ db/usage/glance/exists// .. http:get:: http://example.com/db/usage/glance/exists// - Returns a single instance exists matching provided id +Returns a single instance exists matching provided id **Example request**: From 38e45eb2f1137fd1e9bae8ccfbe24d8d9ce149d1 Mon Sep 17 00:00:00 2001 From: Anuj Mathur Date: Fri, 17 Jan 2014 16:54:32 +0530 Subject: [PATCH 29/53] Added search API for jsonreports --- stacktach/stacky_server.py | 34 +++++++++++++++++++ stacktach/urls.py | 2 ++ tests/unit/test_stacky_server.py | 58 +++++++++++++++++++++++++++++++- 3 files changed, 93 insertions(+), 1 deletion(-) diff --git a/stacktach/stacky_server.py b/stacktach/stacky_server.py index 2269ac1..987dc79 100644 --- a/stacktach/stacky_server.py +++ b/stacktach/stacky_server.py @@ -17,6 +17,8 @@ SECS_PER_DAY = SECS_PER_HOUR * 24 DEFAULT_LIMIT = 50 HARD_LIMIT = 1000 +UTC_FORMAT = '%Y-%m-%d %H:%M:%S' + def _get_limit(request): limit = request.GET.get('limit', DEFAULT_LIMIT) @@ -619,3 +621,35 @@ def search(request): except FieldError: return error_response(400, 'Bad Request', "The requested field '%s' does not exist for the corresponding object.\n" "Note: The field names of database are case-sensitive." % field) + + +def do_jsonreports_search(request): + model = models.JsonReport.objects + filters = {} + for filter, value in request.GET.iteritems(): + filters[filter + '__exact'] = value + try: + reports = model_search(request, model, filters) + except FieldError: + args = request.GET.keys() + args.sort() + return error_response( + 400, 'Bad Request', "The requested fields do not exist for " + "the corresponding object: %s. Note: The field names of database " + "are case-sensitive." % ', '.join(args)) + + results = [['Id', 'Start', 'End', 'Created', 'Name', 'Version']] + for report in reports: + results.append([report.id, + datetime.datetime.strftime( + report.period_start, UTC_FORMAT), + datetime.datetime.strftime( + report.period_end, UTC_FORMAT), + datetime.datetime.strftime( + dt.dt_from_decimal(report.created), + UTC_FORMAT), + report.name, + report.version]) + + return rsp(json.dumps(results)) + diff --git a/stacktach/urls.py b/stacktach/urls.py index 5579957..e49c217 100644 --- a/stacktach/urls.py +++ b/stacktach/urls.py @@ -17,6 +17,8 @@ urlpatterns = patterns('', url(r'stacky/timings/uuid/$', 'stacktach.stacky_server.do_timings_uuid'), url(r'stacky/summary/$', 'stacktach.stacky_server.do_summary'), url(r'stacky/request/$', 'stacktach.stacky_server.do_request'), + url(r'stacky/reports/search/$', + 'stacktach.stacky_server.do_jsonreports_search'), url(r'stacky/reports/$', 'stacktach.stacky_server.do_jsonreports'), url(r'stacky/report/(?P\d+)/$', 'stacktach.stacky_server.do_jsonreport'), diff --git a/tests/unit/test_stacky_server.py b/tests/unit/test_stacky_server.py index 659dd90..aea84e6 100644 --- a/tests/unit/test_stacky_server.py +++ b/tests/unit/test_stacky_server.py @@ -17,6 +17,7 @@ # LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING # FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS # IN THE SOFTWARE. +import ast import datetime import decimal @@ -1509,4 +1510,59 @@ class StackyServerTestCase(StacktachBaseTestCase): actual_results = stacky_server.model_search(fake_request, fake_model, filters, order_by='when') self.assertEqual(actual_results, results) - self.mox.VerifyAll() \ No newline at end of file + self.mox.VerifyAll() + + def test_jsonreports_search(self): + model = models.JsonReport.objects + model_search_result = self.mox.CreateMockAnything() + model_search_result.id = '5975' + model_search_result.period_start = datetime.datetime(2014, 1, 18,) + model_search_result.period_end = datetime.datetime(2014, 1, 19) + model_search_result.created = 1388569200 + model_search_result.name = 'nova usage audit' + model_search_result.version = 4 + request = self.mox.CreateMockAnything() + request.GET = { + 'audit_period_beginning': 1234, + 'name': 'nova_usage_audit' + } + filters = { + 'audit_period_beginning__exact': 1234, + 'name__exact': 'nova_usage_audit' + } + self.mox.StubOutWithMock(stacky_server, 'model_search') + stacky_server.model_search(request, model, filters).AndReturn( + [model_search_result]) + self.mox.ReplayAll() + + actual_result = stacky_server.do_jsonreports_search(request).content + expected_result = \ + [['Id', 'Start', 'End', 'Created', 'Name', 'Version'], + ['5975', '2014-01-18 00:00:00', '2014-01-19 00:00:00', + '2014-01-01 09:40:00', 'nova usage audit', 4]] + + self.assertEquals(ast.literal_eval(actual_result), expected_result) + self.mox.VerifyAll() + + def test_jsonreports_search_400(self): + model = models.JsonReport.objects + request = self.mox.CreateMockAnything() + request.GET = {'invalid_column_1': 'value_1', + 'invalid_column_2': 'value_2' } + filters = {'invalid_column_1__exact': 'value_1', + 'invalid_column_2__exact': 'value_2'} + self.mox.StubOutWithMock(stacky_server, 'model_search') + stacky_server.model_search(request, model, filters).AndRaise(FieldError) + + self.mox.ReplayAll() + + actual_result = stacky_server.do_jsonreports_search(request).content + expected_result = \ + [ + ["Error", "Message"], + ["Bad Request", "The requested fields do not exist for the " + "corresponding object: invalid_column_1, invalid_column_2. Note: " + "The field names of database are case-sensitive."] + ] + self.assertEqual(ast.literal_eval(actual_result), expected_result) + self.mox.VerifyAll() From e4eb2c0339dde9f698a95cb61b36aea704d63dce Mon Sep 17 00:00:00 2001 From: Anuj Mathur Date: Fri, 24 Jan 2014 16:53:02 +0530 Subject: [PATCH 30/53] Modified Stacky search API - ordered results by id - created now accepts datetime - returns error response instead of 500 field value has the wrong format --- stacktach/stacky_server.py | 98 +++++++++++++++------- tests/unit/test_stacky_server.py | 139 ++++++++++++++++++++++++++----- 2 files changed, 186 insertions(+), 51 deletions(-) diff --git a/stacktach/stacky_server.py b/stacktach/stacky_server.py index 987dc79..d87bf3c 100644 --- a/stacktach/stacky_server.py +++ b/stacktach/stacky_server.py @@ -1,3 +1,4 @@ +from copy import deepcopy import decimal import datetime import json @@ -9,7 +10,7 @@ from django.shortcuts import get_object_or_404 import datetime_to_decimal as dt import models import utils -from django.core.exceptions import ObjectDoesNotExist, FieldError +from django.core.exceptions import ObjectDoesNotExist, FieldError, ValidationError SECS_PER_HOUR = 60 * 60 SECS_PER_DAY = SECS_PER_HOUR * 24 @@ -623,33 +624,74 @@ def search(request): "Note: The field names of database are case-sensitive." % field) -def do_jsonreports_search(request): - model = models.JsonReport.objects - filters = {} - for filter, value in request.GET.iteritems(): - filters[filter + '__exact'] = value - try: - reports = model_search(request, model, filters) - except FieldError: - args = request.GET.keys() - args.sort() - return error_response( - 400, 'Bad Request', "The requested fields do not exist for " - "the corresponding object: %s. Note: The field names of database " - "are case-sensitive." % ', '.join(args)) +class BadRequestException(Exception): + pass - results = [['Id', 'Start', 'End', 'Created', 'Name', 'Version']] - for report in reports: - results.append([report.id, - datetime.datetime.strftime( - report.period_start, UTC_FORMAT), - datetime.datetime.strftime( - report.period_end, UTC_FORMAT), - datetime.datetime.strftime( - dt.dt_from_decimal(report.created), - UTC_FORMAT), - report.name, - report.version]) + +def _parse_created(request_filters): + try: + created_datetime = datetime.datetime.strptime( + request_filters['created'], '%Y-%m-%d %H:%M:%S') + return dt.dt_to_decimal(created_datetime) + except ValueError: + raise BadRequestException( + "'%s' value has an invalid format. It must be in " + "YYYY-MM-DD HH:MM[:ss[.uuuuuu]][TZ] format." % + request_filters['created']) + + +def _create_query_filters_from_request(request_filters, model): + allowed_fields = [field.name for field in models.get_model_fields(model)] + invalid_fields = [] + query_filters = {} + + for field, value in request_filters.iteritems(): + if field in allowed_fields: + query_filters[field + '__exact'] = value + else: + invalid_fields.append(field) + + if invalid_fields: + raise BadRequestException( + "The requested fields do not exist for the corresponding " + "object: %s. Note: The field names of database " + "are case-sensitive." % + ', '.join(sorted(invalid_fields))) + + return query_filters + + +def _get_query_filters(request, model): + request_filters = deepcopy(request.GET) + if 'created' in request_filters: + request_filters['created'] = _parse_created(request_filters) + request_filters.pop('limit', None) + request_filters.pop('offset', None) + + return _create_query_filters_from_request(request_filters, model) + + +def do_jsonreports_search(request): + try: + model = models.JsonReport + filters = _get_query_filters(request, model) + reports = model_search(request, model.objects, filters, + order_by='-id') + results = [['Id', 'Start', 'End', 'Created', 'Name', 'Version']] + for report in reports: + results.append([report.id, + datetime.datetime.strftime( + report.period_start, UTC_FORMAT), + datetime.datetime.strftime( + report.period_end, UTC_FORMAT), + datetime.datetime.strftime( + dt.dt_from_decimal(report.created), + UTC_FORMAT), + report.name, + report.version]) + except BadRequestException as be: + return error_response(400, 'Bad Request', be.message) + except ValidationError as ve: + return error_response(400, 'Bad Request', ve.messages[0]) return rsp(json.dumps(results)) - diff --git a/tests/unit/test_stacky_server.py b/tests/unit/test_stacky_server.py index aea84e6..23aea92 100644 --- a/tests/unit/test_stacky_server.py +++ b/tests/unit/test_stacky_server.py @@ -1512,27 +1512,46 @@ class StackyServerTestCase(StacktachBaseTestCase): self.assertEqual(actual_results, results) self.mox.VerifyAll() - def test_jsonreports_search(self): - model = models.JsonReport.objects - model_search_result = self.mox.CreateMockAnything() - model_search_result.id = '5975' - model_search_result.period_start = datetime.datetime(2014, 1, 18,) - model_search_result.period_end = datetime.datetime(2014, 1, 19) - model_search_result.created = 1388569200 - model_search_result.name = 'nova usage audit' - model_search_result.version = 4 + +class JsonReportsSearchAPI(StacktachBaseTestCase): + def setUp(self): + self.mox = mox.Mox() + self.model = models.JsonReport.objects + self.model_search_result = self.mox.CreateMockAnything() + self.model_search_result.id = '5975' + self.model_search_result.period_start = datetime.datetime(2014, 1, 18,) + self.model_search_result.period_end = datetime.datetime(2014, 1, 19) + self.model_search_result.created = 1388569200 + self.model_search_result.name = 'nova usage audit' + self.model_search_result.version = 4 + + def tearDown(self): + self.mox.UnsetStubs() + + def test_jsonreports_search_order_by_period_start(self): request = self.mox.CreateMockAnything() request.GET = { - 'audit_period_beginning': 1234, - 'name': 'nova_usage_audit' + 'id': 1, + 'name': 'nova_usage_audit', + 'period_start': '2014-01-01 00:00:00', + 'period_end': '2014-01-02 00:00:00', + 'created': '2014-01-01 09:40:00', + 'version': 4, + 'json': 'json' } filters = { - 'audit_period_beginning__exact': 1234, - 'name__exact': 'nova_usage_audit' + 'id__exact': 1, + 'period_start__exact': '2014-01-01 00:00:00', + 'name__exact': 'nova_usage_audit', + 'period_end__exact': '2014-01-02 00:00:00', + 'created__exact': decimal.Decimal('1388569200'), + 'version__exact': 4, + 'json__exact': 'json' } self.mox.StubOutWithMock(stacky_server, 'model_search') - stacky_server.model_search(request, model, filters).AndReturn( - [model_search_result]) + stacky_server.model_search(request, self.model, filters, + order_by='-id').AndReturn( + [self.model_search_result]) self.mox.ReplayAll() actual_result = stacky_server.do_jsonreports_search(request).content @@ -1544,16 +1563,38 @@ class StackyServerTestCase(StacktachBaseTestCase): self.assertEquals(ast.literal_eval(actual_result), expected_result) self.mox.VerifyAll() - def test_jsonreports_search_400(self): - model = models.JsonReport.objects + def test_jsonreports_search_with_limit_offset(self): + request = self.mox.CreateMockAnything() + request.GET = { + 'period_start': '2014-01-01 09:40:00', + 'name': 'nova_usage_audit', + 'limit': 10, + 'offset': 5 + } + filters = { + 'period_start__exact': '2014-01-01 09:40:00', + 'name__exact': 'nova_usage_audit', + } + self.mox.StubOutWithMock(stacky_server, 'model_search') + stacky_server.model_search(request, self.model, filters, + order_by='-id').AndReturn( + [self.model_search_result]) + self.mox.ReplayAll() + + actual_result = stacky_server.do_jsonreports_search(request).content + expected_result = \ + [['Id', 'Start', 'End', 'Created', 'Name', 'Version'], + ['5975', '2014-01-18 00:00:00', '2014-01-19 00:00:00', + '2014-01-01 09:40:00', 'nova usage audit', 4]] + + self.assertEquals(ast.literal_eval(actual_result), expected_result) + self.mox.VerifyAll() + + def test_jsonreports_search_with_invalid_field_names_400(self): request = self.mox.CreateMockAnything() request.GET = {'invalid_column_1': 'value_1', - 'invalid_column_2': 'value_2' } - filters = {'invalid_column_1__exact': 'value_1', - 'invalid_column_2__exact': 'value_2'} - self.mox.StubOutWithMock(stacky_server, 'model_search') - stacky_server.model_search(request, model, filters).AndRaise(FieldError) - + 'invalid_column_2': 'value_2', + 'period_start': '2014-01-01 00:00:00'} self.mox.ReplayAll() actual_result = stacky_server.do_jsonreports_search(request).content @@ -1566,3 +1607,55 @@ class StackyServerTestCase(StacktachBaseTestCase): ] self.assertEqual(ast.literal_eval(actual_result), expected_result) self.mox.VerifyAll() + + def test_jsonreports_search_with_invalid_format_of_field_values_400(self): + request = self.mox.CreateMockAnything() + request.GET = {'period_start': '1234'} + self.mox.ReplayAll() + + actual_result = stacky_server.do_jsonreports_search(request).content + expected_result = \ + [ + ["Error", "Message"], + ["Bad Request", "'1234' value has an invalid format. It must be in " + "YYYY-MM-DD HH:MM[:ss[.uuuuuu]][TZ] format."] + ] + self.assertEqual(ast.literal_eval(actual_result), expected_result) + self.mox.VerifyAll() + + def test_jsonreports_search_by_created(self): + request = self.mox.CreateMockAnything() + request.GET = { + 'created': '2014-01-01 09:40:20'} + filters = { + 'created__exact': 1388569220} + self.mox.StubOutWithMock(stacky_server, 'model_search') + stacky_server.model_search(request, self.model, filters, + order_by='-id').AndReturn( + [self.model_search_result]) + self.mox.ReplayAll() + + actual_result = stacky_server.do_jsonreports_search(request).content + expected_result = \ + [['Id', 'Start', 'End', 'Created', 'Name', 'Version'], + ['5975', '2014-01-18 00:00:00', '2014-01-19 00:00:00', + '2014-01-01 09:40:00', 'nova usage audit', 4]] + + self.assertEquals(ast.literal_eval(actual_result), expected_result) + self.mox.VerifyAll() + + def test_jsonreports_search_by_invalid_created_400(self): + request = self.mox.CreateMockAnything() + request.GET = { + 'created': '1234'} + self.mox.ReplayAll() + + actual_result = stacky_server.do_jsonreports_search(request).content + expected_result = \ + [ + ["Error", "Message"], + ["Bad Request", "'1234' value has an invalid format. It must be in " + "YYYY-MM-DD HH:MM[:ss[.uuuuuu]][TZ] format."] + ] + self.assertEquals(ast.literal_eval(actual_result), expected_result) + self.mox.VerifyAll() From 7c47c360c3f2f54dec8e1df29b90eee680d9288e Mon Sep 17 00:00:00 2001 From: "Bernhard K. Weisshuhn" Date: Thu, 6 Feb 2014 00:47:30 +0100 Subject: [PATCH 31/53] force message buffers to strings before decoding See Section "Buffer Objects:" in http://kombu.readthedocs.org/en/latest/userguide/serialization.html Conflicts: worker/worker.py --- worker/worker.py | 9 +++++++++ 1 file changed, 9 insertions(+) diff --git a/worker/worker.py b/worker/worker.py index e5d9b1b..10abd19 100644 --- a/worker/worker.py +++ b/worker/worker.py @@ -19,6 +19,7 @@ import datetime import sys import time +import anyjson import kombu import kombu.mixins @@ -38,6 +39,7 @@ from stacktach import db from stacktach import message_service from stacktach import stacklog from stacktach import views +from kombu.serialization import BytesIO, register stacklog.set_default_logger_name('worker') @@ -61,6 +63,13 @@ class Consumer(kombu.mixins.ConsumerMixin): self.topics = topics self.exchange = exchange + register('bufferjson', self.loads, anyjson.dumps, + content_type='application/json', + content_encoding='binary') + + def loads(s): + return anyjson.loads(BytesIO(s)) + def _create_exchange(self, name, type, exclusive=False, auto_delete=False): return message_service.create_exchange(name, exchange_type=type, exclusive=exclusive, From 4f7b363748ede3c684edd45b1815b73a4e895df1 Mon Sep 17 00:00:00 2001 From: "Bernhard K. Weisshuhn" Date: Thu, 6 Feb 2014 00:49:58 +0100 Subject: [PATCH 32/53] add --insecure to runserver docs --- docs/setup.rst | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/docs/setup.rst b/docs/setup.rst index 79f8a78..2a002a6 100644 --- a/docs/setup.rst +++ b/docs/setup.rst @@ -12,7 +12,7 @@ The "Hurry Up" Install Guide #. Configure OpenStack to publish Notifications back into RabbitMQ (see below) #. Restart the OpenStack services. #. Run the Worker to start consuming messages. (see below) -#. Run the web server (``python manage.py runserver``) +#. Run the web server (``python manage.py runserver --insecure``) #. Point your browser to ``http://127.0.0.1:8000`` (the default server location) #. Click on stuff, see what happens. You can't hurt anything, it's all read-only. From 469cd43d8513a6a47c9fe965c5ce790a6d37d9f3 Mon Sep 17 00:00:00 2001 From: "Bernhard K. Weisshuhn" Date: Thu, 6 Feb 2014 12:26:57 +0100 Subject: [PATCH 33/53] force pickle session serializer (workaround for django 1.6+) --- settings.py | 4 ++++ 1 file changed, 4 insertions(+) diff --git a/settings.py b/settings.py index c9106be..b6d27e5 100644 --- a/settings.py +++ b/settings.py @@ -167,3 +167,7 @@ LOGGING = { }, } } + +# Force use of the pickle serializer as a workaound for django-1.6. See: +# https://docs.djangoproject.com/en/dev/releases/1.6/#default-session-serialization-switched-to-json +SESSION_SERIALIZER='django.contrib.sessions.serializers.PickleSerializer' From caa2732225f918781c55b761870e188505a2bc71 Mon Sep 17 00:00:00 2001 From: "Bernhard K. Weisshuhn" Date: Thu, 6 Feb 2014 15:04:36 +0100 Subject: [PATCH 34/53] let workers exit gracefully Conflicts: worker/worker.py --- worker/start_workers.py | 2 +- worker/worker.py | 14 +++++++++++++- 2 files changed, 14 insertions(+), 2 deletions(-) diff --git a/worker/start_workers.py b/worker/start_workers.py index cbc3edd..3654cbc 100644 --- a/worker/start_workers.py +++ b/worker/start_workers.py @@ -25,13 +25,13 @@ def _get_parent_logger(): def kill_time(signal, frame): - log_listener.end() print "dying ..." for process in processes: process.terminate() print "rose" for process in processes: process.join() + log_listener.end() print "bud" sys.exit(0) diff --git a/worker/worker.py b/worker/worker.py index 10abd19..3c3f89f 100644 --- a/worker/worker.py +++ b/worker/worker.py @@ -20,6 +20,7 @@ import datetime import sys import time import anyjson +import signal import kombu import kombu.mixins @@ -42,6 +43,7 @@ from stacktach import views from kombu.serialization import BytesIO, register stacklog.set_default_logger_name('worker') +shutdown_soon = False def _get_child_logger(): @@ -62,6 +64,7 @@ class Consumer(kombu.mixins.ConsumerMixin): self.total_processed = 0 self.topics = topics self.exchange = exchange + signal.signal(signal.SIGTERM, self._shutdown) register('bufferjson', self.loads, anyjson.dumps, content_type='application/json', @@ -144,9 +147,14 @@ class Consumer(kombu.mixins.ConsumerMixin): (e, json.loads(str(message.body)))) raise + def _shutdown(self, signal, stackframe = False): + global shutdown_soon + self.should_stop = True + shutdown_soon = True + def continue_running(): - return True + return not shutdown_soon def exit_or_sleep(exit=False): @@ -206,6 +214,10 @@ def run(deployment_config, deployment_id, exchange): "exception=%s. Retrying in 5s" logger.exception(msg % (name, exchange, e)) exit_or_sleep(exit_on_exception) + logger.info("Worker exiting.") + +signal.signal(signal.SIGINT, signal.SIG_IGN) +signal.signal(signal.SIGTERM, signal.SIG_IGN) POST_PROCESS_METHODS = { 'RawData': views.post_process_rawdata, From 974c6e65f26a16d3260ef378b561a1bd4f5b988b Mon Sep 17 00:00:00 2001 From: "Bernhard K. Weisshuhn" Date: Thu, 6 Feb 2014 12:46:44 +0100 Subject: [PATCH 35/53] add "Worker exiting." message to test mocks --- tests/unit/test_worker.py | 2 ++ 1 file changed, 2 insertions(+) diff --git a/tests/unit/test_worker.py b/tests/unit/test_worker.py index 01d70cc..13bb50f 100644 --- a/tests/unit/test_worker.py +++ b/tests/unit/test_worker.py @@ -169,6 +169,7 @@ class ConsumerTestCase(StacktachBaseTestCase): mock_logger.debug("Processing on 'east_coast.prod.global nova'") mock_logger.debug("Completed processing on " "'east_coast.prod.global nova'") + mock_logger.info("Worker exiting.") config = { 'name': 'east_coast.prod.global', @@ -217,6 +218,7 @@ class ConsumerTestCase(StacktachBaseTestCase): mock_logger.debug("Processing on 'east_coast.prod.global nova'") mock_logger.debug("Completed processing on " "'east_coast.prod.global nova'") + mock_logger.info("Worker exiting.") config = { 'name': 'east_coast.prod.global', From e7195f5404423bada58859e7ab39f778c4f2973f Mon Sep 17 00:00:00 2001 From: Anuj Mathur Date: Wed, 12 Feb 2014 17:39:50 +0530 Subject: [PATCH 36/53] Revert "force message buffers to strings before decoding" This reverts commit 7c47c360c3f2f54dec8e1df29b90eee680d9288e. Conflicts: worker/worker.py --- worker/worker.py | 9 --------- 1 file changed, 9 deletions(-) diff --git a/worker/worker.py b/worker/worker.py index 3c3f89f..4fe2dd5 100644 --- a/worker/worker.py +++ b/worker/worker.py @@ -19,7 +19,6 @@ import datetime import sys import time -import anyjson import signal import kombu @@ -40,7 +39,6 @@ from stacktach import db from stacktach import message_service from stacktach import stacklog from stacktach import views -from kombu.serialization import BytesIO, register stacklog.set_default_logger_name('worker') shutdown_soon = False @@ -66,13 +64,6 @@ class Consumer(kombu.mixins.ConsumerMixin): self.exchange = exchange signal.signal(signal.SIGTERM, self._shutdown) - register('bufferjson', self.loads, anyjson.dumps, - content_type='application/json', - content_encoding='binary') - - def loads(s): - return anyjson.loads(BytesIO(s)) - def _create_exchange(self, name, type, exclusive=False, auto_delete=False): return message_service.create_exchange(name, exchange_type=type, exclusive=exclusive, From 98d6d857f00bb972cceee42f8479d5659cc8e94d Mon Sep 17 00:00:00 2001 From: Anuj Mathur Date: Wed, 19 Feb 2014 11:50:09 +0530 Subject: [PATCH 37/53] Stacky search API Added documentation Modified search by created to return all reports created on the day Added error message for invalid id search value --- docs/api.rst | 71 ++++++++++++++++++++++++ stacktach/stacky_server.py | 62 ++++++++++++--------- tests/unit/test_stacky_server.py | 93 ++++++++++++++++++++------------ 3 files changed, 167 insertions(+), 59 deletions(-) diff --git a/docs/api.rst b/docs/api.rst index c7b5166..8b559dc 100644 --- a/docs/api.rst +++ b/docs/api.rst @@ -488,6 +488,77 @@ stacky/report/ ... ] +stacky/reports/search/ +========================= + +.. http:get:: http://example.com/stacky/reports/search + + Returns reports that match the search criteria in descending order of id. + + The contents of the report varies by the specific report, but + all are in row/column format with Row 0 being a special *metadata* row. + The actual row/columns of the report start at Row 1 onwards. + + **Example request**: + + .. sourcecode:: http + + GET /stacky/reports/search/ HTTP/1.1 + Host: example.com + Accept: application/json + + **Example response**: + + .. sourcecode:: http + + HTTP/1.1 200 OK + Vary: Accept + Content-Type: text/json + + [ + [ + "Id", + "Start", + "End", + "Created", + "Name", + "Version" + ], + + [ + 4253, + "2013-11-21 00:00:00", + "2013-11-22 00:00:00", + "2013-11-22 01:44:55", + "public outbound bandwidth", + 1 + ], + [ + 4252, + "2014-01-18 00:00:00", + "2013-11-22 00:00:00", + "2013-11-22 01:44:55", + "image events audit", + 1 + ], + [ + 4248, + "2013-11-21 00:00:00", + "2013-11-22 00:00:00", + "2013-11-22 01:44:55", + "Error detail report", + 1 + ], + + ... + ] + + :query id: integer report id + :query name: string report name(can include spaces) + :query period_start: start of period, which the report pertains to, in the following format: YYYY-MM-DD HH:MM[:ss[.uuuuuu]][TZ] + :query period_end: end of period, which the report pertains to, in the following format: YYYY-MM-DD HH:MM[:ss[.uuuuuu]][TZ] + :query created: the day, when the report was created, in the following format: YYYY-MM-DD + stacky/show/ ====================== diff --git a/stacktach/stacky_server.py b/stacktach/stacky_server.py index d87bf3c..f5690d0 100644 --- a/stacktach/stacky_server.py +++ b/stacktach/stacky_server.py @@ -628,53 +628,67 @@ class BadRequestException(Exception): pass -def _parse_created(request_filters): +def _parse_created(created): try: - created_datetime = datetime.datetime.strptime( - request_filters['created'], '%Y-%m-%d %H:%M:%S') + created_datetime = datetime.datetime.strptime(created, '%Y-%m-%d') return dt.dt_to_decimal(created_datetime) except ValueError: raise BadRequestException( - "'%s' value has an invalid format. It must be in " - "YYYY-MM-DD HH:MM[:ss[.uuuuuu]][TZ] format." % - request_filters['created']) + "'%s' value has an invalid format. It must be in YYYY-MM-DD format." + % created) -def _create_query_filters_from_request(request_filters, model): - allowed_fields = [field.name for field in models.get_model_fields(model)] - invalid_fields = [] +def _parse_id(id): + try: + return int(id) + except ValueError: + raise BadRequestException( + "'%s' value has an invalid format. It must be in integer " + "format." % id) + + +def _parse_fields_and_create_query_filters(request_filters): query_filters = {} for field, value in request_filters.iteritems(): - if field in allowed_fields: - query_filters[field + '__exact'] = value + if field == 'created': + decimal_created = _parse_created(value) + query_filters['created__gt'] = decimal_created + query_filters['created__lt'] = decimal_created + SECS_PER_DAY + elif field == 'id': + id = _parse_id(value) + query_filters['id__exact'] = id else: - invalid_fields.append(field) - - if invalid_fields: - raise BadRequestException( - "The requested fields do not exist for the corresponding " - "object: %s. Note: The field names of database " - "are case-sensitive." % - ', '.join(sorted(invalid_fields))) + query_filters[field + '__exact'] = value return query_filters -def _get_query_filters(request, model): +def _check_if_fields_searchable(request_filters): + allowed_fields = ['id', 'name', 'created', 'period_start', 'period_end'] + invalid_fields = [field for field in request_filters.keys() + if field not in allowed_fields] + if invalid_fields: + raise BadRequestException( + "The requested fields either do not exist for the corresponding " + "object or are not searchable: %s. Note: The field names of " + "database are case-sensitive." % + ', '.join(sorted(invalid_fields))) + + +def _create_query_filters(request): request_filters = deepcopy(request.GET) - if 'created' in request_filters: - request_filters['created'] = _parse_created(request_filters) request_filters.pop('limit', None) request_filters.pop('offset', None) - return _create_query_filters_from_request(request_filters, model) + _check_if_fields_searchable(request_filters) + return _parse_fields_and_create_query_filters(request_filters) def do_jsonreports_search(request): try: model = models.JsonReport - filters = _get_query_filters(request, model) + filters = _create_query_filters(request) reports = model_search(request, model.objects, filters, order_by='-id') results = [['Id', 'Start', 'End', 'Created', 'Name', 'Version']] diff --git a/tests/unit/test_stacky_server.py b/tests/unit/test_stacky_server.py index 23aea92..8f5d781 100644 --- a/tests/unit/test_stacky_server.py +++ b/tests/unit/test_stacky_server.py @@ -1528,25 +1528,22 @@ class JsonReportsSearchAPI(StacktachBaseTestCase): def tearDown(self): self.mox.UnsetStubs() - def test_jsonreports_search_order_by_period_start(self): + def test_jsonreports_search_order_by_id(self): request = self.mox.CreateMockAnything() request.GET = { 'id': 1, 'name': 'nova_usage_audit', 'period_start': '2014-01-01 00:00:00', 'period_end': '2014-01-02 00:00:00', - 'created': '2014-01-01 09:40:00', - 'version': 4, - 'json': 'json' + 'created': '2014-01-01', } filters = { 'id__exact': 1, 'period_start__exact': '2014-01-01 00:00:00', 'name__exact': 'nova_usage_audit', 'period_end__exact': '2014-01-02 00:00:00', - 'created__exact': decimal.Decimal('1388569200'), - 'version__exact': 4, - 'json__exact': 'json' + 'created__lt': decimal.Decimal('1388620800'), + 'created__gt': decimal.Decimal('1388534400'), } self.mox.StubOutWithMock(stacky_server, 'model_search') stacky_server.model_search(request, self.model, filters, @@ -1555,10 +1552,11 @@ class JsonReportsSearchAPI(StacktachBaseTestCase): self.mox.ReplayAll() actual_result = stacky_server.do_jsonreports_search(request).content - expected_result = \ - [['Id', 'Start', 'End', 'Created', 'Name', 'Version'], - ['5975', '2014-01-18 00:00:00', '2014-01-19 00:00:00', - '2014-01-01 09:40:00', 'nova usage audit', 4]] + expected_result = [ + ['Id', 'Start', 'End', 'Created', 'Name', 'Version'], + ['5975', '2014-01-18 00:00:00', '2014-01-19 00:00:00', + '2014-01-01 09:40:00', 'nova usage audit', 4] + ] self.assertEquals(ast.literal_eval(actual_result), expected_result) self.mox.VerifyAll() @@ -1590,32 +1588,33 @@ class JsonReportsSearchAPI(StacktachBaseTestCase): self.assertEquals(ast.literal_eval(actual_result), expected_result) self.mox.VerifyAll() - def test_jsonreports_search_with_invalid_field_names_400(self): + def test_jsonreports_search_with_invalid_fields(self): request = self.mox.CreateMockAnything() request.GET = {'invalid_column_1': 'value_1', 'invalid_column_2': 'value_2', + 'version': 4, + 'json': 'json', 'period_start': '2014-01-01 00:00:00'} self.mox.ReplayAll() actual_result = stacky_server.do_jsonreports_search(request).content - expected_result = \ - [ + expected_result = [ ["Error", "Message"], - ["Bad Request", "The requested fields do not exist for the " - "corresponding object: invalid_column_1, invalid_column_2. Note: " - "The field names of database are case-sensitive."] + ["Bad Request", "The requested fields either do not exist for the " + "corresponding object or are not searchable: invalid_column_1, " + "invalid_column_2, json, version. Note: The field names of " + "database are case-sensitive."] ] self.assertEqual(ast.literal_eval(actual_result), expected_result) self.mox.VerifyAll() - def test_jsonreports_search_with_invalid_format_of_field_values_400(self): + def test_jsonreports_search_with_invalid_period_start(self): request = self.mox.CreateMockAnything() request.GET = {'period_start': '1234'} self.mox.ReplayAll() actual_result = stacky_server.do_jsonreports_search(request).content - expected_result = \ - [ + expected_result = [ ["Error", "Message"], ["Bad Request", "'1234' value has an invalid format. It must be in " "YYYY-MM-DD HH:MM[:ss[.uuuuuu]][TZ] format."] @@ -1623,25 +1622,49 @@ class JsonReportsSearchAPI(StacktachBaseTestCase): self.assertEqual(ast.literal_eval(actual_result), expected_result) self.mox.VerifyAll() - def test_jsonreports_search_by_created(self): + def test_jsonreports_search_with_invalid_period_end(self): request = self.mox.CreateMockAnything() - request.GET = { - 'created': '2014-01-01 09:40:20'} - filters = { - 'created__exact': 1388569220} - self.mox.StubOutWithMock(stacky_server, 'model_search') - stacky_server.model_search(request, self.model, filters, - order_by='-id').AndReturn( - [self.model_search_result]) + request.GET = {'period_end': '1234'} self.mox.ReplayAll() actual_result = stacky_server.do_jsonreports_search(request).content - expected_result = \ - [['Id', 'Start', 'End', 'Created', 'Name', 'Version'], - ['5975', '2014-01-18 00:00:00', '2014-01-19 00:00:00', - '2014-01-01 09:40:00', 'nova usage audit', 4]] + expected_result = [ + ["Error", "Message"], + ["Bad Request", "'1234' value has an invalid format. It must be in " + "YYYY-MM-DD HH:MM[:ss[.uuuuuu]][TZ] format."] + ] + self.assertEqual(ast.literal_eval(actual_result), expected_result) + self.mox.VerifyAll() - self.assertEquals(ast.literal_eval(actual_result), expected_result) + def test_jsonreports_search_with_invalid_id(self): + request = self.mox.CreateMockAnything() + request.GET = {'id': 'abcd'} + self.mox.ReplayAll() + + actual_result = stacky_server.do_jsonreports_search(request).content + expected_result = [ + ["Error", "Message"], + ["Bad Request", "'abcd' value has an invalid format. It must be in " + "integer format."] + ] + self.assertEqual(ast.literal_eval(actual_result), expected_result) + self.mox.VerifyAll() + + def test_jsonreports_search_with_invalid_created_format(self): + request = self.mox.CreateMockAnything() + request.GET = { + 'created': '2014-01-01 00:00:00' + } + self.mox.ReplayAll() + + actual_result = stacky_server.do_jsonreports_search(request).content + expected_result = [ + ["Error", "Message"], + ["Bad Request", "'2014-01-01 00:00:00' value has an invalid format." + " It must be in YYYY-MM-DD format."] + ] + + self.assertEqual(ast.literal_eval(actual_result), expected_result) self.mox.VerifyAll() def test_jsonreports_search_by_invalid_created_400(self): @@ -1655,7 +1678,7 @@ class JsonReportsSearchAPI(StacktachBaseTestCase): [ ["Error", "Message"], ["Bad Request", "'1234' value has an invalid format. It must be in " - "YYYY-MM-DD HH:MM[:ss[.uuuuuu]][TZ] format."] + "YYYY-MM-DD format."] ] self.assertEquals(ast.literal_eval(actual_result), expected_result) self.mox.VerifyAll() From 259fe6edd059e2812f6b026f203cf2f135494c58 Mon Sep 17 00:00:00 2001 From: Anuj Mathur Date: Mon, 24 Feb 2014 15:13:52 +0530 Subject: [PATCH 38/53] Added instance/image id and verified_at to error messages --- tests/unit/test_glance_verifier.py | 151 ++++++++----- tests/unit/test_nova_verifier.py | 339 ++++++++++++++++------------- tests/unit/utils.py | 15 +- verifier/__init__.py | 33 ++- verifier/base_verifier.py | 22 +- verifier/glance_verifier.py | 21 +- verifier/nova_verifier.py | 48 ++-- 7 files changed, 374 insertions(+), 255 deletions(-) diff --git a/tests/unit/test_glance_verifier.py b/tests/unit/test_glance_verifier.py index 9b75d6b..6c834ea 100644 --- a/tests/unit/test_glance_verifier.py +++ b/tests/unit/test_glance_verifier.py @@ -17,11 +17,9 @@ # LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING # FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS # IN THE SOFTWARE. -from datetime import datetime - +import datetime import decimal import json -import logging import uuid import kombu @@ -31,7 +29,7 @@ from stacktach import datetime_to_decimal as dt from stacktach import stacklog from stacktach import models from tests.unit import StacktachBaseTestCase -from utils import IMAGE_UUID_1 +from utils import IMAGE_UUID_1, SIZE_1, SIZE_2, CREATED_AT_1, CREATED_AT_2 from utils import GLANCE_VERIFIER_EVENT_TYPE from utils import make_verifier_config from verifier import glance_verifier @@ -87,8 +85,8 @@ class GlanceVerifierTestCase(StacktachBaseTestCase): def test_verify_usage_created_at_mismatch(self): exist = self.mox.CreateMockAnything() exist.usage = self.mox.CreateMockAnything() - exist.created_at = decimal.Decimal('1.1') - exist.usage.created_at = decimal.Decimal('2.1') + exist.created_at = CREATED_AT_1 + exist.usage.created_at = CREATED_AT_2 self.mox.ReplayAll() with self.assertRaises(FieldMismatch) as cm: @@ -96,8 +94,8 @@ class GlanceVerifierTestCase(StacktachBaseTestCase): exception = cm.exception self.assertEqual(exception.field_name, 'created_at') - self.assertEqual(exception.expected, decimal.Decimal('1.1')) - self.assertEqual(exception.actual, decimal.Decimal('2.1')) + self.assertEqual(exception.expected, CREATED_AT_1) + self.assertEqual(exception.actual, CREATED_AT_2) self.mox.VerifyAll() @@ -119,10 +117,10 @@ class GlanceVerifierTestCase(StacktachBaseTestCase): def test_verify_usage_size_mismatch(self): exist = self.mox.CreateMockAnything() - exist.size = 1234 + exist.size = SIZE_1 exist.usage = self.mox.CreateMockAnything() - exist.usage.size = 5678 + exist.usage.size = SIZE_2 self.mox.ReplayAll() with self.assertRaises(FieldMismatch) as cm: @@ -130,8 +128,8 @@ class GlanceVerifierTestCase(StacktachBaseTestCase): exception = cm.exception self.assertEqual(exception.field_name, 'size') - self.assertEqual(exception.expected, 1234) - self.assertEqual(exception.actual, 5678) + self.assertEqual(exception.expected, SIZE_1) + self.assertEqual(exception.actual, SIZE_2) self.mox.VerifyAll() @@ -255,30 +253,16 @@ class GlanceVerifierTestCase(StacktachBaseTestCase): self.assertEqual(exception.actual, decimal.Decimal('4.1')) self.mox.VerifyAll() - def test_verify_for_delete_size_mismatch(self): - exist = self.mox.CreateMockAnything() - exist.delete = self.mox.CreateMockAnything() - exist.launched_at = decimal.Decimal('1.1') - exist.deleted_at = decimal.Decimal('5.1') - exist.delete.launched_at = decimal.Decimal('1.1') - exist.delete.deleted_at = decimal.Decimal('6.1') + def test_should_verify_that_image_size_in_exist_is_not_null(self): + self.mox.StubOutWithMock(datetime, 'datetime') + datetime.datetime.utcnow().AndReturn('2014-01-02 03:04:05') self.mox.ReplayAll() - try: - glance_verifier._verify_for_delete(exist) - self.fail() - except FieldMismatch, fm: - self.assertEqual(fm.field_name, 'deleted_at') - self.assertEqual(fm.expected, decimal.Decimal('5.1')) - self.assertEqual(fm.actual, decimal.Decimal('6.1')) - self.mox.VerifyAll() - - def test_should_verify_that_image_size_in_exist_is_not_null(self): exist = self.mox.CreateMockAnything() exist.id = 23 exist.size = None exist.created_at = decimal.Decimal('5.1') - exist.uuid = 'abcd1234' + exist.uuid = '1234-5678-9012-3456' self.mox.ReplayAll() try: @@ -286,26 +270,40 @@ class GlanceVerifierTestCase(StacktachBaseTestCase): self.fail() except NullFieldException as nf: self.assertEqual(nf.field_name, 'image_size') - self.assertEqual(nf.reason, "image_size field was null for exist id 23") + self.assertEqual( + nf.reason, "Failed at 2014-01-02 03:04:05 UTC for " + "1234-5678-9012-3456: image_size field was null for " + "exist id 23") self.mox.VerifyAll() def test_should_verify_that_created_at_in_exist_is_not_null(self): + self.mox.StubOutWithMock(datetime, 'datetime') + datetime.datetime.utcnow().AndReturn('2014-01-01 01:02:03') + self.mox.ReplayAll() + exist = self.mox.CreateMockAnything() exist.id = 23 exist.size = 'size' exist.created_at = None - exist.uuid = 'abcd1234' + exist.uuid = '1234-5678-9012-3456' self.mox.ReplayAll() - try: + with self.assertRaises(NullFieldException) as nfe: glance_verifier._verify_validity(exist) - self.fail() - except NullFieldException as nf: - self.assertEqual(nf.field_name, 'created_at') - self.assertEqual(nf.reason, "created_at field was null for exist id 23") + exception = nfe.exception + + self.assertEqual(exception.field_name, 'created_at') + self.assertEqual(exception.reason, + "Failed at 2014-01-01 01:02:03 UTC for " + "1234-5678-9012-3456: created_at field was " + "null for exist id 23") self.mox.VerifyAll() def test_should_verify_that_uuid_in_exist_is_not_null(self): + self.mox.StubOutWithMock(datetime, 'datetime') + datetime.datetime.utcnow().AndReturn('2014-01-01 01:02:03') + self.mox.ReplayAll() + exist = self.mox.CreateMockAnything() exist.id = 23 exist.size = 'size' @@ -318,15 +316,21 @@ class GlanceVerifierTestCase(StacktachBaseTestCase): self.fail() except NullFieldException as nf: self.assertEqual(nf.field_name, 'uuid') - self.assertEqual(nf.reason, "uuid field was null for exist id 23") + self.assertEqual( + nf.reason, "Failed at 2014-01-01 01:02:03 UTC for None: " + "uuid field was null for exist id 23") self.mox.VerifyAll() def test_should_verify_that_owner_in_exist_is_not_null(self): + self.mox.StubOutWithMock(datetime, 'datetime') + datetime.datetime.utcnow().AndReturn('2014-01-02 03:04:05') + self.mox.ReplayAll() + exist = self.mox.CreateMockAnything() exist.id = 23 exist.size = 1234 exist.created_at = decimal.Decimal('5.1') - exist.uuid = 'abcd1234' + exist.uuid = '1234-5678-9012-3456' exist.owner = None self.mox.ReplayAll() @@ -335,10 +339,16 @@ class GlanceVerifierTestCase(StacktachBaseTestCase): self.fail() except NullFieldException as nf: self.assertEqual(nf.field_name, 'owner') - self.assertEqual(nf.reason, "owner field was null for exist id 23") + self.assertEqual( + nf.reason, "Failed at 2014-01-02 03:04:05 UTC for " + "1234-5678-9012-3456: owner field was null for exist id 23") self.mox.VerifyAll() def test_should_verify_that_uuid_value_is_uuid_like(self): + self.mox.StubOutWithMock(datetime, 'datetime') + datetime.datetime.utcnow().AndReturn('2014-01-02 03:04:05') + self.mox.ReplayAll() + exist = self.mox.CreateMockAnything() exist.id = 23 exist.size = 'size' @@ -351,10 +361,17 @@ class GlanceVerifierTestCase(StacktachBaseTestCase): self.fail() except WrongTypeException as wt: self.assertEqual(wt.field_name, 'uuid') - self.assertEqual(wt.reason, "{ uuid : asdfe-fgh } of incorrect type for exist id 23") + self.assertEqual( + wt.reason, + "Failed at 2014-01-02 03:04:05 UTC for None: " + "{uuid: asdfe-fgh} was of incorrect type for exist id 23") self.mox.VerifyAll() def test_should_verify_created_at_is_decimal(self): + self.mox.StubOutWithMock(datetime, 'datetime') + datetime.datetime.utcnow().AndReturn('2014-01-02 03:04:05') + self.mox.ReplayAll() + exist = self.mox.CreateMockAnything() exist.id = 23 exist.size = 'size' @@ -367,13 +384,21 @@ class GlanceVerifierTestCase(StacktachBaseTestCase): self.fail() except WrongTypeException as wt: self.assertEqual(wt.field_name, 'created_at') - self.assertEqual(wt.reason, "{ created_at : 123.a } of incorrect type for exist id 23") + self.assertEqual( + wt.reason, + "Failed at 2014-01-02 03:04:05 UTC for " + "58fb036d-5ef8-47a8-b503-7571276c400a: {created_at: 123.a} was " + "of incorrect type for exist id 23") self.mox.VerifyAll() def test_should_verify_image_size_is_of_type_decimal(self): + self.mox.StubOutWithMock(datetime, 'datetime') + datetime.datetime.utcnow().AndReturn('2014-01-02 03:04:05') + self.mox.ReplayAll() + exist = self.mox.CreateMockAnything() exist.id = 23 - exist.size = 'size' + exist.size = 'random' exist.created_at = decimal.Decimal('5.1') exist.uuid = "58fb036d-5ef8-47a8-b503-7571276c400a" self.mox.ReplayAll() @@ -383,10 +408,18 @@ class GlanceVerifierTestCase(StacktachBaseTestCase): self.fail() except WrongTypeException as wt: self.assertEqual(wt.field_name, 'size') - self.assertEqual(wt.reason, "{ size : size } of incorrect type for exist id 23") + self.assertEqual( + wt.reason, + "Failed at 2014-01-02 03:04:05 UTC for " + "58fb036d-5ef8-47a8-b503-7571276c400a: {size: random} was " + "of incorrect type for exist id 23") self.mox.VerifyAll() def test_should_verify_owner_is_of_type_hex(self): + self.mox.StubOutWithMock(datetime, 'datetime') + datetime.datetime.utcnow().AndReturn('2014-01-02 03:04:05') + self.mox.ReplayAll() + exist = self.mox.CreateMockAnything() exist.id = 23 exist.size = 1234L @@ -400,7 +433,12 @@ class GlanceVerifierTestCase(StacktachBaseTestCase): self.fail() except WrongTypeException as wt: self.assertEqual(wt.field_name, 'owner') - self.assertEqual(wt.reason, "{ owner : 3762854cd6f6435998188d5120e4c271,kl } of incorrect type for exist id 23") + self.assertEqual( + wt.reason, + "Failed at 2014-01-02 03:04:05 UTC for " + "58fb036d-5ef8-47a8-b503-7571276c400a: " + "{owner: 3762854cd6f6435998188d5120e4c271,kl} was of " + "incorrect type for exist id 23") self.mox.VerifyAll() def test_should_verify_correctly_for_all_non_null_and_valid_types(self): @@ -435,6 +473,9 @@ class GlanceVerifierTestCase(StacktachBaseTestCase): self.assertTrue(verified) def test_verify_exist_marks_exist_failed_if_field_mismatch_exception(self): + self.mox.StubOutWithMock(datetime, 'datetime') + datetime.datetime.utcnow().AndReturn('2014-01-01 01:01:01') + self.mox.ReplayAll() exist1 = self.mox.CreateMockAnything() exist2 = self.mox.CreateMockAnything() @@ -442,11 +483,13 @@ class GlanceVerifierTestCase(StacktachBaseTestCase): self.mox.StubOutWithMock(glance_verifier, '_verify_for_usage') self.mox.StubOutWithMock(glance_verifier, '_verify_for_delete') self.mox.StubOutWithMock(glance_verifier, '_verify_validity') - - field_mismatch_exc = FieldMismatch('field', 'expected', 'actual') + field_mismatch_exc = FieldMismatch('field', 'expected', + 'actual', 'uuid') glance_verifier._verify_for_usage(exist1).AndRaise( exception=field_mismatch_exc) - exist1.mark_failed(reason="Expected field to be 'expected' got 'actual'") + exist1.mark_failed( + reason="Failed at 2014-01-01 01:01:01 UTC for uuid: Expected " + "field to be 'expected' got 'actual'") glance_verifier._verify_for_usage(exist2) glance_verifier._verify_for_delete(exist2) @@ -465,7 +508,7 @@ class GlanceVerifierTestCase(StacktachBaseTestCase): stacklog.get_logger('verifier', is_parent=False).AndReturn(mock_logger) mock_logger.info('glance: Adding 2 per-owner exists to queue.') mock_logger.info('glance: Adding 2 per-owner exists to queue.') - when_max = datetime.utcnow() + when_max = datetime.datetime.utcnow() models.ImageExists.VERIFYING = 'verifying' models.ImageExists.PENDING = 'pending' models.ImageExists.SENT_VERIFYING = 'sent_verifying' @@ -514,7 +557,7 @@ class GlanceVerifierTestCase(StacktachBaseTestCase): mock_logger.info('glance: Adding 0 per-owner exists to queue.') mock_logger.info('glance: Adding 2 per-owner exists to queue.') callback = self.mox.CreateMockAnything() - when_max = datetime.utcnow() + when_max = datetime.datetime.utcnow() models.ImageExists.SENT_VERIFYING = 'sent_verifying' models.ImageExists.SENT_UNVERIFIED = 'sent_unverified' models.ImageExists.PENDING = 'pending' @@ -559,8 +602,8 @@ class GlanceVerifierTestCase(StacktachBaseTestCase): ] exist_str = json.dumps(exist_dict) exist.raw.json = exist_str - exist.audit_period_beginning = datetime(2013, 10, 10) - exist.audit_period_ending = datetime(2013, 10, 10, 23, 59, 59) + exist.audit_period_beginning = datetime.datetime(2013, 10, 10) + exist.audit_period_ending = datetime.datetime(2013, 10, 10, 23, 59, 59) exist.owner = "1" self.mox.StubOutWithMock(uuid, 'uuid4') uuid.uuid4().AndReturn('some_other_uuid') @@ -601,8 +644,8 @@ class GlanceVerifierTestCase(StacktachBaseTestCase): ] exist_str = json.dumps(exist_dict) exist.raw.json = exist_str - exist.audit_period_beginning = datetime(2013, 10, 10) - exist.audit_period_ending = datetime(2013, 10, 10, 23, 59, 59) + exist.audit_period_beginning = datetime.datetime(2013, 10, 10) + exist.audit_period_ending = datetime.datetime(2013, 10, 10, 23, 59, 59) exist.owner = "1" self.mox.StubOutWithMock(kombu.pools, 'producers') self.mox.StubOutWithMock(kombu.common, 'maybe_declare') diff --git a/tests/unit/test_nova_verifier.py b/tests/unit/test_nova_verifier.py index e56959e..0f189f7 100644 --- a/tests/unit/test_nova_verifier.py +++ b/tests/unit/test_nova_verifier.py @@ -32,7 +32,7 @@ from stacktach import datetime_to_decimal as dt from stacktach import stacklog from stacktach import models from tests.unit import StacktachBaseTestCase -from utils import make_verifier_config +from utils import make_verifier_config, LAUNCHED_AT_1, INSTANCE_FLAVOR_ID_1, INSTANCE_FLAVOR_ID_2, FLAVOR_FIELD_NAME, DELETED_AT_1, LAUNCHED_AT_2, DELETED_AT_2 from utils import INSTANCE_ID_1 from utils import RAX_OPTIONS_1 from utils import RAX_OPTIONS_2 @@ -54,12 +54,14 @@ from verifier import FieldMismatch from verifier import NotFound from verifier import VerificationException + class NovaVerifierVerifyForLaunchTestCase(StacktachBaseTestCase): def setUp(self): self.mox = mox.Mox() self.mox.StubOutWithMock(models, 'InstanceUsage', use_mock_anything=True) models.InstanceUsage.objects = self.mox.CreateMockAnything() + self._setup_verifier() def _setup_verifier(self): @@ -132,28 +134,36 @@ class NovaVerifierVerifyForLaunchTestCase(StacktachBaseTestCase): self.mox.VerifyAll() def test_verify_for_launch_flavor_id_missmatch(self): - self.mox.StubOutWithMock(config, 'flavor_field_name') - config.flavor_field_name().AndReturn('dummy_flavor_field_name') - exist = self.mox.CreateMockAnything() - exist.usage = self.mox.CreateMockAnything() - exist.launched_at = decimal.Decimal('1.1') - exist.dummy_flavor_field_name = 'dummy_flavor_1' - exist.usage.launched_at = decimal.Decimal('1.1') - exist.usage.dummy_flavor_field_name = 'dummy_flavor_2' + self.mox.StubOutWithMock(datetime, 'datetime') + datetime.datetime.utcnow().AndReturn('2014-01-02 03:04:05') self.mox.ReplayAll() + self.mox.StubOutWithMock(config, 'flavor_field_name') + config.flavor_field_name().AndReturn(FLAVOR_FIELD_NAME) + exist = self.mox.CreateMockAnything() + exist.instance = INSTANCE_ID_1 + exist.usage = self.mox.CreateMockAnything() + exist.launched_at = decimal.Decimal(LAUNCHED_AT_1) + exist.flavor_field_name = INSTANCE_FLAVOR_ID_1 + exist.usage.launched_at = decimal.Decimal(LAUNCHED_AT_1) + exist.usage.flavor_field_name = INSTANCE_FLAVOR_ID_2 + self.mox.ReplayAll() with self.assertRaises(FieldMismatch) as fm: nova_verifier._verify_for_launch(exist) exception = fm.exception - self.assertEqual(exception.field_name, 'dummy_flavor_field_name') - self.assertEqual(exception.expected, 'dummy_flavor_1') - self.assertEqual(exception.actual, 'dummy_flavor_2') - + self.assertEqual(exception.field_name, FLAVOR_FIELD_NAME) + self.assertEqual(exception.expected, INSTANCE_FLAVOR_ID_1) + self.assertEqual(exception.actual, INSTANCE_FLAVOR_ID_2) + self.assertEqual( + exception.reason, + "Failed at 2014-01-02 03:04:05 UTC for " + "08f685d9-6352-4dbc-8271-96cc54bf14cd: Expected flavor_field_name " + "to be '1' got 'performance2-120'") self.mox.VerifyAll() def test_verify_for_launch_tenant_id_mismatch(self): self.mox.StubOutWithMock(config, 'flavor_field_name') - config.flavor_field_name().AndReturn("flavor_field_name") + config.flavor_field_name().AndReturn(FLAVOR_FIELD_NAME) exist = self.mox.CreateMockAnything() exist.tenant = TENANT_ID_1 @@ -425,35 +435,35 @@ class NovaVerifierVerifyForDeleteTestCase(StacktachBaseTestCase): def test_verify_for_delete_launched_at_mismatch(self): exist = self.mox.CreateMockAnything() exist.delete = self.mox.CreateMockAnything() - exist.launched_at = decimal.Decimal('1.1') - exist.deleted_at = decimal.Decimal('5.1') - exist.delete.launched_at = decimal.Decimal('2.1') - exist.delete.deleted_at = decimal.Decimal('5.1') + exist.launched_at = LAUNCHED_AT_1 + exist.deleted_at = DELETED_AT_1 + exist.delete.launched_at = LAUNCHED_AT_2 + exist.delete.deleted_at = DELETED_AT_1 self.mox.ReplayAll() with self.assertRaises(FieldMismatch) as fm: nova_verifier._verify_for_delete(exist) exception = fm.exception self.assertEqual(exception.field_name, 'launched_at') - self.assertEqual(exception.expected, decimal.Decimal('1.1')) - self.assertEqual(exception.actual, decimal.Decimal('2.1')) + self.assertEqual(exception.expected, LAUNCHED_AT_1) + self.assertEqual(exception.actual, LAUNCHED_AT_2) self.mox.VerifyAll() def test_verify_for_delete_deleted_at_mismatch(self): exist = self.mox.CreateMockAnything() exist.delete = self.mox.CreateMockAnything() - exist.launched_at = decimal.Decimal('1.1') - exist.deleted_at = decimal.Decimal('5.1') - exist.delete.launched_at = decimal.Decimal('1.1') - exist.delete.deleted_at = decimal.Decimal('6.1') + exist.launched_at = LAUNCHED_AT_1 + exist.deleted_at = DELETED_AT_1 + exist.delete.launched_at = LAUNCHED_AT_1 + exist.delete.deleted_at = DELETED_AT_2 self.mox.ReplayAll() with self.assertRaises(FieldMismatch) as fm: nova_verifier._verify_for_delete(exist) exception = fm.exception self.assertEqual(exception.field_name, 'deleted_at') - self.assertEqual(exception.expected, decimal.Decimal('5.1')) - self.assertEqual(exception.actual, decimal.Decimal('6.1')) + self.assertEqual(exception.expected, DELETED_AT_1) + self.assertEqual(exception.actual, DELETED_AT_2) self.mox.VerifyAll() @@ -1012,28 +1022,52 @@ class NovaVerifierValidityTestCase(StacktachBaseTestCase): def tearDown(self): self.mox.UnsetStubs() + def _create_mock_exist(self): + exist = self.mox.CreateMockAnything() + exist.instance = '58fb036d-5ef8-47a8-b503-7571276c400a' + exist.tenant = '3762854cd6f6435998188d5120e4c271' + exist.id = 23 + exist.launched_at = decimal.Decimal('1.1') + exist.deleted_at = decimal.Decimal('5.1') + exist.dummy_flavor_field_name = 'dummy_flavor' + exist.rax_options = '1' + exist.os_architecture = 'x64' + exist.os_distro = 'com.microsoft.server' + exist.os_version = '2008.2' + + return exist + def test_should_verify_that_tenant_in_exist_is_not_null(self): + self.mox.StubOutWithMock(datetime, 'datetime') + datetime.datetime.utcnow().AndReturn('2014-01-02 03:04:05') + self.mox.ReplayAll() + self.mox.StubOutWithMock(config, 'flavor_field_name') config.flavor_field_name().AndReturn('dummy_flavor_field_name') - exist = self.mox.CreateMockAnything() + + exist = self._create_mock_exist() exist.tenant = None - exist.id = 23 self.mox.ReplayAll() with self.assertRaises(NullFieldException) as nf: nova_verifier._verify_validity(exist, 'all') exception = nf.exception self.assertEqual(exception.field_name, 'tenant') - self.assertEqual(exception.reason, - "tenant field was null for exist id 23") + self.assertEqual( + exception.reason, "Failed at 2014-01-02 03:04:05 UTC for " + "58fb036d-5ef8-47a8-b503-7571276c400a: tenant field was null for " + "exist id 23") self.mox.VerifyAll() def test_should_verify_that_launched_at_in_exist_is_not_null(self): + self.mox.StubOutWithMock(datetime, 'datetime') + datetime.datetime.utcnow().AndReturn('2014-01-02 03:04:05') + self.mox.ReplayAll() + self.mox.StubOutWithMock(config, 'flavor_field_name') config.flavor_field_name().AndReturn('dummy_flavor_field_name') - exist = self.mox.CreateMockAnything() - exist.tenant = 'tenant' - exist.id = 23 + + exist = self._create_mock_exist() exist.launched_at = None self.mox.ReplayAll() @@ -1041,17 +1075,21 @@ class NovaVerifierValidityTestCase(StacktachBaseTestCase): nova_verifier._verify_validity(exist, 'all') exception = nf.exception self.assertEqual(exception.field_name, 'launched_at') - self.assertEqual(exception.reason, - "launched_at field was null for exist id 23") + self.assertEqual( + exception.reason, "Failed at 2014-01-02 03:04:05 UTC for " + "58fb036d-5ef8-47a8-b503-7571276c400a: launched_at field was null " + "for exist id 23") self.mox.VerifyAll() def test_should_verify_that_instance_flavor_id_in_exist_is_not_null(self): + self.mox.StubOutWithMock(datetime, 'datetime') + datetime.datetime.utcnow().AndReturn('2014-01-02 03:04:05') + self.mox.ReplayAll() + self.mox.StubOutWithMock(config, 'flavor_field_name') config.flavor_field_name().AndReturn('dummy_flavor_field_name') - exist = self.mox.CreateMockAnything() - exist.tenant = '3762854cd6f6435998188d5120e4c271' - exist.id = 23 - exist.launched_at = decimal.Decimal('1.1') + + exist = self._create_mock_exist() exist.dummy_flavor_field_name = None self.mox.ReplayAll() @@ -1061,17 +1099,21 @@ class NovaVerifierValidityTestCase(StacktachBaseTestCase): self.assertEqual(exception.field_name, 'dummy_flavor_field_name') self.assertEqual( exception.reason, - "dummy_flavor_field_name field was null for exist id 23") + "Failed at 2014-01-02 03:04:05 UTC for " + "58fb036d-5ef8-47a8-b503-7571276c400a: dummy_flavor_field_name " + "field was null for exist id 23") self.mox.VerifyAll() def test_should_verify_tenant_id_is_of_type_hex(self): + self.mox.StubOutWithMock(datetime, 'datetime') + datetime.datetime.utcnow().AndReturn('2014-01-02 03:04:05') + self.mox.ReplayAll() + self.mox.StubOutWithMock(config, 'flavor_field_name') config.flavor_field_name().AndReturn('dummy_flavor_field_name') - exist = self.mox.CreateMockAnything() - exist.tenant = 'tenant' - exist.id = 23 - exist.launched_at = decimal.Decimal('1.1') - exist.dummy_flavor_field_name = 'dummy_flavor' + + exist = self._create_mock_exist() + exist.tenant = 'invalid_tenant' self.mox.ReplayAll() with self.assertRaises(WrongTypeException) as wt: @@ -1080,17 +1122,21 @@ class NovaVerifierValidityTestCase(StacktachBaseTestCase): self.assertEqual(exception.field_name, 'tenant') self.assertEqual( exception.reason, - "{ tenant : tenant } of incorrect type for exist id 23") + "Failed at 2014-01-02 03:04:05 UTC for " + "58fb036d-5ef8-47a8-b503-7571276c400a: {tenant: invalid_tenant} " + "was of incorrect type for exist id 23") self.mox.VerifyAll() def test_should_verify_launched_at_is_of_type_decimal(self): + self.mox.StubOutWithMock(datetime, 'datetime') + datetime.datetime.utcnow().AndReturn('2014-01-02 03:04:05') + self.mox.ReplayAll() + self.mox.StubOutWithMock(config, 'flavor_field_name') config.flavor_field_name().AndReturn('dummy_flavor_field_name') - exist = self.mox.CreateMockAnything() - exist.tenant = '3762854cd6f6435998188d5120e4c271' - exist.id = 23 + + exist = self._create_mock_exist() exist.launched_at = 111 - exist.dummy_flavor_field_name = 'dummy_flavor' self.mox.ReplayAll() @@ -1100,17 +1146,20 @@ class NovaVerifierValidityTestCase(StacktachBaseTestCase): self.assertEqual(exception.field_name, 'launched_at') self.assertEqual( exception.reason, - "{ launched_at : 111 } of incorrect type for exist id 23") + 'Failed at 2014-01-02 03:04:05 UTC for ' + '58fb036d-5ef8-47a8-b503-7571276c400a: {launched_at: 111} was of ' + 'incorrect type for exist id 23') self.mox.VerifyAll() def test_should_verify_deleted_at_is_of_decimal_type_if_present(self): + self.mox.StubOutWithMock(datetime, 'datetime') + datetime.datetime.utcnow().AndReturn('2014-01-02 03:04:05') + self.mox.ReplayAll() + self.mox.StubOutWithMock(config, 'flavor_field_name') config.flavor_field_name().AndReturn('dummy_flavor_field_name') - exist = self.mox.CreateMockAnything() - exist.tenant = '3762854cd6f6435998188d5120e4c271' - exist.id = 23 - exist.launched_at = decimal.Decimal('1.1') - exist.dummy_flavor_field_name = 'dummy_flavor' + + exist = self._create_mock_exist() exist.deleted_at = 20 self.mox.ReplayAll() @@ -1120,19 +1169,20 @@ class NovaVerifierValidityTestCase(StacktachBaseTestCase): self.assertEqual(exception.field_name, 'deleted_at') self.assertEqual( exception.reason, - "{ deleted_at : 20 } of incorrect type for exist id 23") + 'Failed at 2014-01-02 03:04:05 UTC for ' + '58fb036d-5ef8-47a8-b503-7571276c400a: {deleted_at: 20} was of ' + 'incorrect type for exist id 23') self.mox.VerifyAll() - def test_should_verify_rax_options_should_be_of_integer_type(self): + self.mox.StubOutWithMock(datetime, 'datetime') + datetime.datetime.utcnow().AndReturn('2014-01-02 03:04:05') + self.mox.ReplayAll() + self.mox.StubOutWithMock(config, 'flavor_field_name') config.flavor_field_name().AndReturn('dummy_flavor_field_name') - exist = self.mox.CreateMockAnything() - exist.tenant = '3762854cd6f6435998188d5120e4c271' - exist.id = 23 - exist.launched_at = decimal.Decimal('1.1') - exist.deleted_at = decimal.Decimal('5.1') - exist.dummy_flavor_field_name = 'dummy_flavor' + + exist = self._create_mock_exist() exist.rax_options = 'a' self.mox.ReplayAll() @@ -1142,18 +1192,20 @@ class NovaVerifierValidityTestCase(StacktachBaseTestCase): self.assertEqual(exception.field_name, 'rax_options') self.assertEqual( exception.reason, - "{ rax_options : a } of incorrect type for exist id 23") + 'Failed at 2014-01-02 03:04:05 UTC for ' + '58fb036d-5ef8-47a8-b503-7571276c400a: {rax_options: a} was of ' + 'incorrect type for exist id 23') self.mox.VerifyAll() def test_should_verify_rax_options_should_not_be_empty(self): + self.mox.StubOutWithMock(datetime, 'datetime') + datetime.datetime.utcnow().AndReturn('2014-01-02 03:04:05') + self.mox.ReplayAll() + self.mox.StubOutWithMock(config, 'flavor_field_name') config.flavor_field_name().AndReturn('dummy_flavor_field_name') - exist = self.mox.CreateMockAnything() - exist.tenant = '3762854cd6f6435998188d5120e4c271' - exist.id = 23 - exist.launched_at = decimal.Decimal('1.1') - exist.deleted_at = decimal.Decimal('5.1') - exist.dummy_flavor_field_name = 'dummy_flavor' + + exist = self._create_mock_exist() exist.rax_options = '' self.mox.ReplayAll() @@ -1161,20 +1213,22 @@ class NovaVerifierValidityTestCase(StacktachBaseTestCase): nova_verifier._verify_validity(exist, 'all') exception = nf.exception self.assertEqual(exception.field_name, 'rax_options') - self.assertEqual(exception.reason, - "rax_options field was null for exist id 23") + self.assertEqual( + exception.reason, + "Failed at 2014-01-02 03:04:05 UTC for " + "58fb036d-5ef8-47a8-b503-7571276c400a: rax_options field was null " + "for exist id 23") self.mox.VerifyAll() def test_should_verify_os_arch_should_be_alphanumeric(self): + self.mox.StubOutWithMock(datetime, 'datetime') + datetime.datetime.utcnow().AndReturn('2014-01-02 03:04:05') + self.mox.ReplayAll() + self.mox.StubOutWithMock(config, 'flavor_field_name') config.flavor_field_name().AndReturn('dummy_flavor_field_name') - exist = self.mox.CreateMockAnything() - exist.tenant = '3762854cd6f6435998188d5120e4c271' - exist.id = 23 - exist.launched_at = decimal.Decimal('1.1') - exist.deleted_at = decimal.Decimal('5.1') - exist.dummy_flavor_field_name = 'dummy_flavor' - exist.rax_options = 12 + + exist = self._create_mock_exist() exist.os_architecture = 'x64,' self.mox.ReplayAll() @@ -1184,19 +1238,19 @@ class NovaVerifierValidityTestCase(StacktachBaseTestCase): self.assertEqual(exception.field_name, 'os_architecture') self.assertEqual( exception.reason, - "{ os_architecture : x64, } of incorrect type for exist id 23") + 'Failed at 2014-01-02 03:04:05 UTC for ' + '58fb036d-5ef8-47a8-b503-7571276c400a: {os_architecture: x64,} ' + 'was of incorrect type for exist id 23') self.mox.VerifyAll() def test_should_verify_os_arch_should_not_be_empty(self): + self.mox.StubOutWithMock(datetime, 'datetime') + datetime.datetime.utcnow().AndReturn('2014-01-02 03:04:05') + self.mox.ReplayAll() self.mox.StubOutWithMock(config, 'flavor_field_name') config.flavor_field_name().AndReturn('dummy_flavor_field_name') - exist = self.mox.CreateMockAnything() - exist.tenant = '3762854cd6f6435998188d5120e4c271' - exist.id = 23 - exist.launched_at = decimal.Decimal('1.1') - exist.deleted_at = decimal.Decimal('5.1') - exist.dummy_flavor_field_name = 'dummy_flavor' - exist.rax_options = 12 + + exist = self._create_mock_exist() exist.os_architecture = '' self.mox.ReplayAll() @@ -1206,20 +1260,20 @@ class NovaVerifierValidityTestCase(StacktachBaseTestCase): self.assertEqual(exception.field_name, 'os_architecture') self.assertEqual( exception.reason, - "os_architecture field was null for exist id 23") + "Failed at 2014-01-02 03:04:05 UTC for " + "58fb036d-5ef8-47a8-b503-7571276c400a: os_architecture field was " + "null for exist id 23") self.mox.VerifyAll() def test_should_verify_os_distro_should_be_alphanumeric(self): + self.mox.StubOutWithMock(datetime, 'datetime') + datetime.datetime.utcnow().AndReturn('2014-01-02 03:04:05') + self.mox.ReplayAll() + self.mox.StubOutWithMock(config, 'flavor_field_name') config.flavor_field_name().AndReturn('dummy_flavor_field_name') - exist = self.mox.CreateMockAnything() - exist.tenant = '3762854cd6f6435998188d5120e4c271' - exist.id = 23 - exist.launched_at = decimal.Decimal('1.1') - exist.deleted_at = decimal.Decimal('5.1') - exist.dummy_flavor_field_name = 'dummy_flavor' - exist.rax_options = 12 - exist.os_architecture = 'x64' + + exist = self._create_mock_exist() exist.os_distro = 'com.microsoft.server,' self.mox.ReplayAll() @@ -1229,21 +1283,21 @@ class NovaVerifierValidityTestCase(StacktachBaseTestCase): self.assertEqual(exception.field_name, 'os_distro') self.assertEqual( exception.reason, - "{ os_distro : com.microsoft.server, } " - "of incorrect type for exist id 23") + "Failed at 2014-01-02 03:04:05 UTC for " + "58fb036d-5ef8-47a8-b503-7571276c400a: " + "{os_distro: com.microsoft.server,} was of incorrect type for " + "exist id 23") self.mox.VerifyAll() def test_should_verify_os_distro_should_not_be_empty(self): + self.mox.StubOutWithMock(datetime, 'datetime') + datetime.datetime.utcnow().AndReturn('2014-01-02 03:04:05') + self.mox.ReplayAll() + self.mox.StubOutWithMock(config, 'flavor_field_name') config.flavor_field_name().AndReturn('dummy_flavor_field_name') - exist = self.mox.CreateMockAnything() - exist.tenant = '3762854cd6f6435998188d5120e4c271' - exist.id = 23 - exist.launched_at = decimal.Decimal('1.1') - exist.deleted_at = decimal.Decimal('5.1') - exist.dummy_flavor_field_name = 'dummy_flavor' - exist.rax_options = 12 - exist.os_architecture = 'x64' + + exist = self._create_mock_exist() exist.os_distro = '' self.mox.ReplayAll() @@ -1253,21 +1307,20 @@ class NovaVerifierValidityTestCase(StacktachBaseTestCase): self.assertEqual(exception.field_name, 'os_distro') self.assertEqual( exception.reason, - "os_distro field was null for exist id 23") + "Failed at 2014-01-02 03:04:05 UTC for " + "58fb036d-5ef8-47a8-b503-7571276c400a: os_distro field was null " + "for exist id 23") self.mox.VerifyAll() def test_should_verify_os_version_should_be_alphanumeric(self): + self.mox.StubOutWithMock(datetime, 'datetime') + datetime.datetime.utcnow().AndReturn('2014-01-02 03:04:05') + self.mox.ReplayAll() + self.mox.StubOutWithMock(config, 'flavor_field_name') config.flavor_field_name().AndReturn('dummy_flavor_field_name') - exist = self.mox.CreateMockAnything() - exist.tenant = '3762854cd6f6435998188d5120e4c271' - exist.id = 23 - exist.launched_at = decimal.Decimal('1.1') - exist.deleted_at = decimal.Decimal('5.1') - exist.dummy_flavor_field_name = 'dummy_flavor' - exist.rax_options = 12 - exist.os_architecture = 'x64' - exist.os_distro = 'com.microsoft.server' + + exist = self._create_mock_exist() exist.os_version = '2008.2,' self.mox.ReplayAll() @@ -1277,21 +1330,20 @@ class NovaVerifierValidityTestCase(StacktachBaseTestCase): self.assertEqual(exception.field_name, 'os_version') self.assertEqual( exception.reason, - "{ os_version : 2008.2, } of incorrect type for exist id 23") + 'Failed at 2014-01-02 03:04:05 UTC for ' + '58fb036d-5ef8-47a8-b503-7571276c400a: {os_version: 2008.2,} was ' + 'of incorrect type for exist id 23') self.mox.VerifyAll() def test_should_verify_os_version_should_not_be_empty(self): + self.mox.StubOutWithMock(datetime, 'datetime') + datetime.datetime.utcnow().AndReturn('2014-01-02 03:04:05') + self.mox.ReplayAll() + self.mox.StubOutWithMock(config, 'flavor_field_name') config.flavor_field_name().AndReturn('dummy_flavor_field_name') - exist = self.mox.CreateMockAnything() - exist.tenant = '3762854cd6f6435998188d5120e4c271' - exist.id = 23 - exist.launched_at = decimal.Decimal('1.1') - exist.deleted_at = decimal.Decimal('5.1') - exist.dummy_flavor_field_name = 'dummy_flavor' - exist.rax_options = 12 - exist.os_architecture = 'x64' - exist.os_distro = 'com.microsoft.server' + + exist = self._create_mock_exist() exist.os_version = '' self.mox.ReplayAll() @@ -1300,30 +1352,26 @@ class NovaVerifierValidityTestCase(StacktachBaseTestCase): exception = nf.exception self.assertEqual(exception.field_name, 'os_version') self.assertEqual( - exception.reason, "os_version field was null for exist id 23") + exception.reason, + "Failed at 2014-01-02 03:04:05 UTC for " + "58fb036d-5ef8-47a8-b503-7571276c400a: os_version field was null " + "for exist id 23") self.mox.VerifyAll() - def test_should_verify_all_exist_fields_when_validity_check_value_is_all(self): + def test_should_verify_all_exist_fields_when_validity_check_value_all(self): self.mox.StubOutWithMock(config, 'flavor_field_name') config.flavor_field_name().AndReturn('dummy_flavor_field_name') - exist = self.mox.CreateMockAnything() - exist.tenant = '3762854cd6f6435998188d5120e4c271' - exist.id = 23 - exist.launched_at = decimal.Decimal('1.1') - exist.deleted_at = decimal.Decimal('5.1') - exist.dummy_flavor_field_name = 'dummy_flavor' - exist.rax_options = '12' - exist.os_architecture = 'x64' - exist.os_distro = 'com.microsoft.server' - exist.os_version = '2008.2' + + exist = self._create_mock_exist() self.mox.ReplayAll() nova_verifier._verify_validity(exist, 'all') self.mox.VerifyAll() - def test_should_verify_only_basic_exist_fields_when_validity_check_value_is_basic(self): + def test_should_verify_only_basic_fields_when_validity_check_basic(self): self.mox.StubOutWithMock(config, 'flavor_field_name') config.flavor_field_name().AndReturn('dummy_flavor_field_name') + exist = self.mox.CreateMockAnything() exist.tenant = '3762854cd6f6435998188d5120e4c271' exist.id = 23 @@ -1346,16 +1394,9 @@ class NovaVerifierValidityTestCase(StacktachBaseTestCase): def test_should_verify_exist_fields_even_if_deleted_at_is_none(self): self.mox.StubOutWithMock(config, 'flavor_field_name') config.flavor_field_name().AndReturn('dummy_flavor_field_name') - exist = self.mox.CreateMockAnything() - exist.tenant = '3762854cd6f6435998188d5120e4c271' - exist.id = 23 - exist.launched_at = decimal.Decimal('1.1') + + exist = self._create_mock_exist() exist.deleted_at = None - exist.dummy_flavor_field_name = 'dummy_flavor' - exist.rax_options = 12 - exist.os_architecture = 'x64' - exist.os_distro = 'com.microsoft.server' - exist.os_version = '2008.2' self.mox.ReplayAll() nova_verifier._verify_validity(exist, 'all') diff --git a/tests/unit/utils.py b/tests/unit/utils.py index 2d4f402..581c7e4 100644 --- a/tests/unit/utils.py +++ b/tests/unit/utils.py @@ -19,6 +19,7 @@ # IN THE SOFTWARE. import datetime +import decimal TENANT_ID_1 = 'testtenantid1' TENANT_ID_2 = 'testtenantid2' @@ -30,7 +31,7 @@ IMAGE_UUID_1 = "12345678-6352-4dbc-8271-96cc54bf14cd" INSTANCE_ID_1 = "08f685d9-6352-4dbc-8271-96cc54bf14cd" INSTANCE_ID_2 = "515adf96-41d3-b86d-5467-e584edc61dab" -INSTANCE_FLAVOR_ID_1 = "performance1-120" +INSTANCE_FLAVOR_ID_1 = "1" INSTANCE_FLAVOR_ID_2 = "performance2-120" INSTANCE_TYPE_ID_1 = "12345" @@ -61,6 +62,18 @@ OS_ARCH_2 = "x64" OS_VERSION_1 = "1" OS_VERSION_2 = "2" +LAUNCHED_AT_1 = decimal.Decimal("1.1") +LAUNCHED_AT_2 = decimal.Decimal("2.1") + +DELETED_AT_1 = decimal.Decimal("3.1") +DELETED_AT_2 = decimal.Decimal("4.1") + +SIZE_1 = 1234 +SIZE_2 = 4567 + +CREATED_AT_1 = decimal.Decimal("10.1") +CREATED_AT_2 = decimal.Decimal("11.1") + TIMESTAMP_1 = "2013-06-20 17:31:57.939614" SETTLE_TIME = 5 SETTLE_UNITS = "minutes" diff --git a/verifier/__init__.py b/verifier/__init__.py index 0c542f6..b9dda73 100644 --- a/verifier/__init__.py +++ b/verifier/__init__.py @@ -17,6 +17,8 @@ # LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING # FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS # IN THE SOFTWARE. +import datetime + class VerificationException(Exception): def __init__(self, reason): @@ -44,22 +46,35 @@ class AmbiguousResults(VerificationException): class FieldMismatch(VerificationException): - def __init__(self, field_name, expected, actual): + def __init__(self, field_name, expected, actual, uuid): self.field_name = field_name self.expected = expected self.actual = actual - self.reason = "Expected %s to be '%s' got '%s'" % (self.field_name, - self.expected, - self.actual) + self.reason = \ + "Failed at {failed_at} UTC for {uuid}: Expected {field_name} " \ + "to be '{expected}' got '{actual}'".\ + format(failed_at=datetime.datetime.utcnow(), uuid=uuid, + field_name=field_name, expected=expected, + actual=actual) + class NullFieldException(VerificationException): - def __init__(self, field_name, exist_id): + def __init__(self, field_name, exist_id, uuid): self.field_name = field_name - self.reason = "%s field was null for exist id %s" %(field_name, exist_id) + self.reason = \ + "Failed at {failed_at} UTC for {uuid}: {field_name} field " \ + "was null for exist id {exist_id}".format( + failed_at=datetime.datetime.utcnow(), uuid=uuid, + field_name=field_name, exist_id=exist_id) + class WrongTypeException(VerificationException): - def __init__(self, field_name, value, exist_id): + def __init__(self, field_name, value, exist_id, uuid): self.field_name = field_name - self.reason = "{ %s : %s } of incorrect type for exist id %s"\ - %(field_name, value, exist_id) + self.reason = \ + "Failed at {failed_at} UTC for {uuid}: " \ + "{{{field_name}: {value}}} was of incorrect type for " \ + "exist id {exist_id}".format( + failed_at=datetime.datetime.utcnow(), uuid=uuid, + field_name=field_name, value=value, exist_id=exist_id) diff --git a/verifier/base_verifier.py b/verifier/base_verifier.py index 7033ebc..9affa04 100644 --- a/verifier/base_verifier.py +++ b/verifier/base_verifier.py @@ -81,34 +81,34 @@ def _verify_date_field(d1, d2, same_second=False): def _is_like_uuid(attr_name, attr_value, exist_id): if not re.match("[0-9A-Fa-f]{8}-[0-9A-Fa-f]{4}-[0-9A-Fa-f]{4}-[0-9A-Fa-f]{4}-[0-9A-Fa-f]{12}$", attr_value): - raise WrongTypeException(attr_name, attr_value, exist_id) + raise WrongTypeException(attr_name, attr_value, exist_id, None) -def _is_like_date(attr_name, attr_value, exist_id): +def _is_like_date(attr_name, attr_value, exist_id, instance_uuid): if not isinstance(attr_value, decimal.Decimal): - raise WrongTypeException(attr_name, attr_value, exist_id) + raise WrongTypeException(attr_name, attr_value, exist_id, instance_uuid) -def _is_long(attr_name, attr_value, exist_id): +def _is_long(attr_name, attr_value, exist_id, instance_uuid): if not isinstance(attr_value, long): - raise WrongTypeException(attr_name, attr_value, exist_id) + raise WrongTypeException(attr_name, attr_value, exist_id, instance_uuid) -def _is_int_in_char(attr_name, attr_value, exist_id): +def _is_int_in_char(attr_name, attr_value, exist_id, instance_uuid): try: int(attr_value) except ValueError: - raise WrongTypeException(attr_name, attr_value, exist_id) + raise WrongTypeException(attr_name, attr_value, exist_id, instance_uuid) -def _is_hex_owner_id(attr_name, attr_value, exist_id): +def _is_hex_owner_id(attr_name, attr_value, exist_id, instance_uuid): if not re.match("^[0-9a-fA-F]+$", attr_value): - raise WrongTypeException(attr_name, attr_value, exist_id) + raise WrongTypeException(attr_name, attr_value, exist_id, instance_uuid) -def _is_alphanumeric(attr_name, attr_value, exist_id): +def _is_alphanumeric(attr_name, attr_value, exist_id, instance_uuid): if not re.match("[a-zA-Z0-9.]+$", attr_value): - raise WrongTypeException(attr_name, attr_value, exist_id) + raise WrongTypeException(attr_name, attr_value, exist_id, instance_uuid) class Verifier(object): diff --git a/verifier/glance_verifier.py b/verifier/glance_verifier.py index 1392f5d..c0227be 100644 --- a/verifier/glance_verifier.py +++ b/verifier/glance_verifier.py @@ -51,16 +51,14 @@ def _get_child_logger(): def _verify_field_mismatch(exists, usage): if not base_verifier._verify_date_field( usage.created_at, exists.created_at, same_second=True): - raise FieldMismatch('created_at', exists.created_at, - usage.created_at) + raise FieldMismatch('created_at', exists.created_at, usage.created_at, + exists.uuid) if usage.owner != exists.owner: - raise FieldMismatch('owner', exists.owner, - usage.owner) + raise FieldMismatch('owner', exists.owner, usage.owner, exists.uuid) if usage.size != exists.size: - raise FieldMismatch('size', exists.size, - usage.size) + raise FieldMismatch('size', exists.size, usage.size, exists.uuid) def _verify_validity(exist): @@ -68,11 +66,12 @@ def _verify_validity(exist): exist.uuid: 'uuid', exist.owner: 'owner'} for (field_value, field_name) in fields.items(): if field_value is None: - raise NullFieldException(field_name, exist.id) + raise NullFieldException(field_name, exist.id, exist.uuid) base_verifier._is_like_uuid('uuid', exist.uuid, exist.id) - base_verifier._is_like_date('created_at', exist.created_at, exist.id) - base_verifier._is_long('size', exist.size, exist.id) - base_verifier._is_hex_owner_id('owner', exist.owner, exist.id) + base_verifier._is_like_date('created_at', exist.created_at, exist.id, + exist.uuid) + base_verifier._is_long('size', exist.size, exist.id, exist.uuid) + base_verifier._is_hex_owner_id('owner', exist.owner, exist.id, exist.uuid) def _verify_for_usage(exist, usage=None): @@ -124,7 +123,7 @@ def _verify_for_delete(exist, delete=None): if not base_verifier._verify_date_field( delete.deleted_at, exist.deleted_at, same_second=True): raise FieldMismatch('deleted_at', exist.deleted_at, - delete.deleted_at) + delete.deleted_at, exist.uuid) def _verify(exists): diff --git a/verifier/nova_verifier.py b/verifier/nova_verifier.py index 9c91a97..f88afe8 100644 --- a/verifier/nova_verifier.py +++ b/verifier/nova_verifier.py @@ -54,33 +54,34 @@ def _verify_field_mismatch(exists, launch): if not base_verifier._verify_date_field( launch.launched_at, exists.launched_at, same_second=True): raise FieldMismatch('launched_at', exists.launched_at, - launch.launched_at) + launch.launched_at, exists.instance) if getattr(launch, flavor_field_name) != \ getattr(exists, flavor_field_name): raise FieldMismatch(flavor_field_name, getattr(exists, flavor_field_name), - getattr(launch, flavor_field_name)) + getattr(launch, flavor_field_name), + exists.instance) if launch.tenant != exists.tenant: - raise FieldMismatch('tenant', exists.tenant, - launch.tenant) + raise FieldMismatch('tenant', exists.tenant, launch.tenant, + exists.instance) if launch.rax_options != exists.rax_options: raise FieldMismatch('rax_options', exists.rax_options, - launch.rax_options) + launch.rax_options, exists.instance) if launch.os_architecture != exists.os_architecture: raise FieldMismatch('os_architecture', exists.os_architecture, - launch.os_architecture) + launch.os_architecture, exists.instance) if launch.os_version != exists.os_version: raise FieldMismatch('os_version', exists.os_version, - launch.os_version) + launch.os_version, exists.instance) if launch.os_distro != exists.os_distro: raise FieldMismatch('os_distro', exists.os_distro, - launch.os_distro) + launch.os_distro, exists.instance) def _verify_for_launch(exist, launch=None, @@ -147,12 +148,12 @@ def _verify_for_delete(exist, delete=None, if not base_verifier._verify_date_field( delete.launched_at, exist.launched_at, same_second=True): raise FieldMismatch('launched_at', exist.launched_at, - delete.launched_at) + delete.launched_at, exist.instance) if not base_verifier._verify_date_field( delete.deleted_at, exist.deleted_at, same_second=True): - raise FieldMismatch( - 'deleted_at', exist.deleted_at, delete.deleted_at) + raise FieldMismatch('deleted_at', exist.deleted_at, + delete.deleted_at, exist.instance) def _verify_basic_validity(exist): @@ -164,11 +165,14 @@ def _verify_basic_validity(exist): } for (field_name, field_value) in fields.items(): if field_value is None: - raise NullFieldException(field_name, exist.id) - base_verifier._is_hex_owner_id('tenant', exist.tenant, exist.id) - base_verifier._is_like_date('launched_at', exist.launched_at, exist.id) + raise NullFieldException(field_name, exist.id, exist.instance) + base_verifier._is_hex_owner_id( + 'tenant', exist.tenant, exist.id, exist.instance) + base_verifier._is_like_date( + 'launched_at', exist.launched_at, exist.id, exist.instance) if exist.deleted_at is not None: - base_verifier._is_like_date('deleted_at', exist.deleted_at, exist.id) + base_verifier._is_like_date( + 'deleted_at', exist.deleted_at, exist.id, exist.instance) def _verify_optional_validity(exist): @@ -178,11 +182,15 @@ def _verify_optional_validity(exist): exist.os_distro: 'os_distro'} for (field_value, field_name) in fields.items(): if field_value == '': - raise NullFieldException(field_name, exist.id) - base_verifier._is_int_in_char('rax_options', exist.rax_options, exist.id) - base_verifier._is_alphanumeric('os_architecture', exist.os_architecture, exist.id) - base_verifier._is_alphanumeric('os_distro', exist.os_distro, exist.id) - base_verifier._is_alphanumeric('os_version', exist.os_version, exist.id) + raise NullFieldException(field_name, exist.id, exist.instance) + base_verifier._is_int_in_char( + 'rax_options', exist.rax_options, exist.id, exist.instance) + base_verifier._is_alphanumeric( + 'os_architecture', exist.os_architecture, exist.id, exist.instance) + base_verifier._is_alphanumeric( + 'os_distro', exist.os_distro, exist.id, exist.instance) + base_verifier._is_alphanumeric( + 'os_version', exist.os_version, exist.id, exist.instance) def _verify_validity(exist, validation_level): From 3c7dd7bff59d8594aed075c38bc3b6dfbe3491f4 Mon Sep 17 00:00:00 2001 From: Anuj Mathur Date: Mon, 24 Feb 2014 15:15:52 +0530 Subject: [PATCH 39/53] Added tests for verification exceptions --- tests/unit/test_verification_exception.py | 60 +++++++++++++++++++++++ 1 file changed, 60 insertions(+) create mode 100644 tests/unit/test_verification_exception.py diff --git a/tests/unit/test_verification_exception.py b/tests/unit/test_verification_exception.py new file mode 100644 index 0000000..06078ae --- /dev/null +++ b/tests/unit/test_verification_exception.py @@ -0,0 +1,60 @@ +import datetime +import mox +from tests.unit import StacktachBaseTestCase +from verifier import NotFound, AmbiguousResults, FieldMismatch, NullFieldException, WrongTypeException + + +class VerificationExceptionTestCase(StacktachBaseTestCase): + def setUp(self): + self.mox = mox.Mox() + + def tearDown(self): + self.mox.UnsetStubs() + + def test_not_found_exception(self): + exception = NotFound('object_type', 'search_params') + + self.assertEqual(exception.reason, + "Couldn't find object_type using search_params") + + def test_ambiguous_results_exception(self): + exception = AmbiguousResults('object_type', 'search_params') + + self.assertEqual( + exception.reason, + "Ambiguous results for object_type using search_params") + + def test_field_mismatch_exception(self): + self.mox.StubOutWithMock(datetime, 'datetime') + datetime.datetime.utcnow().AndReturn('2014-01-02 03:04:05') + self.mox.ReplayAll() + + exception = FieldMismatch('field_name', 'expected', 'actual', 'uuid') + + self.assertEqual(exception.reason, + "Failed at 2014-01-02 03:04:05 UTC for uuid: Expected" + " field_name to be 'expected' got 'actual'") + + def test_null_field_exception(self): + self.mox.StubOutWithMock(datetime, 'datetime') + datetime.datetime.utcnow().AndReturn('2014-01-02 03:04:05') + self.mox.ReplayAll() + + exception = NullFieldException('field_name', '1234', 'uuid') + + self.assertEqual(exception.reason, + "Failed at 2014-01-02 03:04:05 UTC for uuid: " + "field_name field was null for exist id 1234") + + def test_wrong_type_exception(self): + self.mox.StubOutWithMock(datetime, 'datetime') + datetime.datetime.utcnow().AndReturn('2014-01-02 03:04:05') + self.mox.ReplayAll() + + exception = WrongTypeException('field_name', 'value', '1234', 'uuid') + + self.assertEqual(exception.reason, + "Failed at 2014-01-02 03:04:05 UTC for uuid: " + "{field_name: value} was of incorrect type for" + " exist id 1234") + From 06e3e4a8d4a9ec27237169c788653f02e70cbb3a Mon Sep 17 00:00:00 2001 From: Manali Latkar Date: Thu, 27 Feb 2014 15:11:31 +0530 Subject: [PATCH 40/53] adding an api to get count of .verified in rawdata --- docs/dbapi.rst | 35 ++++++++++++++ stacktach/dbapi.py | 33 +++++++++++++ stacktach/urls.py | 1 + tests/unit/test_dbapi.py | 100 ++++++++++++++++++++++++++++++++++----- 4 files changed, 156 insertions(+), 13 deletions(-) diff --git a/docs/dbapi.rst b/docs/dbapi.rst index e6d596f..23d1731 100644 --- a/docs/dbapi.rst +++ b/docs/dbapi.rst @@ -779,4 +779,39 @@ Returns a single instance exists matching provided id "id": 5300, "delete": null } + } + +db/count/verified/ +================== + +.. http:get:: http://example.com/count/verified/ + +Returns a count of .verified events stored in Stacktach's Rawdata table from +``audit_period_beginning`` to ``audit_period_ending`` + + **Query Parameters** + + * ``audit_period_beginning``: datetime (yyyy-mm-dd) + * ``audit_period_ending``: datetime (yyyy-mm-dd) + * ``service``: ``nova`` or ``glance``. default="nova" + + **Example request**: + + .. sourcecode:: http + + GET db/count/verified/ HTTP/1.1 + Host: example.com + Accept: application/json + + + **Example response**: + + .. sourcecode:: http + + HTTP/1.1 200 OK + Vary: Accept + Content-Type: application/json + + { + count: 10 } \ No newline at end of file diff --git a/stacktach/dbapi.py b/stacktach/dbapi.py index 1d0cc4d..4b9be0f 100644 --- a/stacktach/dbapi.py +++ b/stacktach/dbapi.py @@ -21,6 +21,7 @@ import decimal import functools import json +from datetime import datetime from django.db import transaction from django.db.models import FieldDoesNotExist @@ -418,3 +419,35 @@ def _convert_model_list(model_list, extra_values_func=None): converted.append(_convert_model(item, extra_values_func)) return converted + + +def _rawdata_factory(service): + if service == "nova": + rawdata = models.RawData.objects + elif service == "glance": + rawdata = models.GlanceRawData.objects + else: + raise BadRequestException(message="Invalid service") + return rawdata + + +@api_call +def get_verified_count(request): + try: + audit_period_beginning = datetime.strptime( + request.GET.get("audit_period_beginning"), "%Y-%m-%d") + audit_period_ending = datetime.strptime( + request.GET.get("audit_period_ending"), "%Y-%m-%d") + service = request.GET.get("service", "nova") + rawdata = _rawdata_factory(service) + filters = { + 'when__gte': dt.dt_to_decimal(audit_period_beginning), + 'when__lte': dt.dt_to_decimal(audit_period_ending), + 'event': "compute.instance.exists.verified" + } + return {'count': rawdata.filter(**filters).count()} + except KeyError and TypeError: + raise BadRequestException(message="Invalid/absent query parameter") + except ValueError: + raise BadRequestException(message="Invalid format for date (Correct " + "format should be %YYYY-%mm-%dd)") diff --git a/stacktach/urls.py b/stacktach/urls.py index e49c217..4cde55d 100644 --- a/stacktach/urls.py +++ b/stacktach/urls.py @@ -71,6 +71,7 @@ urlpatterns = patterns('', 'stacktach.dbapi.get_usage_exist_glance'), url(r'db/confirm/usage/exists/(?P[\w\-]+)/$', 'stacktach.dbapi.exists_send_status'), + url(r'db/count/verified', 'stacktach.dbapi.get_verified_count'), url(r'^(?P\d+)/$', 'stacktach.views.home', name='home'), url(r'^(?P\d+)/details/(?P\w+)/(?P\d+)/$', diff --git a/tests/unit/test_dbapi.py b/tests/unit/test_dbapi.py index 4b335a5..fab764f 100644 --- a/tests/unit/test_dbapi.py +++ b/tests/unit/test_dbapi.py @@ -19,6 +19,7 @@ # IN THE SOFTWARE. import datetime +from decimal import Decimal import json from django.db.models import FieldDoesNotExist @@ -124,8 +125,8 @@ class DBAPITestCase(StacktachBaseTestCase): fake_request = self.mox.CreateMockAnything() fake_request.GET = {'somebadfield_max': str(start_time)} fake_model = self.make_fake_model() - fake_model._meta.get_field_by_name('somebadfield')\ - .AndRaise(FieldDoesNotExist()) + fake_model._meta.get_field_by_name('somebadfield') \ + .AndRaise(FieldDoesNotExist()) self.mox.ReplayAll() self.assertRaises(dbapi.BadRequestException, dbapi._get_filter_args, @@ -307,7 +308,8 @@ class DBAPITestCase(StacktachBaseTestCase): fake_request.GET = filters self.mox.StubOutWithMock(dbapi, '_get_filter_args') dbapi._get_filter_args(fake_model, fake_request, - custom_filters=custom_filters).AndReturn(filters) + custom_filters=custom_filters).AndReturn( + filters) self.mox.StubOutWithMock(dbapi, '_check_has_field') dbapi._check_has_field(fake_model, 'id') result = self.mox.CreateMockAnything() @@ -558,7 +560,8 @@ class DBAPITestCase(StacktachBaseTestCase): exists1.send_status = 200 self.mox.VerifyAll() - def test_send_status_batch_accepts_post_for_nova_and_glance_when_version_is_1(self): + def test_send_status_batch_accepts_post_for_nova_and_glance_when_version_is_1( + self): fake_request = self.mox.CreateMockAnything() fake_request.method = 'POST' fake_request.GET = {'service': 'glance'} @@ -586,14 +589,16 @@ class DBAPITestCase(StacktachBaseTestCase): models.ImageExists.objects.select_for_update().AndReturn(results1) exists1A = self.mox.CreateMockAnything() exists1B = self.mox.CreateMockAnything() - results1.filter(message_id=MESSAGE_ID_2).AndReturn([exists1A, exists1B]) + results1.filter(message_id=MESSAGE_ID_2).AndReturn( + [exists1A, exists1B]) exists1A.save() exists1B.save() results2 = self.mox.CreateMockAnything() models.ImageExists.objects.select_for_update().AndReturn(results2) exists2A = self.mox.CreateMockAnything() exists2B = self.mox.CreateMockAnything() - results2.filter(message_id=MESSAGE_ID_1).AndReturn([exists2A, exists2B]) + results2.filter(message_id=MESSAGE_ID_1).AndReturn( + [exists2A, exists2B]) exists2A.save() exists2B.save() trans_obj.__exit__(None, None, None) @@ -604,7 +609,6 @@ class DBAPITestCase(StacktachBaseTestCase): self.mox.VerifyAll() - def test_send_status_batch_accepts_post_when_version_is_0(self): fake_request = self.mox.CreateMockAnything() fake_request.method = 'POST' @@ -759,7 +763,8 @@ class DBAPITestCase(StacktachBaseTestCase): launches = {'a': 1} self.mox.StubOutWithMock(dbapi, '_convert_model_list') dbapi._convert_model_list(mock_objects).AndReturn(launches) - dbapi.get_db_objects(models.InstanceUsage, fake_request, 'launched_at').AndReturn(mock_objects) + dbapi.get_db_objects(models.InstanceUsage, fake_request, + 'launched_at').AndReturn(mock_objects) self.mox.ReplayAll() resp = dbapi.list_usage_launches(fake_request) @@ -776,7 +781,8 @@ class DBAPITestCase(StacktachBaseTestCase): launches = {'a': 1} self.mox.StubOutWithMock(dbapi, '_convert_model_list') dbapi._convert_model_list(mock_objects).AndReturn(launches) - dbapi.get_db_objects(models.ImageUsage, fake_request, 'created_at').AndReturn(mock_objects) + dbapi.get_db_objects(models.ImageUsage, fake_request, + 'created_at').AndReturn(mock_objects) self.mox.ReplayAll() resp = dbapi.list_usage_images(fake_request) @@ -793,7 +799,8 @@ class DBAPITestCase(StacktachBaseTestCase): launches = {'a': 1} self.mox.StubOutWithMock(dbapi, '_convert_model_list') dbapi._convert_model_list(mock_objects).AndReturn(launches) - dbapi.get_db_objects(models.InstanceUsage, fake_request, 'launched_at').AndReturn(mock_objects) + dbapi.get_db_objects(models.InstanceUsage, fake_request, + 'launched_at').AndReturn(mock_objects) self.mox.ReplayAll() resp = dbapi.list_usage_launches(fake_request) @@ -880,7 +887,8 @@ class DBAPITestCase(StacktachBaseTestCase): deletes = {'a': 1} self.mox.StubOutWithMock(dbapi, '_convert_model_list') dbapi._convert_model_list(mock_objects).AndReturn(deletes) - dbapi.get_db_objects(models.InstanceDeletes, fake_request, 'launched_at').AndReturn(mock_objects) + dbapi.get_db_objects(models.InstanceDeletes, fake_request, + 'launched_at').AndReturn(mock_objects) self.mox.ReplayAll() resp = dbapi.list_usage_deletes(fake_request) @@ -897,7 +905,8 @@ class DBAPITestCase(StacktachBaseTestCase): deletes = {'a': 1} self.mox.StubOutWithMock(dbapi, '_convert_model_list') dbapi._convert_model_list(mock_objects).AndReturn(deletes) - dbapi.get_db_objects(models.InstanceDeletes, fake_request, 'launched_at').AndReturn(mock_objects) + dbapi.get_db_objects(models.InstanceDeletes, fake_request, + 'launched_at').AndReturn(mock_objects) self.mox.ReplayAll() resp = dbapi.list_usage_deletes(fake_request) @@ -914,10 +923,75 @@ class DBAPITestCase(StacktachBaseTestCase): deletes = {'a': 1} self.mox.StubOutWithMock(dbapi, '_convert_model_list') dbapi._convert_model_list(mock_objects).AndReturn(deletes) - dbapi.get_db_objects(models.ImageDeletes, fake_request, 'deleted_at').AndReturn(mock_objects) + dbapi.get_db_objects(models.ImageDeletes, fake_request, + 'deleted_at').AndReturn(mock_objects) self.mox.ReplayAll() resp = dbapi.list_usage_deletes_glance(fake_request) self.assertEqual(resp.status_code, 200) self.assertEqual(json.loads(resp.content), {'deletes': deletes}) self.mox.VerifyAll() + + def test_get_verified_count(self): + fake_request = self.mox.CreateMockAnything() + fake_request.method = 'GET' + fake_request.GET = {'audit_period_beginning': "2014-02-26", + 'audit_period_ending': "2014-02-27", + 'service': "nova"} + mock_query = self.mox.CreateMockAnything() + self.mox.StubOutWithMock(models.RawData.objects, "filter") + models.RawData.objects.filter(event='compute.instance.exists.verified', + when__gte=Decimal('1393372800'), + when__lte=Decimal('1393459200')).\ + AndReturn(mock_query) + mock_query.count().AndReturn(100) + self.mox.ReplayAll() + + response = dbapi.get_verified_count(fake_request) + self.assertEqual(response.status_code, 200) + self.assertEqual(json.loads(response.content), {'count': 100}) + self.mox.VerifyAll() + + def test_get_verified_count_wrong_date_format_returns_400(self): + fake_request = self.mox.CreateMockAnything() + fake_request.method = 'GET' + fake_request.GET = {'audit_period_beginning': "2014-020-26", + + 'service': "nova"} + + self.mox.ReplayAll() + + response = dbapi.get_verified_count(fake_request) + self.assertEqual(response.status_code, 400) + self.assertEqual(json.loads(response.content)['message'], + "Invalid format for date" + " (Correct format should be %YYYY-%mm-%dd)") + self.mox.VerifyAll() + + def test_get_verified_count_wrong_service_returns_400(self): + fake_request = self.mox.CreateMockAnything() + fake_request.method = 'GET' + fake_request.GET = {'audit_period_beginning': "2014-02-26", + "audit_period_ending": "2014-02-27", + 'service': "qonos"} + + self.mox.ReplayAll() + + response = dbapi.get_verified_count(fake_request) + self.assertEqual(response.status_code, 400) + self.assertEqual(json.loads(response.content)['message'], + "Invalid service") + self.mox.VerifyAll() + + def test_get_verified_count_invalid_query_parameter_returns_400(self): + fake_request = self.mox.CreateMockAnything() + fake_request.method = 'GET' + fake_request.GET = {'audit_period': "2014-02-26",} + + self.mox.ReplayAll() + + response = dbapi.get_verified_count(fake_request) + self.assertEqual(response.status_code, 400) + self.assertEqual(json.loads(response.content)['message'], + "Invalid/absent query parameter") + self.mox.VerifyAll() \ No newline at end of file From 97666bcb6c4f865a424cd86d307fbafd99374855 Mon Sep 17 00:00:00 2001 From: Manali Latkar Date: Mon, 17 Feb 2014 18:36:24 +0530 Subject: [PATCH 41/53] Stacktach down scenario: added an api to repair exists status after a period of verifier inactivity added documentation --- docs/api.rst | 2 +- docs/dbapi.rst | 38 +++++++++- stacktach/dbapi.py | 22 ++++++ stacktach/models.py | 36 +++++++++ stacktach/urls.py | 3 +- tests/unit/test_dbapi.py | 51 +++++++++++++ tests/unit/test_models.py | 138 +++++++++++++++++++++++++++++++++++ tests/unit/test_stacktach.py | 3 +- 8 files changed, 288 insertions(+), 5 deletions(-) diff --git a/docs/api.rst b/docs/api.rst index 8b559dc..0644414 100644 --- a/docs/api.rst +++ b/docs/api.rst @@ -805,4 +805,4 @@ stacky/usage/exists ] ] - :query instance: desired instance UUID (optional) + :query instance: desired instance UUID (optional) \ No newline at end of file diff --git a/docs/dbapi.rst b/docs/dbapi.rst index 23d1731..9a4daef 100644 --- a/docs/dbapi.rst +++ b/docs/dbapi.rst @@ -814,4 +814,40 @@ Returns a count of .verified events stored in Stacktach's Rawdata table from { count: 10 - } \ No newline at end of file + } + +repair +====== + +.. http:post:: http://example.com/repair/ + + Changes the status of all the exists of message-ids sent with the request + from 'pending' to 'sent_unverified' so that the verifier does not end up + sending .verified for all those exists(since the .exists have already been + modified as .verified and sent to AH by Yagi). It sends back the message-ids + of exists which could not be updated in the json response. + + **Example request**: + + .. sourcecode::http + + POST /repair/ HTTP/1.1 + Host: example.com + Accept: application/json + + **Example response**: + + .. sourcecode:: http + + HTTP/1.1 200 OK + Vary: Accept + Content-Type: text/json + + { + u'exists_not_pending': [u'494ebfce-0219-4b62-b810-79039a279620'], + u'absent_exists': [u'7609f3b2-3694-4b6f-869e-2f13ae504cb2', + u'0c64032e-4a60-44c0-a99d-5a4f2e46afb0'] + } + + :query message_ids: list of message_ids of exists messages + :query service: ``nova`` or ``glance``. default="nova" \ No newline at end of file diff --git a/stacktach/dbapi.py b/stacktach/dbapi.py index 4b9be0f..6450007 100644 --- a/stacktach/dbapi.py +++ b/stacktach/dbapi.py @@ -36,6 +36,7 @@ from stacktach import datetime_to_decimal as dt from stacktach import models from stacktach import stacklog from stacktach import utils +from stacktach.models import InstanceExists, ImageExists DEFAULT_LIMIT = 50 HARD_LIMIT = 1000 @@ -451,3 +452,24 @@ def get_verified_count(request): except ValueError: raise BadRequestException(message="Invalid format for date (Correct " "format should be %YYYY-%mm-%dd)") + + +def exists_factory(service): + model = InstanceExists + if service == 'glance': + model = ImageExists + return model + + +def repair_stacktach_down(request): + post_dict = dict((request.POST._iterlists())) + message_ids = post_dict.get('message_ids') + service = post_dict.get('service', ['nova']) + absent_exists, exists_not_pending = \ + exists_factory(service[0]).mark_exists_as_sent_unverified(message_ids) + response_data = {'absent_exists': absent_exists, + 'exists_not_pending': exists_not_pending} + response = HttpResponse(json.dumps(response_data), + content_type="application/json") + return response + diff --git a/stacktach/models.py b/stacktach/models.py index 9d1e0fb..d9afe35 100644 --- a/stacktach/models.py +++ b/stacktach/models.py @@ -338,6 +338,24 @@ class InstanceExists(models.Model): def update_status(self, new_status): self.status = new_status + @staticmethod + def mark_exists_as_sent_unverified(message_ids): + absent_exists = [] + exists_not_pending = [] + for message_id in message_ids: + try: + exists = InstanceExists.objects.get(message_id=message_id) + if exists.status == InstanceExists.PENDING: + exists.status = InstanceExists.SENT_UNVERIFIED + exists.save() + else: + exists_not_pending.append(message_id) + except Exception: + absent_exists.append(message_id) + return absent_exists, exists_not_pending + + + class Timing(models.Model): """Each Timing record corresponds to a .start/.end event pair @@ -536,6 +554,24 @@ class ImageExists(models.Model): self.fail_reason = reason self.save() + @staticmethod + def mark_exists_as_sent_unverified(message_ids): + absent_exists = [] + exists_not_pending = [] + for message_id in message_ids: + exists_list = ImageExists.objects.filter(message_id=message_id) + if exists_list: + for exists in exists_list: + if exists.status == ImageExists.PENDING: + exists.status = ImageExists.SENT_UNVERIFIED + exists.save() + else: + exists_not_pending.append(message_id) + else : + absent_exists.append(message_id) + return absent_exists, exists_not_pending + + def get_model_fields(model): return model._meta.fields diff --git a/stacktach/urls.py b/stacktach/urls.py index 4cde55d..a965594 100644 --- a/stacktach/urls.py +++ b/stacktach/urls.py @@ -72,6 +72,7 @@ urlpatterns = patterns('', url(r'db/confirm/usage/exists/(?P[\w\-]+)/$', 'stacktach.dbapi.exists_send_status'), url(r'db/count/verified', 'stacktach.dbapi.get_verified_count'), + url(r'repair/', 'stacktach.dbapi.repair_stacktach_down'), url(r'^(?P\d+)/$', 'stacktach.views.home', name='home'), url(r'^(?P\d+)/details/(?P\w+)/(?P\d+)/$', @@ -83,5 +84,5 @@ urlpatterns = patterns('', url(r'^(?P\d+)/latest_raw/$', 'stacktach.views.latest_raw', name='latest_raw'), url(r'^(?P\d+)/instance_status/$', - 'stacktach.views.instance_status', name='instance_status'), + 'stacktach.views.instance_status', name='instance_status') ) diff --git a/tests/unit/test_dbapi.py b/tests/unit/test_dbapi.py index fab764f..7739c9e 100644 --- a/tests/unit/test_dbapi.py +++ b/tests/unit/test_dbapi.py @@ -994,4 +994,55 @@ class DBAPITestCase(StacktachBaseTestCase): self.assertEqual(response.status_code, 400) self.assertEqual(json.loads(response.content)['message'], "Invalid/absent query parameter") + self.mox.VerifyAll() + +class StacktachRepairScenarioApi(StacktachBaseTestCase): + def setUp(self): + self.mox = mox.Mox() + + def tearDown(self): + self.mox.UnsetStubs() + + def test_change_nova_exists_status_for_all_exists(self): + request = self.mox.CreateMockAnything() + request.POST = self.mox.CreateMockAnything() + message_ids = ["04fd94b5-64dd-4559-83b7-981d9d4f7a5a", + "14fd94b5-64dd-4559-83b7-981d9d4f7a5a", + "24fd94b5-64dd-4559-83b7-981d9d4f7a5a"] + request.POST._iterlists().AndReturn([('service', 'nova'), + ('message_ids', message_ids)]) + self.mox.StubOutWithMock(models.InstanceExists, + 'mark_exists_as_sent_unverified') + models.InstanceExists.mark_exists_as_sent_unverified(message_ids).\ + AndReturn([[], []]) + self.mox.ReplayAll() + + response = dbapi.repair_stacktach_down(request) + self.assertEqual(response.status_code, 200) + response_data = json.loads(response.content) + self.assertEqual(response_data['exists_not_pending'], []) + self.assertEqual(response_data['absent_exists'], []) + + self.mox.VerifyAll() + + def test_change_glance_exists_status_for_all_exists(self): + request = self.mox.CreateMockAnything() + request.POST = self.mox.CreateMockAnything() + message_ids = ['04fd94b5-64dd-4559-83b7-981d9d4f7a5a', + '14fd94b5-64dd-4559-83b7-981d9d4f7a5a', + '24fd94b5-64dd-4559-83b7-981d9d4f7a5a'] + request.POST._iterlists().AndReturn([('service', ['glance']), + ('message_ids', message_ids)]) + self.mox.StubOutWithMock(models.ImageExists, + 'mark_exists_as_sent_unverified') + models.ImageExists.mark_exists_as_sent_unverified(message_ids).\ + AndReturn([[], []]) + self.mox.ReplayAll() + + response = dbapi.repair_stacktach_down(request) + self.assertEqual(response.status_code, 200) + response_data = json.loads(response.content) + self.assertEqual(response_data['exists_not_pending'], []) + self.assertEqual(response_data['absent_exists'], []) + self.mox.VerifyAll() \ No newline at end of file diff --git a/tests/unit/test_models.py b/tests/unit/test_models.py index ea32261..f2c7a03 100644 --- a/tests/unit/test_models.py +++ b/tests/unit/test_models.py @@ -112,6 +112,81 @@ class ImageExistsTestCase(unittest.TestCase): 'owner1-3': [exist4], 'owner2-2': [exist2]}) + def test_mark_exists_as_sent_unverified(self): + message_ids = ["0708cb0b-6169-4d7c-9f58-3cf3d5bf694b", + "9156b83e-f684-4ec3-8f94-7e41902f27aa"] + + exist1 = self.mox.CreateMockAnything() + exist1.status = "pending" + exist1.save() + exist2 = self.mox.CreateMockAnything() + exist2.status = "pending" + exist2.save() + exist3 = self.mox.CreateMockAnything() + exist3.status = "pending" + exist3.save() + self.mox.StubOutWithMock(ImageExists.objects, 'filter') + ImageExists.objects.filter(message_id=message_ids[0]).AndReturn( + [exist1, exist2]) + ImageExists.objects.filter(message_id=message_ids[1]).AndReturn( + [exist3]) + self.mox.ReplayAll() + + results = ImageExists.mark_exists_as_sent_unverified(message_ids) + + self.assertEqual(results, ([], [])) + + self.mox.VerifyAll() + + def test_mark_exists_as_sent_unverified_return_absent_exists(self): + message_ids = ["0708cb0b-6169-4d7c-9f58-3cf3d5bf694b", + "9156b83e-f684-4ec3-8f94-7e41902f27aa"] + + exist1 = self.mox.CreateMockAnything() + exist1.status = "pending" + exist1.save() + exist2 = self.mox.CreateMockAnything() + exist2.status = "pending" + exist2.save() + self.mox.StubOutWithMock(ImageExists.objects, 'filter') + ImageExists.objects.filter(message_id=message_ids[0]).AndReturn( + [exist1, exist2]) + ImageExists.objects.filter(message_id=message_ids[1]).AndReturn([]) + self.mox.ReplayAll() + + results = ImageExists.mark_exists_as_sent_unverified(message_ids) + + self.assertEqual(results, (['9156b83e-f684-4ec3-8f94-7e41902f27aa'], + [])) + + self.mox.VerifyAll() + + def test_mark_exists_as_sent_unverified_and_return_exist_not_pending(self): + message_ids = ["0708cb0b-6169-4d7c-9f58-3cf3d5bf694b", + "9156b83e-f684-4ec3-8f94-7e41902f27aa"] + + exist1 = self.mox.CreateMockAnything() + exist1.status = "pending" + exist1.save() + exist2 = self.mox.CreateMockAnything() + exist2.status = "verified" + exist3 = self.mox.CreateMockAnything() + exist3.status = "pending" + exist3.save() + self.mox.StubOutWithMock(ImageExists.objects, 'filter') + ImageExists.objects.filter(message_id=message_ids[0]).AndReturn( + [exist1, exist2]) + ImageExists.objects.filter(message_id=message_ids[1]).AndReturn( + [exist3]) + self.mox.ReplayAll() + + results = ImageExists.mark_exists_as_sent_unverified(message_ids) + + self.assertEqual(results, ([], + ["0708cb0b-6169-4d7c-9f58-3cf3d5bf694b"])) + + self.mox.VerifyAll() + class InstanceExistsTestCase(unittest.TestCase): def setUp(self): @@ -137,3 +212,66 @@ class InstanceExistsTestCase(unittest.TestCase): self.mox.VerifyAll() self.assertEqual(results, [1, 2]) + + def test_mark_exists_as_sent_unverified(self): + message_ids = ["0708cb0b-6169-4d7c-9f58-3cf3d5bf694b", + "9156b83e-f684-4ec3-8f94-7e41902f27aa"] + + exist1 = self.mox.CreateMockAnything() + exist1.status = "pending" + exist1.save() + exist2 = self.mox.CreateMockAnything() + exist2.status = "pending" + exist2.save() + self.mox.StubOutWithMock(InstanceExists.objects, 'get') + InstanceExists.objects.get(message_id=message_ids[0]).AndReturn(exist1) + InstanceExists.objects.get(message_id=message_ids[1]).AndReturn(exist2) + self.mox.ReplayAll() + + results = InstanceExists.mark_exists_as_sent_unverified(message_ids) + + self.assertEqual(results, ([], [])) + + self.mox.VerifyAll() + + def test_mark_exists_as_sent_unverified_return_absent_exists(self): + message_ids = ["0708cb0b-6169-4d7c-9f58-3cf3d5bf694b", + "9156b83e-f684-4ec3-8f94-7e41902f27aa"] + + exist1 = self.mox.CreateMockAnything() + exist1.status = "pending" + exist1.save() + self.mox.StubOutWithMock(InstanceExists.objects, 'get') + InstanceExists.objects.get(message_id=message_ids[0]).AndReturn(exist1) + InstanceExists.objects.get(message_id=message_ids[1]).AndRaise( + Exception) + self.mox.ReplayAll() + + results = InstanceExists.mark_exists_as_sent_unverified(message_ids) + + self.assertEqual(results, (['9156b83e-f684-4ec3-8f94-7e41902f27aa'], + [])) + + self.mox.VerifyAll() + + def test_mark_exists_as_sent_unverified_and_return_exist_not_pending(self): + message_ids = ["0708cb0b-6169-4d7c-9f58-3cf3d5bf694b", + "9156b83e-f684-4ec3-8f94-7e41902f27aa"] + + exist1 = self.mox.CreateMockAnything() + exist1.status = "pending" + exist1.save() + exist2 = self.mox.CreateMockAnything() + exist2.status = "verified" + self.mox.StubOutWithMock(InstanceExists.objects, 'get') + InstanceExists.objects.get(message_id=message_ids[0]).AndReturn(exist1) + InstanceExists.objects.get(message_id=message_ids[1]).AndReturn(exist2) + self.mox.ReplayAll() + + results = InstanceExists.mark_exists_as_sent_unverified(message_ids) + + self.assertEqual(results, ([], + ["9156b83e-f684-4ec3-8f94-7e41902f27aa"])) + + self.mox.VerifyAll() + diff --git a/tests/unit/test_stacktach.py b/tests/unit/test_stacktach.py index 4ca4311..a668f13 100644 --- a/tests/unit/test_stacktach.py +++ b/tests/unit/test_stacktach.py @@ -38,8 +38,7 @@ from utils import TENANT_ID_1 from utils import INSTANCE_TYPE_ID_1 from utils import DUMMY_TIME from utils import INSTANCE_TYPE_ID_2 -from utils import IMAGE_UUID_1 -from stacktach import stacklog +from stacktach import stacklog, models from stacktach import notification from stacktach import views from tests.unit import StacktachBaseTestCase From cb29fb70220f6668aa2ca245dc31bfcd60684284 Mon Sep 17 00:00:00 2001 From: Andrew Melton Date: Thu, 27 Feb 2014 11:15:41 -0500 Subject: [PATCH 42/53] Exists stats api --- docs/dbapi.rst | 97 +++++++++++++++++++++- etc/pip-requires.txt | 4 +- stacktach/dbapi.py | 59 ++++++++++---- stacktach/urls.py | 36 ++++++--- tests/unit/test_dbapi.py | 171 +++++++++++++++++++++++++++++++++++++++ 5 files changed, 335 insertions(+), 32 deletions(-) diff --git a/docs/dbapi.rst b/docs/dbapi.rst index e6d596f..d23cac4 100644 --- a/docs/dbapi.rst +++ b/docs/dbapi.rst @@ -41,7 +41,7 @@ Write APIs ********** db/confirm/usage/exists/batch/ -===================================== +============================== .. http:put:: http://example.com/db/confirm/usage/exists/batch/ @@ -100,6 +100,101 @@ Uses the provided message_id's and http status codes to update image and instanc Read APIs ********* +db/stats/nova/exists/ +===================== + +.. http:get:: http://example.com/db/stats/nova/exists + +Returns a list of status combinations and count of events with those status combinations. + +Note: Only status combinations with >0 count will show up. + + **Query Parameters** + + * ``audit_period_beginning_min``: datetime (yyyy-mm-dd hh:mm:ss) + * ``audit_period_beginning_max``: datetime (yyyy-mm-dd hh:mm:ss) + * ``audit_period_ending_min``: datetime (yyyy-mm-dd hh:mm:ss) + * ``audit_period_ending_max``: datetime (yyyy-mm-dd hh:mm:ss) + * ``launched_at_min``: datetime (yyyy-mm-dd hh:mm:ss) + * ``launched_at_max``: datetime (yyyy-mm-dd hh:mm:ss) + * ``deleted_at_min``: datetime (yyyy-mm-dd hh:mm:ss) + * ``deleted_at_max``: datetime (yyyy-mm-dd hh:mm:ss) + * ``received_min``: datetime (yyyy-mm-dd hh:mm:ss) + * ``received_max``: datetime (yyyy-mm-dd hh:mm:ss) + + **Example request**: + + .. sourcecode:: http + + GET /db/stats/nova/exists/ HTTP/1.1 + Host: example.com + Accept: application/json + + **Example response**: + + .. sourcecode:: http + + HTTP/1.1 200 OK + Vary: Accept + Content-Type: application/json + + { + "stats": + [ + {"status": "pending", "send_status": 0, "event_count": 1}, + {"status": "verified", "send_status": 200, "event_count": 100}, + {"status": "reconciled", "send_status": 200, "event_count": 2}, + {"status": "failed", "send_status": 0, "event_count": 1}, + ] + } + +db/stats/glance/exists/ +======================= + +.. http:get:: http://example.com/db/status/usage/glance/exists + +Returns a list of status combinations and count of events with those status combinations. + +Note: Only status combinations with >0 count will show up. + + **Query Parameters** + + * ``audit_period_beginning_min``: datetime (yyyy-mm-dd hh:mm:ss) + * ``audit_period_beginning_max``: datetime (yyyy-mm-dd hh:mm:ss) + * ``audit_period_ending_min``: datetime (yyyy-mm-dd hh:mm:ss) + * ``audit_period_ending_max``: datetime (yyyy-mm-dd hh:mm:ss) + * ``created_at_min``: datetime (yyyy-mm-dd hh:mm:ss) + * ``created_at_max``: datetime (yyyy-mm-dd hh:mm:ss) + * ``deleted_at_min``: datetime (yyyy-mm-dd hh:mm:ss) + * ``deleted_at_max``: datetime (yyyy-mm-dd hh:mm:ss) + * ``received_min``: datetime (yyyy-mm-dd hh:mm:ss) + * ``received_max``: datetime (yyyy-mm-dd hh:mm:ss) + + **Example request**: + + .. sourcecode:: http + + GET /db/stats/nova/exists/ HTTP/1.1 + Host: example.com + Accept: application/json + + **Example response**: + + .. sourcecode:: http + + HTTP/1.1 200 OK + Vary: Accept + Content-Type: application/json + + { + "stats": + [ + {"status": "verified", "send_status": 200, "event_count": 200}, + {"status": "failed", "send_status": 0, "event_count": 2}, + ] + } + + db/usage/launches/ ================== diff --git a/etc/pip-requires.txt b/etc/pip-requires.txt index f14c1f6..72c109e 100644 --- a/etc/pip-requires.txt +++ b/etc/pip-requires.txt @@ -1,4 +1,4 @@ -Django>=1.4.2 +Django>=1.4.2, <1.6.0 MySQL-python>=1.2.3 eventlet>=0.9.17 kombu>=2.4.7 @@ -9,4 +9,4 @@ Pympler requests south sphinxcontrib-httpdomain -pbr \ No newline at end of file +pbr diff --git a/stacktach/dbapi.py b/stacktach/dbapi.py index 1d0cc4d..174c50b 100644 --- a/stacktach/dbapi.py +++ b/stacktach/dbapi.py @@ -23,6 +23,7 @@ import functools import json from django.db import transaction +from django.db.models import Count from django.db.models import FieldDoesNotExist from django.forms.models import model_to_dict from django.http import HttpResponse @@ -199,22 +200,7 @@ def list_usage_exists_glance(request): def list_usage_exists_with_service(request, service): model = _exists_model_factory(service) - try: - custom_filters = {} - if 'received_min' in request.GET: - received_min = request.GET['received_min'] - custom_filters['received_min'] = {} - custom_filters['received_min']['raw__when__gte'] = \ - utils.str_time_to_unix(received_min) - if 'received_max' in request.GET: - received_max = request.GET['received_max'] - custom_filters['received_max'] = {} - custom_filters['received_max']['raw__when__lte'] = \ - utils.str_time_to_unix(received_max) - except AttributeError: - msg = "Range filters must be dates." - raise BadRequestException(message=msg) - + custom_filters = _get_exists_filter_args(request) objects = get_db_objects(model['klass'], request, 'id', custom_filters=custom_filters) dicts = _convert_model_list(objects, _exists_extra_values) @@ -232,6 +218,28 @@ def get_usage_exist_glance(request, exist_id): _exists_extra_values)} +@api_call +def get_usage_exist_stats(request): + return {'stats': _get_exist_stats(request, 'nova')} + + +@api_call +def get_usage_exist_stats_glance(request): + return {'stats': _get_exist_stats(request, 'glance')} + + +def _get_exist_stats(request, service): + klass = _exists_model_factory(service)['klass'] + exists_filters = _get_exists_filter_args(request) + filters = _get_filter_args(klass, request, + custom_filters=exists_filters) + for value in exists_filters.values(): + filters.update(value) + query = klass.objects.filter(**filters) + values = query.values('status', 'send_status') + stats = values.annotate(event_count=Count('send_status')) + return stats + @api_call def exists_send_status(request, message_id): if request.method not in ['PUT', 'POST']: @@ -330,6 +338,25 @@ def _check_has_field(klass, field_name): raise BadRequestException(msg) +def _get_exists_filter_args(request): + try: + custom_filters = {} + if 'received_min' in request.GET: + received_min = request.GET['received_min'] + custom_filters['received_min'] = {} + custom_filters['received_min']['raw__when__gte'] = \ + utils.str_time_to_unix(received_min) + if 'received_max' in request.GET: + received_max = request.GET['received_max'] + custom_filters['received_max'] = {} + custom_filters['received_max']['raw__when__lte'] = \ + utils.str_time_to_unix(received_max) + except AttributeError: + msg = "Range filters must be dates." + raise BadRequestException(message=msg) + return custom_filters + + def _get_filter_args(klass, request, custom_filters=None): filter_args = {} if 'instance' in request.GET: diff --git a/stacktach/urls.py b/stacktach/urls.py index e49c217..b5d471c 100644 --- a/stacktach/urls.py +++ b/stacktach/urls.py @@ -7,8 +7,22 @@ web_logger = stacklog.get_logger('stacktach-web') web_logger_listener = stacklog.LogListener(web_logger) web_logger_listener.start() -urlpatterns = patterns('', +web_urls = ( url(r'^$', 'stacktach.views.welcome', name='welcome'), + url(r'^(?P\d+)/$', 'stacktach.views.home', name='home'), + url(r'^(?P\d+)/details/(?P\w+)/(?P\d+)/$', + 'stacktach.views.details', name='details'), + url(r'^(?P\d+)/search/$', + 'stacktach.views.search', name='search'), + url(r'^(?P\d+)/expand/(?P\d+)/$', + 'stacktach.views.expand', name='expand'), + url(r'^(?P\d+)/latest_raw/$', + 'stacktach.views.latest_raw', name='latest_raw'), + url(r'^(?P\d+)/instance_status/$', + 'stacktach.views.instance_status', name='instance_status'), +) + +stacky_urls = ( url(r'stacky/deployments/$', 'stacktach.stacky_server.do_deployments'), url(r'stacky/events/$', 'stacktach.stacky_server.do_events'), url(r'stacky/hosts/$', 'stacktach.stacky_server.do_hosts'), @@ -35,7 +49,9 @@ urlpatterns = patterns('', 'stacktach.stacky_server.do_list_usage_deletes'), url(r'stacky/usage/exists/$', 'stacktach.stacky_server.do_list_usage_exists'), +) +dbapi_urls = ( url(r'db/usage/launches/$', 'stacktach.dbapi.list_usage_launches'), url(r'db/usage/nova/launches/$', @@ -71,16 +87,10 @@ urlpatterns = patterns('', 'stacktach.dbapi.get_usage_exist_glance'), url(r'db/confirm/usage/exists/(?P[\w\-]+)/$', 'stacktach.dbapi.exists_send_status'), - - url(r'^(?P\d+)/$', 'stacktach.views.home', name='home'), - url(r'^(?P\d+)/details/(?P\w+)/(?P\d+)/$', - 'stacktach.views.details', name='details'), - url(r'^(?P\d+)/search/$', - 'stacktach.views.search', name='search'), - url(r'^(?P\d+)/expand/(?P\d+)/$', - 'stacktach.views.expand', name='expand'), - url(r'^(?P\d+)/latest_raw/$', - 'stacktach.views.latest_raw', name='latest_raw'), - url(r'^(?P\d+)/instance_status/$', - 'stacktach.views.instance_status', name='instance_status'), + url(r'db/stats/nova/exists$', + 'stacktach.dbapi.get_usage_exist_stats'), + url(r'db/stats/glance/exists$', + 'stacktach.dbapi.get_usage_exist_stats_glance'), ) + +urlpatterns = patterns('', *(web_urls + stacky_urls + dbapi_urls)) diff --git a/tests/unit/test_dbapi.py b/tests/unit/test_dbapi.py index 4b335a5..3de9b11 100644 --- a/tests/unit/test_dbapi.py +++ b/tests/unit/test_dbapi.py @@ -21,6 +21,7 @@ import datetime import json +from django.db.models import Count from django.db.models import FieldDoesNotExist from django.db import transaction import mox @@ -43,6 +44,10 @@ class DBAPITestCase(StacktachBaseTestCase): mor_exception = models.InstanceExists.MultipleObjectsReturned self.mox.StubOutWithMock(models, 'InstanceExists', use_mock_anything=True) + self.mox.StubOutWithMock(models, 'ImageExists', + use_mock_anything=True) + models.InstanceExists._meta = self.mox.CreateMockAnything() + models.ImageExists._meta = self.mox.CreateMockAnything() models.InstanceExists.objects = self.mox.CreateMockAnything() models.ImageExists.objects = self.mox.CreateMockAnything() models.InstanceExists.DoesNotExist = dne_exception @@ -921,3 +926,169 @@ class DBAPITestCase(StacktachBaseTestCase): self.assertEqual(resp.status_code, 200) self.assertEqual(json.loads(resp.content), {'deletes': deletes}) self.mox.VerifyAll() + + def test_get_usage_exist_stats_nova(self): + fake_request = self.mox.CreateMockAnything() + fake_request.method = 'GET' + fake_request.GET = {} + query = self.mox.CreateMockAnything() + models.InstanceExists.objects.filter().AndReturn(query) + query.values('status', 'send_status').AndReturn(query) + result = [ + {'status': 'verified', 'send_status': 201L, 'event_count': 2}, + {'status': 'failed', 'send_status': 0L, 'event_count': 1} + ] + query.annotate(event_count=mox.IsA(Count)).AndReturn(result) + self.mox.ReplayAll() + response = dbapi.get_usage_exist_stats(fake_request) + self.assertEqual(response.status_code, 200) + expected_response = json.dumps({'stats': result}) + self.assertEqual(expected_response, response.content) + self.mox.VerifyAll() + + def test_get_usage_exist_stats_nova_received_min(self): + fake_request = self.mox.CreateMockAnything() + fake_request.method = 'GET' + now = datetime.datetime.utcnow() + fake_request.GET = {'received_min': str(now)} + query = self.mox.CreateMockAnything() + filters = {'raw__when__gte': utils.decimal_utc(now)} + models.InstanceExists.objects.filter(**filters).AndReturn(query) + query.values('status', 'send_status').AndReturn(query) + result = [ + {'status': 'verified', 'send_status': 201L, 'event_count': 2}, + {'status': 'failed', 'send_status': 0L, 'event_count': 1} + ] + query.annotate(event_count=mox.IsA(Count)).AndReturn(result) + self.mox.ReplayAll() + response = dbapi.get_usage_exist_stats(fake_request) + self.assertEqual(response.status_code, 200) + expected_response = json.dumps({'stats': result}) + self.assertEqual(expected_response, response.content) + self.mox.VerifyAll() + + def test_get_usage_exist_stats_nova_received_max(self): + fake_request = self.mox.CreateMockAnything() + fake_request.method = 'GET' + now = datetime.datetime.utcnow() + fake_request.GET = {'received_max': str(now)} + query = self.mox.CreateMockAnything() + filters = {'raw__when__lte': utils.decimal_utc(now)} + models.InstanceExists.objects.filter(**filters).AndReturn(query) + query.values('status', 'send_status').AndReturn(query) + result = [ + {'status': 'verified', 'send_status': 201L, 'event_count': 2}, + {'status': 'failed', 'send_status': 0L, 'event_count': 1} + ] + query.annotate(event_count=mox.IsA(Count)).AndReturn(result) + self.mox.ReplayAll() + response = dbapi.get_usage_exist_stats(fake_request) + self.assertEqual(response.status_code, 200) + expected_response = json.dumps({'stats': result}) + self.assertEqual(expected_response, response.content) + self.mox.VerifyAll() + + def test_get_usage_exist_stats_nova_class_field_filter(self): + fake_request = self.mox.CreateMockAnything() + fake_request.method = 'GET' + now = datetime.datetime.utcnow() + fake_request.GET = {'audit_period_ending_min': str(now)} + query = self.mox.CreateMockAnything() + models.InstanceExists._meta.get_field_by_name('audit_period_ending') + filters = {'audit_period_ending__gte': utils.decimal_utc(now)} + models.InstanceExists.objects.filter(**filters).AndReturn(query) + query.values('status', 'send_status').AndReturn(query) + result = [ + {'status': 'verified', 'send_status': 201L, 'event_count': 2}, + {'status': 'failed', 'send_status': 0L, 'event_count': 1} + ] + query.annotate(event_count=mox.IsA(Count)).AndReturn(result) + self.mox.ReplayAll() + response = dbapi.get_usage_exist_stats(fake_request) + self.assertEqual(response.status_code, 200) + expected_response = json.dumps({'stats': result}) + self.assertEqual(expected_response, response.content) + self.mox.VerifyAll() + + def test_get_usage_exist_stats_glance(self): + fake_request = self.mox.CreateMockAnything() + fake_request.method = 'GET' + fake_request.GET = {} + query = self.mox.CreateMockAnything() + models.ImageExists.objects.filter().AndReturn(query) + query.values('status', 'send_status').AndReturn(query) + result = [ + {'status': 'verified', 'send_status': 201L, 'event_count': 2}, + {'status': 'failed', 'send_status': 0L, 'event_count': 1} + ] + query.annotate(event_count=mox.IsA(Count)).AndReturn(result) + self.mox.ReplayAll() + response = dbapi.get_usage_exist_stats_glance(fake_request) + self.assertEqual(response.status_code, 200) + expected_response = json.dumps({'stats': result}) + self.assertEqual(expected_response, response.content) + self.mox.VerifyAll() + + def test_get_usage_exist_stats_glance_received_min(self): + fake_request = self.mox.CreateMockAnything() + fake_request.method = 'GET' + now = datetime.datetime.utcnow() + fake_request.GET = {'received_min': str(now)} + query = self.mox.CreateMockAnything() + filters = {'raw__when__gte': utils.decimal_utc(now)} + models.ImageExists.objects.filter(**filters).AndReturn(query) + query.values('status', 'send_status').AndReturn(query) + result = [ + {'status': 'verified', 'send_status': 201L, 'event_count': 2}, + {'status': 'failed', 'send_status': 0L, 'event_count': 1} + ] + query.annotate(event_count=mox.IsA(Count)).AndReturn(result) + self.mox.ReplayAll() + response = dbapi.get_usage_exist_stats_glance(fake_request) + self.assertEqual(response.status_code, 200) + expected_response = json.dumps({'stats': result}) + self.assertEqual(expected_response, response.content) + self.mox.VerifyAll() + + def test_get_usage_exist_stats_glance_received_max(self): + fake_request = self.mox.CreateMockAnything() + fake_request.method = 'GET' + now = datetime.datetime.utcnow() + fake_request.GET = {'received_max': str(now)} + query = self.mox.CreateMockAnything() + filters = {'raw__when__lte': utils.decimal_utc(now)} + models.ImageExists.objects.filter(**filters).AndReturn(query) + query.values('status', 'send_status').AndReturn(query) + result = [ + {'status': 'verified', 'send_status': 201L, 'event_count': 2}, + {'status': 'failed', 'send_status': 0L, 'event_count': 1} + ] + query.annotate(event_count=mox.IsA(Count)).AndReturn(result) + self.mox.ReplayAll() + response = dbapi.get_usage_exist_stats_glance(fake_request) + self.assertEqual(response.status_code, 200) + expected_response = json.dumps({'stats': result}) + self.assertEqual(expected_response, response.content) + self.mox.VerifyAll() + + def test_get_usage_exist_stats_glance_class_field_filter(self): + fake_request = self.mox.CreateMockAnything() + fake_request.method = 'GET' + now = datetime.datetime.utcnow() + fake_request.GET = {'audit_period_ending_min': str(now)} + query = self.mox.CreateMockAnything() + models.ImageExists._meta.get_field_by_name('audit_period_ending') + filters = {'audit_period_ending__gte': utils.decimal_utc(now)} + models.ImageExists.objects.filter(**filters).AndReturn(query) + query.values('status', 'send_status').AndReturn(query) + result = [ + {'status': 'verified', 'send_status': 201L, 'event_count': 2}, + {'status': 'failed', 'send_status': 0L, 'event_count': 1} + ] + query.annotate(event_count=mox.IsA(Count)).AndReturn(result) + self.mox.ReplayAll() + response = dbapi.get_usage_exist_stats_glance(fake_request) + self.assertEqual(response.status_code, 200) + expected_response = json.dumps({'stats': result}) + self.assertEqual(expected_response, response.content) + self.mox.VerifyAll() From a45a11e2f75be7d1fe5fb36be6ed79970d1014bb Mon Sep 17 00:00:00 2001 From: Andrew Melton Date: Thu, 27 Feb 2014 14:59:03 -0500 Subject: [PATCH 43/53] Refactoring repair api --- stacktach/dbapi.py | 11 ++--------- stacktach/urls.py | 2 +- tests/unit/test_dbapi.py | 2 +- 3 files changed, 4 insertions(+), 11 deletions(-) diff --git a/stacktach/dbapi.py b/stacktach/dbapi.py index 6450007..ad1a692 100644 --- a/stacktach/dbapi.py +++ b/stacktach/dbapi.py @@ -36,7 +36,6 @@ from stacktach import datetime_to_decimal as dt from stacktach import models from stacktach import stacklog from stacktach import utils -from stacktach.models import InstanceExists, ImageExists DEFAULT_LIMIT = 50 HARD_LIMIT = 1000 @@ -454,19 +453,13 @@ def get_verified_count(request): "format should be %YYYY-%mm-%dd)") -def exists_factory(service): - model = InstanceExists - if service == 'glance': - model = ImageExists - return model - - def repair_stacktach_down(request): post_dict = dict((request.POST._iterlists())) message_ids = post_dict.get('message_ids') service = post_dict.get('service', ['nova']) + klass = _exists_model_factory(service[0])['klass'] absent_exists, exists_not_pending = \ - exists_factory(service[0]).mark_exists_as_sent_unverified(message_ids) + klass.mark_exists_as_sent_unverified(message_ids) response_data = {'absent_exists': absent_exists, 'exists_not_pending': exists_not_pending} response = HttpResponse(json.dumps(response_data), diff --git a/stacktach/urls.py b/stacktach/urls.py index a965594..50d8273 100644 --- a/stacktach/urls.py +++ b/stacktach/urls.py @@ -72,7 +72,7 @@ urlpatterns = patterns('', url(r'db/confirm/usage/exists/(?P[\w\-]+)/$', 'stacktach.dbapi.exists_send_status'), url(r'db/count/verified', 'stacktach.dbapi.get_verified_count'), - url(r'repair/', 'stacktach.dbapi.repair_stacktach_down'), + url(r'db/repair/', 'stacktach.dbapi.repair_stacktach_down'), url(r'^(?P\d+)/$', 'stacktach.views.home', name='home'), url(r'^(?P\d+)/details/(?P\w+)/(?P\d+)/$', diff --git a/tests/unit/test_dbapi.py b/tests/unit/test_dbapi.py index 7739c9e..3b4164f 100644 --- a/tests/unit/test_dbapi.py +++ b/tests/unit/test_dbapi.py @@ -1009,7 +1009,7 @@ class StacktachRepairScenarioApi(StacktachBaseTestCase): message_ids = ["04fd94b5-64dd-4559-83b7-981d9d4f7a5a", "14fd94b5-64dd-4559-83b7-981d9d4f7a5a", "24fd94b5-64dd-4559-83b7-981d9d4f7a5a"] - request.POST._iterlists().AndReturn([('service', 'nova'), + request.POST._iterlists().AndReturn([('service', ['nova']), ('message_ids', message_ids)]) self.mox.StubOutWithMock(models.InstanceExists, 'mark_exists_as_sent_unverified') From 4f27f3e393d272b933bbfa8dfc124879bff8d680 Mon Sep 17 00:00:00 2001 From: Andrew Melton Date: Thu, 27 Feb 2014 15:35:31 -0500 Subject: [PATCH 44/53] Refactoring event count api --- docs/dbapi.rst | 81 +++++++++++++++++++++------------------- stacktach/dbapi.py | 29 +++++++------- stacktach/urls.py | 2 +- tests/unit/test_dbapi.py | 34 ++++++----------- 4 files changed, 71 insertions(+), 75 deletions(-) diff --git a/docs/dbapi.rst b/docs/dbapi.rst index 954caaa..53e4195 100644 --- a/docs/dbapi.rst +++ b/docs/dbapi.rst @@ -100,6 +100,44 @@ Uses the provided message_id's and http status codes to update image and instanc Read APIs ********* + + +db/stats/events +=============== + +.. http:get:: http://example.com/db/stats/events/ + +Returns a count of events stored in Stacktach's Rawdata tables from +``when_min`` to ``when_max`` + + **Query Parameters** + + * ``event``: event type to filter by + * ``when_min``: datetime (yyyy-mm-dd hh:mm:ss) + * ``when_max``: datetime (yyyy-mm-dd hh:mm:ss) + * ``service``: ``nova`` or ``glance``. default="nova" + + **Example request**: + + .. sourcecode:: http + + GET db/stats/events/ HTTP/1.1 + Host: example.com + Accept: application/json + + + **Example response**: + + .. sourcecode:: http + + HTTP/1.1 200 OK + Vary: Accept + Content-Type: application/json + + { + count: 10 + } + db/stats/nova/exists/ ===================== @@ -876,45 +914,10 @@ Returns a single instance exists matching provided id } } -db/count/verified/ -================== +/db/repair +========== -.. http:get:: http://example.com/count/verified/ - -Returns a count of .verified events stored in Stacktach's Rawdata table from -``audit_period_beginning`` to ``audit_period_ending`` - - **Query Parameters** - - * ``audit_period_beginning``: datetime (yyyy-mm-dd) - * ``audit_period_ending``: datetime (yyyy-mm-dd) - * ``service``: ``nova`` or ``glance``. default="nova" - - **Example request**: - - .. sourcecode:: http - - GET db/count/verified/ HTTP/1.1 - Host: example.com - Accept: application/json - - - **Example response**: - - .. sourcecode:: http - - HTTP/1.1 200 OK - Vary: Accept - Content-Type: application/json - - { - count: 10 - } - -repair -====== - -.. http:post:: http://example.com/repair/ +.. http:post:: http://example.com/db/repair/ Changes the status of all the exists of message-ids sent with the request from 'pending' to 'sent_unverified' so that the verifier does not end up @@ -926,7 +929,7 @@ repair .. sourcecode::http - POST /repair/ HTTP/1.1 + POST /db/repair/ HTTP/1.1 Host: example.com Accept: application/json diff --git a/stacktach/dbapi.py b/stacktach/dbapi.py index f170f34..6bb26b5 100644 --- a/stacktach/dbapi.py +++ b/stacktach/dbapi.py @@ -459,23 +459,26 @@ def _rawdata_factory(service): @api_call -def get_verified_count(request): +def get_event_stats(request): try: - audit_period_beginning = datetime.strptime( - request.GET.get("audit_period_beginning"), "%Y-%m-%d") - audit_period_ending = datetime.strptime( - request.GET.get("audit_period_ending"), "%Y-%m-%d") + filters = {} + if 'when_min' in request.GET: + when_min = utils.str_time_to_unix(request.GET['when_min']) + filters['when__gte'] = when_min + + if 'when_max' in request.GET: + when_max = utils.str_time_to_unix(request.GET['when_max']) + filters['when__lte'] = when_max + + if 'event' in request.GET: + filters['event'] = request.GET['event'] + service = request.GET.get("service", "nova") rawdata = _rawdata_factory(service) - filters = { - 'when__gte': dt.dt_to_decimal(audit_period_beginning), - 'when__lte': dt.dt_to_decimal(audit_period_ending), - 'event': "compute.instance.exists.verified" - } - return {'count': rawdata.filter(**filters).count()} - except KeyError and TypeError: + return {'stats': {'count': rawdata.filter(**filters).count()}} + except (KeyError, TypeError): raise BadRequestException(message="Invalid/absent query parameter") - except ValueError: + except (ValueError, AttributeError): raise BadRequestException(message="Invalid format for date (Correct " "format should be %YYYY-%mm-%dd)") diff --git a/stacktach/urls.py b/stacktach/urls.py index 2bd64de..701783d 100644 --- a/stacktach/urls.py +++ b/stacktach/urls.py @@ -91,7 +91,7 @@ dbapi_urls = ( 'stacktach.dbapi.get_usage_exist_stats'), url(r'db/stats/glance/exists$', 'stacktach.dbapi.get_usage_exist_stats_glance'), - url(r'db/count/verified', 'stacktach.dbapi.get_verified_count'), + url(r'db/stats/events', 'stacktach.dbapi.get_event_stats'), url(r'db/repair/', 'stacktach.dbapi.repair_stacktach_down'), ) diff --git a/tests/unit/test_dbapi.py b/tests/unit/test_dbapi.py index 07d1b84..b7e66f7 100644 --- a/tests/unit/test_dbapi.py +++ b/tests/unit/test_dbapi.py @@ -1106,9 +1106,10 @@ class DBAPITestCase(StacktachBaseTestCase): def test_get_verified_count(self): fake_request = self.mox.CreateMockAnything() fake_request.method = 'GET' - fake_request.GET = {'audit_period_beginning': "2014-02-26", - 'audit_period_ending': "2014-02-27", - 'service': "nova"} + fake_request.GET = {'when_min': "2014-02-26 00:00:00", + 'when_max': "2014-02-27 00:00:00", + 'service': "nova", + 'event': 'compute.instance.exists.verified'} mock_query = self.mox.CreateMockAnything() self.mox.StubOutWithMock(models.RawData.objects, "filter") models.RawData.objects.filter(event='compute.instance.exists.verified', @@ -1118,21 +1119,22 @@ class DBAPITestCase(StacktachBaseTestCase): mock_query.count().AndReturn(100) self.mox.ReplayAll() - response = dbapi.get_verified_count(fake_request) + response = dbapi.get_event_stats(fake_request) self.assertEqual(response.status_code, 200) - self.assertEqual(json.loads(response.content), {'count': 100}) + self.assertEqual(json.loads(response.content), + {'stats': {'count': 100}}) self.mox.VerifyAll() def test_get_verified_count_wrong_date_format_returns_400(self): fake_request = self.mox.CreateMockAnything() fake_request.method = 'GET' - fake_request.GET = {'audit_period_beginning': "2014-020-26", + fake_request.GET = {'when_min': "2014-020-26", 'service': "nova"} self.mox.ReplayAll() - response = dbapi.get_verified_count(fake_request) + response = dbapi.get_event_stats(fake_request) self.assertEqual(response.status_code, 400) self.assertEqual(json.loads(response.content)['message'], "Invalid format for date" @@ -1142,30 +1144,18 @@ class DBAPITestCase(StacktachBaseTestCase): def test_get_verified_count_wrong_service_returns_400(self): fake_request = self.mox.CreateMockAnything() fake_request.method = 'GET' - fake_request.GET = {'audit_period_beginning': "2014-02-26", - "audit_period_ending": "2014-02-27", + fake_request.GET = {'when_min': "2014-02-26 00:00:00", + "when_min": "2014-02-27 00:00:00", 'service': "qonos"} self.mox.ReplayAll() - response = dbapi.get_verified_count(fake_request) + response = dbapi.get_event_stats(fake_request) self.assertEqual(response.status_code, 400) self.assertEqual(json.loads(response.content)['message'], "Invalid service") self.mox.VerifyAll() - def test_get_verified_count_invalid_query_parameter_returns_400(self): - fake_request = self.mox.CreateMockAnything() - fake_request.method = 'GET' - fake_request.GET = {'audit_period': "2014-02-26",} - - self.mox.ReplayAll() - - response = dbapi.get_verified_count(fake_request) - self.assertEqual(response.status_code, 400) - self.assertEqual(json.loads(response.content)['message'], - "Invalid/absent query parameter") - self.mox.VerifyAll() class StacktachRepairScenarioApi(StacktachBaseTestCase): def setUp(self): From 15666258d5ff873e58cc9661a4e4d950352a8df6 Mon Sep 17 00:00:00 2001 From: Andrew Melton Date: Thu, 27 Feb 2014 16:04:36 -0500 Subject: [PATCH 45/53] Convert annotations to list --- stacktach/dbapi.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/stacktach/dbapi.py b/stacktach/dbapi.py index 6bb26b5..a593d78 100644 --- a/stacktach/dbapi.py +++ b/stacktach/dbapi.py @@ -239,7 +239,7 @@ def _get_exist_stats(request, service): query = klass.objects.filter(**filters) values = query.values('status', 'send_status') stats = values.annotate(event_count=Count('send_status')) - return stats + return list(stats) @api_call def exists_send_status(request, message_id): From f57bc6a95fb8b80a2f4a7f48d55c3856996bf005 Mon Sep 17 00:00:00 2001 From: Andrew Melton Date: Fri, 28 Feb 2014 10:27:07 -0500 Subject: [PATCH 46/53] Properly anchor API urls --- stacktach/urls.py | 84 +++++++++++++++++++++++------------------------ 1 file changed, 42 insertions(+), 42 deletions(-) diff --git a/stacktach/urls.py b/stacktach/urls.py index 701783d..45274c3 100644 --- a/stacktach/urls.py +++ b/stacktach/urls.py @@ -23,76 +23,76 @@ web_urls = ( ) stacky_urls = ( - url(r'stacky/deployments/$', 'stacktach.stacky_server.do_deployments'), - url(r'stacky/events/$', 'stacktach.stacky_server.do_events'), - url(r'stacky/hosts/$', 'stacktach.stacky_server.do_hosts'), - url(r'stacky/uuid/$', 'stacktach.stacky_server.do_uuid'), - url(r'stacky/timings/$', 'stacktach.stacky_server.do_timings'), - url(r'stacky/timings/uuid/$', 'stacktach.stacky_server.do_timings_uuid'), - url(r'stacky/summary/$', 'stacktach.stacky_server.do_summary'), - url(r'stacky/request/$', 'stacktach.stacky_server.do_request'), - url(r'stacky/reports/search/$', + url(r'^stacky/deployments/$', 'stacktach.stacky_server.do_deployments'), + url(r'^stacky/events/$', 'stacktach.stacky_server.do_events'), + url(r'^stacky/hosts/$', 'stacktach.stacky_server.do_hosts'), + url(r'^stacky/uuid/$', 'stacktach.stacky_server.do_uuid'), + url(r'^stacky/timings/$', 'stacktach.stacky_server.do_timings'), + url(r'^stacky/timings/uuid/$', 'stacktach.stacky_server.do_timings_uuid'), + url(r'^stacky/summary/$', 'stacktach.stacky_server.do_summary'), + url(r'^stacky/request/$', 'stacktach.stacky_server.do_request'), + url(r'^stacky/reports/search/$', 'stacktach.stacky_server.do_jsonreports_search'), - url(r'stacky/reports/$', 'stacktach.stacky_server.do_jsonreports'), - url(r'stacky/report/(?P\d+)/$', + url(r'^stacky/reports/$', 'stacktach.stacky_server.do_jsonreports'), + url(r'^stacky/report/(?P\d+)/$', 'stacktach.stacky_server.do_jsonreport'), - url(r'stacky/show/(?P\d+)/$', + url(r'^stacky/show/(?P\d+)/$', 'stacktach.stacky_server.do_show'), - url(r'stacky/watch/(?P\d+)/$', + url(r'^stacky/watch/(?P\d+)/$', 'stacktach.stacky_server.do_watch'), - url(r'stacky/search/$', 'stacktach.stacky_server.search'), - url(r'stacky/kpi/$', 'stacktach.stacky_server.do_kpi'), - url(r'stacky/kpi/(?P\w+)/$', 'stacktach.stacky_server.do_kpi'), - url(r'stacky/usage/launches/$', + url(r'^stacky/search/$', 'stacktach.stacky_server.search'), + url(r'^stacky/kpi/$', 'stacktach.stacky_server.do_kpi'), + url(r'^stacky/kpi/(?P\w+)/$', 'stacktach.stacky_server.do_kpi'), + url(r'^stacky/usage/launches/$', 'stacktach.stacky_server.do_list_usage_launches'), - url(r'stacky/usage/deletes/$', + url(r'^stacky/usage/deletes/$', 'stacktach.stacky_server.do_list_usage_deletes'), - url(r'stacky/usage/exists/$', + url(r'^stacky/usage/exists/$', 'stacktach.stacky_server.do_list_usage_exists'), ) dbapi_urls = ( - url(r'db/usage/launches/$', + url(r'^db/usage/launches/$', 'stacktach.dbapi.list_usage_launches'), - url(r'db/usage/nova/launches/$', + url(r'^db/usage/nova/launches/$', 'stacktach.dbapi.list_usage_launches'), - url(r'db/usage/glance/images/$', + url(r'^db/usage/glance/images/$', 'stacktach.dbapi.list_usage_images'), - url(r'db/usage/launches/(?P\d+)/$', + url(r'^db/usage/launches/(?P\d+)/$', 'stacktach.dbapi.get_usage_launch'), - url(r'db/usage/nova/launches/(?P\d+)/$', + url(r'^db/usage/nova/launches/(?P\d+)/$', 'stacktach.dbapi.get_usage_launch'), - url(r'db/usage/glance/images/(?P\d+)/$', + url(r'^db/usage/glance/images/(?P\d+)/$', 'stacktach.dbapi.get_usage_image'), - url(r'db/usage/deletes/$', + url(r'^db/usage/deletes/$', 'stacktach.dbapi.list_usage_deletes'), - url(r'db/usage/nova/deletes/$', + url(r'^db/usage/nova/deletes/$', 'stacktach.dbapi.list_usage_deletes'), - url(r'db/usage/glance/deletes/$', + url(r'^db/usage/glance/deletes/$', 'stacktach.dbapi.list_usage_deletes_glance'), - url(r'db/usage/deletes/(?P\d+)/$', + url(r'^db/usage/deletes/(?P\d+)/$', 'stacktach.dbapi.get_usage_delete'), - url(r'db/usage/nova/deletes/(?P\d+)/$', + url(r'^db/usage/nova/deletes/(?P\d+)/$', 'stacktach.dbapi.get_usage_delete'), - url(r'db/usage/glance/deletes/(?P\d+)/$', + url(r'^db/usage/glance/deletes/(?P\d+)/$', 'stacktach.dbapi.get_usage_delete_glance'), - url(r'db/usage/exists/$', 'stacktach.dbapi.list_usage_exists'), - url(r'db/usage/nova/exists/$', 'stacktach.dbapi.list_usage_exists'), - url(r'db/usage/glance/exists/$', 'stacktach.dbapi.list_usage_exists_glance'), - url(r'db/usage/exists/(?P\d+)/$', + url(r'^db/usage/exists/$', 'stacktach.dbapi.list_usage_exists'), + url(r'^db/usage/nova/exists/$', 'stacktach.dbapi.list_usage_exists'), + url(r'^db/usage/glance/exists/$', 'stacktach.dbapi.list_usage_exists_glance'), + url(r'^db/usage/exists/(?P\d+)/$', 'stacktach.dbapi.get_usage_exist'), - url(r'db/usage/nova/exists/(?P\d+)/$', + url(r'^db/usage/nova/exists/(?P\d+)/$', 'stacktach.dbapi.get_usage_exist'), - url(r'db/usage/glance/exists/(?P\d+)/$', + url(r'^db/usage/glance/exists/(?P\d+)/$', 'stacktach.dbapi.get_usage_exist_glance'), - url(r'db/confirm/usage/exists/(?P[\w\-]+)/$', + url(r'^db/confirm/usage/exists/(?P[\w\-]+)/$', 'stacktach.dbapi.exists_send_status'), - url(r'db/stats/nova/exists$', + url(r'^db/stats/nova/exists/$', 'stacktach.dbapi.get_usage_exist_stats'), - url(r'db/stats/glance/exists$', + url(r'^db/stats/glance/exists/$', 'stacktach.dbapi.get_usage_exist_stats_glance'), - url(r'db/stats/events', 'stacktach.dbapi.get_event_stats'), - url(r'db/repair/', 'stacktach.dbapi.repair_stacktach_down'), + url(r'^db/stats/events/', 'stacktach.dbapi.get_event_stats'), + url(r'^db/repair/', 'stacktach.dbapi.repair_stacktach_down'), ) urlpatterns = patterns('', *(web_urls + stacky_urls + dbapi_urls)) From 475ef4777d5ae35812ac049dfa4e6fb5a9bb3ecf Mon Sep 17 00:00:00 2001 From: Andrew Melton Date: Fri, 28 Feb 2014 11:12:48 -0500 Subject: [PATCH 47/53] Correcting date format error --- __init__.py | 0 stacktach/dbapi.py | 2 +- tests/unit/test_dbapi.py | 2 +- 3 files changed, 2 insertions(+), 2 deletions(-) delete mode 100644 __init__.py diff --git a/__init__.py b/__init__.py deleted file mode 100644 index e69de29..0000000 diff --git a/stacktach/dbapi.py b/stacktach/dbapi.py index a593d78..01e5242 100644 --- a/stacktach/dbapi.py +++ b/stacktach/dbapi.py @@ -480,7 +480,7 @@ def get_event_stats(request): raise BadRequestException(message="Invalid/absent query parameter") except (ValueError, AttributeError): raise BadRequestException(message="Invalid format for date (Correct " - "format should be %YYYY-%mm-%dd)") + "format should be %Y-%m-%d %H:%M:%S)") def repair_stacktach_down(request): diff --git a/tests/unit/test_dbapi.py b/tests/unit/test_dbapi.py index b7e66f7..fe8c7b3 100644 --- a/tests/unit/test_dbapi.py +++ b/tests/unit/test_dbapi.py @@ -1138,7 +1138,7 @@ class DBAPITestCase(StacktachBaseTestCase): self.assertEqual(response.status_code, 400) self.assertEqual(json.loads(response.content)['message'], "Invalid format for date" - " (Correct format should be %YYYY-%mm-%dd)") + " (Correct format should be %Y-%m-%d %H:%M:%S)") self.mox.VerifyAll() def test_get_verified_count_wrong_service_returns_400(self): From fef0458472d6e1e00931fd8c2df831a96eb38f5c Mon Sep 17 00:00:00 2001 From: Andrew Melton Date: Mon, 3 Mar 2014 12:27:49 -0500 Subject: [PATCH 48/53] More efficient event stats query --- stacktach/dbapi.py | 18 +++++++--- tests/unit/test_dbapi.py | 74 +++++++++++++++++++++++++++++++++------- 2 files changed, 75 insertions(+), 17 deletions(-) diff --git a/stacktach/dbapi.py b/stacktach/dbapi.py index a593d78..0f98ebc 100644 --- a/stacktach/dbapi.py +++ b/stacktach/dbapi.py @@ -470,12 +470,22 @@ def get_event_stats(request): when_max = utils.str_time_to_unix(request.GET['when_max']) filters['when__lte'] = when_max - if 'event' in request.GET: - filters['event'] = request.GET['event'] - service = request.GET.get("service", "nova") rawdata = _rawdata_factory(service) - return {'stats': {'count': rawdata.filter(**filters).count()}} + if filters: + rawdata = rawdata.filter(**filters) + events = rawdata.values('event').annotate(event_count=Count('event')) + events = list(events) + + if 'event' in request.GET: + event_filter = request.GET['event'] + event_count = {'event': event_filter, 'event_count': 0} + for event in events: + if event['event'] == event_filter: + event_count['event_count'] = event['event_count'] + events = [event_count, ] + + return {'stats': events} except (KeyError, TypeError): raise BadRequestException(message="Invalid/absent query parameter") except (ValueError, AttributeError): diff --git a/tests/unit/test_dbapi.py b/tests/unit/test_dbapi.py index b7e66f7..ba93418 100644 --- a/tests/unit/test_dbapi.py +++ b/tests/unit/test_dbapi.py @@ -43,10 +43,13 @@ class DBAPITestCase(StacktachBaseTestCase): self.mox = mox.Mox() dne_exception = models.InstanceExists.DoesNotExist mor_exception = models.InstanceExists.MultipleObjectsReturned + self.mox.StubOutWithMock(models, 'RawData', + use_mock_anything=True) self.mox.StubOutWithMock(models, 'InstanceExists', use_mock_anything=True) self.mox.StubOutWithMock(models, 'ImageExists', use_mock_anything=True) + models.RawData.objects = self.mox.CreateMockAnything() models.InstanceExists._meta = self.mox.CreateMockAnything() models.ImageExists._meta = self.mox.CreateMockAnything() models.InstanceExists.objects = self.mox.CreateMockAnything() @@ -1103,26 +1106,71 @@ class DBAPITestCase(StacktachBaseTestCase): self.assertEqual(expected_response, response.content) self.mox.VerifyAll() - def test_get_verified_count(self): + def test_get_event_stats(self): fake_request = self.mox.CreateMockAnything() fake_request.method = 'GET' - fake_request.GET = {'when_min': "2014-02-26 00:00:00", - 'when_max': "2014-02-27 00:00:00", - 'service': "nova", - 'event': 'compute.instance.exists.verified'} + fake_request.GET = {'service': "nova"} mock_query = self.mox.CreateMockAnything() - self.mox.StubOutWithMock(models.RawData.objects, "filter") - models.RawData.objects.filter(event='compute.instance.exists.verified', - when__gte=Decimal('1393372800'), - when__lte=Decimal('1393459200')).\ - AndReturn(mock_query) - mock_query.count().AndReturn(100) + models.RawData.objects.values('event').AndReturn(mock_query) + events = [ + {'event': 'compute.instance.exists.verified', 'event_count': 100}, + {'event': 'compute.instance.exists', 'event_count': 100} + ] + mock_query.annotate(event_count=mox.IsA(Count)).AndReturn(events) self.mox.ReplayAll() response = dbapi.get_event_stats(fake_request) self.assertEqual(response.status_code, 200) - self.assertEqual(json.loads(response.content), - {'stats': {'count': 100}}) + self.assertEqual(response.content, + json.dumps({'stats': events})) + self.mox.VerifyAll() + + def test_get_event_stats_date_range(self): + fake_request = self.mox.CreateMockAnything() + fake_request.method = 'GET' + start = "2014-02-26 00:00:00" + end = "2014-02-27 00:00:00" + fake_request.GET = {'when_min': start, + 'when_max': end, + 'service': "nova"} + mock_query = self.mox.CreateMockAnything() + filters = { + 'when__gte': stacktach_utils.str_time_to_unix(start), + 'when__lte': stacktach_utils.str_time_to_unix(end) + } + models.RawData.objects.filter(**filters).AndReturn(mock_query) + mock_query.values('event').AndReturn(mock_query) + events = [ + {'event': 'compute.instance.exists.verified', 'event_count': 100}, + {'event': 'compute.instance.exists', 'event_count': 100} + ] + mock_query.annotate(event_count=mox.IsA(Count)).AndReturn(events) + self.mox.ReplayAll() + + response = dbapi.get_event_stats(fake_request) + self.assertEqual(response.status_code, 200) + self.assertEqual(response.content, + json.dumps({'stats': events})) + self.mox.VerifyAll() + + def test_get_verified_count(self): + fake_request = self.mox.CreateMockAnything() + fake_request.method = 'GET' + fake_request.GET = {'service': "nova", + 'event': 'compute.instance.exists.verified'} + mock_query = self.mox.CreateMockAnything() + models.RawData.objects.values('event').AndReturn(mock_query) + events = [ + {'event': 'compute.instance.exists.verified', 'event_count': 100}, + {'event': 'compute.instance.exists', 'event_count': 100} + ] + mock_query.annotate(event_count=mox.IsA(Count)).AndReturn(events) + self.mox.ReplayAll() + + response = dbapi.get_event_stats(fake_request) + self.assertEqual(response.status_code, 200) + self.assertEqual(response.content, + json.dumps({'stats': [events[0]]})) self.mox.VerifyAll() def test_get_verified_count_wrong_date_format_returns_400(self): From 18c3309d7baaa2f23bfa24b20032b58a76480d6b Mon Sep 17 00:00:00 2001 From: Andrew Melton Date: Mon, 3 Mar 2014 12:52:42 -0500 Subject: [PATCH 49/53] Short circuit event stats filter when event found --- stacktach/dbapi.py | 1 + 1 file changed, 1 insertion(+) diff --git a/stacktach/dbapi.py b/stacktach/dbapi.py index 0f98ebc..b9f7528 100644 --- a/stacktach/dbapi.py +++ b/stacktach/dbapi.py @@ -483,6 +483,7 @@ def get_event_stats(request): for event in events: if event['event'] == event_filter: event_count['event_count'] = event['event_count'] + break events = [event_count, ] return {'stats': events} From e21afc84a76c62231b3a770496c408de055ea186 Mon Sep 17 00:00:00 2001 From: Andrew Melton Date: Mon, 3 Mar 2014 13:06:02 -0500 Subject: [PATCH 50/53] Clearer event stats filtering --- stacktach/dbapi.py | 10 +++------- tests/unit/test_dbapi.py | 20 ++++++++++++++++++++ 2 files changed, 23 insertions(+), 7 deletions(-) diff --git a/stacktach/dbapi.py b/stacktach/dbapi.py index b9f7528..55b795b 100644 --- a/stacktach/dbapi.py +++ b/stacktach/dbapi.py @@ -478,13 +478,9 @@ def get_event_stats(request): events = list(events) if 'event' in request.GET: - event_filter = request.GET['event'] - event_count = {'event': event_filter, 'event_count': 0} - for event in events: - if event['event'] == event_filter: - event_count['event_count'] = event['event_count'] - break - events = [event_count, ] + event = request.GET['event'] + default = {'event': event, 'event_count': 0} + events = [x for x in events if x['event'] == event] or [default, ] return {'stats': events} except (KeyError, TypeError): diff --git a/tests/unit/test_dbapi.py b/tests/unit/test_dbapi.py index ba93418..3ebfaeb 100644 --- a/tests/unit/test_dbapi.py +++ b/tests/unit/test_dbapi.py @@ -1173,6 +1173,26 @@ class DBAPITestCase(StacktachBaseTestCase): json.dumps({'stats': [events[0]]})) self.mox.VerifyAll() + def test_get_verified_count_default(self): + fake_request = self.mox.CreateMockAnything() + fake_request.method = 'GET' + fake_request.GET = {'service': "nova", + 'event': 'compute.instance.exists.verified'} + mock_query = self.mox.CreateMockAnything() + models.RawData.objects.values('event').AndReturn(mock_query) + events = [ + {'event': 'compute.instance.create.start', 'event_count': 100}, + {'event': 'compute.instance.exists', 'event_count': 100} + ] + mock_query.annotate(event_count=mox.IsA(Count)).AndReturn(events) + self.mox.ReplayAll() + + response = dbapi.get_event_stats(fake_request) + self.assertEqual(response.status_code, 200) + self.assertEqual(response.content, + json.dumps({'stats': [{'event': 'compute.instance.exists.verified', 'event_count': 0}]})) + self.mox.VerifyAll() + def test_get_verified_count_wrong_date_format_returns_400(self): fake_request = self.mox.CreateMockAnything() fake_request.method = 'GET' From 913310e4f5e4339248360e85f912cb3393b4a81a Mon Sep 17 00:00:00 2001 From: Andrew Melton Date: Mon, 3 Mar 2014 14:50:30 -0500 Subject: [PATCH 51/53] Adding date range limit to event stats --- stacktach/dbapi.py | 18 ++++++++++++++---- tests/unit/test_dbapi.py | 34 ++++++++++++++++++++++++++++++++-- 2 files changed, 46 insertions(+), 6 deletions(-) diff --git a/stacktach/dbapi.py b/stacktach/dbapi.py index 01e5242..91902fa 100644 --- a/stacktach/dbapi.py +++ b/stacktach/dbapi.py @@ -40,6 +40,7 @@ from stacktach import utils DEFAULT_LIMIT = 50 HARD_LIMIT = 1000 +HARD_WHEN_RANGE_LIMIT = 5 * 24 * 60 * 60 # 5 Days class APIException(Exception): @@ -462,13 +463,22 @@ def _rawdata_factory(service): def get_event_stats(request): try: filters = {} - if 'when_min' in request.GET: - when_min = utils.str_time_to_unix(request.GET['when_min']) - filters['when__gte'] = when_min - if 'when_max' in request.GET: + if 'when_min' in request.GET or 'when_max' in request.GET: + if not ('when_min' in request.GET and 'when_max' in request.GET): + msg = "When providing date range filters, " \ + "a min and max are required." + raise BadRequestException(message=msg) + + when_min = utils.str_time_to_unix(request.GET['when_min']) when_max = utils.str_time_to_unix(request.GET['when_max']) + + if when_max - when_min > HARD_WHEN_RANGE_LIMIT: + msg = "Date ranges may be no larger than %s seconds" + raise BadRequestException(message=msg % HARD_WHEN_RANGE_LIMIT) + filters['when__lte'] = when_max + filters['when__gte'] = when_min if 'event' in request.GET: filters['event'] = request.GET['event'] diff --git a/tests/unit/test_dbapi.py b/tests/unit/test_dbapi.py index fe8c7b3..0c40c58 100644 --- a/tests/unit/test_dbapi.py +++ b/tests/unit/test_dbapi.py @@ -1125,11 +1125,41 @@ class DBAPITestCase(StacktachBaseTestCase): {'stats': {'count': 100}}) self.mox.VerifyAll() + def test_get_verified_count_only_one_range_param_returns_400(self): + fake_request = self.mox.CreateMockAnything() + fake_request.method = 'GET' + fake_request.GET = {'when_min': "2014-020-26", + 'service': "nova"} + + self.mox.ReplayAll() + + response = dbapi.get_event_stats(fake_request) + self.assertEqual(response.status_code, 400) + self.assertEqual(json.loads(response.content)['message'], + "When providing date range filters, " + "a min and max are required.") + self.mox.VerifyAll() + + def test_get_verified_count_only_large_date_range_returns_400(self): + fake_request = self.mox.CreateMockAnything() + fake_request.method = 'GET' + fake_request.GET = {'when_min': "2014-2-26 00:00:00", + 'when_max': "2014-3-3 00:00:01", # > 5 days later + 'service': "nova"} + + self.mox.ReplayAll() + + response = dbapi.get_event_stats(fake_request) + self.assertEqual(response.status_code, 400) + self.assertEqual(json.loads(response.content)['message'], + "Date ranges may be no larger than 432000 seconds") + self.mox.VerifyAll() + def test_get_verified_count_wrong_date_format_returns_400(self): fake_request = self.mox.CreateMockAnything() fake_request.method = 'GET' fake_request.GET = {'when_min': "2014-020-26", - + 'when_max': "2014-020-26", 'service': "nova"} self.mox.ReplayAll() @@ -1145,7 +1175,7 @@ class DBAPITestCase(StacktachBaseTestCase): fake_request = self.mox.CreateMockAnything() fake_request.method = 'GET' fake_request.GET = {'when_min': "2014-02-26 00:00:00", - "when_min": "2014-02-27 00:00:00", + "when_max": "2014-02-27 00:00:00", 'service': "qonos"} self.mox.ReplayAll() From 93d13262dbab444b2608bec9a6af87e76ba97ea1 Mon Sep 17 00:00:00 2001 From: Andrew Melton Date: Mon, 3 Mar 2014 15:37:34 -0500 Subject: [PATCH 52/53] Increasing event stats max range --- stacktach/dbapi.py | 2 +- tests/unit/test_dbapi.py | 4 ++-- 2 files changed, 3 insertions(+), 3 deletions(-) diff --git a/stacktach/dbapi.py b/stacktach/dbapi.py index 91902fa..a263425 100644 --- a/stacktach/dbapi.py +++ b/stacktach/dbapi.py @@ -40,7 +40,7 @@ from stacktach import utils DEFAULT_LIMIT = 50 HARD_LIMIT = 1000 -HARD_WHEN_RANGE_LIMIT = 5 * 24 * 60 * 60 # 5 Days +HARD_WHEN_RANGE_LIMIT = 7 * 24 * 60 * 60 # 5 Days class APIException(Exception): diff --git a/tests/unit/test_dbapi.py b/tests/unit/test_dbapi.py index 0c40c58..3635458 100644 --- a/tests/unit/test_dbapi.py +++ b/tests/unit/test_dbapi.py @@ -1144,7 +1144,7 @@ class DBAPITestCase(StacktachBaseTestCase): fake_request = self.mox.CreateMockAnything() fake_request.method = 'GET' fake_request.GET = {'when_min': "2014-2-26 00:00:00", - 'when_max': "2014-3-3 00:00:01", # > 5 days later + 'when_max': "2014-3-5 00:00:01", # > 7 days later 'service': "nova"} self.mox.ReplayAll() @@ -1152,7 +1152,7 @@ class DBAPITestCase(StacktachBaseTestCase): response = dbapi.get_event_stats(fake_request) self.assertEqual(response.status_code, 400) self.assertEqual(json.loads(response.content)['message'], - "Date ranges may be no larger than 432000 seconds") + "Date ranges may be no larger than 604800 seconds") self.mox.VerifyAll() def test_get_verified_count_wrong_date_format_returns_400(self): From d3867ccdb59c3bd46932701691da48c67263cb5e Mon Sep 17 00:00:00 2001 From: Andrew Melton Date: Mon, 3 Mar 2014 15:53:29 -0500 Subject: [PATCH 53/53] Fixing comment --- stacktach/dbapi.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/stacktach/dbapi.py b/stacktach/dbapi.py index a263425..56f8aba 100644 --- a/stacktach/dbapi.py +++ b/stacktach/dbapi.py @@ -40,7 +40,7 @@ from stacktach import utils DEFAULT_LIMIT = 50 HARD_LIMIT = 1000 -HARD_WHEN_RANGE_LIMIT = 7 * 24 * 60 * 60 # 5 Days +HARD_WHEN_RANGE_LIMIT = 7 * 24 * 60 * 60 # 7 Days class APIException(Exception):