Merge "Add time_interval in live_query"

This commit is contained in:
Jenkins 2015-07-02 15:49:14 +00:00 committed by Gerrit Code Review
commit 21f9e521af
11 changed files with 98 additions and 80 deletions

View File

@ -62,7 +62,7 @@ types documentation
.. autotype:: surveil.api.datamodel.status.metrics.live_metric.LiveMetric .. autotype:: surveil.api.datamodel.status.metrics.live_metric.LiveMetric
:members: :members:
.. autotype:: surveil.api.datamodel.status.metrics.time_delta.TimeDelta .. autotype:: surveil.api.datamodel.status.metrics.time_interval.TimeInterval
:members: :members:
.. autotype:: surveil.api.datamodel.status.event.Event .. autotype:: surveil.api.datamodel.status.event.Event

View File

@ -20,7 +20,6 @@ from surveil.api.controllers.v2.status import services as v2_services
class StatusController(rest.RestController): class StatusController(rest.RestController):
# events = EventsController()
hosts = v2_hosts.HostsController() hosts = v2_hosts.HostsController()
services = v2_services.ServicesController() services = v2_services.ServicesController()
events = v2_events.EventsController() events = v2_events.EventsController()

View File

@ -23,7 +23,6 @@ from surveil.api.datamodel.status import live_host
from surveil.api.datamodel.status import live_query from surveil.api.datamodel.status import live_query
from surveil.api.datamodel.status import live_service from surveil.api.datamodel.status import live_service
from surveil.api.datamodel.status.metrics import live_metric from surveil.api.datamodel.status.metrics import live_metric
from surveil.api.datamodel.status.metrics import time_delta
from surveil.api.handlers.status import live_host_handler from surveil.api.handlers.status import live_host_handler
from surveil.api.handlers.status import live_service_handler from surveil.api.handlers.status import live_service_handler
from surveil.api.handlers.status.metrics import live_metric_handler from surveil.api.handlers.status.metrics import live_metric_handler
@ -185,18 +184,18 @@ class HostServiceMetricController(rest.RestController):
return metric return metric
@util.policy_enforce(['authenticated']) @util.policy_enforce(['authenticated'])
@wsme_pecan.wsexpose([live_metric.LiveMetric], body=time_delta.TimeDelta) @wsme_pecan.wsexpose([live_metric.LiveMetric], body=live_query.LiveQuery)
def post(self, time): def post(self, query):
"""Returns all matching metrics. """Returns all matching metrics.
:param time: a time delta within the request body. :param time: a time delta within the request body.
""" """
handler = live_metric_handler.MetricHandler(pecan.request) handler = live_metric_handler.MetricHandler(pecan.request)
metrics = handler.get_all(time_delta=time, metrics = handler.get_all(metric_name=self.metric_name,
metric_name=self.metric_name,
host_name=pecan.request.context['host_name'], host_name=pecan.request.context['host_name'],
service_description=pecan.request. service_description=pecan.request
context['service_name']) .context['service_name'],
live_query=query)
return metrics return metrics
@ -221,17 +220,16 @@ class HostMetricController(rest.RestController):
return metric return metric
@util.policy_enforce(['authenticated']) @util.policy_enforce(['authenticated'])
@wsme_pecan.wsexpose([live_metric.LiveMetric], body=time_delta.TimeDelta) @wsme_pecan.wsexpose([live_metric.LiveMetric], body=live_query.LiveQuery)
def post(self, time): def post(self, query):
"""Given a time delta, returns all matching metrics. """Given a LiveQuery, returns all matching metrics.
:param time: a time delta within the request body. :param time: a live query within the request body.
""" """
handler = live_metric_handler.MetricHandler(pecan.request) handler = live_metric_handler.MetricHandler(pecan.request)
metrics = handler.get_all(time_delta=time, metrics = handler.get_all(metric_name=self.metric_name,
metric_name=self.metric_name, host_name=pecan.request.context['host_name'],
host_name=pecan.request.context['host_name'] live_query=query)
)
return metrics return metrics

View File

@ -19,7 +19,6 @@ from surveil.api.controllers.v2 import bansho as v2_bansho
from surveil.api.controllers.v2 import config as v2_config from surveil.api.controllers.v2 import config as v2_config
from surveil.api.controllers.v2 import hello as v2_hello from surveil.api.controllers.v2 import hello as v2_hello
from surveil.api.controllers.v2 import status as v2_status from surveil.api.controllers.v2 import status as v2_status
from surveil.api.controllers.v2.status import events as v2_event
class V2Controller(object): class V2Controller(object):
@ -30,5 +29,4 @@ class V2Controller(object):
status = v2_status.StatusController() status = v2_status.StatusController()
surveil = v2_admin.AdminController() surveil = v2_admin.AdminController()
auth = v2_auth.AuthController() auth = v2_auth.AuthController()
events = v2_event.EventsController()
bansho = v2_bansho.BanshoController() bansho = v2_bansho.BanshoController()

View File

@ -17,26 +17,37 @@ import json
import wsme import wsme
import wsme.types as wtypes import wsme.types as wtypes
from surveil.api.datamodel.status.metrics import time_interval
from surveil.api.datamodel import types from surveil.api.datamodel import types
class LiveQuery(types.Base): class LiveQuery(types.Base):
"""Holds a sample query encoded in json.""" """Holds a sample query encoded in json."""
filters = wsme.wsattr(wtypes.text, mandatory=True) filters = wsme.wsattr(wtypes.text, mandatory=False)
"The filter expression encoded in json." "The filter expression encoded in json."
fields = wsme.wsattr([wtypes.text], mandatory=False) fields = wsme.wsattr([wtypes.text], mandatory=False)
"List of fields to include in the response." "List of fields to include in the response."
time_interval = wsme.wsattr(time_interval.TimeInterval, mandatory=False)
"Time interval of the query."
@classmethod @classmethod
def sample(cls): def sample(cls):
return cls( return cls(
fields=['host_name', 'last_check'], fields=['host_name', 'last_check'],
time_interval=time_interval.TimeInterval(
start_time='2015-01-29T21:50:44Z',
end_time='2015-01-29T22:50:44Z'
),
filters=json.dumps({ filters=json.dumps({
"isnot": { "isnot": {
"state": ["0", "1"], "state": ["0", "1"],
"host_state": ["2"] "host_state": ["2"]
},
"is": {
"event_type": ["ALERT"]
} }
}) })
) )

View File

@ -18,18 +18,18 @@ import wsme.types as wtypes
from surveil.api.datamodel import types from surveil.api.datamodel import types
class TimeDelta(types.Base): class TimeInterval(types.Base):
"""Hold a time.""" """Hold a time."""
begin = wsme.wsattr(wtypes.text, mandatory=True) start_time = wsme.wsattr(wtypes.text, mandatory=True)
"The begin time of a measure in RFC3339." "The starting time."
end = wsme.wsattr(wtypes.text, mandatory=True) end_time = wsme.wsattr(wtypes.text, mandatory=True)
"The end time of a measure in RFC3339." "The ending time."
@classmethod @classmethod
def sample(cls): def sample(cls):
return cls( return cls(
begin='2015-01-29T21:50:44Z', start_time='2015-01-29T21:50:44Z',
end='2015-01-29T22:50:44Z' end_time='2015-01-29T22:50:44Z'
) )

View File

@ -17,18 +17,23 @@ import json
def build_influxdb_query(live_query, def build_influxdb_query(live_query,
measurement, measurement,
time_delta=None,
group_by=[], group_by=[],
order_by=[], order_by=[],
additional_filters={},
limit=None): limit=None):
query = ['SELECT * FROM', measurement] query = ['SELECT * FROM', measurement]
filters = {} filters = {}
if live_query and live_query.filters: time = None
filters = json.loads(live_query.filters) if live_query:
if live_query.filters:
filters.update(json.loads(live_query.filters))
if live_query.time_interval:
time = live_query.time_interval
query += _build_where_clause(filters, time_delta) filters.update(additional_filters)
query += _build_where_clause(filters, time)
if group_by: if group_by:
query.append('GROUP BY') query.append('GROUP BY')
@ -44,27 +49,26 @@ def build_influxdb_query(live_query,
return ' '.join(query) return ' '.join(query)
def _build_where_clause(filters, time_delta=None): def _build_where_clause(filters, time=None):
filters_conversion = { filters_conversion = {
'is': '=', 'is': '=',
'isnot': '!=' 'isnot': '!='
} }
clause = [] clause = []
first = True is_where_append = False
if time_delta: if time:
clause.append('WHERE') clause.append('WHERE')
first = False clause.append("time >= '%s' AND time <= '%s'" %
(time.start_time, time.end_time))
begin = time_delta.begin is_where_append = True
end = time_delta.end
clause.append("time >= '%s' AND time <= '%s'" % (begin, end))
for filter_name, filter_data in sorted(filters.items()): for filter_name, filter_data in sorted(filters.items()):
for field, values in sorted(filter_data.items()): for field, values in sorted(filter_data.items()):
for value in values: for value in values:
if first: if not is_where_append:
clause.append('WHERE') clause.append('WHERE')
is_where_append = True
else: else:
clause.append('AND') clause.append('AND')
@ -77,6 +81,5 @@ def _build_where_clause(filters, time_delta=None):
(field, (field,
filters_conversion[filter_name], filters_conversion[filter_name],
value)) value))
first = False
return clause return clause

View File

@ -11,7 +11,6 @@
# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the # WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
# License for the specific language governing permissions and limitations # License for the specific language governing permissions and limitations
# under the License. # under the License.
import json
from surveil.api.datamodel.status import live_query from surveil.api.datamodel.status import live_query
from surveil.api.datamodel.status.metrics import live_metric from surveil.api.datamodel.status.metrics import live_metric
@ -76,9 +75,11 @@ class MetricHandler(handler.Handler):
return metrics return metrics
def get_all(self, metric_name, time_delta, host_name, def get_all(self, metric_name,
service_description=None): host_name, service_description=None,
live_query=live_query.LiveQuery()):
"""Return all metrics.""" """Return all metrics."""
filters = { filters = {
"is": { "is": {
"host_name": [host_name] "host_name": [host_name]
@ -88,19 +89,12 @@ class MetricHandler(handler.Handler):
if service_description: if service_description:
filters["is"]["service_description"] = [service_description] filters["is"]["service_description"] = [service_description]
query = live_query.LiveQuery( influx_client = self.request.influxdb_client
filters=json.dumps(filters) query = influxdb_query.build_influxdb_query(live_query,
) 'metric_' + metric_name,
order_by = ["time desc"] order_by=["time desc"],
additional_filters=filters)
cli = self.request.influxdb_client response = influx_client.query(query)
query = influxdb_query.build_influxdb_query(
query,
"metric_" + metric_name,
time_delta=time_delta,
order_by=order_by
)
response = cli.query(query)
metric_dicts = [] metric_dicts = []

View File

@ -316,14 +316,19 @@ class TestEvents(functionalTest.FunctionalTest):
"is": { "is": {
"host_name": ['Google'] "host_name": ['Google']
} }
}) }),
'time_interval': {
"start_time": "2015-06-04T18:55:02Z",
"end_time": "2015-06-04T18:55:42Z"
}
} }
response = self.post_json('/v2/status/events', params=query) response = self.post_json('/v2/status/events', params=query)
self.assertEqual( self.assertEqual(
m.last_request.qs['q'], m.last_request.qs['q'],
["select * from event where host_name='google'"] ["select * from event where time >= '2015-06-04t18:55:02z' "
"and time <= '2015-06-04t18:55:42z' and host_name='google'"]
) )
self.assert_count_equal_backport( self.assert_count_equal_backport(

View File

@ -20,6 +20,7 @@ from surveil.tests.api import functionalTest
class TestHostMetric(functionalTest.FunctionalTest): class TestHostMetric(functionalTest.FunctionalTest):
def setUp(self): def setUp(self):
super(TestHostMetric, self).setUp() super(TestHostMetric, self).setUp()
self.influxdb_response = json.dumps({ self.influxdb_response = json.dumps({
@ -144,12 +145,17 @@ class TestHostMetric(functionalTest.FunctionalTest):
"http://influxdb:8086/query", "http://influxdb:8086/query",
text=self.influxdb_response) text=self.influxdb_response)
time = {'begin': '2015-04-19T00:09:24Z', query = {
'end': '2015-04-19T02:09:25Z'} 'fields': [],
'time_interval': {
'start_time': '2015-04-19T00:09:24Z',
'end_time': '2015-04-19T02:09:25Z'
}
}
response = self.post_json("/v2/status/hosts/srv-monitoring-01/" response = self.post_json("/v2/status/hosts/srv-monitoring-01/"
"services/load/metrics/load1", "services/load/metrics/load1",
params=time) params=query)
expected = [{"metric_name": 'load1', expected = [{"metric_name": 'load1',
"min": "0", "min": "0",
@ -164,9 +170,6 @@ class TestHostMetric(functionalTest.FunctionalTest):
"value": "10" "value": "10"
}] }]
self.assert_count_equal_backport(
json.loads(response.body.decode()),
expected)
self.assertEqual( self.assertEqual(
m.last_request.qs['q'], m.last_request.qs['q'],
["select * from metric_load1 " ["select * from metric_load1 "
@ -177,6 +180,9 @@ class TestHostMetric(functionalTest.FunctionalTest):
"order by time desc" "order by time desc"
] ]
) )
self.assert_count_equal_backport(
json.loads(response.body.decode()),
expected)
def test_metric_names(self): def test_metric_names(self):
self.influxdb_response = json.dumps({ self.influxdb_response = json.dumps({
@ -250,4 +256,4 @@ class TestHostMetric(functionalTest.FunctionalTest):
m.last_request.qs['q'], m.last_request.qs['q'],
["show measurements where host_name='localhost' " ["show measurements where host_name='localhost' "
"and service_description='load'"] "and service_description='load'"]
) )

View File

@ -15,7 +15,7 @@
import json import json
from surveil.api.datamodel.status import live_query from surveil.api.datamodel.status import live_query
from surveil.api.datamodel.status.metrics import time_delta from surveil.api.datamodel.status.metrics import time_interval
from surveil.api.handlers.status import influxdb_query from surveil.api.handlers.status import influxdb_query
from surveil.tests import base from surveil.tests import base
@ -86,16 +86,18 @@ class LiveQueryFilterTest(base.BaseTestCase):
self.assertEqual(expected, result) self.assertEqual(expected, result)
def test_build_query_basic(self): def test_build_query_basic(self):
query_time = time_delta.TimeDelta(begin='2015-01-29T21:50:44Z', query = live_query.LiveQuery(
end='2015-01-29T22:50:44Z') time_interval=time_interval.TimeInterval(
start_time="2015-01-29T21:50:44Z",
end_time="2015-01-29T22:50:44Z"
)
)
query = live_query.LiveQuery()
group_by = ['host_name', 'service_description'] group_by = ['host_name', 'service_description']
order_by = ['time DESC'] order_by = ['time DESC']
result = influxdb_query.build_influxdb_query(query, result = influxdb_query.build_influxdb_query(query,
"metric_pl", "metric_pl",
time_delta=query_time,
group_by=group_by, group_by=group_by,
order_by=order_by order_by=order_by
) )
@ -109,22 +111,23 @@ class LiveQueryFilterTest(base.BaseTestCase):
self.assertEqual(result, expected) self.assertEqual(result, expected)
def test_build_query_host_name(self): def test_build_query_host_name(self):
query_time = time_delta.TimeDelta(begin='2015-01-29T21:50:44Z',
end='2015-01-29T22:50:44Z')
query = live_query.LiveQuery( query = live_query.LiveQuery(
fields=['host_name'], fields=['host_name'],
filters=json.dumps({ filters=json.dumps({
"is": { "is": {
"host_name": ["localhost"] "host_name": ["localhost"]
} }
}) }),
time_interval=time_interval.TimeInterval(
start_time='2015-01-29T21:50:44Z',
end_time='2015-01-29T22:50:44Z'
)
) )
group_by = ['service_description'] group_by = ['service_description']
order_by = ['time DESC'] order_by = ['time DESC']
result = influxdb_query.build_influxdb_query(query, result = influxdb_query.build_influxdb_query(query,
"metric_pl", "metric_pl",
time_delta=query_time,
group_by=group_by, group_by=group_by,
order_by=order_by order_by=order_by
) )
@ -139,8 +142,6 @@ class LiveQueryFilterTest(base.BaseTestCase):
self.assertEqual(result, expected) self.assertEqual(result, expected)
def test_build_query_complete(self): def test_build_query_complete(self):
query_time = time_delta.TimeDelta(begin='2015-01-29T21:50:44Z',
end='2015-01-29T22:50:44Z', )
query = live_query.LiveQuery( query = live_query.LiveQuery(
fields=['host_name'], fields=['host_name'],
filters=json.dumps({ filters=json.dumps({
@ -148,12 +149,15 @@ class LiveQueryFilterTest(base.BaseTestCase):
"host_name": ["localhost"], "host_name": ["localhost"],
"service_description": ["mySQL"] "service_description": ["mySQL"]
} }
}) }),
time_interval=time_interval.TimeInterval(
start_time='2015-01-29T21:50:44Z',
end_time='2015-01-29T22:50:44Z'
)
) )
order_by = ['time DESC'] order_by = ['time DESC']
result = influxdb_query.build_influxdb_query(query, result = influxdb_query.build_influxdb_query(query,
"metric_pl", "metric_pl",
time_delta=query_time,
order_by=order_by order_by=order_by
) )