Add time_interval in live_query
Change-Id: I7b82b78b39ac405aa82ef3d26136031169041761
This commit is contained in:
parent
04889b55fd
commit
b7f2a71b97
@ -62,7 +62,7 @@ types documentation
|
||||
.. autotype:: surveil.api.datamodel.status.metrics.live_metric.LiveMetric
|
||||
:members:
|
||||
|
||||
.. autotype:: surveil.api.datamodel.status.metrics.time_delta.TimeDelta
|
||||
.. autotype:: surveil.api.datamodel.status.metrics.time_interval.TimeInterval
|
||||
:members:
|
||||
|
||||
.. autotype:: surveil.api.datamodel.status.event.Event
|
||||
|
@ -20,7 +20,6 @@ from surveil.api.controllers.v2.status import services as v2_services
|
||||
|
||||
|
||||
class StatusController(rest.RestController):
|
||||
# events = EventsController()
|
||||
hosts = v2_hosts.HostsController()
|
||||
services = v2_services.ServicesController()
|
||||
events = v2_events.EventsController()
|
||||
|
@ -23,7 +23,6 @@ from surveil.api.datamodel.status import live_host
|
||||
from surveil.api.datamodel.status import live_query
|
||||
from surveil.api.datamodel.status import live_service
|
||||
from surveil.api.datamodel.status.metrics import live_metric
|
||||
from surveil.api.datamodel.status.metrics import time_delta
|
||||
from surveil.api.handlers.status import live_host_handler
|
||||
from surveil.api.handlers.status import live_service_handler
|
||||
from surveil.api.handlers.status.metrics import live_metric_handler
|
||||
@ -185,18 +184,18 @@ class HostServiceMetricController(rest.RestController):
|
||||
return metric
|
||||
|
||||
@util.policy_enforce(['authenticated'])
|
||||
@wsme_pecan.wsexpose([live_metric.LiveMetric], body=time_delta.TimeDelta)
|
||||
def post(self, time):
|
||||
@wsme_pecan.wsexpose([live_metric.LiveMetric], body=live_query.LiveQuery)
|
||||
def post(self, query):
|
||||
"""Returns all matching metrics.
|
||||
|
||||
:param time: a time delta within the request body.
|
||||
"""
|
||||
handler = live_metric_handler.MetricHandler(pecan.request)
|
||||
metrics = handler.get_all(time_delta=time,
|
||||
metric_name=self.metric_name,
|
||||
metrics = handler.get_all(metric_name=self.metric_name,
|
||||
host_name=pecan.request.context['host_name'],
|
||||
service_description=pecan.request.
|
||||
context['service_name'])
|
||||
service_description=pecan.request
|
||||
.context['service_name'],
|
||||
live_query=query)
|
||||
return metrics
|
||||
|
||||
|
||||
@ -221,17 +220,16 @@ class HostMetricController(rest.RestController):
|
||||
return metric
|
||||
|
||||
@util.policy_enforce(['authenticated'])
|
||||
@wsme_pecan.wsexpose([live_metric.LiveMetric], body=time_delta.TimeDelta)
|
||||
def post(self, time):
|
||||
"""Given a time delta, returns all matching metrics.
|
||||
@wsme_pecan.wsexpose([live_metric.LiveMetric], body=live_query.LiveQuery)
|
||||
def post(self, query):
|
||||
"""Given a LiveQuery, returns all matching metrics.
|
||||
|
||||
:param time: a time delta within the request body.
|
||||
:param time: a live query within the request body.
|
||||
"""
|
||||
handler = live_metric_handler.MetricHandler(pecan.request)
|
||||
metrics = handler.get_all(time_delta=time,
|
||||
metric_name=self.metric_name,
|
||||
host_name=pecan.request.context['host_name']
|
||||
)
|
||||
metrics = handler.get_all(metric_name=self.metric_name,
|
||||
host_name=pecan.request.context['host_name'],
|
||||
live_query=query)
|
||||
return metrics
|
||||
|
||||
|
||||
|
@ -19,7 +19,6 @@ from surveil.api.controllers.v2 import bansho as v2_bansho
|
||||
from surveil.api.controllers.v2 import config as v2_config
|
||||
from surveil.api.controllers.v2 import hello as v2_hello
|
||||
from surveil.api.controllers.v2 import status as v2_status
|
||||
from surveil.api.controllers.v2.status import events as v2_event
|
||||
|
||||
|
||||
class V2Controller(object):
|
||||
@ -30,5 +29,4 @@ class V2Controller(object):
|
||||
status = v2_status.StatusController()
|
||||
surveil = v2_admin.AdminController()
|
||||
auth = v2_auth.AuthController()
|
||||
events = v2_event.EventsController()
|
||||
bansho = v2_bansho.BanshoController()
|
||||
|
@ -17,26 +17,37 @@ import json
|
||||
import wsme
|
||||
import wsme.types as wtypes
|
||||
|
||||
from surveil.api.datamodel.status.metrics import time_interval
|
||||
from surveil.api.datamodel import types
|
||||
|
||||
|
||||
class LiveQuery(types.Base):
|
||||
"""Holds a sample query encoded in json."""
|
||||
|
||||
filters = wsme.wsattr(wtypes.text, mandatory=True)
|
||||
filters = wsme.wsattr(wtypes.text, mandatory=False)
|
||||
"The filter expression encoded in json."
|
||||
|
||||
fields = wsme.wsattr([wtypes.text], mandatory=False)
|
||||
"List of fields to include in the response."
|
||||
|
||||
time_interval = wsme.wsattr(time_interval.TimeInterval, mandatory=False)
|
||||
"Time interval of the query."
|
||||
|
||||
@classmethod
|
||||
def sample(cls):
|
||||
return cls(
|
||||
fields=['host_name', 'last_check'],
|
||||
time_interval=time_interval.TimeInterval(
|
||||
start_time='2015-01-29T21:50:44Z',
|
||||
end_time='2015-01-29T22:50:44Z'
|
||||
),
|
||||
filters=json.dumps({
|
||||
"isnot": {
|
||||
"state": ["0", "1"],
|
||||
"host_state": ["2"]
|
||||
},
|
||||
"is": {
|
||||
"event_type": ["ALERT"]
|
||||
}
|
||||
})
|
||||
)
|
||||
|
@ -18,18 +18,18 @@ import wsme.types as wtypes
|
||||
from surveil.api.datamodel import types
|
||||
|
||||
|
||||
class TimeDelta(types.Base):
|
||||
class TimeInterval(types.Base):
|
||||
"""Hold a time."""
|
||||
|
||||
begin = wsme.wsattr(wtypes.text, mandatory=True)
|
||||
"The begin time of a measure in RFC3339."
|
||||
start_time = wsme.wsattr(wtypes.text, mandatory=True)
|
||||
"The starting time."
|
||||
|
||||
end = wsme.wsattr(wtypes.text, mandatory=True)
|
||||
"The end time of a measure in RFC3339."
|
||||
end_time = wsme.wsattr(wtypes.text, mandatory=True)
|
||||
"The ending time."
|
||||
|
||||
@classmethod
|
||||
def sample(cls):
|
||||
return cls(
|
||||
begin='2015-01-29T21:50:44Z',
|
||||
end='2015-01-29T22:50:44Z'
|
||||
)
|
||||
start_time='2015-01-29T21:50:44Z',
|
||||
end_time='2015-01-29T22:50:44Z'
|
||||
)
|
@ -17,18 +17,23 @@ import json
|
||||
|
||||
def build_influxdb_query(live_query,
|
||||
measurement,
|
||||
time_delta=None,
|
||||
group_by=[],
|
||||
order_by=[],
|
||||
additional_filters={},
|
||||
limit=None):
|
||||
|
||||
query = ['SELECT * FROM', measurement]
|
||||
|
||||
filters = {}
|
||||
if live_query and live_query.filters:
|
||||
filters = json.loads(live_query.filters)
|
||||
time = None
|
||||
if live_query:
|
||||
if live_query.filters:
|
||||
filters.update(json.loads(live_query.filters))
|
||||
if live_query.time_interval:
|
||||
time = live_query.time_interval
|
||||
|
||||
query += _build_where_clause(filters, time_delta)
|
||||
filters.update(additional_filters)
|
||||
query += _build_where_clause(filters, time)
|
||||
|
||||
if group_by:
|
||||
query.append('GROUP BY')
|
||||
@ -44,27 +49,26 @@ def build_influxdb_query(live_query,
|
||||
return ' '.join(query)
|
||||
|
||||
|
||||
def _build_where_clause(filters, time_delta=None):
|
||||
def _build_where_clause(filters, time=None):
|
||||
filters_conversion = {
|
||||
'is': '=',
|
||||
'isnot': '!='
|
||||
}
|
||||
clause = []
|
||||
first = True
|
||||
is_where_append = False
|
||||
|
||||
if time_delta:
|
||||
if time:
|
||||
clause.append('WHERE')
|
||||
first = False
|
||||
|
||||
begin = time_delta.begin
|
||||
end = time_delta.end
|
||||
clause.append("time >= '%s' AND time <= '%s'" % (begin, end))
|
||||
clause.append("time >= '%s' AND time <= '%s'" %
|
||||
(time.start_time, time.end_time))
|
||||
is_where_append = True
|
||||
|
||||
for filter_name, filter_data in sorted(filters.items()):
|
||||
for field, values in sorted(filter_data.items()):
|
||||
for value in values:
|
||||
if first:
|
||||
if not is_where_append:
|
||||
clause.append('WHERE')
|
||||
is_where_append = True
|
||||
else:
|
||||
clause.append('AND')
|
||||
|
||||
@ -77,6 +81,5 @@ def _build_where_clause(filters, time_delta=None):
|
||||
(field,
|
||||
filters_conversion[filter_name],
|
||||
value))
|
||||
first = False
|
||||
|
||||
return clause
|
||||
|
@ -11,7 +11,6 @@
|
||||
# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
|
||||
# License for the specific language governing permissions and limitations
|
||||
# under the License.
|
||||
import json
|
||||
|
||||
from surveil.api.datamodel.status import live_query
|
||||
from surveil.api.datamodel.status.metrics import live_metric
|
||||
@ -76,9 +75,11 @@ class MetricHandler(handler.Handler):
|
||||
|
||||
return metrics
|
||||
|
||||
def get_all(self, metric_name, time_delta, host_name,
|
||||
service_description=None):
|
||||
def get_all(self, metric_name,
|
||||
host_name, service_description=None,
|
||||
live_query=live_query.LiveQuery()):
|
||||
"""Return all metrics."""
|
||||
|
||||
filters = {
|
||||
"is": {
|
||||
"host_name": [host_name]
|
||||
@ -88,19 +89,12 @@ class MetricHandler(handler.Handler):
|
||||
if service_description:
|
||||
filters["is"]["service_description"] = [service_description]
|
||||
|
||||
query = live_query.LiveQuery(
|
||||
filters=json.dumps(filters)
|
||||
)
|
||||
order_by = ["time desc"]
|
||||
|
||||
cli = self.request.influxdb_client
|
||||
query = influxdb_query.build_influxdb_query(
|
||||
query,
|
||||
"metric_" + metric_name,
|
||||
time_delta=time_delta,
|
||||
order_by=order_by
|
||||
)
|
||||
response = cli.query(query)
|
||||
influx_client = self.request.influxdb_client
|
||||
query = influxdb_query.build_influxdb_query(live_query,
|
||||
'metric_' + metric_name,
|
||||
order_by=["time desc"],
|
||||
additional_filters=filters)
|
||||
response = influx_client.query(query)
|
||||
|
||||
metric_dicts = []
|
||||
|
||||
|
@ -316,14 +316,19 @@ class TestEvents(functionalTest.FunctionalTest):
|
||||
"is": {
|
||||
"host_name": ['Google']
|
||||
}
|
||||
})
|
||||
}),
|
||||
'time_interval': {
|
||||
"start_time": "2015-06-04T18:55:02Z",
|
||||
"end_time": "2015-06-04T18:55:42Z"
|
||||
}
|
||||
}
|
||||
|
||||
response = self.post_json('/v2/status/events', params=query)
|
||||
|
||||
self.assertEqual(
|
||||
m.last_request.qs['q'],
|
||||
["select * from event where host_name='google'"]
|
||||
["select * from event where time >= '2015-06-04t18:55:02z' "
|
||||
"and time <= '2015-06-04t18:55:42z' and host_name='google'"]
|
||||
)
|
||||
|
||||
self.assert_count_equal_backport(
|
||||
|
@ -20,6 +20,7 @@ from surveil.tests.api import functionalTest
|
||||
|
||||
|
||||
class TestHostMetric(functionalTest.FunctionalTest):
|
||||
|
||||
def setUp(self):
|
||||
super(TestHostMetric, self).setUp()
|
||||
self.influxdb_response = json.dumps({
|
||||
@ -144,12 +145,17 @@ class TestHostMetric(functionalTest.FunctionalTest):
|
||||
"http://influxdb:8086/query",
|
||||
text=self.influxdb_response)
|
||||
|
||||
time = {'begin': '2015-04-19T00:09:24Z',
|
||||
'end': '2015-04-19T02:09:25Z'}
|
||||
query = {
|
||||
'fields': [],
|
||||
'time_interval': {
|
||||
'start_time': '2015-04-19T00:09:24Z',
|
||||
'end_time': '2015-04-19T02:09:25Z'
|
||||
}
|
||||
}
|
||||
|
||||
response = self.post_json("/v2/status/hosts/srv-monitoring-01/"
|
||||
"services/load/metrics/load1",
|
||||
params=time)
|
||||
params=query)
|
||||
|
||||
expected = [{"metric_name": 'load1',
|
||||
"min": "0",
|
||||
@ -164,9 +170,6 @@ class TestHostMetric(functionalTest.FunctionalTest):
|
||||
"value": "10"
|
||||
}]
|
||||
|
||||
self.assert_count_equal_backport(
|
||||
json.loads(response.body.decode()),
|
||||
expected)
|
||||
self.assertEqual(
|
||||
m.last_request.qs['q'],
|
||||
["select * from metric_load1 "
|
||||
@ -177,6 +180,9 @@ class TestHostMetric(functionalTest.FunctionalTest):
|
||||
"order by time desc"
|
||||
]
|
||||
)
|
||||
self.assert_count_equal_backport(
|
||||
json.loads(response.body.decode()),
|
||||
expected)
|
||||
|
||||
def test_metric_names(self):
|
||||
self.influxdb_response = json.dumps({
|
||||
@ -250,4 +256,4 @@ class TestHostMetric(functionalTest.FunctionalTest):
|
||||
m.last_request.qs['q'],
|
||||
["show measurements where host_name='localhost' "
|
||||
"and service_description='load'"]
|
||||
)
|
||||
)
|
||||
|
@ -15,7 +15,7 @@
|
||||
import json
|
||||
|
||||
from surveil.api.datamodel.status import live_query
|
||||
from surveil.api.datamodel.status.metrics import time_delta
|
||||
from surveil.api.datamodel.status.metrics import time_interval
|
||||
from surveil.api.handlers.status import influxdb_query
|
||||
from surveil.tests import base
|
||||
|
||||
@ -86,16 +86,18 @@ class LiveQueryFilterTest(base.BaseTestCase):
|
||||
self.assertEqual(expected, result)
|
||||
|
||||
def test_build_query_basic(self):
|
||||
query_time = time_delta.TimeDelta(begin='2015-01-29T21:50:44Z',
|
||||
end='2015-01-29T22:50:44Z')
|
||||
query = live_query.LiveQuery(
|
||||
time_interval=time_interval.TimeInterval(
|
||||
start_time="2015-01-29T21:50:44Z",
|
||||
end_time="2015-01-29T22:50:44Z"
|
||||
)
|
||||
)
|
||||
|
||||
query = live_query.LiveQuery()
|
||||
group_by = ['host_name', 'service_description']
|
||||
order_by = ['time DESC']
|
||||
|
||||
result = influxdb_query.build_influxdb_query(query,
|
||||
"metric_pl",
|
||||
time_delta=query_time,
|
||||
group_by=group_by,
|
||||
order_by=order_by
|
||||
)
|
||||
@ -109,22 +111,23 @@ class LiveQueryFilterTest(base.BaseTestCase):
|
||||
self.assertEqual(result, expected)
|
||||
|
||||
def test_build_query_host_name(self):
|
||||
query_time = time_delta.TimeDelta(begin='2015-01-29T21:50:44Z',
|
||||
end='2015-01-29T22:50:44Z')
|
||||
query = live_query.LiveQuery(
|
||||
fields=['host_name'],
|
||||
filters=json.dumps({
|
||||
"is": {
|
||||
"host_name": ["localhost"]
|
||||
}
|
||||
})
|
||||
}),
|
||||
time_interval=time_interval.TimeInterval(
|
||||
start_time='2015-01-29T21:50:44Z',
|
||||
end_time='2015-01-29T22:50:44Z'
|
||||
)
|
||||
)
|
||||
group_by = ['service_description']
|
||||
order_by = ['time DESC']
|
||||
|
||||
result = influxdb_query.build_influxdb_query(query,
|
||||
"metric_pl",
|
||||
time_delta=query_time,
|
||||
group_by=group_by,
|
||||
order_by=order_by
|
||||
)
|
||||
@ -139,8 +142,6 @@ class LiveQueryFilterTest(base.BaseTestCase):
|
||||
self.assertEqual(result, expected)
|
||||
|
||||
def test_build_query_complete(self):
|
||||
query_time = time_delta.TimeDelta(begin='2015-01-29T21:50:44Z',
|
||||
end='2015-01-29T22:50:44Z', )
|
||||
query = live_query.LiveQuery(
|
||||
fields=['host_name'],
|
||||
filters=json.dumps({
|
||||
@ -148,12 +149,15 @@ class LiveQueryFilterTest(base.BaseTestCase):
|
||||
"host_name": ["localhost"],
|
||||
"service_description": ["mySQL"]
|
||||
}
|
||||
})
|
||||
}),
|
||||
time_interval=time_interval.TimeInterval(
|
||||
start_time='2015-01-29T21:50:44Z',
|
||||
end_time='2015-01-29T22:50:44Z'
|
||||
)
|
||||
)
|
||||
order_by = ['time DESC']
|
||||
result = influxdb_query.build_influxdb_query(query,
|
||||
"metric_pl",
|
||||
time_delta=query_time,
|
||||
order_by=order_by
|
||||
)
|
||||
|
||||
|
Loading…
x
Reference in New Issue
Block a user