Add events
Change-Id: Ic5374b8d99890e804da340f8e90d7d81cb549197
This commit is contained in:
parent
3c0ac12ebb
commit
cc2df796e2
@ -35,23 +35,10 @@ Hosts
|
|||||||
.. rest-controller:: surveil.api.controllers.v2.status.hosts:HostServiceMetricsController
|
.. rest-controller:: surveil.api.controllers.v2.status.hosts:HostServiceMetricsController
|
||||||
:webprefix: /v2/status/hosts/(host_name)/services/(service_description)/metrics
|
:webprefix: /v2/status/hosts/(host_name)/services/(service_description)/metrics
|
||||||
|
|
||||||
.. rest-controller:: surveil.api.controllers.v2.status.hosts:HostServiceMetricController
|
.. rest-controller:: surveil.api.controllers.v2.status.events:EventsController
|
||||||
:webprefix: /v2/status/hosts/(host_name)/services/(service_description)/metrics
|
:webprefix: /v2/status/events/
|
||||||
|
|
||||||
.. rest-controller:: surveil.api.controllers.v2.logs:LogsController
|
|
||||||
:webprefix: /v2/status/hosts/(host_name)/events
|
|
||||||
|
|
||||||
.. rest-controller:: surveil.api.controllers.v2.logs.acknowledgements:AcknowledgementsController
|
|
||||||
:webprefix: /v2/status/hosts/(host_name)/events/acknowledgements
|
|
||||||
|
|
||||||
.. rest-controller:: surveil.api.controllers.v2.logs.comments:CommentsController
|
|
||||||
:webprefix: /v2/status/hosts/(host_name)/events/comments
|
|
||||||
|
|
||||||
.. rest-controller:: surveil.api.controllers.v2.logs.downtimes:DowntimesController
|
|
||||||
:webprefix: /v2/status/hosts/(host_name)/events/downtimes
|
|
||||||
|
|
||||||
.. rest-controller:: surveil.api.controllers.v2.logs.notifications:NotificationsController
|
|
||||||
:webprefix: /v2/status/hosts/(host_name)/events/notifications
|
|
||||||
|
|
||||||
Services
|
Services
|
||||||
========
|
========
|
||||||
@ -76,4 +63,8 @@ types documentation
|
|||||||
:members:
|
:members:
|
||||||
|
|
||||||
.. autotype:: surveil.api.datamodel.status.metrics.time_delta.TimeDelta
|
.. autotype:: surveil.api.datamodel.status.metrics.time_delta.TimeDelta
|
||||||
:members:
|
:members:
|
||||||
|
|
||||||
|
.. autotype:: surveil.api.datamodel.status.event.Event
|
||||||
|
:members:
|
||||||
|
|
||||||
|
@ -8,7 +8,7 @@ oslo.middleware
|
|||||||
oslo.policy>=0.3.0
|
oslo.policy>=0.3.0
|
||||||
keystonemiddleware
|
keystonemiddleware
|
||||||
PasteDeploy
|
PasteDeploy
|
||||||
influxdb==2.4.0
|
influxdb==2.6.0
|
||||||
pika
|
pika
|
||||||
python-surveilclient==0.6.0
|
python-surveilclient==0.6.0
|
||||||
six
|
six
|
||||||
|
@ -1,44 +0,0 @@
|
|||||||
# Copyright 2014 - Savoir-Faire Linux inc.
|
|
||||||
#
|
|
||||||
# Licensed under the Apache License, Version 2.0 (the "License"); you may
|
|
||||||
# not use this file except in compliance with the License. You may obtain
|
|
||||||
# a copy of the License at
|
|
||||||
#
|
|
||||||
# http://www.apache.org/licenses/LICENSE-2.0
|
|
||||||
#
|
|
||||||
# Unless required by applicable law or agreed to in writing, software
|
|
||||||
# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
|
|
||||||
# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
|
|
||||||
# License for the specific language governing permissions and limitations
|
|
||||||
# under the License.
|
|
||||||
|
|
||||||
import pecan
|
|
||||||
from pecan import rest
|
|
||||||
|
|
||||||
|
|
||||||
from surveil.api.controllers.v2.logs import acknowledgements
|
|
||||||
from surveil.api.controllers.v2.logs import comments
|
|
||||||
from surveil.api.controllers.v2.logs import downtimes
|
|
||||||
from surveil.api.controllers.v2.logs import notifications
|
|
||||||
from surveil.common import util
|
|
||||||
|
|
||||||
|
|
||||||
class LogsController(rest.RestController):
|
|
||||||
acknowledgements = acknowledgements.AcknowledgementsController()
|
|
||||||
comments = comments.CommentsController()
|
|
||||||
downtimes = downtimes.DowntimesController()
|
|
||||||
notifications = notifications.NotificationsController()
|
|
||||||
|
|
||||||
# @wsme_pecan.wsexpose([Host])
|
|
||||||
@util.policy_enforce(['authenticated'])
|
|
||||||
@pecan.expose()
|
|
||||||
def get_all(self):
|
|
||||||
"""Returns all events from a specific host."""
|
|
||||||
host_name = pecan.request.context.get("host_name")
|
|
||||||
if host_name is not None:
|
|
||||||
return "All events for %s" % host_name
|
|
||||||
return "ALLL Events"
|
|
||||||
|
|
||||||
# @pecan.expose()
|
|
||||||
# def _lookup(self, host_name, *remainder):
|
|
||||||
# return EventController(host_name), remainder
|
|
@ -1,29 +0,0 @@
|
|||||||
# Copyright 2014 - Savoir-Faire Linux inc.
|
|
||||||
#
|
|
||||||
# Licensed under the Apache License, Version 2.0 (the "License"); you may
|
|
||||||
# not use this file except in compliance with the License. You may obtain
|
|
||||||
# a copy of the License at
|
|
||||||
#
|
|
||||||
# http://www.apache.org/licenses/LICENSE-2.0
|
|
||||||
#
|
|
||||||
# Unless required by applicable law or agreed to in writing, software
|
|
||||||
# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
|
|
||||||
# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
|
|
||||||
# License for the specific language governing permissions and limitations
|
|
||||||
# under the License.
|
|
||||||
|
|
||||||
import pecan
|
|
||||||
from pecan import rest
|
|
||||||
|
|
||||||
from surveil.common import util
|
|
||||||
|
|
||||||
|
|
||||||
class AcknowledgementsController(rest.RestController):
|
|
||||||
|
|
||||||
# curl -X GET http://127.0.0.1:8080/v2/titilambert/myproject/builds/
|
|
||||||
# @wsme_pecan.wsexpose([Host])
|
|
||||||
@util.policy_enforce(['authenticated'])
|
|
||||||
@pecan.expose()
|
|
||||||
def get_all(self):
|
|
||||||
"""Returns all acks from a specific host."""
|
|
||||||
return "ALLL ACK"
|
|
@ -1,29 +0,0 @@
|
|||||||
# Copyright 2014 - Savoir-Faire Linux inc.
|
|
||||||
#
|
|
||||||
# Licensed under the Apache License, Version 2.0 (the "License"); you may
|
|
||||||
# not use this file except in compliance with the License. You may obtain
|
|
||||||
# a copy of the License at
|
|
||||||
#
|
|
||||||
# http://www.apache.org/licenses/LICENSE-2.0
|
|
||||||
#
|
|
||||||
# Unless required by applicable law or agreed to in writing, software
|
|
||||||
# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
|
|
||||||
# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
|
|
||||||
# License for the specific language governing permissions and limitations
|
|
||||||
# under the License.
|
|
||||||
|
|
||||||
import pecan
|
|
||||||
from pecan import rest
|
|
||||||
|
|
||||||
from surveil.common import util
|
|
||||||
|
|
||||||
|
|
||||||
class CommentsController(rest.RestController):
|
|
||||||
|
|
||||||
# curl -X GET http://127.0.0.1:8080/v2/titilambert/myproject/builds/
|
|
||||||
# @wsme_pecan.wsexpose([Host])
|
|
||||||
@util.policy_enforce(['authenticated'])
|
|
||||||
@pecan.expose()
|
|
||||||
def get_all(self):
|
|
||||||
"""Returns all comments from a specific host."""
|
|
||||||
return "ALLL Comments"
|
|
@ -1,29 +0,0 @@
|
|||||||
# Copyright 2014 - Savoir-Faire Linux inc.
|
|
||||||
#
|
|
||||||
# Licensed under the Apache License, Version 2.0 (the "License"); you may
|
|
||||||
# not use this file except in compliance with the License. You may obtain
|
|
||||||
# a copy of the License at
|
|
||||||
#
|
|
||||||
# http://www.apache.org/licenses/LICENSE-2.0
|
|
||||||
#
|
|
||||||
# Unless required by applicable law or agreed to in writing, software
|
|
||||||
# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
|
|
||||||
# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
|
|
||||||
# License for the specific language governing permissions and limitations
|
|
||||||
# under the License.
|
|
||||||
|
|
||||||
import pecan
|
|
||||||
from pecan import rest
|
|
||||||
|
|
||||||
from surveil.common import util
|
|
||||||
|
|
||||||
|
|
||||||
class DowntimesController(rest.RestController):
|
|
||||||
|
|
||||||
# curl -X GET http://127.0.0.1:8080/v2/titilambert/myproject/builds/
|
|
||||||
# @wsme_pecan.wsexpose([Host])
|
|
||||||
@util.policy_enforce(['authenticated'])
|
|
||||||
@pecan.expose()
|
|
||||||
def get_all(self):
|
|
||||||
"""Returns all downtimes from a specific host."""
|
|
||||||
return "ALLL DT"
|
|
@ -1,29 +0,0 @@
|
|||||||
# Copyright 2014 - Savoir-Faire Linux inc.
|
|
||||||
#
|
|
||||||
# Licensed under the Apache License, Version 2.0 (the "License"); you may
|
|
||||||
# not use this file except in compliance with the License. You may obtain
|
|
||||||
# a copy of the License at
|
|
||||||
#
|
|
||||||
# http://www.apache.org/licenses/LICENSE-2.0
|
|
||||||
#
|
|
||||||
# Unless required by applicable law or agreed to in writing, software
|
|
||||||
# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
|
|
||||||
# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
|
|
||||||
# License for the specific language governing permissions and limitations
|
|
||||||
# under the License.
|
|
||||||
|
|
||||||
import pecan
|
|
||||||
from pecan import rest
|
|
||||||
|
|
||||||
from surveil.common import util
|
|
||||||
|
|
||||||
|
|
||||||
class NotificationsController(rest.RestController):
|
|
||||||
|
|
||||||
# curl -X GET http://127.0.0.1:8080/v2/titilambert/myproject/builds/
|
|
||||||
# @wsme_pecan.wsexpose([Host])
|
|
||||||
@util.policy_enforce(['authenticated'])
|
|
||||||
@pecan.expose()
|
|
||||||
def get_all(self):
|
|
||||||
"""Returns all notifications from a specific host."""
|
|
||||||
return "ALLL notifs"
|
|
@ -14,6 +14,7 @@
|
|||||||
|
|
||||||
from pecan import rest
|
from pecan import rest
|
||||||
|
|
||||||
|
from surveil.api.controllers.v2.status import events as v2_events
|
||||||
from surveil.api.controllers.v2.status import hosts as v2_hosts
|
from surveil.api.controllers.v2.status import hosts as v2_hosts
|
||||||
from surveil.api.controllers.v2.status import services as v2_services
|
from surveil.api.controllers.v2.status import services as v2_services
|
||||||
|
|
||||||
@ -22,3 +23,4 @@ class StatusController(rest.RestController):
|
|||||||
# events = EventsController()
|
# events = EventsController()
|
||||||
hosts = v2_hosts.HostsController()
|
hosts = v2_hosts.HostsController()
|
||||||
services = v2_services.ServicesController()
|
services = v2_services.ServicesController()
|
||||||
|
events = v2_events.EventsController()
|
||||||
|
42
surveil/api/controllers/v2/status/events.py
Normal file
42
surveil/api/controllers/v2/status/events.py
Normal file
@ -0,0 +1,42 @@
|
|||||||
|
# Copyright 2014 - Savoir-Faire Linux inc.
|
||||||
|
#
|
||||||
|
# Licensed under the Apache License, Version 2.0 (the "License"); you may
|
||||||
|
# not use this file except in compliance with the License. You may obtain
|
||||||
|
# a copy of the License at
|
||||||
|
#
|
||||||
|
# http://www.apache.org/licenses/LICENSE-2.0
|
||||||
|
#
|
||||||
|
# Unless required by applicable law or agreed to in writing, software
|
||||||
|
# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
|
||||||
|
# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
|
||||||
|
# License for the specific language governing permissions and limitations
|
||||||
|
# under the License.
|
||||||
|
|
||||||
|
import pecan
|
||||||
|
from pecan import rest
|
||||||
|
import wsmeext.pecan as wsme_pecan
|
||||||
|
|
||||||
|
|
||||||
|
from surveil.api.datamodel.status import event
|
||||||
|
from surveil.api.datamodel.status import live_query
|
||||||
|
from surveil.api.handlers.status import event_handler
|
||||||
|
from surveil.common import util
|
||||||
|
|
||||||
|
|
||||||
|
class EventsController(rest.RestController):
|
||||||
|
|
||||||
|
@util.policy_enforce(['authenticated'])
|
||||||
|
@wsme_pecan.wsexpose([event.Event])
|
||||||
|
def get_all(self):
|
||||||
|
"""Returns all events."""
|
||||||
|
handler = event_handler.EventHandler(pecan.request)
|
||||||
|
events = handler.get_all()
|
||||||
|
return events
|
||||||
|
|
||||||
|
@util.policy_enforce(['authenticated'])
|
||||||
|
@wsme_pecan.wsexpose([event.Event], body=live_query.LiveQuery)
|
||||||
|
def post(self, query):
|
||||||
|
"""Given a LiveQuery, returns all matching events."""
|
||||||
|
handler = event_handler.EventHandler(pecan.request)
|
||||||
|
events = handler.get_all(live_query=query)
|
||||||
|
return events
|
@ -17,7 +17,7 @@ from pecan import rest
|
|||||||
import requests
|
import requests
|
||||||
import wsmeext.pecan as wsme_pecan
|
import wsmeext.pecan as wsme_pecan
|
||||||
|
|
||||||
from surveil.api.controllers.v2 import logs
|
from surveil.api.controllers.v2.status import events
|
||||||
from surveil.api.datamodel import checkresult
|
from surveil.api.datamodel import checkresult
|
||||||
from surveil.api.datamodel.status import live_host
|
from surveil.api.datamodel.status import live_host
|
||||||
from surveil.api.datamodel.status import live_query
|
from surveil.api.datamodel.status import live_query
|
||||||
@ -241,7 +241,7 @@ class HostController(rest.RestController):
|
|||||||
# See init for controller creation. We need host_name to instanciate it
|
# See init for controller creation. We need host_name to instanciate it
|
||||||
# externalcommands = ExternalCommandsController()
|
# externalcommands = ExternalCommandsController()
|
||||||
# config = config.ConfigController()
|
# config = config.ConfigController()
|
||||||
events = logs.LogsController()
|
events = events.EventsController()
|
||||||
metrics = HostMetricsController()
|
metrics = HostMetricsController()
|
||||||
results = HostCheckResultsSubController()
|
results = HostCheckResultsSubController()
|
||||||
|
|
||||||
|
@ -18,8 +18,8 @@ from surveil.api.controllers.v2 import auth as v2_auth
|
|||||||
from surveil.api.controllers.v2 import bansho as v2_bansho
|
from surveil.api.controllers.v2 import bansho as v2_bansho
|
||||||
from surveil.api.controllers.v2 import config as v2_config
|
from surveil.api.controllers.v2 import config as v2_config
|
||||||
from surveil.api.controllers.v2 import hello as v2_hello
|
from surveil.api.controllers.v2 import hello as v2_hello
|
||||||
from surveil.api.controllers.v2 import logs as v2_logs
|
|
||||||
from surveil.api.controllers.v2 import status as v2_status
|
from surveil.api.controllers.v2 import status as v2_status
|
||||||
|
from surveil.api.controllers.v2.status import events as v2_event
|
||||||
|
|
||||||
|
|
||||||
class V2Controller(object):
|
class V2Controller(object):
|
||||||
@ -30,5 +30,5 @@ class V2Controller(object):
|
|||||||
status = v2_status.StatusController()
|
status = v2_status.StatusController()
|
||||||
surveil = v2_admin.AdminController()
|
surveil = v2_admin.AdminController()
|
||||||
auth = v2_auth.AuthController()
|
auth = v2_auth.AuthController()
|
||||||
logs = v2_logs.LogsController()
|
events = v2_event.EventsController()
|
||||||
bansho = v2_bansho.BanshoController()
|
bansho = v2_bansho.BanshoController()
|
||||||
|
80
surveil/api/datamodel/status/event.py
Normal file
80
surveil/api/datamodel/status/event.py
Normal file
@ -0,0 +1,80 @@
|
|||||||
|
# Copyright 2015 - Savoir-Faire Linux inc.
|
||||||
|
#
|
||||||
|
# Licensed under the Apache License, Version 2.0 (the "License"); you may
|
||||||
|
# not use this file except in compliance with the License. You may obtain
|
||||||
|
# a copy of the License at
|
||||||
|
#
|
||||||
|
# http://www.apache.org/licenses/LICENSE-2.0
|
||||||
|
#
|
||||||
|
# Unless required by applicable law or agreed to in writing, software
|
||||||
|
# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
|
||||||
|
# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
|
||||||
|
# License for the specific language governing permissions and limitations
|
||||||
|
# under the License.
|
||||||
|
|
||||||
|
import wsme
|
||||||
|
import wsme.types as wtypes
|
||||||
|
|
||||||
|
from surveil.api.datamodel import types
|
||||||
|
|
||||||
|
|
||||||
|
class Event(types.Base):
|
||||||
|
|
||||||
|
time = wsme.wsattr(wtypes.text, mandatory=True)
|
||||||
|
"""Timestamp of the alert"""
|
||||||
|
|
||||||
|
event_type = wsme.wsattr(wtypes.text, mandatory=True)
|
||||||
|
"""Type of event. This is only ALERT"""
|
||||||
|
|
||||||
|
host_name = wsme.wsattr(wtypes.text, mandatory=False)
|
||||||
|
"""Host which the alert is from."""
|
||||||
|
|
||||||
|
service_description = wsme.wsattr(wtypes.text, mandatory=False)
|
||||||
|
"""Service which raised the alert"""
|
||||||
|
|
||||||
|
state = wsme.wsattr(wtypes.text, mandatory=False)
|
||||||
|
"""State of the service or host who raised the alert"""
|
||||||
|
|
||||||
|
# Alerts
|
||||||
|
state_type = wsme.wsattr(wtypes.text, mandatory=False)
|
||||||
|
"""Confirmness level of the state [SOFT|HARD]"""
|
||||||
|
|
||||||
|
attempts = wsme.wsattr(int, mandatory=False)
|
||||||
|
"""Number of attempts to confirm state"""
|
||||||
|
|
||||||
|
# Downtime
|
||||||
|
downtime_type = wsme.wsattr(wtypes.text, mandatory=False)
|
||||||
|
"""Type of alert. This is only HOST or SERVICE"""
|
||||||
|
|
||||||
|
# Notifications
|
||||||
|
notification_type = wsme.wsattr(wtypes.text, mandatory=False)
|
||||||
|
|
||||||
|
notification_method = wsme.wsattr(wtypes.text, mandatory=False)
|
||||||
|
|
||||||
|
contact = wsme.wsattr(wtypes.text, mandatory=False)
|
||||||
|
|
||||||
|
acknowledgement = wsme.wsattr(wtypes.text, mandatory=False)
|
||||||
|
|
||||||
|
# Alert, Flapping
|
||||||
|
alert_type = wsme.wsattr(wtypes.text, mandatory=False)
|
||||||
|
"""Type of alert. This is only HOST or SERVICE"""
|
||||||
|
|
||||||
|
# Alerts, Downtime, Flapping
|
||||||
|
output = wsme.wsattr(wtypes.text, mandatory=False)
|
||||||
|
"""Additional output of the alert."""
|
||||||
|
|
||||||
|
@classmethod
|
||||||
|
def sample(cls):
|
||||||
|
return cls(
|
||||||
|
time='2015-06-04T18:55:12Z',
|
||||||
|
event_type='ALERT',
|
||||||
|
alert_type='SERVICE',
|
||||||
|
host_name='CoolHost',
|
||||||
|
service_description='Apache Service',
|
||||||
|
state='CRITICAL',
|
||||||
|
state_type='HARD',
|
||||||
|
attempts=4,
|
||||||
|
output='WARNING - load average: 5.04, 4.67, 5.04',
|
||||||
|
notification_method='notify-service-by-email',
|
||||||
|
notification_type=''
|
||||||
|
)
|
65
surveil/api/handlers/status/event_handler.py
Normal file
65
surveil/api/handlers/status/event_handler.py
Normal file
@ -0,0 +1,65 @@
|
|||||||
|
# Copyright 2015 - Savoir-Faire Linux inc.
|
||||||
|
#
|
||||||
|
# Licensed under the Apache License, Version 2.0 (the "License"); you may
|
||||||
|
# not use this file except in compliance with the License. You may obtain
|
||||||
|
# a copy of the License at
|
||||||
|
#
|
||||||
|
# http://www.apache.org/licenses/LICENSE-2.0
|
||||||
|
#
|
||||||
|
# Unless required by applicable law or agreed to in writing, software
|
||||||
|
# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
|
||||||
|
# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
|
||||||
|
# License for the specific language governing permissions and limitations
|
||||||
|
# under the License.
|
||||||
|
|
||||||
|
from surveil.api.datamodel.status import event
|
||||||
|
from surveil.api.handlers import handler
|
||||||
|
from surveil.api.handlers.status import influxdb_query
|
||||||
|
|
||||||
|
|
||||||
|
class EventHandler(handler.Handler):
|
||||||
|
"""Fulfills a request on the events resource."""
|
||||||
|
|
||||||
|
def get_all(self, live_query=None):
|
||||||
|
"""Return all logs."""
|
||||||
|
influx_client = self.request.influxdb_client
|
||||||
|
query = influxdb_query.build_influxdb_query(live_query, "EVENT")
|
||||||
|
response = influx_client.query(query)
|
||||||
|
|
||||||
|
events = []
|
||||||
|
|
||||||
|
for item in response.items():
|
||||||
|
tags = item[0][1]
|
||||||
|
for point in response.get_points(tags=tags):
|
||||||
|
point.update(tags)
|
||||||
|
event_dict = self._event_dict_from_influx_item(point)
|
||||||
|
events.append(event.Event(**event_dict))
|
||||||
|
|
||||||
|
return events
|
||||||
|
|
||||||
|
def _event_dict_from_influx_item(self, item):
|
||||||
|
mappings = [
|
||||||
|
'time',
|
||||||
|
'event_type',
|
||||||
|
'host_name',
|
||||||
|
'service_description',
|
||||||
|
'state',
|
||||||
|
'state_type',
|
||||||
|
'attempts',
|
||||||
|
'downtime_type',
|
||||||
|
'notification_type',
|
||||||
|
'notification_method',
|
||||||
|
'contact',
|
||||||
|
'alert_type',
|
||||||
|
'output',
|
||||||
|
'acknowledgement'
|
||||||
|
]
|
||||||
|
|
||||||
|
event_dict = {}
|
||||||
|
|
||||||
|
for field in mappings:
|
||||||
|
value = item.get(field, None)
|
||||||
|
if value is not None and value != "":
|
||||||
|
event_dict[field] = value
|
||||||
|
|
||||||
|
return event_dict
|
@ -17,16 +17,18 @@ import json
|
|||||||
|
|
||||||
def build_influxdb_query(live_query,
|
def build_influxdb_query(live_query,
|
||||||
measurement,
|
measurement,
|
||||||
|
time_delta=None,
|
||||||
group_by=[],
|
group_by=[],
|
||||||
order_by=[],
|
order_by=[],
|
||||||
limit=None):
|
limit=None):
|
||||||
|
|
||||||
query = ['SELECT * FROM', measurement]
|
query = ['SELECT * FROM', measurement]
|
||||||
|
|
||||||
if live_query:
|
filters = {}
|
||||||
|
if live_query and live_query.filters:
|
||||||
filters = json.loads(live_query.filters)
|
filters = json.loads(live_query.filters)
|
||||||
if filters:
|
|
||||||
query.append(_build_where_clause(filters))
|
query += _build_where_clause(filters, time_delta)
|
||||||
|
|
||||||
if group_by:
|
if group_by:
|
||||||
query.append('GROUP BY')
|
query.append('GROUP BY')
|
||||||
@ -42,7 +44,7 @@ def build_influxdb_query(live_query,
|
|||||||
return ' '.join(query)
|
return ' '.join(query)
|
||||||
|
|
||||||
|
|
||||||
def _build_where_clause(filters):
|
def _build_where_clause(filters, time_delta=None):
|
||||||
filters_conversion = {
|
filters_conversion = {
|
||||||
'is': '=',
|
'is': '=',
|
||||||
'isnot': '!='
|
'isnot': '!='
|
||||||
@ -50,6 +52,14 @@ def _build_where_clause(filters):
|
|||||||
clause = []
|
clause = []
|
||||||
first = True
|
first = True
|
||||||
|
|
||||||
|
if time_delta:
|
||||||
|
clause.append('WHERE')
|
||||||
|
first = False
|
||||||
|
|
||||||
|
begin = time_delta.begin
|
||||||
|
end = time_delta.end
|
||||||
|
clause.append("time >= '%s' AND time <= '%s'" % (begin, end))
|
||||||
|
|
||||||
for filter_name, filter_data in sorted(filters.items()):
|
for filter_name, filter_data in sorted(filters.items()):
|
||||||
for field, values in sorted(filter_data.items()):
|
for field, values in sorted(filter_data.items()):
|
||||||
for value in values:
|
for value in values:
|
||||||
@ -69,4 +79,4 @@ def _build_where_clause(filters):
|
|||||||
value))
|
value))
|
||||||
first = False
|
first = False
|
||||||
|
|
||||||
return ' '.join(clause)
|
return clause
|
||||||
|
@ -1,43 +0,0 @@
|
|||||||
# Copyright 2014 - Savoir-Faire Linux inc.
|
|
||||||
#
|
|
||||||
# Licensed under the Apache License, Version 2.0 (the "License"); you may
|
|
||||||
# not use this file except in compliance with the License. You may obtain
|
|
||||||
# a copy of the License at
|
|
||||||
#
|
|
||||||
# http://www.apache.org/licenses/LICENSE-2.0
|
|
||||||
#
|
|
||||||
# Unless required by applicable law or agreed to in writing, software
|
|
||||||
# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
|
|
||||||
# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
|
|
||||||
# License for the specific language governing permissions and limitations
|
|
||||||
# under the License.
|
|
||||||
|
|
||||||
|
|
||||||
def build_influxdb_query(metric_name,
|
|
||||||
time_delta,
|
|
||||||
host_name=None,
|
|
||||||
service_description=None
|
|
||||||
):
|
|
||||||
group_by = []
|
|
||||||
query = ['SELECT * FROM metric_%s'
|
|
||||||
% metric_name]
|
|
||||||
begin = time_delta.begin
|
|
||||||
end = time_delta.end
|
|
||||||
query.append("WHERE time >= '%s' AND time <= '%s'" % (begin, end))
|
|
||||||
|
|
||||||
if host_name is None:
|
|
||||||
group_by.append('host_name')
|
|
||||||
else:
|
|
||||||
query.append("AND host_name ='%s'" % host_name)
|
|
||||||
|
|
||||||
if service_description is None:
|
|
||||||
group_by.append('service_description')
|
|
||||||
else:
|
|
||||||
query.append("AND service_description ='%s'" % service_description)
|
|
||||||
|
|
||||||
if len(group_by) != 0:
|
|
||||||
query.append('GROUP BY')
|
|
||||||
query.append(', '.join(group_by))
|
|
||||||
|
|
||||||
query.append('ORDER BY time DESC')
|
|
||||||
return ' '.join(query)
|
|
@ -11,11 +11,12 @@
|
|||||||
# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
|
# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
|
||||||
# License for the specific language governing permissions and limitations
|
# License for the specific language governing permissions and limitations
|
||||||
# under the License.
|
# under the License.
|
||||||
|
import json
|
||||||
|
|
||||||
|
from surveil.api.datamodel.status import live_query
|
||||||
from surveil.api.datamodel.status.metrics import live_metric
|
from surveil.api.datamodel.status.metrics import live_metric
|
||||||
from surveil.api.handlers import handler
|
from surveil.api.handlers import handler
|
||||||
from surveil.api.handlers.status.metrics import influxdb_time_query
|
from surveil.api.handlers.status import influxdb_query
|
||||||
|
|
||||||
|
|
||||||
class MetricHandler(handler.Handler):
|
class MetricHandler(handler.Handler):
|
||||||
@ -75,16 +76,29 @@ class MetricHandler(handler.Handler):
|
|||||||
|
|
||||||
return metrics
|
return metrics
|
||||||
|
|
||||||
def get_all(self, metric_name, time_delta, host_name=None,
|
def get_all(self, metric_name, time_delta, host_name,
|
||||||
service_description=None):
|
service_description=None):
|
||||||
"""Return all metrics."""
|
"""Return all metrics."""
|
||||||
|
filters = {
|
||||||
|
"is": {
|
||||||
|
"host_name": [host_name]
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
if service_description:
|
||||||
|
filters["is"]["service_description"] = [service_description]
|
||||||
|
|
||||||
|
query = live_query.LiveQuery(
|
||||||
|
filters=json.dumps(filters)
|
||||||
|
)
|
||||||
|
order_by = ["time desc"]
|
||||||
|
|
||||||
cli = self.request.influxdb_client
|
cli = self.request.influxdb_client
|
||||||
query = influxdb_time_query.build_influxdb_query(
|
query = influxdb_query.build_influxdb_query(
|
||||||
metric_name,
|
query,
|
||||||
time_delta,
|
"metric_" + metric_name,
|
||||||
host_name,
|
time_delta=time_delta,
|
||||||
service_description
|
order_by=order_by
|
||||||
)
|
)
|
||||||
response = cli.query(query)
|
response = cli.query(query)
|
||||||
|
|
||||||
@ -127,4 +141,15 @@ class MetricHandler(handler.Handler):
|
|||||||
else:
|
else:
|
||||||
metric_dict[field[0]] = field[1](value)
|
metric_dict[field[0]] = field[1](value)
|
||||||
|
|
||||||
return metric_dict
|
return metric_dict
|
||||||
|
|
||||||
|
def _metrics_name_from_influx_item(self, item):
|
||||||
|
|
||||||
|
metric_name = {}
|
||||||
|
mappings = [('metric_name', 'name', str), ]
|
||||||
|
for field in mappings:
|
||||||
|
value = item.get(field[1], None)
|
||||||
|
if value is not None:
|
||||||
|
metric_name[field[0]] = field[2](value)
|
||||||
|
|
||||||
|
return metric_name
|
||||||
|
341
surveil/tests/api/controllers/v2/status/test_events.py
Normal file
341
surveil/tests/api/controllers/v2/status/test_events.py
Normal file
@ -0,0 +1,341 @@
|
|||||||
|
# Copyright 2014 - Savoir-Faire Linux inc.
|
||||||
|
#
|
||||||
|
# Licensed under the Apache License, Version 2.0 (the "License"); you may
|
||||||
|
# not use this file except in compliance with the License. You may obtain
|
||||||
|
# a copy of the License at
|
||||||
|
#
|
||||||
|
# http://www.apache.org/licenses/LICENSE-2.0
|
||||||
|
#
|
||||||
|
# Unless required by applicable law or agreed to in writing, software
|
||||||
|
# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
|
||||||
|
# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
|
||||||
|
# License for the specific language governing permissions and limitations
|
||||||
|
# under the License.
|
||||||
|
|
||||||
|
import json
|
||||||
|
|
||||||
|
import requests_mock
|
||||||
|
|
||||||
|
from surveil.tests.api import functionalTest
|
||||||
|
|
||||||
|
|
||||||
|
class TestEvents(functionalTest.FunctionalTest):
|
||||||
|
|
||||||
|
def setUp(self):
|
||||||
|
super(TestEvents, self).setUp()
|
||||||
|
self.influxdb_response = json.dumps({
|
||||||
|
"results": [
|
||||||
|
{
|
||||||
|
"series": [
|
||||||
|
{
|
||||||
|
"name": "ALERT",
|
||||||
|
"tags": {
|
||||||
|
"event_type": "ALERT",
|
||||||
|
"host_name": "myServiceIsDown",
|
||||||
|
"service_description": "iAmADownService"
|
||||||
|
},
|
||||||
|
"columns": [
|
||||||
|
"time",
|
||||||
|
"attempts",
|
||||||
|
"output",
|
||||||
|
"state",
|
||||||
|
"state_type",
|
||||||
|
"alert_type"
|
||||||
|
],
|
||||||
|
"values": [
|
||||||
|
[
|
||||||
|
"2015-06-04T18:55:12Z",
|
||||||
|
1,
|
||||||
|
"Connection refused",
|
||||||
|
"CRITICAL",
|
||||||
|
"SOFT",
|
||||||
|
"SERVICE"
|
||||||
|
],
|
||||||
|
[
|
||||||
|
'2015-06-04T18:55:12Z',
|
||||||
|
2,
|
||||||
|
'Connection refused',
|
||||||
|
'CRITICAL',
|
||||||
|
'SOFT',
|
||||||
|
'SERVICE'
|
||||||
|
],
|
||||||
|
[
|
||||||
|
'2015-06-04T18:55:12Z',
|
||||||
|
3,
|
||||||
|
'Connection refused',
|
||||||
|
'CRITICAL',
|
||||||
|
'SOFT',
|
||||||
|
'SERVICE'
|
||||||
|
]
|
||||||
|
]
|
||||||
|
},
|
||||||
|
{
|
||||||
|
'name': 'ALERT',
|
||||||
|
'tags': {
|
||||||
|
'event_type': 'ALERT',
|
||||||
|
'host_name': 'savoirfairelinux',
|
||||||
|
'service_description': 'CPU'
|
||||||
|
},
|
||||||
|
'columns': [
|
||||||
|
'time',
|
||||||
|
'attempts',
|
||||||
|
'output',
|
||||||
|
'state',
|
||||||
|
'state_type',
|
||||||
|
'alert_type'
|
||||||
|
],
|
||||||
|
'values': [
|
||||||
|
[
|
||||||
|
'2015-06-04T18:55:12Z',
|
||||||
|
1,
|
||||||
|
'Warning - Connection refused',
|
||||||
|
'CRITICAL',
|
||||||
|
'HARD',
|
||||||
|
'SERVICE'
|
||||||
|
],
|
||||||
|
[
|
||||||
|
'2015-06-04T18:55:12Z',
|
||||||
|
2,
|
||||||
|
'Warning - Connection refused',
|
||||||
|
'WARNING',
|
||||||
|
'HARD',
|
||||||
|
'HOST'
|
||||||
|
]
|
||||||
|
]
|
||||||
|
},
|
||||||
|
{
|
||||||
|
'name': 'ALERT',
|
||||||
|
'tags': {
|
||||||
|
'event_type': 'NOTIFICATION',
|
||||||
|
'host_name': 'savoirfairelinux',
|
||||||
|
'service_description': 'CPU'
|
||||||
|
},
|
||||||
|
'columns': [
|
||||||
|
'time',
|
||||||
|
'notification_type',
|
||||||
|
'contact',
|
||||||
|
'state',
|
||||||
|
'notification_method',
|
||||||
|
'acknowledgement'
|
||||||
|
],
|
||||||
|
'values': [
|
||||||
|
[
|
||||||
|
'2015-06-04T18:55:12Z',
|
||||||
|
'SERVICE',
|
||||||
|
'admin',
|
||||||
|
'CRITICAL',
|
||||||
|
'notify-service-by-email',
|
||||||
|
None
|
||||||
|
],
|
||||||
|
[
|
||||||
|
'2015-06-04T18:55:12Z',
|
||||||
|
'SERVICE',
|
||||||
|
'admin',
|
||||||
|
'CRITICAL',
|
||||||
|
'notify-service-by-email',
|
||||||
|
'ACKNOWLEDGEMENT'
|
||||||
|
]
|
||||||
|
]
|
||||||
|
},
|
||||||
|
{
|
||||||
|
'name': 'ALERT',
|
||||||
|
'tags': {
|
||||||
|
'event_type': 'NOTIFICATION',
|
||||||
|
'host_name': 'Google',
|
||||||
|
'service_description': 'Load'
|
||||||
|
},
|
||||||
|
'columns': [
|
||||||
|
'time',
|
||||||
|
'notification_type',
|
||||||
|
'contact',
|
||||||
|
'state',
|
||||||
|
'notification_method',
|
||||||
|
'acknowledgement'
|
||||||
|
],
|
||||||
|
'values': [
|
||||||
|
[
|
||||||
|
'2015-06-04T18:55:12Z',
|
||||||
|
'SERVICE',
|
||||||
|
'admin',
|
||||||
|
'CRITICAL',
|
||||||
|
'notify-service-by-email',
|
||||||
|
None
|
||||||
|
]
|
||||||
|
]
|
||||||
|
}
|
||||||
|
]
|
||||||
|
}
|
||||||
|
]
|
||||||
|
})
|
||||||
|
|
||||||
|
def test_get_all(self):
|
||||||
|
expected_values = [
|
||||||
|
{
|
||||||
|
"host_name": "myServiceIsDown",
|
||||||
|
"event_type": "ALERT",
|
||||||
|
"service_description": "iAmADownService",
|
||||||
|
"time": "2015-06-04T18:55:12Z",
|
||||||
|
"attempts": 1,
|
||||||
|
"output": "Connection refused",
|
||||||
|
"state": "CRITICAL",
|
||||||
|
"state_type": "SOFT",
|
||||||
|
"alert_type": "SERVICE"
|
||||||
|
},
|
||||||
|
{
|
||||||
|
'host_name': 'myServiceIsDown',
|
||||||
|
'event_type': 'ALERT',
|
||||||
|
'service_description': 'iAmADownService',
|
||||||
|
'time': '2015-06-04T18:55:12Z',
|
||||||
|
'attempts': 2,
|
||||||
|
'output': 'Connection refused',
|
||||||
|
'state': 'CRITICAL',
|
||||||
|
'state_type': 'SOFT',
|
||||||
|
'alert_type': 'SERVICE'
|
||||||
|
},
|
||||||
|
{
|
||||||
|
'host_name': 'myServiceIsDown',
|
||||||
|
'event_type': 'ALERT',
|
||||||
|
'service_description': 'iAmADownService',
|
||||||
|
'time': '2015-06-04T18:55:12Z',
|
||||||
|
'attempts': 3,
|
||||||
|
'output': 'Connection refused',
|
||||||
|
'state': 'CRITICAL',
|
||||||
|
'state_type': 'SOFT',
|
||||||
|
'alert_type': 'SERVICE'
|
||||||
|
},
|
||||||
|
{
|
||||||
|
'host_name': 'savoirfairelinux',
|
||||||
|
'event_type': 'ALERT',
|
||||||
|
'service_description': 'CPU',
|
||||||
|
'time': '2015-06-04T18:55:12Z',
|
||||||
|
'attempts': 1,
|
||||||
|
'output': 'Warning - Connection refused',
|
||||||
|
'state': 'CRITICAL',
|
||||||
|
'state_type': 'HARD',
|
||||||
|
'alert_type': 'SERVICE'
|
||||||
|
},
|
||||||
|
{
|
||||||
|
'host_name': 'savoirfairelinux',
|
||||||
|
'event_type': 'ALERT',
|
||||||
|
'service_description': 'CPU',
|
||||||
|
'time': '2015-06-04T18:55:12Z',
|
||||||
|
'attempts': 2,
|
||||||
|
'output': 'Warning - Connection refused',
|
||||||
|
'state': 'WARNING',
|
||||||
|
'state_type': 'HARD',
|
||||||
|
'alert_type': 'HOST'
|
||||||
|
},
|
||||||
|
{
|
||||||
|
'host_name': 'savoirfairelinux',
|
||||||
|
'event_type': 'NOTIFICATION',
|
||||||
|
'service_description': 'CPU',
|
||||||
|
'time': '2015-06-04T18:55:12Z',
|
||||||
|
'notification_type': 'SERVICE',
|
||||||
|
'contact': 'admin',
|
||||||
|
'state': 'CRITICAL',
|
||||||
|
'notification_method': 'notify-service-by-email'
|
||||||
|
},
|
||||||
|
{
|
||||||
|
'host_name': 'savoirfairelinux',
|
||||||
|
'event_type': 'NOTIFICATION',
|
||||||
|
'service_description': 'CPU',
|
||||||
|
'time': '2015-06-04T18:55:12Z',
|
||||||
|
'notification_type': 'SERVICE',
|
||||||
|
'contact': 'admin',
|
||||||
|
'state': 'CRITICAL',
|
||||||
|
'notification_method': 'notify-service-by-email',
|
||||||
|
'acknowledgement': 'ACKNOWLEDGEMENT'
|
||||||
|
},
|
||||||
|
{
|
||||||
|
'host_name': 'Google',
|
||||||
|
'event_type': 'NOTIFICATION',
|
||||||
|
'service_description': 'Load',
|
||||||
|
'time': '2015-06-04T18:55:12Z',
|
||||||
|
'notification_type': 'SERVICE',
|
||||||
|
'contact': 'admin',
|
||||||
|
'state': 'CRITICAL',
|
||||||
|
'notification_method': 'notify-service-by-email'
|
||||||
|
}
|
||||||
|
]
|
||||||
|
|
||||||
|
with requests_mock.Mocker() as m:
|
||||||
|
m.register_uri(requests_mock.GET,
|
||||||
|
'http://influxdb:8086/query',
|
||||||
|
text=self.influxdb_response)
|
||||||
|
|
||||||
|
response = self.get('/v2/status/events')
|
||||||
|
|
||||||
|
self.assert_count_equal_backport(
|
||||||
|
json.loads(response.body.decode()),
|
||||||
|
expected_values
|
||||||
|
)
|
||||||
|
|
||||||
|
def test_get_events_for_host(self):
|
||||||
|
influxdb_google_response = json.dumps({
|
||||||
|
"results": [
|
||||||
|
{
|
||||||
|
"series": [
|
||||||
|
{
|
||||||
|
'name': 'ALERT',
|
||||||
|
'tags': {
|
||||||
|
'event_type': 'NOTIFICATION',
|
||||||
|
'host_name': 'Google',
|
||||||
|
'service_description': 'Load'
|
||||||
|
},
|
||||||
|
'columns': [
|
||||||
|
'time',
|
||||||
|
'notification_type',
|
||||||
|
'contact',
|
||||||
|
'state',
|
||||||
|
'notification_method',
|
||||||
|
'acknowledgement'
|
||||||
|
],
|
||||||
|
'values': [
|
||||||
|
[
|
||||||
|
'2015-06-04T18:55:12Z',
|
||||||
|
'SERVICE',
|
||||||
|
'admin',
|
||||||
|
'CRITICAL',
|
||||||
|
'notify-service-by-email',
|
||||||
|
None
|
||||||
|
]
|
||||||
|
]
|
||||||
|
}
|
||||||
|
]
|
||||||
|
}
|
||||||
|
]
|
||||||
|
})
|
||||||
|
|
||||||
|
with requests_mock.Mocker() as m:
|
||||||
|
m.register_uri(requests_mock.GET,
|
||||||
|
'http://influxdb:8086/query',
|
||||||
|
text=influxdb_google_response)
|
||||||
|
|
||||||
|
query = {
|
||||||
|
'filters': json.dumps({
|
||||||
|
"is": {
|
||||||
|
"host_name": ['Google']
|
||||||
|
}
|
||||||
|
})
|
||||||
|
}
|
||||||
|
|
||||||
|
response = self.post_json('/v2/status/events', params=query)
|
||||||
|
|
||||||
|
self.assertEqual(
|
||||||
|
m.last_request.qs['q'],
|
||||||
|
["select * from event where host_name='google'"]
|
||||||
|
)
|
||||||
|
|
||||||
|
self.assert_count_equal_backport(
|
||||||
|
json.loads(response.body.decode()),
|
||||||
|
[{
|
||||||
|
'host_name': 'Google',
|
||||||
|
'event_type': 'NOTIFICATION',
|
||||||
|
'service_description': 'Load',
|
||||||
|
'time': '2015-06-04T18:55:12Z',
|
||||||
|
'notification_type': 'SERVICE',
|
||||||
|
'contact': 'admin',
|
||||||
|
'state': 'CRITICAL',
|
||||||
|
'notification_method': 'notify-service-by-email'
|
||||||
|
}]
|
||||||
|
)
|
@ -172,8 +172,8 @@ class TestHostMetric(functionalTest.FunctionalTest):
|
|||||||
["select * from metric_load1 "
|
["select * from metric_load1 "
|
||||||
"where time >= '2015-04-19t00:09:24z' "
|
"where time >= '2015-04-19t00:09:24z' "
|
||||||
"and time <= '2015-04-19t02:09:25z' "
|
"and time <= '2015-04-19t02:09:25z' "
|
||||||
"and host_name ='srv-monitoring-01' "
|
"and host_name='srv-monitoring-01' "
|
||||||
"and service_description ='load' "
|
"and service_description='load' "
|
||||||
"order by time desc"
|
"order by time desc"
|
||||||
]
|
]
|
||||||
)
|
)
|
||||||
|
@ -15,6 +15,7 @@
|
|||||||
import json
|
import json
|
||||||
|
|
||||||
from surveil.api.datamodel.status import live_query
|
from surveil.api.datamodel.status import live_query
|
||||||
|
from surveil.api.datamodel.status.metrics import time_delta
|
||||||
from surveil.api.handlers.status import influxdb_query
|
from surveil.api.handlers.status import influxdb_query
|
||||||
from surveil.tests import base
|
from surveil.tests import base
|
||||||
|
|
||||||
@ -33,9 +34,9 @@ class LiveQueryFilterTest(base.BaseTestCase):
|
|||||||
filters
|
filters
|
||||||
)
|
)
|
||||||
|
|
||||||
expected = "WHERE state=0 AND description='test_keystone'"
|
expected = ["WHERE", "description='test_keystone'", "AND", "state=0"]
|
||||||
|
|
||||||
self.assert_count_equal_backport(result, expected)
|
self.assertEqual(result, expected)
|
||||||
|
|
||||||
def test_build_where_clause_no_filters(self):
|
def test_build_where_clause_no_filters(self):
|
||||||
filters = {}
|
filters = {}
|
||||||
@ -49,10 +50,7 @@ class LiveQueryFilterTest(base.BaseTestCase):
|
|||||||
self.assert_count_equal_backport(result, expected)
|
self.assert_count_equal_backport(result, expected)
|
||||||
|
|
||||||
def test_build_influx_query(self):
|
def test_build_influx_query(self):
|
||||||
query = live_query.LiveQuery(
|
query = {}
|
||||||
fields=['host_name', 'last_check'],
|
|
||||||
filters=json.dumps({}),
|
|
||||||
)
|
|
||||||
measurement = 'ALERT'
|
measurement = 'ALERT'
|
||||||
group_by = ['*', 'host_name']
|
group_by = ['*', 'host_name']
|
||||||
limit = 10
|
limit = 10
|
||||||
@ -64,12 +62,11 @@ class LiveQueryFilterTest(base.BaseTestCase):
|
|||||||
|
|
||||||
expected = "SELECT * FROM ALERT GROUP BY *, host_name LIMIT 10"
|
expected = "SELECT * FROM ALERT GROUP BY *, host_name LIMIT 10"
|
||||||
|
|
||||||
self.assert_count_equal_backport(result, expected)
|
self.assertEqual(expected, result)
|
||||||
|
|
||||||
def test_build_influx_query_orderby(self):
|
def test_build_influx_query_orderby(self):
|
||||||
query = live_query.LiveQuery(
|
query = live_query.LiveQuery(
|
||||||
fields=['host_name', 'last_check'],
|
fields=['host_name', 'last_check']
|
||||||
filters=json.dumps({}),
|
|
||||||
)
|
)
|
||||||
measurement = 'ALERT'
|
measurement = 'ALERT'
|
||||||
group_by = ['*', 'host_name']
|
group_by = ['*', 'host_name']
|
||||||
@ -86,4 +83,86 @@ class LiveQueryFilterTest(base.BaseTestCase):
|
|||||||
"GROUP BY *, host_name "
|
"GROUP BY *, host_name "
|
||||||
"ORDER BY time DESC LIMIT 10")
|
"ORDER BY time DESC LIMIT 10")
|
||||||
|
|
||||||
self.assert_count_equal_backport(result, expected)
|
self.assertEqual(expected, result)
|
||||||
|
|
||||||
|
def test_build_query_basic(self):
|
||||||
|
query_time = time_delta.TimeDelta(begin='2015-01-29T21:50:44Z',
|
||||||
|
end='2015-01-29T22:50:44Z')
|
||||||
|
|
||||||
|
query = live_query.LiveQuery()
|
||||||
|
group_by = ['host_name', 'service_description']
|
||||||
|
order_by = ['time DESC']
|
||||||
|
|
||||||
|
result = influxdb_query.build_influxdb_query(query,
|
||||||
|
"metric_pl",
|
||||||
|
time_delta=query_time,
|
||||||
|
group_by=group_by,
|
||||||
|
order_by=order_by
|
||||||
|
)
|
||||||
|
expected = ("SELECT * "
|
||||||
|
"FROM metric_pl "
|
||||||
|
"WHERE time >= '2015-01-29T21:50:44Z' "
|
||||||
|
"AND time <= '2015-01-29T22:50:44Z' "
|
||||||
|
"GROUP BY host_name, "
|
||||||
|
"service_description ORDER BY time DESC")
|
||||||
|
|
||||||
|
self.assertEqual(result, expected)
|
||||||
|
|
||||||
|
def test_build_query_host_name(self):
|
||||||
|
query_time = time_delta.TimeDelta(begin='2015-01-29T21:50:44Z',
|
||||||
|
end='2015-01-29T22:50:44Z')
|
||||||
|
query = live_query.LiveQuery(
|
||||||
|
fields=['host_name'],
|
||||||
|
filters=json.dumps({
|
||||||
|
"is": {
|
||||||
|
"host_name": ["localhost"]
|
||||||
|
}
|
||||||
|
})
|
||||||
|
)
|
||||||
|
group_by = ['service_description']
|
||||||
|
order_by = ['time DESC']
|
||||||
|
|
||||||
|
result = influxdb_query.build_influxdb_query(query,
|
||||||
|
"metric_pl",
|
||||||
|
time_delta=query_time,
|
||||||
|
group_by=group_by,
|
||||||
|
order_by=order_by
|
||||||
|
)
|
||||||
|
expected = ("SELECT * "
|
||||||
|
"FROM metric_pl "
|
||||||
|
"WHERE time >= '2015-01-29T21:50:44Z' "
|
||||||
|
"AND time <= '2015-01-29T22:50:44Z' "
|
||||||
|
"AND host_name='localhost' "
|
||||||
|
"GROUP BY service_description "
|
||||||
|
"ORDER BY time DESC")
|
||||||
|
|
||||||
|
self.assertEqual(result, expected)
|
||||||
|
|
||||||
|
def test_build_query_complete(self):
|
||||||
|
query_time = time_delta.TimeDelta(begin='2015-01-29T21:50:44Z',
|
||||||
|
end='2015-01-29T22:50:44Z', )
|
||||||
|
query = live_query.LiveQuery(
|
||||||
|
fields=['host_name'],
|
||||||
|
filters=json.dumps({
|
||||||
|
"is": {
|
||||||
|
"host_name": ["localhost"],
|
||||||
|
"service_description": ["mySQL"]
|
||||||
|
}
|
||||||
|
})
|
||||||
|
)
|
||||||
|
order_by = ['time DESC']
|
||||||
|
result = influxdb_query.build_influxdb_query(query,
|
||||||
|
"metric_pl",
|
||||||
|
time_delta=query_time,
|
||||||
|
order_by=order_by
|
||||||
|
)
|
||||||
|
|
||||||
|
expected = ("SELECT * "
|
||||||
|
"FROM metric_pl "
|
||||||
|
"WHERE time >= '2015-01-29T21:50:44Z' "
|
||||||
|
"AND time <= '2015-01-29T22:50:44Z' "
|
||||||
|
"AND host_name='localhost' "
|
||||||
|
"AND service_description='mySQL' "
|
||||||
|
"ORDER BY time DESC")
|
||||||
|
|
||||||
|
self.assertEqual(result, expected)
|
||||||
|
@ -1,80 +0,0 @@
|
|||||||
# Copyright 2015 - Savoir-Faire Linux inc.
|
|
||||||
#
|
|
||||||
# Licensed under the Apache License, Version 2.0 (the "License"); you may
|
|
||||||
# not use this file except in compliance with the License. You may obtain
|
|
||||||
# a copy of the License at
|
|
||||||
#
|
|
||||||
# http://www.apache.org/licenses/LICENSE-2.0
|
|
||||||
#
|
|
||||||
# Unless required by applicable law or agreed to in writing, software
|
|
||||||
# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
|
|
||||||
# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
|
|
||||||
# License for the specific language governing permissions and limitations
|
|
||||||
# under the License.
|
|
||||||
|
|
||||||
|
|
||||||
from surveil.api.datamodel.status.metrics import time_delta
|
|
||||||
from surveil.api.handlers.status.metrics import influxdb_time_query
|
|
||||||
from surveil.tests import base
|
|
||||||
|
|
||||||
|
|
||||||
class InfluxdbTimeQueryTest(base.BaseTestCase):
|
|
||||||
def test_build_query_basic(self):
|
|
||||||
query_time = time_delta.TimeDelta(begin='2015-01-29T21:50:44Z',
|
|
||||||
end='2015-01-29T22:50:44Z', )
|
|
||||||
query_metric_name = 'pl'
|
|
||||||
|
|
||||||
result = influxdb_time_query.build_influxdb_query(query_metric_name,
|
|
||||||
query_time
|
|
||||||
)
|
|
||||||
expected = ("SELECT * "
|
|
||||||
"FROM metric_pl "
|
|
||||||
"WHERE time >= '2015-01-29T21:50:44Z' "
|
|
||||||
"AND time <= '2015-01-29T22:50:44Z' "
|
|
||||||
"GROUP BY host_name, "
|
|
||||||
"service_description ORDER BY time DESC")
|
|
||||||
|
|
||||||
self.assert_count_equal_backport(result, expected)
|
|
||||||
|
|
||||||
def test_build_query_host_name(self):
|
|
||||||
query_time = time_delta.TimeDelta(begin='2015-01-29T21:50:44Z',
|
|
||||||
end='2015-01-29T22:50:44Z', )
|
|
||||||
query_metric_name = 'pl'
|
|
||||||
query_host_name = 'localhost'
|
|
||||||
|
|
||||||
result = influxdb_time_query.build_influxdb_query(query_metric_name,
|
|
||||||
query_time,
|
|
||||||
query_host_name
|
|
||||||
)
|
|
||||||
expected = ("SELECT * "
|
|
||||||
"FROM metric_pl "
|
|
||||||
"WHERE time >= '2015-01-29T21:50:44Z' "
|
|
||||||
"AND time <= '2015-01-29T22:50:44Z' "
|
|
||||||
"AND host_name ='localhost' "
|
|
||||||
"GROUP BY service_description "
|
|
||||||
"ORDER BY time DESC")
|
|
||||||
|
|
||||||
self.assert_count_equal_backport(result, expected)
|
|
||||||
|
|
||||||
def test_build_query_complete(self):
|
|
||||||
query_time = time_delta.TimeDelta(begin='2015-01-29T21:50:44Z',
|
|
||||||
end='2015-01-29T22:50:44Z', )
|
|
||||||
query_metric_name = 'pl'
|
|
||||||
query_host_name = 'localhost'
|
|
||||||
query_service_description = 'mySQL'
|
|
||||||
|
|
||||||
result = influxdb_time_query.build_influxdb_query(
|
|
||||||
query_metric_name,
|
|
||||||
query_time,
|
|
||||||
query_host_name,
|
|
||||||
query_service_description
|
|
||||||
)
|
|
||||||
expected = ("SELECT * "
|
|
||||||
"FROM metric_pl "
|
|
||||||
"WHERE time >= '2015-01-29T21:50:44Z' "
|
|
||||||
"AND time <= '2015-01-29T22:50:44Z' "
|
|
||||||
"AND host_name ='localhost' "
|
|
||||||
"AND service_description ='mySQL' "
|
|
||||||
"ORDER BY time DESC")
|
|
||||||
|
|
||||||
self.assert_count_equal_backport(result, expected)
|
|
@ -42,7 +42,7 @@ RUN cd /tmp && \
|
|||||||
# mod-influxdb
|
# mod-influxdb
|
||||||
RUN pip install influxdb==2.3.0
|
RUN pip install influxdb==2.3.0
|
||||||
RUN cd /tmp && \
|
RUN cd /tmp && \
|
||||||
wget -O mod-influxdb.tar.gz https://github.com/savoirfairelinux/mod-influxdb/archive/2.7.4.tar.gz && \
|
wget -O mod-influxdb.tar.gz https://github.com/savoirfairelinux/mod-influxdb/archive/3.0.tar.gz && \
|
||||||
tar -zxvf mod-influxdb.tar.gz && \
|
tar -zxvf mod-influxdb.tar.gz && \
|
||||||
mv /tmp/mod-influxdb-*/module /var/lib/alignak/modules/mod-influxdb && \
|
mv /tmp/mod-influxdb-*/module /var/lib/alignak/modules/mod-influxdb && \
|
||||||
rm -rfv /tmp/mod-influxdb*
|
rm -rfv /tmp/mod-influxdb*
|
||||||
|
Loading…
x
Reference in New Issue
Block a user