Refactor log files digging

Change-Id: I0771c2ce7f2754c1dfb646645d60b46d1101fdb8
This commit is contained in:
Federico Ressi 2020-10-29 13:10:24 +01:00
parent 3dd71cc486
commit 66435b0448
9 changed files with 209 additions and 150 deletions

View File

@ -15,12 +15,8 @@
# under the License. # under the License.
from __future__ import absolute_import from __future__ import absolute_import
from tobiko.shell.files import _exception
from tobiko.shell.files import _logs from tobiko.shell.files import _logs
LogParserError = _exception.LogParserError LogFileDigger = _logs.LogFileDigger
LogFileNotFound = _exception.LogFileNotFound MultihostLogFileDigger = _logs.MultihostLogFileDigger
LogFile = _logs.LogFile
ClusterLogFile = _logs.ClusterLogFile

View File

@ -1,26 +0,0 @@
# Copyright (c) 2020 Red Hat, Inc.
#
# All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License"); you may
# not use this file except in compliance with the License. You may obtain
# a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
# License for the specific language governing permissions and limitations
# under the License.
from __future__ import absolute_import
import tobiko
class LogParserError(tobiko.TobikoException):
pass
class LogFileNotFound(LogParserError):
message = 'File {filename} was not found on {host}'

View File

@ -15,96 +15,71 @@
# under the License. # under the License.
from __future__ import absolute_import from __future__ import absolute_import
import collections
import os import os
from tobiko.shell import files from tobiko.shell import grep
from tobiko.shell import find
from tobiko.shell import sh from tobiko.shell import sh
from tobiko.openstack import topology
class LogFile(object): class LogFileDigger(object):
def __init__(self, hostname, filename): def __init__(self, filename, **execute_params):
self.filename = filename self.filename = filename
self.host = topology.get_openstack_node(hostname=hostname) self.execute_params = execute_params
self._list_logfiles() self.logfiles = set()
self.cmd = '' self.found = set()
self.found = []
def find(self, regex): def find_lines(self, pattern, new_lines=False):
self._list_logfiles() log_files = self.list_log_files()
self.cmd = f"zgrep -Eh {regex}"
self.found = sh.execute(f'{self.cmd} {" ".join(self.logfiles)}',
ssh_client=self.host.ssh_client,
expect_exit_status=None,
sudo=True).stdout.split('\n')
try: try:
self.found.remove('') lines = frozenset(
except ValueError: grep.grep_files(pattern=pattern, files=log_files,
pass **self.execute_params))
return self.found except grep.NoMatchingLinesFound:
if new_lines:
return frozenset()
else:
lines -= self.found
self.found.update(lines)
if new_lines:
return lines
return frozenset(self.found)
def find_new(self): def find_new_lines(self, pattern):
self._list_logfiles() return self.find_lines(pattern=pattern, new_lines=True)
if not self.cmd:
err_msg = 'find_new() method can be only executed after find()'
raise files.LogParserError(message=err_msg)
tmp = sh.execute(f'{self.cmd} {" ".join(self.logfiles)}',
ssh_client=self.host.ssh_client,
expect_exit_status=None,
sudo=True).stdout.split('\n')
found = []
for log_string in tmp:
if log_string not in self.found and log_string != '':
found.append(log_string)
self.found.append(log_string)
return found
def _list_logfiles(self): def list_log_files(self):
file_path, file_name = os.path.split(self.filename) file_path, file_name = os.path.split(self.filename)
result = sh.execute(f'find {file_path} -name {file_name}*', return find.find_files(path=file_path,
ssh_client=self.host.ssh_client, name=file_name,
expect_exit_status=None, **self.execute_params)
sudo=True)
self.logfiles = set(result.stdout.split('\n'))
if '' in self.logfiles:
self.logfiles.remove('')
if self.logfiles == []:
raise files.LogFileNotFound(filename=str(self.filename),
host=str(self.host.name))
class ClusterLogFile(object): class MultihostLogFileDigger(object):
def __init__(self, filename): def __init__(self, filename, ssh_clients=None, **execute_params):
self.diggers = collections.OrderedDict()
self.filename = filename self.filename = filename
self.hostnames = [] self.execute_params = execute_params
self.logfiles = [] if ssh_clients:
for ssh_client in ssh_clients:
self.add_host(ssh_client=ssh_client)
def add_host(self, hostname): def add_host(self, hostname=None, ssh_client=None):
if hostname in self.hostnames: hostname = hostname or sh.get_hostname(ssh_client=ssh_client)
return if hostname not in self.diggers:
self.hostnames.append(hostname) self.diggers[hostname] = LogFileDigger(filename=self.filename,
self.logfiles.append(LogFile(hostname, self.filename)) ssh_client=ssh_client,
**self.execute_params)
def add_group(self, group): def find_lines(self, pattern, new_lines=False):
for host in topology.list_openstack_nodes(group=group): lines = []
self.add_host(host.name) for hostname, digger in self.diggers.items():
for line in digger.find_lines(pattern, new_lines=new_lines):
lines.append((hostname, line))
return lines
def find(self, regex): def find_new_lines(self, pattern):
for logfile in self.logfiles: return self.find_lines(pattern=pattern, new_lines=True)
logfile.find(regex)
return self.found
def find_new(self):
new_lines = []
for logfile in self.logfiles:
new_lines += logfile.find_new()
return new_lines
@property
def found(self):
found = []
for logfile in self.logfiles:
found += logfile.found
return found

View File

@ -1,16 +0,0 @@
# Copyright (c) 2020 Red Hat, Inc.
#
# All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License"); you may
# not use this file except in compliance with the License. You may obtain
# a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
# License for the specific language governing permissions and limitations
# under the License.
from __future__ import absolute_import

59
tobiko/shell/find.py Normal file
View File

@ -0,0 +1,59 @@
# Copyright (c) 2020 Red Hat, Inc.
#
# All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License"); you may
# not use this file except in compliance with the License. You may obtain
# a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
# License for the specific language governing permissions and limitations
# under the License.
from __future__ import absolute_import
import typing # noqa
import tobiko
from tobiko.shell import sh
from tobiko.shell import ssh
class FilesNotFound(tobiko.TobikoException):
message = ("Files not found (path={path}, name={name}, login={login}, "
"exit_status={exit_status}):\n{stderr}")
NameType = typing.Union[None, str, typing.List[str]]
PathType = typing.Union[str, typing.Iterable[str]]
def find_files(path: sh.ShellCommandType,
name: NameType = None,
command: sh.ShellCommandType = 'find',
ssh_client: ssh.SSHClientFixture = None,
**execute_params) -> typing.List[str]:
if not path:
raise ValueError("Path can't be empty")
command_line = sh.shell_command(command) + path
if name is not None:
command_line += ['-name', name]
result = sh.execute(command_line,
ssh_client=ssh_client,
expect_exit_status=None,
**execute_params)
if result.exit_status == 0:
output_lines: typing.List[str] = [
line.strip()
for line in result.stdout.splitlines()
if line.strip()]
if output_lines:
return output_lines
raise FilesNotFound(path=path,
name=name,
login=ssh_client and ssh_client.login or None,
exit_status=result.exit_status,
stderr=result.stderr.strip())

58
tobiko/shell/grep.py Normal file
View File

@ -0,0 +1,58 @@
# Copyright (c) 2020 Red Hat, Inc.
#
# All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License"); you may
# not use this file except in compliance with the License. You may obtain
# a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
# License for the specific language governing permissions and limitations
# under the License.
from __future__ import absolute_import
import typing # noqa
import tobiko
from tobiko.shell import sh
from tobiko.shell import ssh
class NoMatchingLinesFound(tobiko.TobikoException):
message = ("No matching lines found in files (pattern='{pattern}',"
" files={files}, login={login})")
def grep_files(pattern: str,
files: typing.List[str],
command: sh.ShellCommandType = 'zgrep -Eh',
ssh_client: ssh.SSHClientFixture = None,
blank_lines=False,
**execute_params) -> typing.List[str]:
if not pattern:
raise ValueError("Pattern string can't be empty")
if not files:
raise ValueError("File list can't be empty")
command_line = sh.shell_command(command) + ['-e', pattern] + files
try:
result = sh.execute(command_line,
ssh_client=ssh_client,
**execute_params)
except sh.ShellCommandFailed as ex:
if ex.exit_status > 1:
# Some unknown problem occurred
raise
else:
output_lines: typing.List[str] = [
line
for line in result.stdout.splitlines()
if blank_lines or line.strip()]
if output_lines:
return output_lines
raise NoMatchingLinesFound(pattern=pattern,
files=files,
login=ssh_client and ssh_client.login or None)

View File

@ -28,6 +28,8 @@ from tobiko.shell.sh import _ssh
from tobiko.shell.sh import _uptime from tobiko.shell.sh import _uptime
ShellCommand = _command.ShellCommand
ShellCommandType = _command.ShellCommandType
shell_command = _command.shell_command shell_command = _command.shell_command
ShellError = _exception.ShellError ShellError = _exception.ShellError

View File

@ -15,20 +15,9 @@
# under the License. # under the License.
from __future__ import absolute_import from __future__ import absolute_import
import shlex
import subprocess import subprocess
import typing # noqa
import six
def shell_command(command):
if isinstance(command, ShellCommand):
return command
elif isinstance(command, six.string_types):
return ShellCommand(command.split())
elif command:
return ShellCommand(str(a) for a in command)
else:
return ShellCommand()
class ShellCommand(tuple): class ShellCommand(tuple):
@ -42,3 +31,15 @@ class ShellCommand(tuple):
def __add__(self, other): def __add__(self, other):
other = shell_command(other) other = shell_command(other)
return shell_command(tuple(self) + other) return shell_command(tuple(self) + other)
ShellCommandType = typing.Union[ShellCommand, str, typing.Iterable]
def shell_command(command: ShellCommandType) -> ShellCommand:
if isinstance(command, ShellCommand):
return command
elif isinstance(command, str):
return ShellCommand(shlex.split(command))
else:
return ShellCommand(str(a) for a in command)

View File

@ -24,6 +24,7 @@ from tobiko.shell import ip
from tobiko.openstack import neutron from tobiko.openstack import neutron
from tobiko.openstack import nova from tobiko.openstack import nova
from tobiko.openstack import stacks from tobiko.openstack import stacks
from tobiko.openstack import topology
class PortTest(testtools.TestCase): class PortTest(testtools.TestCase):
@ -111,21 +112,30 @@ class PortLogs(testtools.TestCase):
stack = tobiko.required_setup_fixture(PortLogsStack) stack = tobiko.required_setup_fixture(PortLogsStack)
LOG_FILENAME = '/var/log/containers/neutron/server.log*'
def test_nova_port_notification(self): def test_nova_port_notification(self):
expected_logfile = '/var/log/containers/neutron/server.log' pattern = f'Nova.+event.+response.*{self.stack.server_id}'
logfile = files.ClusterLogFile(expected_logfile) log_digger = files.MultihostLogFileDigger(filename=self.LOG_FILENAME,
try: sudo=True)
logfile.add_group('controller') for node in topology.list_openstack_nodes(group='controller'):
except files.LogFileNotFound as ex: log_digger.add_host(hostname=node.hostname,
tobiko.skip(str(ex)) ssh_client=node.ssh_client)
logfile.find(f'Nova.+event.+response.*{self.stack.server_id}') log_digger.find_lines(pattern=pattern)
nova.shutoff_server(self.stack.server_id) nova.shutoff_server(self.stack.server_id)
nova.activate_server(self.stack.server_id) nova.activate_server(self.stack.server_id)
new_events = logfile.find_new()
self.assertEqual(len(new_events), 2) new_lines = log_digger.find_new_lines(pattern=pattern)
self.assertTrue(
any('network-vif-unplugged' in event for event in new_events)) plugged_events = [
self.assertTrue( (hostname, line)
any('network-vif-plugged' in event for event in new_events)) for hostname, line in new_lines
self.assertTrue( if 'network-vif-plugged' in line and self.stack.port_id in line]
all(self.stack.port_id in event for event in new_events)) self.assertEqual(1, len(plugged_events), new_lines)
unplugged_events = [
(hostname, line)
for hostname, line in new_lines
if 'network-vif-unplugged' in line and self.stack.port_id in line]
self.assertEqual(1, len(unplugged_events), new_lines)