Merge "Add SNAT background ping test"

This commit is contained in:
Zuul 2025-04-02 09:41:08 +00:00 committed by Gerrit Code Review
commit 9f6299472d
14 changed files with 322 additions and 97 deletions

View File

@ -26,6 +26,7 @@ from tobiko.podified import _openshift
from tobiko.podified import containers from tobiko.podified import containers
from tobiko import rhosp from tobiko import rhosp
from tobiko.shell import iperf3 from tobiko.shell import iperf3
from tobiko.shell import ping
from tobiko.shell import sh from tobiko.shell import sh
from tobiko.shell import ssh from tobiko.shell import ssh
@ -176,10 +177,22 @@ class PodifiedTopology(rhosp.RhospTopology):
node_type=EDPM_NODE) node_type=EDPM_NODE)
assert isinstance(node, EdpmNode) assert isinstance(node, EdpmNode)
def check_or_start_background_vm_ping(self, server_ip): def check_or_start_background_vm_ping(
_openshift.check_or_start_tobiko_ping_command( self,
server_ip=server_ip server_ip: typing.Union[str, netaddr.IPAddress],
) ssh_client: ssh.SSHClientType = None):
if not ssh_client:
_openshift.check_or_start_tobiko_ping_command(
server_ip=server_ip
)
else:
sh.check_or_start_external_process(
start_function=ping.execute_ping_in_background,
check_function=ping.check_ping_results,
liveness_function=ping.ping_alive,
stop_function=ping.stop_ping,
address=server_ip,
ssh_client=ssh_client)
def check_or_start_background_iperf_connection( def check_or_start_background_iperf_connection(
self, self,

View File

@ -87,6 +87,10 @@ RHOSP_OPTIONS = [
"then working for 60 seconds and then again not working " "then working for 60 seconds and then again not working "
"for another 10 seconds. In such case this total break " "for another 10 seconds. In such case this total break "
"time would be 13 seconds."), "time would be 13 seconds."),
cfg.IntOpt('max_ping_loss_allowed',
default=10,
help="maximum number of unreplied pings during the "
"background ping tests."),
] ]
TRIPLEO_OPTIONS = [ TRIPLEO_OPTIONS = [

View File

@ -15,9 +15,14 @@
# under the License. # under the License.
from __future__ import absolute_import from __future__ import absolute_import
from tobiko.shell.files import _files
from tobiko.shell.files import _logs from tobiko.shell.files import _logs
get_home_absolute_filepath = _files.get_home_absolute_filepath
truncate_client_logfile = _files.truncate_client_logfile
remove_old_logfile = _files.remove_old_logfile
LogFileDigger = _logs.LogFileDigger LogFileDigger = _logs.LogFileDigger
JournalLogDigger = _logs.JournalLogDigger JournalLogDigger = _logs.JournalLogDigger
MultihostLogFileDigger = _logs.MultihostLogFileDigger MultihostLogFileDigger = _logs.MultihostLogFileDigger

View File

@ -0,0 +1,74 @@
# Copyright (c) 2025 Red Hat, Inc.
#
# All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License"); you may
# not use this file except in compliance with the License. You may obtain
# a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
# License for the specific language governing permissions and limitations
# under the License.
from __future__ import absolute_import
import os
import tobiko
from tobiko.shell import sh
from tobiko.shell import ssh
def get_home_absolute_filepath(path: str,
ssh_client: ssh.SSHClientType = None) -> str:
if ssh_client is None:
return _get_local_filepath(path)
else:
return _get_remote_filepath(path, ssh_client)
def _get_local_filepath(path: str) -> str:
final_dir_path = f'{sh.get_user_home_dir()}/{path}'
if not os.path.exists(final_dir_path):
os.makedirs(final_dir_path)
return final_dir_path
def _get_remote_filepath(path: str,
ssh_client: ssh.SSHClientType) -> str:
homedir = sh.execute('echo ~', ssh_client=ssh_client).stdout.rstrip()
final_dir_path = f'{homedir}/{path}'
sh.make_remote_dirs(final_dir_path, ssh_client=ssh_client)
return final_dir_path
def truncate_client_logfile(
logfile: str,
ssh_client: ssh.SSHClientType = None) -> None:
if ssh_client:
_truncate_remote_logfile(logfile, ssh_client)
else:
tobiko.truncate_logfile(logfile)
def _truncate_remote_logfile(logfile: str,
ssh_client: ssh.SSHClientType) -> None:
truncated_logfile = tobiko.get_truncated_filename(logfile)
sh.execute(f'/usr/bin/mv {logfile} {truncated_logfile}',
ssh_client=ssh_client)
def remove_old_logfile(logfile: str,
ssh_client: ssh.SSHClientType = None):
if ssh_client:
sh.execute(f'/usr/bin/rm -f {logfile}',
ssh_client=ssh_client)
else:
try:
os.remove(logfile)
except FileNotFoundError:
pass

View File

@ -21,6 +21,7 @@ import typing # noqa
import tobiko import tobiko
from tobiko.shell import sh from tobiko.shell import sh
from tobiko.shell import ssh from tobiko.shell import ssh
from tobiko.shell.sh import _command
class FilesNotFound(tobiko.TobikoException): class FilesNotFound(tobiko.TobikoException):
@ -32,9 +33,9 @@ NameType = typing.Union[None, str, typing.List[str]]
PathType = typing.Union[str, typing.Iterable[str]] PathType = typing.Union[str, typing.Iterable[str]]
def find_files(path: sh.ShellCommandType, def find_files(path: _command.ShellCommandType,
name: NameType = None, name: NameType = None,
command: sh.ShellCommandType = 'find', command: _command.ShellCommandType = 'find',
max_depth: int = None, max_depth: int = None,
modified_since: tobiko.Seconds = None, modified_since: tobiko.Seconds = None,
ssh_client: ssh.SSHClientType = None, ssh_client: ssh.SSHClientType = None,

View File

@ -20,6 +20,7 @@ import typing # noqa
import tobiko import tobiko
from tobiko.shell import sh from tobiko.shell import sh
from tobiko.shell import ssh from tobiko.shell import ssh
from tobiko.shell.sh import _command
class NoMatchingLinesFound(tobiko.TobikoException): class NoMatchingLinesFound(tobiko.TobikoException):
@ -28,8 +29,8 @@ class NoMatchingLinesFound(tobiko.TobikoException):
def grep(pattern: str, def grep(pattern: str,
command: typing.Optional[sh.ShellCommandType] = None, command: typing.Optional[_command.ShellCommandType] = None,
grep_command: sh.ShellCommandType = 'zgrep -Eh', grep_command: _command.ShellCommandType = 'zgrep -Eh',
files: typing.Optional[typing.List[str]] = None, files: typing.Optional[typing.List[str]] = None,
ssh_client: ssh.SSHClientFixture = None, ssh_client: ssh.SSHClientFixture = None,
blank_lines: bool = True, blank_lines: bool = True,
@ -77,6 +78,6 @@ def grep_files(pattern: str,
def grep_lines(pattern: str, def grep_lines(pattern: str,
command: sh.ShellCommandType, command: _command.ShellCommandType,
**grep_params) -> typing.List[str]: **grep_params) -> typing.List[str]:
return grep(pattern=pattern, command=command, **grep_params) return grep(pattern=pattern, command=command, **grep_params)

View File

@ -25,6 +25,7 @@ from oslo_log import log
import tobiko import tobiko
from tobiko import config from tobiko import config
from tobiko.shell import files
from tobiko.shell.iperf3 import _interface from tobiko.shell.iperf3 import _interface
from tobiko.shell.iperf3 import _parameters from tobiko.shell.iperf3 import _parameters
from tobiko.shell import sh from tobiko.shell import sh
@ -38,58 +39,11 @@ LOG = log.getLogger(__name__)
def get_iperf3_logs_filepath(address: typing.Union[str, netaddr.IPAddress], def get_iperf3_logs_filepath(address: typing.Union[str, netaddr.IPAddress],
path: str, path: str,
ssh_client: ssh.SSHClientType = None) -> str: ssh_client: ssh.SSHClientType = None) -> str:
if ssh_client: final_dir = files.get_home_absolute_filepath(path, ssh_client)
final_dir = _get_remote_filepath(path, ssh_client)
else:
final_dir = _get_local_filepath(path)
filename = f'iperf_{address}.log' filename = f'iperf_{address}.log'
return os.path.join(final_dir, filename) return os.path.join(final_dir, filename)
def _get_local_filepath(path: str) -> str:
final_dir_path = f'{sh.get_user_home_dir()}/{path}'
if not os.path.exists(final_dir_path):
os.makedirs(final_dir_path)
return final_dir_path
def _get_remote_filepath(path: str,
ssh_client: ssh.SSHClientType) -> str:
homedir = sh.execute('echo ~', ssh_client=ssh_client).stdout.rstrip()
final_dir_path = f'{homedir}/{path}'
sh.execute(f'/usr/bin/mkdir -p {final_dir_path}',
ssh_client=ssh_client)
return final_dir_path
def _truncate_iperf3_client_logfile(
logfile: str,
ssh_client: ssh.SSHClientType = None) -> None:
if ssh_client:
_truncate_remote_logfile(logfile, ssh_client)
else:
tobiko.truncate_logfile(logfile)
def _truncate_remote_logfile(logfile: str,
ssh_client: ssh.SSHClientType) -> None:
truncated_logfile = tobiko.get_truncated_filename(logfile)
sh.execute(f'/usr/bin/mv {logfile} {truncated_logfile}',
ssh_client=ssh_client)
def _remove_old_logfile(logfile: str,
ssh_client: ssh.SSHClientType = None):
if ssh_client:
sh.execute(f'/usr/bin/rm -f {logfile}',
ssh_client=ssh_client)
else:
try:
os.remove(logfile)
except FileNotFoundError:
pass
def get_bandwidth(address: typing.Union[str, netaddr.IPAddress], def get_bandwidth(address: typing.Union[str, netaddr.IPAddress],
bitrate: int = None, bitrate: int = None,
download: bool = None, download: bool = None,
@ -169,7 +123,7 @@ def execute_iperf3_client_in_background(
# it needs to be removed, otherwise iperf will append new log # it needs to be removed, otherwise iperf will append new log
# to the end of the existing file and this will make json output # to the end of the existing file and this will make json output
# file to be malformed # file to be malformed
_remove_old_logfile(output_path, ssh_client=ssh_client) files.remove_old_logfile(output_path, ssh_client=ssh_client)
# If there is ssh client for the server where iperf3 server is going # If there is ssh client for the server where iperf3 server is going
# to run, lets make sure it is started fresh as e.g. in case of # to run, lets make sure it is started fresh as e.g. in case of
# failure in the previous run, it may report that is still "busy" thus # failure in the previous run, it may report that is still "busy" thus
@ -206,26 +160,20 @@ def _get_iperf3_pid(
port: int = None, port: int = None,
protocol: str = None, protocol: str = None,
ssh_client: ssh.SSHClientType = None) -> typing.Union[int, None]: ssh_client: ssh.SSHClientType = None) -> typing.Union[int, None]:
try: if address:
iperf_pids = sh.execute( iperf_commands = [f'iperf3 .*{address}']
'pidof iperf3', ssh_client=ssh_client).stdout.rstrip().split(" ") elif protocol and protocol.lower() == 'udp':
except sh.ShellCommandFailed: iperf_commands = [f'iperf3 .*-s .*-u .*-p {port}',
return None f'iperf3 .*-s .*-p {port} .*-u']
for iperf_pid in iperf_pids: else:
proc_cmdline = sh.get_command_line( iperf_commands = [f'iperf3 .*-s .*-p {port}']
iperf_pid,
ssh_client=ssh_client) for iperf_command in iperf_commands:
if address and str(address) in proc_cmdline: iperf_processes = sh.list_processes(command_line=iperf_command,
# This is looking for the iperf client instance ssh_client=ssh_client)
return int(iperf_pid) if iperf_processes:
elif port and protocol: return iperf_processes.unique.pid
# By looking for port and protocol we are looking LOG.debug('no iperf3 processes were found')
# for the iperf3 server's PID
if "-s" in proc_cmdline and f"-p {port}" in proc_cmdline:
if ((protocol.lower() == 'udp' and "-u" in proc_cmdline) or
(protocol.lower() == 'tcp' and
'-u' not in proc_cmdline)):
return int(iperf_pid)
return None return None
@ -286,7 +234,7 @@ def check_iperf3_client_results(address: typing.Union[str, netaddr.IPAddress],
else: else:
current_break = 0 current_break = 0
_truncate_iperf3_client_logfile(logfile, ssh_client) files.truncate_client_logfile(logfile, ssh_client)
testcase = tobiko.get_test_case() testcase = tobiko.get_test_case()
testcase.assertLessEqual(longest_break, testcase.assertLessEqual(longest_break,
@ -319,7 +267,7 @@ def stop_iperf3_client(address: typing.Union[str, netaddr.IPAddress],
if pid: if pid:
LOG.info(f'iperf3 client process to > {address} already running ' LOG.info(f'iperf3 client process to > {address} already running '
f'with PID: {pid}') f'with PID: {pid}')
sh.execute(f'sudo kill {pid}', ssh_client=ssh_client) sh.execute(f'kill {pid}', ssh_client=ssh_client, sudo=True)
def start_iperf3_server( def start_iperf3_server(

View File

@ -65,3 +65,7 @@ PingStatistics = _statistics.PingStatistics
write_ping_to_file = _ping.write_ping_to_file write_ping_to_file = _ping.write_ping_to_file
check_ping_statistics = _ping.check_ping_statistics check_ping_statistics = _ping.check_ping_statistics
skip_check_ping_statistics = _ping.skip_check_ping_statistics skip_check_ping_statistics = _ping.skip_check_ping_statistics
ping_alive = _ping.ping_alive
stop_ping = _ping.stop_ping
check_ping_results = _ping.check_ping_results
execute_ping_in_background = _ping.execute_ping_in_background

View File

@ -172,8 +172,8 @@ def get_positive_integer(name, value, default=None):
return get_positive_integer(name, getattr(default, name)) return get_positive_integer(name, getattr(default, name))
if value is not None: if value is not None:
value = int(value) value = int(value)
if value <= 0: if value < 0:
message = "{!r} value must be positive: {!r}".format( message = "{!r} value must be zero or greater: {!r}".format(
name, value) name, value)
raise ValueError(message) raise ValueError(message)
return value return value

View File

@ -27,13 +27,17 @@ import netaddr
from oslo_log import log from oslo_log import log
import tobiko import tobiko
from tobiko import config
from tobiko.shell import files
from tobiko.shell import sh from tobiko.shell import sh
from tobiko.shell import ssh
from tobiko.shell.ping import _interface from tobiko.shell.ping import _interface
from tobiko.shell.ping import _exception from tobiko.shell.ping import _exception
from tobiko.shell.ping import _parameters from tobiko.shell.ping import _parameters
from tobiko.shell.ping import _statistics from tobiko.shell.ping import _statistics
CONF = config.CONF
LOG = log.getLogger(__name__) LOG = log.getLogger(__name__)
@ -466,12 +470,16 @@ def get_vm_ping_log_files(glob_ping_log_pattern='tobiko_ping_results/ping_'
yield vm_ping_log_filename yield vm_ping_log_filename
def check_ping_statistics(failure_limit=10): def check_ping_statistics(failure_limit=None):
"""Gets a list of ping_vm_log files and """Gets a list of ping_vm_log files and
iterates their lines, checks if max ping iterates their lines, checks if max ping
failures have been reached per fip=file""" failures have been reached per fip=file"""
if failure_limit is None:
failure_limit = CONF.tobiko.rhosp.max_ping_loss_allowed
ping_files_found = 0
# iterate over ping_vm_log files: # iterate over ping_vm_log files:
for filename in list(get_vm_ping_log_files()): for filename in list(get_vm_ping_log_files()):
ping_files_found += 1
with io.open(filename, 'rt') as fd: with io.open(filename, 'rt') as fd:
LOG.info(f'checking ping log file: {filename}, ' LOG.info(f'checking ping log file: {filename}, '
f'failure_limit is :{failure_limit}') f'failure_limit is :{failure_limit}')
@ -496,8 +504,140 @@ def check_ping_statistics(failure_limit=10):
f'to vm fip destination: ' f'to vm fip destination: '
f'{ping_failures_list[-1]["destination"]}') f'{ping_failures_list[-1]["destination"]}')
if ping_files_found == 0:
tobiko.fail('No ping log files found')
def skip_check_ping_statistics(): def skip_check_ping_statistics():
for filename in list(get_vm_ping_log_files()): for filename in list(get_vm_ping_log_files()):
tobiko.truncate_logfile(filename) tobiko.truncate_logfile(filename)
LOG.info(f'skipping ping failures in ping log file: {filename}') LOG.info(f'skipping ping failures in ping log file: {filename}')
def _get_ping_pid(
address: typing.Union[str, netaddr.IPAddress, None] = None,
ssh_client: ssh.SSHClientType = None) -> typing.Union[int, None]:
ping_command = 'ping'
if address is not None:
ping_command += f' .*{address}'
ping_processes = sh.list_processes(command_line=ping_command,
ssh_client=ssh_client)
if not ping_processes:
LOG.debug('no ping processes were found')
return None
else:
return ping_processes.unique.pid
def ping_alive(address: typing.Union[str, netaddr.IPAddress], # noqa; pylint: disable=W0613
ssh_client: ssh.SSHClientType = None,
**kwargs) -> bool:
return bool(_get_ping_pid(address=address, ssh_client=ssh_client))
def stop_ping(address: typing.Union[str, netaddr.IPAddress],
ssh_client: ssh.SSHClientType = None,
**kwargs): # noqa; pylint: disable=W0613
pid = _get_ping_pid(address=address, ssh_client=ssh_client)
if pid:
LOG.info(f'ping process to > {address} already running '
f'with PID: {pid}')
# the SIGINT signal makes ping write the "ping statistics" block
# before exiting
sh.execute(f'kill -s SIGINT {pid}', ssh_client=ssh_client, sudo=True)
def _get_ping_logs_filepath(address: typing.Union[str, netaddr.IPAddress],
path: str,
ssh_client: ssh.SSHClientType = None) -> str:
final_dir = files.get_home_absolute_filepath(path, ssh_client)
filename = f'ping_{address}.log'
return os.path.join(final_dir, filename)
# TODO(eolivare): replace check_ping_statistics with check_ping_results
def check_ping_results(address: typing.Union[str, netaddr.IPAddress],
output_dir: str = 'tobiko_ping_results',
ssh_client: ssh.SSHClientType = None,
**kwargs): # noqa; pylint: disable=W0613
testcase = tobiko.get_test_case()
testcase.assertFalse(ping_alive(address, ssh_client))
# This function expects that the result file is available locally already
logfile = _get_ping_logs_filepath(address, output_dir, ssh_client)
try:
ping_log_raw = sh.execute(
f"cat {logfile}", ssh_client=ssh_client).stdout
except sh.ShellCommandFailed as err:
if config.is_prevent_create():
# Tobiko is not expected to create resources in this run
# so ping should be already running and log file should
# be already there, if it is not, it should fail
tobiko.fail('Failed to read ping log from the file. '
f'Ping Destination IP address: {address}; '
f'Logfile: {logfile}')
else:
# Tobiko is creating resources so it is normal that file was not
# there yet
LOG.debug(f'Failed to read ping log from the file. '
f'Error: {err}')
return
LOG.debug(f'ping log raw: {ping_log_raw}')
if not ping_log_raw:
if config.is_prevent_create():
# Tobiko is not expected to create resources in this run
# so ping should be already running and log file should
# be already there, if it is not, it should fail
tobiko.fail('Failed empty ping file.')
else:
LOG.debug('Failed ping log file empty')
return
files.truncate_client_logfile(logfile, ssh_client)
ping_stats = _statistics.parse_ping_statistics(ping_log_raw)
testcase.assertGreater(ping_stats.transmitted, 0)
testcase.assertGreater(ping_stats.received, 0)
testcase.assertLessEqual(ping_stats.transmitted - ping_stats.received,
CONF.tobiko.rhosp.max_ping_loss_allowed)
def start_background_ping(address: typing.Union[str, netaddr.IPAddress],
output_path: str,
ssh_client: ssh.SSHClientType = None):
parameters = _parameters.get_ping_parameters(host=address,
count=0,
deadline=0)
command = _interface.get_ping_command(parameters, ssh_client)
# both stdout and stderr need to be written to the provided log file
command += '2>&1'
command += f'> {output_path}'
process = sh.process(command, ssh_client=ssh_client)
process.execute()
# TODO(eolivare): replace write_ping_to_file with execute_ping_in_background
def execute_ping_in_background(address: typing.Union[str, netaddr.IPAddress],
output_dir: str = 'tobiko_ping_results',
ssh_client: ssh.SSHClientType = None,
**kwargs): # noqa; pylint: disable=W0613
output_path = _get_ping_logs_filepath(address, output_dir, ssh_client)
LOG.info(f'starting ping process to > {address} , '
f'output file is : {output_path}')
# just in case there is some leftover file from previous run,
# it needs to be removed, otherwise ping will append new log
# to the end of the existing file and this will make output
# file to be malformed
files.remove_old_logfile(output_path, ssh_client=ssh_client)
# Stop ping in case it is running
stop_ping(address, ssh_client)
# Start ping again
start_background_ping(address, output_path, ssh_client)
# if ping does not start properly, fail the test
if not ping_alive(address, ssh_client):
tobiko.fail('background ping process did not start')

View File

@ -23,6 +23,7 @@ from tobiko.shell.sh import _execute
from tobiko.shell.sh import _hostname from tobiko.shell.sh import _hostname
from tobiko.shell.sh import _io from tobiko.shell.sh import _io
from tobiko.shell.sh import _local from tobiko.shell.sh import _local
from tobiko.shell.sh import _mkdirs
from tobiko.shell.sh import _nameservers from tobiko.shell.sh import _nameservers
from tobiko.shell.sh import _nmcli from tobiko.shell.sh import _nmcli
from tobiko.shell.sh import _path from tobiko.shell.sh import _path
@ -141,3 +142,5 @@ find_command = _which.find_command
get_nm_connection_ids = _nmcli.get_nm_connection_ids get_nm_connection_ids = _nmcli.get_nm_connection_ids
get_nm_connection_values = _nmcli.get_nm_connection_values get_nm_connection_values = _nmcli.get_nm_connection_values
make_remote_dirs = _mkdirs.make_remote_dirs

View File

@ -23,6 +23,6 @@ from tobiko.shell import ssh
def make_remote_dirs(file_name: str, def make_remote_dirs(file_name: str,
ssh_client: ssh.SSHClientType = None, ssh_client: ssh.SSHClientType = None,
sudo: bool = None): sudo: bool = None):
_execute.execute(f'mkdirs -p "{file_name}"', _execute.execute(f'mkdir -p "{file_name}"',
ssh_client=ssh_client, ssh_client=ssh_client,
sudo=sudo) sudo=sudo)

View File

@ -65,6 +65,12 @@ class NetworkTest(BaseNetworkTest):
@pytest.mark.background @pytest.mark.background
class BackgroundProcessTest(BaseNetworkTest): class BackgroundProcessTest(BaseNetworkTest):
"""Test designed to run in the background,
then collect results.
Logic: checks if process exists, if so stop the process,
then execute some check logic i.e. a check function.
if the process by name isn't running,
start a separate process i.e a background function"""
stack = tobiko.required_fixture(stacks.AdvancedPeerServerStackFixture) stack = tobiko.required_fixture(stacks.AdvancedPeerServerStackFixture)
@ -77,16 +83,31 @@ class BackgroundProcessTest(BaseNetworkTest):
'Background tests not supported by this topology class.') 'Background tests not supported by this topology class.')
def test_check_background_vm_ping(self): def test_check_background_vm_ping(self):
""" Tests that are designed to run in the background , """Ping from test machine/container/pod to VM with FIP,
then collect results. validating north-south connectivity with SDNAT (source-destination
Logic: checks if process exists, if so stop the process, NAT)."""
then execute some check logic i.e. a check function.
if the process by name isn't running,
start a separate process i.e a background function"""
self.topology.check_or_start_background_vm_ping( self.topology.check_or_start_background_vm_ping(
self.stack.peer_stack.floating_ip_address) self.stack.peer_stack.floating_ip_address)
def test_check_background_vm_ping_snat(self):
"""Ping from a VM without FIP to an external IP,
validating north-south connectivity with SNAT (source NAT)."""
# make sure the VM does not have any FIP
self.assertFalse(self.stack.has_floating_ip)
try:
ext_subnet = neutron.list_subnets(
network=self.stack.network_stack.gateway_network_id,
ip_version=4)[0]
except IndexError:
ext_subnet = neutron.list_subnets(
network=self.stack.network_stack.gateway_network_id,
ip_version=6)[0]
self.topology.check_or_start_background_vm_ping(
ext_subnet['gateway_ip'],
ssh_client=self.stack.ssh_client)
def test_east_west_tcp_traffic_background_iperf(self): def test_east_west_tcp_traffic_background_iperf(self):
""" Test East-West TCP traffic in the existing flow. """ Test East-West TCP traffic in the existing flow.

View File

@ -30,18 +30,29 @@ from tobiko.shell import ssh
# Test is inteded for D/S env # Test is inteded for D/S env
@overcloud.skip_if_missing_overcloud @overcloud.skip_if_missing_overcloud
def check_or_start_background_vm_ping(server_ip): def check_or_start_background_vm_ping(
server_ip: typing.Union[str, netaddr.IPAddress],
ssh_client: ssh.SSHClientType = None):
"""Check if process exists, if so stop and check ping health """Check if process exists, if so stop and check ping health
if not : start a new separate ping process. if not : start a new separate ping process.
Executes a Background ping to a vm floating_ip, Executes a Background ping to a vm floating_ip,
this test is intended to be run and picked up again this test is intended to be run and picked up again
by the next tobiko run. Ping results are parsed by the next tobiko run. Ping results are parsed
and a failure is raised if ping failure is above a certain amount""" and a failure is raised if ping failure is above a certain amount"""
sh.check_or_start_background_process( if ssh_client is None:
bg_function=ping.write_ping_to_file, sh.check_or_start_background_process(
bg_process_name='tobiko_background_ping', bg_function=ping.write_ping_to_file,
check_function=ping.check_ping_statistics, bg_process_name='tobiko_background_ping',
ping_ip=server_ip) check_function=ping.check_ping_statistics,
ping_ip=server_ip)
else:
sh.check_or_start_external_process(
start_function=ping.execute_ping_in_background,
check_function=ping.check_ping_results,
liveness_function=ping.ping_alive,
stop_function=ping.stop_ping,
address=server_ip,
ssh_client=ssh_client)
# Test is inteded for D/S env # Test is inteded for D/S env