Add support to do periodical report
Change-Id: If6a38a3c9d53d25d812cc88748e927589cc88f21
This commit is contained in:
parent
8b50d685c6
commit
ae03ef1d35
@ -266,8 +266,13 @@ class Router(object):
|
|||||||
network.delete_network()
|
network.delete_network()
|
||||||
# Also delete the shared port and remove it from router interface
|
# Also delete the shared port and remove it from router interface
|
||||||
if self.shared_network:
|
if self.shared_network:
|
||||||
|
for _ in range(10):
|
||||||
|
try:
|
||||||
self.remove_router_interface(self.shared_network, use_port=True)
|
self.remove_router_interface(self.shared_network, use_port=True)
|
||||||
self.shared_network = None
|
self.shared_network = None
|
||||||
|
break
|
||||||
|
except Exception:
|
||||||
|
time.sleep(1)
|
||||||
|
|
||||||
def create_router(self, router_name, ext_net):
|
def create_router(self, router_name, ext_net):
|
||||||
"""
|
"""
|
||||||
@ -302,7 +307,19 @@ class Router(object):
|
|||||||
"""
|
"""
|
||||||
# Delete the network resources first and than delete the router itself
|
# Delete the network resources first and than delete the router itself
|
||||||
self.delete_network_resources()
|
self.delete_network_resources()
|
||||||
|
for _ in range(10):
|
||||||
|
try:
|
||||||
|
self.neutron_client.remove_gateway_router(self.router['router']['id'])
|
||||||
|
self.shared_network = None
|
||||||
|
break
|
||||||
|
except Exception:
|
||||||
|
time.sleep(1)
|
||||||
|
for _ in range(10):
|
||||||
|
try:
|
||||||
self.neutron_client.delete_router(self.router['router']['id'])
|
self.neutron_client.delete_router(self.router['router']['id'])
|
||||||
|
break
|
||||||
|
except Exception:
|
||||||
|
time.sleep(1)
|
||||||
|
|
||||||
def _port_create_neutron(self, network_instance):
|
def _port_create_neutron(self, network_instance):
|
||||||
"""
|
"""
|
||||||
|
@ -114,6 +114,8 @@ client:
|
|||||||
timeout: 5
|
timeout: 5
|
||||||
# Connection Type: "Keep-alive", "New"
|
# Connection Type: "Keep-alive", "New"
|
||||||
connection_type: 'Keep-alive'
|
connection_type: 'Keep-alive'
|
||||||
|
# Interval for periodical report
|
||||||
|
report_interval: 5
|
||||||
# Duration of testing tools (seconds)
|
# Duration of testing tools (seconds)
|
||||||
duration: 30
|
duration: 30
|
||||||
|
|
||||||
|
@ -65,7 +65,7 @@ sed -i "s/^exit\s0/python \/kb_test\/kb_vm_agent.py \&\n\0/g" /etc/rc.local
|
|||||||
pip install redis
|
pip install redis
|
||||||
|
|
||||||
# install the http traffic generator
|
# install the http traffic generator
|
||||||
git clone git://github.com/giltene/wrk2.git
|
git clone git://github.com/yicwang/wrk2.git
|
||||||
cd wrk2
|
cd wrk2
|
||||||
make
|
make
|
||||||
mv wrk /usr/local/bin/wrk2
|
mv wrk /usr/local/bin/wrk2
|
||||||
|
@ -75,11 +75,14 @@ class KB_Instance(object):
|
|||||||
# Run the HTTP benchmarking tool
|
# Run the HTTP benchmarking tool
|
||||||
@staticmethod
|
@staticmethod
|
||||||
def run_http_test(dest_path, target_url, threads, connections,
|
def run_http_test(dest_path, target_url, threads, connections,
|
||||||
rate_limit, duration, timeout, connection_type):
|
rate_limit, duration, timeout, connection_type,
|
||||||
|
report_interval):
|
||||||
if not rate_limit:
|
if not rate_limit:
|
||||||
rate_limit = 65535
|
rate_limit = 65535
|
||||||
cmd = '%s -t%d -c%d -R%d -d%ds --timeout %ds --latency --s /kb_test/kb_wrk2.lua %s' % \
|
cmd = '%s -t%d -c%d -R%d -d%ds -p%ds --timeout %ds --latency '\
|
||||||
(dest_path, threads, connections, rate_limit, duration, timeout, target_url)
|
'--s /kb_test/kb_wrk2.lua %s' % \
|
||||||
|
(dest_path, threads, connections, rate_limit, duration,
|
||||||
|
report_interval, timeout, target_url)
|
||||||
return cmd
|
return cmd
|
||||||
|
|
||||||
|
|
||||||
@ -134,6 +137,24 @@ class KB_VM_Agent(object):
|
|||||||
|
|
||||||
return (p.returncode, stdout, stderr)
|
return (p.returncode, stdout, stderr)
|
||||||
|
|
||||||
|
def exec_command_report(self, cmd):
|
||||||
|
# Execute the command, reporting periodically, and returns the outputs
|
||||||
|
cmds = ['bash', '-c']
|
||||||
|
cmds.append(cmd)
|
||||||
|
p_output = ''
|
||||||
|
p = subprocess.Popen(cmds, stdout=subprocess.PIPE, stderr=subprocess.PIPE)
|
||||||
|
|
||||||
|
lines_iterator = iter(p.stdout.readline, b"")
|
||||||
|
for line in lines_iterator:
|
||||||
|
p_output += line
|
||||||
|
if line.strip() == "=== REPORT END ===":
|
||||||
|
cmd_res_dict = dict(zip(("status", "stdout", "stderr"), (0, p_output, '')))
|
||||||
|
self.report('REPORT', 'http', cmd_res_dict)
|
||||||
|
p_output = ''
|
||||||
|
|
||||||
|
stderr = p.communicate()[1]
|
||||||
|
return (p.returncode, p_output, stderr)
|
||||||
|
|
||||||
def process_cmd(self, message):
|
def process_cmd(self, message):
|
||||||
if message['cmd'] == 'ACK':
|
if message['cmd'] == 'ACK':
|
||||||
# When 'ACK' is received, means the master node
|
# When 'ACK' is received, means the master node
|
||||||
@ -190,7 +211,7 @@ class KB_VM_Agent(object):
|
|||||||
dest_path=self.user_data['http_tool']['dest_path'],
|
dest_path=self.user_data['http_tool']['dest_path'],
|
||||||
target_url=self.user_data['target_url'],
|
target_url=self.user_data['target_url'],
|
||||||
**self.user_data['http_tool_configs'])
|
**self.user_data['http_tool_configs'])
|
||||||
return self.exec_command(self.last_cmd)
|
return self.exec_command_report(self.last_cmd)
|
||||||
|
|
||||||
def exec_command(cmd):
|
def exec_command(cmd):
|
||||||
p = subprocess.Popen(cmd, stdout=subprocess.PIPE, stderr=subprocess.PIPE)
|
p = subprocess.Popen(cmd, stdout=subprocess.PIPE, stderr=subprocess.PIPE)
|
||||||
|
@ -103,9 +103,12 @@ class KBRunner(object):
|
|||||||
retry_count = max(timeout / polling_interval, 1)
|
retry_count = max(timeout / polling_interval, 1)
|
||||||
retry = cnt_succ = cnt_failed = 0
|
retry = cnt_succ = cnt_failed = 0
|
||||||
clist = self.client_dict.copy()
|
clist = self.client_dict.copy()
|
||||||
|
samples = []
|
||||||
|
http_tool = self.client_dict.values()[0].http_tool
|
||||||
|
|
||||||
while (retry < retry_count and len(clist)):
|
while (retry < retry_count and len(clist)):
|
||||||
time.sleep(polling_interval)
|
time.sleep(polling_interval)
|
||||||
|
sample_count = 0
|
||||||
while True:
|
while True:
|
||||||
msg = self.pubsub.get_message()
|
msg = self.pubsub.get_message()
|
||||||
if not msg:
|
if not msg:
|
||||||
@ -127,6 +130,12 @@ class KBRunner(object):
|
|||||||
clist[vm_name].up_flag = True
|
clist[vm_name].up_flag = True
|
||||||
clist.pop(vm_name)
|
clist.pop(vm_name)
|
||||||
cnt_succ = cnt_succ + 1
|
cnt_succ = cnt_succ + 1
|
||||||
|
elif cmd == 'REPORT':
|
||||||
|
sample_count = sample_count + 1
|
||||||
|
# Parse the results from HTTP Tools
|
||||||
|
instance = self.client_dict[vm_name]
|
||||||
|
self.result[vm_name] = instance.http_client_parser(**payload['data'])
|
||||||
|
samples.append(self.result[vm_name])
|
||||||
elif cmd == 'DONE':
|
elif cmd == 'DONE':
|
||||||
self.result[vm_name] = payload['data']
|
self.result[vm_name] = payload['data']
|
||||||
clist.pop(vm_name)
|
clist.pop(vm_name)
|
||||||
@ -142,9 +151,15 @@ class KBRunner(object):
|
|||||||
else:
|
else:
|
||||||
LOG.error('[%s] received invalid command: %s' + (vm_name, cmd))
|
LOG.error('[%s] received invalid command: %s' + (vm_name, cmd))
|
||||||
|
|
||||||
|
log_msg = "%d Succeed, %d Failed, %d Pending... Retry #%d" %\
|
||||||
|
(cnt_succ, cnt_failed, len(clist), retry)
|
||||||
|
if sample_count != 0:
|
||||||
|
log_msg += " (%d sample(s) received)" % sample_count
|
||||||
|
LOG.info(log_msg)
|
||||||
|
|
||||||
LOG.info("%d Succeed, %d Failed, %d Pending... Retry #%d" %
|
if sample_count != 0:
|
||||||
(cnt_succ, cnt_failed, len(clist), retry))
|
print http_tool.consolidate_samples(samples, len(self.client_dict))
|
||||||
|
samples = []
|
||||||
retry = retry + 1
|
retry = retry + 1
|
||||||
|
|
||||||
return (cnt_succ, cnt_failed, len(clist))
|
return (cnt_succ, cnt_failed, len(clist))
|
||||||
|
@ -126,14 +126,13 @@ class WrkTool(PerfTool):
|
|||||||
all_res[key] += item['results'][key]
|
all_res[key] += item['results'][key]
|
||||||
all_res[key] = int(all_res[key])
|
all_res[key] = int(all_res[key])
|
||||||
|
|
||||||
|
# for item in results:
|
||||||
|
# print item['results']['latency_stats']
|
||||||
|
|
||||||
if 'latency_stats' in results[0]['results']:
|
if 'latency_stats' in results[0]['results']:
|
||||||
all_res['latency_stats'] = []
|
all_res['latency_stats'] = []
|
||||||
first_result = results[0]['results']['latency_stats']
|
first_result = results[0]['results']['latency_stats']
|
||||||
latency_counts = len(first_result)
|
latency_counts = len(first_result)
|
||||||
|
|
||||||
# for item in results:
|
|
||||||
# print item['results']['latency_stats']
|
|
||||||
|
|
||||||
for i in range(latency_counts):
|
for i in range(latency_counts):
|
||||||
latency_avg = 0
|
latency_avg = 0
|
||||||
for item in results:
|
for item in results:
|
||||||
@ -143,3 +142,14 @@ class WrkTool(PerfTool):
|
|||||||
all_res['latency_stats'].append(latency_tup)
|
all_res['latency_stats'].append(latency_tup)
|
||||||
|
|
||||||
return all_res
|
return all_res
|
||||||
|
|
||||||
|
@staticmethod
|
||||||
|
def consolidate_samples(results, vm_count):
|
||||||
|
all_res = WrkTool.consolidate_results(results)
|
||||||
|
total_count = len(results) / vm_count
|
||||||
|
if not total_count:
|
||||||
|
return all_res
|
||||||
|
|
||||||
|
all_res['http_rps'] = all_res['http_rps'] / total_count
|
||||||
|
all_res['http_throughput_kbytes'] = all_res['http_throughput_kbytes'] / total_count
|
||||||
|
return all_res
|
||||||
|
Loading…
x
Reference in New Issue
Block a user