From 6a41a81c9eeac9e6ebd20a9d753c75350b4a83cf Mon Sep 17 00:00:00 2001
From: kbespalov <kbespalov@mirantis.com>
Date: Fri, 12 Aug 2016 15:57:23 +0300
Subject: [PATCH] Fix calculating of duration in simulator.py

When we calculate metrics like msg/sec, latency, etc
we expect the start and end time as time of
the first and last processed message:

RPC Server life timeline:

[----0..5 sec----][---5..10 sec---][---10..15 sec--]
 waiting clients    10 msg recved     wait sigint

expected: duration 5 sec, 2 msg/sec
actual (incorrect): duration 15 sec, 0.6 msg/sec

no reason to set the boundaries if server was idle few seconds
before running of clients and after.

Change-Id: I33e0a605b54ea7b89977504892528c41c3b00a68
---
 tools/simulator.py | 13 +++++++++++--
 1 file changed, 11 insertions(+), 2 deletions(-)

diff --git a/tools/simulator.py b/tools/simulator.py
index 2f3161b98..8a3ff5a70 100755
--- a/tools/simulator.py
+++ b/tools/simulator.py
@@ -125,6 +125,9 @@ class MessageStatsCollector(object):
     def monitor(self):
         global IS_RUNNING
         if IS_RUNNING:
+            # NOTE(kbespalov): this way not properly works
+            # because the monitor starting with range 1sec +-150 ms
+            # due to high threading contention between rpc clients
             threading.Timer(1.0, self.monitor).start()
         now = time.time()
 
@@ -187,8 +190,14 @@ class MessageStatsCollector(object):
         for point in itertools.chain(*(c.get_series() for c in collectors)):
             count += point['count']
             size += point['size']
-            start = min(start, point['timestamp'])
-            end = max(end, point['timestamp'])
+            if point['count']:
+                # NOTE(kbespalov):
+                # we except the start and end time as time of
+                # first and last processed message, no reason
+                # to set boundaries if server was idle before
+                # running of clients and after.
+                start = min(start, point['timestamp'])
+                end = max(end, point['timestamp'])
 
             if 'latency' in point:
                 sum_latencies += point['latency'] * point['count']