Generate report in ReST format

With new option --book Shaker is able to generate report
as book in ReST format. The report contains necessary data only.

Change-Id: I3bc575214da5cabb7ec9eee7f497a8d5cbb67097
This commit is contained in:
Ilya Shakhat 2015-12-30 16:08:06 +03:00 committed by Ilya Shakhat
parent e481305c5d
commit bdd41cde7e
10 changed files with 375 additions and 23 deletions

View File

@ -1,5 +1,6 @@
usage: shaker-report [-h] [--config-dir DIR] [--config-file PATH] [--debug]
[--input INPUT] [--log-config-append PATH]
usage: shaker-report [-h] [--book BOOK] [--config-dir DIR]
[--config-file PATH] [--debug] [--input INPUT]
[--log-config-append PATH]
[--log-date-format DATE_FORMAT] [--log-dir LOG_DIR]
[--log-file PATH] [--log-format FORMAT] [--nodebug]
[--nouse-syslog] [--nouse-syslog-rfc-format]
@ -11,6 +12,8 @@ usage: shaker-report [-h] [--config-dir DIR] [--config-file PATH] [--debug]
optional arguments:
-h, --help show this help message and exit
--book BOOK Generate report in ReST format and store it into the
specified folder, defaults to env[SHAKER_BOOK].
--config-dir DIR Path to a config directory to pull *.conf files from.
This file set is sorted, so as to provide a
predictable parse order if individual options are

View File

@ -1,9 +1,9 @@
usage: shaker-spot [-h] [--config-dir DIR] [--config-file PATH] [--debug]
[--log-config-append PATH] [--log-date-format DATE_FORMAT]
[--log-dir LOG_DIR] [--log-file PATH] [--log-format FORMAT]
[--matrix MATRIX] [--no-report-on-error] [--nodebug]
[--nono-report-on-error] [--nouse-syslog]
[--nouse-syslog-rfc-format] [--noverbose]
usage: shaker-spot [-h] [--book BOOK] [--config-dir DIR] [--config-file PATH]
[--debug] [--log-config-append PATH]
[--log-date-format DATE_FORMAT] [--log-dir LOG_DIR]
[--log-file PATH] [--log-format FORMAT] [--matrix MATRIX]
[--no-report-on-error] [--nodebug] [--nono-report-on-error]
[--nouse-syslog] [--nouse-syslog-rfc-format] [--noverbose]
[--nowatch-log-file] [--output OUTPUT] [--report REPORT]
[--report-template REPORT_TEMPLATE] [--scenario SCENARIO]
[--subunit SUBUNIT]
@ -13,6 +13,8 @@ usage: shaker-spot [-h] [--config-dir DIR] [--config-file PATH] [--debug]
optional arguments:
-h, --help show this help message and exit
--book BOOK Generate report in ReST format and store it into the
specified folder, defaults to env[SHAKER_BOOK].
--config-dir DIR Path to a config directory to pull *.conf files from.
This file set is sorted, so as to provide a
predictable parse order if individual options are

View File

@ -1,15 +1,16 @@
usage: shaker [-h] [--agent-join-timeout AGENT_JOIN_TIMEOUT]
[--agent-loss-timeout AGENT_LOSS_TIMEOUT] [--cleanup-on-error]
[--config-dir DIR] [--config-file PATH] [--debug]
[--external-net EXTERNAL_NET] [--flavor-name FLAVOR_NAME]
[--image-name IMAGE_NAME] [--log-config-append PATH]
[--log-date-format DATE_FORMAT] [--log-dir LOG_DIR]
[--log-file PATH] [--log-format FORMAT] [--matrix MATRIX]
[--no-report-on-error] [--nocleanup-on-error] [--nodebug]
[--nono-report-on-error] [--noos-insecure] [--nouse-syslog]
[--nouse-syslog-rfc-format] [--noverbose] [--nowatch-log-file]
[--os-auth-url <auth-url>] [--os-cacert <auth-cacert>]
[--os-insecure] [--os-password <auth-password>]
[--agent-loss-timeout AGENT_LOSS_TIMEOUT] [--book BOOK]
[--cleanup-on-error] [--config-dir DIR] [--config-file PATH]
[--debug] [--external-net EXTERNAL_NET]
[--flavor-name FLAVOR_NAME] [--image-name IMAGE_NAME]
[--log-config-append PATH] [--log-date-format DATE_FORMAT]
[--log-dir LOG_DIR] [--log-file PATH] [--log-format FORMAT]
[--matrix MATRIX] [--no-report-on-error] [--nocleanup-on-error]
[--nodebug] [--nono-report-on-error] [--noos-insecure]
[--nouse-syslog] [--nouse-syslog-rfc-format] [--noverbose]
[--nowatch-log-file] [--os-auth-url <auth-url>]
[--os-cacert <auth-cacert>] [--os-insecure]
[--os-password <auth-password>]
[--os-region-name <auth-region-name>]
[--os-tenant-name <auth-tenant-name>]
[--os-username <auth-username>] [--output OUTPUT]
@ -28,6 +29,8 @@ optional arguments:
execution).
--agent-loss-timeout AGENT_LOSS_TIMEOUT
Timeout to treat agent as lost in seconds
--book BOOK Generate report in ReST format and store it into the
specified folder, defaults to env[SHAKER_BOOK].
--cleanup-on-error Cleans up the heat-stack upon any error occured during
scenario execution.
--config-dir DIR Path to a config directory to pull *.conf files from.

View File

@ -205,6 +205,10 @@
# Subunit stream file name, defaults to env[SHAKER_SUBUNIT]. (string value)
#subunit = <None>
# Generate report in ReST format and store it into the specified folder,
# defaults to env[SHAKER_BOOK]. (string value)
#book = <None>
# File to read test results from, defaults to env[SHAKER_INPUT]. (string value)
#input = <None>

View File

@ -13,6 +13,7 @@ oslo.log>=1.12.0 # Apache-2.0
oslo.serialization>=1.10.0 # Apache-2.0
oslo.utils!=2.6.0,>=2.4.0 # Apache-2.0
psutil<2.0.0,>=1.1.1
pygal
python-glanceclient>=0.18.0
python-keystoneclient!=1.8.0,>=1.6.0
python-neutronclient>=2.6.0

View File

@ -52,7 +52,7 @@ class TrafficAggregator(base.BaseAggregator):
mean_v = collections.defaultdict(list)
units = {}
for record in records:
for record in sorted(records, key=lambda x: x['concurrency']):
xs.append(record['concurrency'])
for k, v in record['stats'].items():
mean_v[k].append(v['mean'])

View File

@ -173,6 +173,10 @@ REPORT_OPTS = [
default=utils.env('SHAKER_SUBUNIT'),
help='Subunit stream file name, defaults to '
'env[SHAKER_SUBUNIT].'),
cfg.StrOpt('book',
default=utils.env('SHAKER_BOOK'),
help='Generate report in ReST format and store it into the '
'specified folder, defaults to env[SHAKER_BOOK]. '),
]
INPUT_OPTS = [

View File

@ -27,6 +27,7 @@ from shaker.engine import aggregators
from shaker.engine import config
from shaker.engine import sla
from shaker.engine import utils
from shaker.engine import writer
LOG = logging.getLogger(__name__)
@ -139,7 +140,8 @@ def save_to_subunit(sla_records, subunit_filename):
fd.close()
def generate_report(data, report_template, report_filename, subunit_filename):
def generate_report(data, report_template, report_filename, subunit_filename,
book_folder=None):
LOG.debug('Generating report, template: %s, output: %s',
report_template, report_filename or '<dummy>')
@ -173,6 +175,9 @@ def generate_report(data, report_template, report_filename, subunit_filename):
except IOError as e:
LOG.error('Failed to write report file: %s', e)
if book_folder:
writer.write_book(book_folder, data)
def main():
utils.init_config_and_logging(config.REPORT_OPTS + config.INPUT_OPTS)
@ -181,7 +186,7 @@ def main():
report_data = json.loads(utils.read_file(cfg.CONF.input))
generate_report(report_data, cfg.CONF.report_template, cfg.CONF.report,
cfg.CONF.subunit)
cfg.CONF.subunit, cfg.CONF.book)
if __name__ == "__main__":

View File

@ -238,7 +238,8 @@ def act():
'no_report_on_error=True')
else:
report.generate_report(output, cfg.CONF.report_template,
cfg.CONF.report, cfg.CONF.subunit)
cfg.CONF.report, cfg.CONF.subunit,
cfg.CONF.book)
def main():

329
shaker/engine/writer.py Normal file
View File

@ -0,0 +1,329 @@
# Copyright (c) 2016 Mirantis Inc.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or
# implied.
# See the License for the specific language governing permissions and
# limitations under the License.
import functools
import numbers
import os
import textwrap
from oslo_log import log as logging
import pygal
from pygal import style
import six
import yaml
from shaker.engine import utils
LOG = logging.getLogger(__name__)
TABLE_FLOAT_PREC = 2
class ReSTPublisher(object):
header_marks = ['*', '=', '-', '^', '~']
def __init__(self, folder):
self.folder = folder
LOG.info('Create ReST book in: %s', folder)
try:
os.makedirs(folder)
except OSError as e:
LOG.warning(e)
self.index = open(os.path.join(folder, 'index.rst'), 'w+')
def __del__(self):
self.index.close()
def ref_label(self, text):
self.index.write('.. _%s:\n\n' % utils.strict(text))
def header(self, text, level=0):
self.index.write(text)
self.index.write('\n')
self.index.write(self.header_marks[level] * len(text))
self.index.write('\n\n')
def subheader(self, text):
self.index.write('**%s**:' % text)
self.index.write('\n\n')
def para(self, text):
self.index.write(textwrap.fill(text, width=79))
self.index.write('\n\n')
def code(self, text):
self.index.write('.. code-block:: yaml\n\n')
for line in text.split('\n'):
if line:
self.index.write(' ' * 4 + line + '\n')
self.index.write('\n')
def chart_line(self, chart_id, chart, meta, x_title):
line_chart = pygal.Line(style=style.RedBlueStyle,
fill=True,
legend_at_bottom=True,
include_x_axis=True,
x_title=x_title)
for i in range(1, len(meta)):
line_title = meta[i][0]
if meta[i][1]:
line_title += ', %s' % meta[i][1]
kwargs = dict(secondary=True) if i == 2 else {}
line_chart.add(line_title, chart[i][1:], **kwargs)
line_chart.render_to_file(os.path.join(self.folder,
'%s.svg' % chart_id))
self.index.write('.. image:: %s.*\n\n' % chart_id)
def chart_xy(self, chart_id, chart, meta, x_title):
xy_chart = pygal.XY(style=style.RedBlueStyle,
legend_at_bottom=True,
fill=True,
include_x_axis=True,
x_title=x_title)
for i in range(1, len(meta)):
line_title = meta[i][0]
if meta[i][1]:
line_title += ', %s' % meta[i][1]
v = [(chart[0][j], chart[i][j]) for j in range(1, len(chart[i]))]
kwargs = dict(secondary=True) if i == 2 else {}
xy_chart.add(line_title, v, **kwargs)
xy_chart.render_to_file(os.path.join(self.folder,
'%s.svg' % chart_id))
self.index.write('.. image:: %s.*\n\n' % chart_id)
def _outline(self, widths):
s = ' '.join('=' * w for w in widths)
self.index.write(s)
self.index.write('\n')
def table(self, t):
widths = [max(len(c), TABLE_FLOAT_PREC) for c in t[0]]
for r in t:
for i in range(len(widths)):
if isinstance(r[i], six.string_types):
widths[i] = max(widths[i], len(r[i]))
# header
self._outline(widths)
self.index.write(' '.join(('{0:<{1}}'.format(t[0][i], widths[i]))
for i in range(len(widths))))
self.index.write('\n')
self._outline(widths)
# body
for r in t[1:]:
cells = []
for i in range(len(widths)):
c = r[i]
if isinstance(c, numbers.Integral):
c = '{0:>{1}}'.format(c, widths[i])
elif isinstance(c, numbers.Number):
c = '{0:>{1}.{2}f}'.format(c, widths[i], TABLE_FLOAT_PREC)
else:
c = '{0:<{1}}'.format(c, widths[i])
cells.append(c)
self.index.write(' '.join(cells).rstrip())
self.index.write('\n')
# bottom border
self._outline(widths)
self.index.write('\n')
yamlize = functools.partial(yaml.safe_dump, indent=2, default_flow_style=False)
def filter_records(records, **kwargs):
result = []
for r in records:
f = True
for param, value in kwargs.items():
f &= r.get(param) == value
if f:
result.append(r)
return result
def write_scenario_definition(publisher, scenario):
publisher.subheader('Scenario')
publisher.code(yamlize(scenario))
def write_test_definition(data, publisher, test):
publisher.subheader('Test Specification')
publisher.code(yamlize(data['tests'][test]))
def write_sla(publisher, records, sla_records):
table = [['Expression', 'Concurrency', 'Node', 'Result']]
for sla_record in sla_records:
expression = sla_record['expression']
record_id = sla_record['record']
state = sla_record['state']
for record in records:
if record['id'] == record_id:
table.append([expression, record['concurrency'],
record['node'], state])
break
if len(table) > 1:
publisher.subheader('SLA')
publisher.table(table)
def write_errors(publisher, records):
bad_records = [r for r in records if r.get('status') in {'lost', 'error'}]
if bad_records:
publisher.subheader('Errors')
for rec in bad_records:
publisher.code(yamlize(rec))
def write_concurrency_block(publisher, all_records, local_records, sla):
for record in local_records:
concurrency = record['concurrency']
if len(local_records) > 2:
publisher.header('Concurrency %s' % concurrency, level=2)
agent_records = filter_records(all_records,
type='agent',
scenario=record['scenario'],
test=record['test'],
concurrency=concurrency)
agent_records_ok = filter_records(agent_records, status='ok')
write_errors(publisher, agent_records)
if len(agent_records_ok) <= 2 and len(local_records) <= 2:
# go into details
write_agent_block_detailed(publisher, agent_records_ok)
else:
# show stats only
write_stats(publisher, agent_records_ok, 'node')
write_sla(publisher, agent_records_ok, sla)
def write_agent_block_detailed(publisher, records):
for record in records:
if len(records) > 1:
publisher.header('Agent %s' % record['agent'], level=3)
if record.get('chart'):
publisher.chart_line(record['id'], record['chart'], record['meta'],
x_title='time, s')
publisher.subheader('Stats')
publisher.code(yamlize(record['stats']))
def write_stats(publisher, records, row_header, show_all=False):
if len(records) < 1:
return
publisher.subheader('Stats')
records.sort(key=lambda x: x[row_header])
if show_all:
keys = ['min', 'mean', 'max']
else:
keys = ['mean']
meta = []
headers = []
# collect meta
record = records[0]
headers.append(row_header)
for param, values in record['stats'].items():
for key in keys:
header = ''
if show_all:
header = key + ' '
header += param
if values['unit']:
header += ', ' + values['unit']
headers.append(header)
meta.append((param, key))
# fill the table
table = [headers]
for record in records:
row = [record[row_header]]
for m in meta:
param, key = m
if param in record['stats']:
row.append(record['stats'][param][key])
else:
row.append('n/a')
table.append(row)
publisher.table(table)
def write_book(doc_folder, data):
records = data['records'].values()
publisher = ReSTPublisher(doc_folder)
for scenario in data['scenarios'].keys():
publisher.ref_label(scenario)
publisher.header(scenario)
scenario_def = data['scenarios'][scenario]
if 'description' in scenario_def:
publisher.para(scenario_def['description'])
write_scenario_definition(publisher, scenario_def)
write_errors(publisher, filter_records(records, scenario=scenario))
test_records = filter_records(records, type='test', scenario=scenario)
test_records.sort(key=lambda x: x['test'])
for record in test_records:
test = record['test']
publisher.header(test, level=1)
write_test_definition(data, publisher, test)
concurrency_records = filter_records(records, type='concurrency',
scenario=scenario, test=test)
concurrency_records.sort(key=lambda x: int(x['concurrency']))
concurrency_count = len(concurrency_records)
if concurrency_count >= 2:
if record.get('chart'):
publisher.chart_xy(record['id'], record['chart'],
record['meta'], 'concurrency')
write_stats(publisher, concurrency_records, 'concurrency')
write_sla(publisher, concurrency_records, data['sla'])
write_concurrency_block(publisher, records, concurrency_records,
data['sla'])