aboutsummaryrefslogtreecommitdiffstats
path: root/nfvbench/summarizer.py
diff options
context:
space:
mode:
Diffstat (limited to 'nfvbench/summarizer.py')
-rw-r--r--nfvbench/summarizer.py355
1 files changed, 247 insertions, 108 deletions
diff --git a/nfvbench/summarizer.py b/nfvbench/summarizer.py
index 0ff9c48..7c69f52 100644
--- a/nfvbench/summarizer.py
+++ b/nfvbench/summarizer.py
@@ -22,11 +22,64 @@ import bitmath
import pytz
from tabulate import tabulate
-from specs import ChainType
-
+def _annotate_chain_stats(chain_stats, nodrop_marker='=>'):
+ """Transform a plain chain stats into an annotated one.
+
+ Example:
+ {
+ 0: {'packets': [2000054, 1999996, 1999996, 1999996],
+ 'lat_min_usec': 10,
+ 'lat_max_usec': 187,
+ 'lat_avg_usec': 45},
+ 1: {...},
+ 'total': {...}
+ }
+ should become:
+ {
+ 0: {'packets': [2000054, -58 (-0.034%), '=>', 1999996],
+ 'lat_min_usec': 10,
+ 'lat_max_usec': 187,
+ 'lat_avg_usec': 45},
+ 1: {...},
+ 'total': {...}
+ }
+
+ In the case of shared net, some columns in packets array can have ''.
+ Some columns cab also be None which means the data is not available.
+ """
+ for stats in list(chain_stats.values()):
+ packets = stats['packets']
+ count = len(packets)
+ if count > 1:
+ # keep the first counter
+ annotated_packets = [packets[0]]
+ # modify all remaining counters
+ prev_count = packets[0]
+ for index in range(1, count):
+ cur_count = packets[index]
+ if cur_count == '':
+ # an empty string indicates an unknown counter for a shared interface
+ # do not annotate those
+ annotated_value = ''
+ elif cur_count is None:
+ # Not available
+ annotated_value = 'n/a'
+ else:
+ drop = cur_count - prev_count
+ if drop:
+ dr = (drop * 100.0) / prev_count if prev_count else 0
+ annotated_value = '{:+,} ({:+.4f}%)'.format(drop, dr)
+ else:
+ # no drop
+ # if last column we display the value
+ annotated_value = cur_count if index == count - 1 else nodrop_marker
+ prev_count = cur_count
+ annotated_packets.append(annotated_value)
+
+ stats['packets'] = annotated_packets
class Formatter(object):
- """Collection of string formatter methods"""
+ """Collection of string formatter methods."""
@staticmethod
def fixed(data):
@@ -44,7 +97,7 @@ class Formatter(object):
def standard(data):
if isinstance(data, int):
return Formatter.int(data)
- elif isinstance(data, float):
+ if isinstance(data, float):
return Formatter.float(4)(data)
return Formatter.fixed(data)
@@ -77,16 +130,16 @@ class Formatter(object):
def percentage(data):
if data is None:
return ''
- elif math.isnan(data):
+ if math.isnan(data):
return '-'
return Formatter.suffix('%')(Formatter.float(4)(data))
class Table(object):
- """ASCII readable table class"""
+ """ASCII readable table class."""
def __init__(self, header):
- header_row, self.formatters = zip(*header)
+ header_row, self.formatters = list(zip(*header))
self.data = [header_row]
self.columns = len(header_row)
@@ -108,7 +161,7 @@ class Table(object):
class Summarizer(object):
- """Generic summarizer class"""
+ """Generic summarizer class."""
indent_per_level = 2
@@ -142,7 +195,7 @@ class Summarizer(object):
def _put_dict(self, data):
with self._create_block(False):
- for key, value in data.iteritems():
+ for key, value in list(data.items()):
if isinstance(value, dict):
self._put(key + ':')
self._put_dict(value)
@@ -164,58 +217,60 @@ class Summarizer(object):
class NFVBenchSummarizer(Summarizer):
- """Summarize nfvbench json result"""
-
- ndr_pdr_header = [
- ('-', Formatter.fixed),
- ('L2 Frame Size', Formatter.standard),
- ('Rate (fwd+rev)', Formatter.bits),
- ('Rate (fwd+rev)', Formatter.suffix(' pps')),
- ('Avg Drop Rate', Formatter.suffix('%')),
- ('Avg Latency (usec)', Formatter.standard),
- ('Min Latency (usec)', Formatter.standard),
- ('Max Latency (usec)', Formatter.standard)
- ]
-
- single_run_header = [
- ('L2 Frame Size', Formatter.standard),
- ('Drop Rate', Formatter.suffix('%')),
- ('Avg Latency (usec)', Formatter.standard),
- ('Min Latency (usec)', Formatter.standard),
- ('Max Latency (usec)', Formatter.standard)
- ]
-
- config_header = [
- ('Direction', Formatter.standard),
- ('Requested TX Rate (bps)', Formatter.bits),
- ('Actual TX Rate (bps)', Formatter.bits),
- ('RX Rate (bps)', Formatter.bits),
- ('Requested TX Rate (pps)', Formatter.suffix(' pps')),
- ('Actual TX Rate (pps)', Formatter.suffix(' pps')),
- ('RX Rate (pps)', Formatter.suffix(' pps'))
- ]
-
- chain_analysis_header = [
- ('Interface', Formatter.standard),
- ('Device', Formatter.standard),
- ('Packets (fwd)', Formatter.standard),
- ('Drops (fwd)', Formatter.standard),
- ('Drop% (fwd)', Formatter.percentage),
- ('Packets (rev)', Formatter.standard),
- ('Drops (rev)', Formatter.standard),
- ('Drop% (rev)', Formatter.percentage)
- ]
+ """Summarize nfvbench json result."""
direction_keys = ['direction-forward', 'direction-reverse', 'direction-total']
direction_names = ['Forward', 'Reverse', 'Total']
def __init__(self, result, sender):
+ """Create a summarizer instance."""
Summarizer.__init__(self)
self.result = result
self.config = self.result['config']
self.record_header = None
self.record_data = None
self.sender = sender
+
+ self.ndr_pdr_header = [
+ ('-', Formatter.fixed),
+ ('L2 Frame Size', Formatter.standard),
+ ('Rate (fwd+rev)', Formatter.bits),
+ ('Rate (fwd+rev)', Formatter.suffix(' pps')),
+ ('Avg Drop Rate', Formatter.suffix('%')),
+ ('Avg Latency (usec)', Formatter.standard),
+ ('Min Latency (usec)', Formatter.standard),
+ ('Max Latency (usec)', Formatter.standard)
+ ]
+
+ self.single_run_header = [
+ ('L2 Frame Size', Formatter.standard),
+ ('Drop Rate', Formatter.suffix('%')),
+ ('Avg Latency (usec)', Formatter.standard),
+ ('Min Latency (usec)', Formatter.standard),
+ ('Max Latency (usec)', Formatter.standard)
+ ]
+
+ self.config_header = [
+ ('Direction', Formatter.standard),
+ ('Requested TX Rate (bps)', Formatter.bits),
+ ('Actual TX Rate (bps)', Formatter.bits),
+ ('RX Rate (bps)', Formatter.bits),
+ ('Requested TX Rate (pps)', Formatter.suffix(' pps')),
+ ('Actual TX Rate (pps)', Formatter.suffix(' pps')),
+ ('RX Rate (pps)', Formatter.suffix(' pps'))
+ ]
+
+ # add percentiles headers if hdrh enabled
+ if not self.config.disable_hdrh:
+ for percentile in self.config.lat_percentiles:
+ # 'append' expects a single parameter => double parentheses
+ self.ndr_pdr_header.append((str(percentile) + ' %ile lat.', Formatter.standard))
+ self.single_run_header.append((str(percentile) + ' %ile lat.', Formatter.standard))
+
+ if self.config.periodic_gratuitous_arp:
+ self.direction_keys.insert(2, 'garp-direction-total')
+ self.direction_names.insert(2, 'Gratuitous ARP')
+
# if sender is available initialize record
if self.sender:
self.__record_init()
@@ -247,17 +302,14 @@ class NFVBenchSummarizer(Summarizer):
self._put('Components:')
with self._create_block():
- self._put('TOR:')
- with self._create_block(False):
- self._put('Type:', self.config['tor']['type'])
self._put('Traffic Generator:')
with self._create_block(False):
- self._put('Profile:', self.config['generator_config']['name'])
- self._put('Tool:', self.config['generator_config']['tool'])
+ self._put('Profile:', self.config['tg-name'])
+ self._put('Tool:', self.config['tg-tool'])
if network_benchmark['versions']:
self._put('Versions:')
with self._create_block():
- for component, version in network_benchmark['versions'].iteritems():
+ for component, version in list(network_benchmark['versions'].items()):
self._put(component + ':', version)
if self.config['ndr_run'] or self.config['pdr_run']:
@@ -268,15 +320,12 @@ class NFVBenchSummarizer(Summarizer):
if self.config['pdr_run']:
self._put('PDR:', self.config['measurement']['PDR'])
self._put('Service chain:')
- for result in network_benchmark['service_chain'].iteritems():
+ for result in list(network_benchmark['service_chain'].items()):
with self._create_block():
self.__chain_summarize(*result)
def __chain_summarize(self, chain_name, chain_benchmark):
self._put(chain_name + ':')
- if chain_name == ChainType.PVVP:
- self._put('Mode:', chain_benchmark.get('mode'))
- chain_name += "-" + chain_benchmark.get('mode')
self.__record_header_put('service_chain', chain_name)
with self._create_block():
self._put('Traffic:')
@@ -288,13 +337,13 @@ class NFVBenchSummarizer(Summarizer):
self._put('Bidirectional:', traffic_benchmark['bidirectional'])
self._put('Flow count:', traffic_benchmark['flow_count'])
self._put('Service chains count:', traffic_benchmark['service_chain_count'])
- self._put('Compute nodes:', traffic_benchmark['compute_nodes'].keys())
+ self._put('Compute nodes:', list(traffic_benchmark['compute_nodes'].keys()))
self.__record_header_put('profile', traffic_benchmark['profile'])
self.__record_header_put('bidirectional', traffic_benchmark['bidirectional'])
self.__record_header_put('flow_count', traffic_benchmark['flow_count'])
self.__record_header_put('sc_count', traffic_benchmark['service_chain_count'])
- self.__record_header_put('compute_nodes', traffic_benchmark['compute_nodes'].keys())
+ self.__record_header_put('compute_nodes', list(traffic_benchmark['compute_nodes'].keys()))
with self._create_block(False):
self._put()
if not self.config['no_traffic']:
@@ -308,20 +357,15 @@ class NFVBenchSummarizer(Summarizer):
except KeyError:
pass
- for entry in traffic_benchmark['result'].iteritems():
+ for entry in list(traffic_benchmark['result'].items()):
if 'warning' in entry:
continue
self.__chain_analysis_summarize(*entry)
- self.__record_send()
+ self.__record_send()
def __chain_analysis_summarize(self, frame_size, analysis):
self._put()
self._put('L2 frame size:', frame_size)
- if 'analysis_duration_sec' in analysis:
- self._put('Chain analysis duration:',
- Formatter.float(3)(analysis['analysis_duration_sec']), 'seconds')
- self.__record_data_put(frame_size, {'chain_analysis_duration': Formatter.float(3)(
- analysis['analysis_duration_sec'])})
if self.config['ndr_run']:
self._put('NDR search duration:', Formatter.float(0)(analysis['ndr']['time_taken_sec']),
'seconds')
@@ -344,12 +388,13 @@ class NFVBenchSummarizer(Summarizer):
self._put(analysis['run_config']['warning'])
self._put()
- if 'packet_analysis' in analysis:
- self._put('Chain Analysis:')
- self._put()
- with self._create_block(False):
- self._put_table(self.__get_chain_analysis_table(analysis['packet_analysis']))
+ if 'packet_path_stats' in analysis:
+ for dir in ['Forward', 'Reverse']:
+ self._put(dir + ' Chain Packet Counters and Latency:')
self._put()
+ with self._create_block(False):
+ self._put_table(self._get_chain_table(analysis['packet_path_stats'][dir]))
+ self._put()
def __get_summary_table(self, traffic_result):
if self.config['single_run']:
@@ -358,10 +403,11 @@ class NFVBenchSummarizer(Summarizer):
summary_table = Table(self.ndr_pdr_header)
if self.config['ndr_run']:
- for frame_size, analysis in traffic_result.iteritems():
+ for frame_size, analysis in list(traffic_result.items()):
if frame_size == 'warning':
continue
- summary_table.add_row([
+
+ row_data = [
'NDR',
frame_size,
analysis['ndr']['rate_bps'],
@@ -370,21 +416,34 @@ class NFVBenchSummarizer(Summarizer):
analysis['ndr']['stats']['overall']['avg_delay_usec'],
analysis['ndr']['stats']['overall']['min_delay_usec'],
analysis['ndr']['stats']['overall']['max_delay_usec']
- ])
- self.__record_data_put(frame_size, {'ndr': {
+ ]
+ if not self.config.disable_hdrh:
+ self.extract_hdrh_percentiles(
+ analysis['ndr']['stats']['overall']['lat_percentile'], row_data)
+ summary_table.add_row(row_data)
+
+ ndr_data = {
'type': 'NDR',
'rate_bps': analysis['ndr']['rate_bps'],
'rate_pps': analysis['ndr']['rate_pps'],
+ 'offered_tx_rate_bps': analysis['ndr']['stats']['offered_tx_rate_bps'],
+ 'theoretical_tx_rate_pps': analysis['ndr']['stats']['theoretical_tx_rate_pps'],
+ 'theoretical_tx_rate_bps': analysis['ndr']['stats']['theoretical_tx_rate_bps'],
'drop_percentage': analysis['ndr']['stats']['overall']['drop_percentage'],
'avg_delay_usec': analysis['ndr']['stats']['overall']['avg_delay_usec'],
'min_delay_usec': analysis['ndr']['stats']['overall']['min_delay_usec'],
'max_delay_usec': analysis['ndr']['stats']['overall']['max_delay_usec']
- }})
+ }
+ if not self.config.disable_hdrh:
+ self.extract_hdrh_percentiles(
+ analysis['ndr']['stats']['overall']['lat_percentile'], ndr_data, True)
+ self.__record_data_put(frame_size, {'ndr': ndr_data})
if self.config['pdr_run']:
- for frame_size, analysis in traffic_result.iteritems():
+ for frame_size, analysis in list(traffic_result.items()):
if frame_size == 'warning':
continue
- summary_table.add_row([
+
+ row_data = [
'PDR',
frame_size,
analysis['pdr']['rate_bps'],
@@ -393,34 +452,73 @@ class NFVBenchSummarizer(Summarizer):
analysis['pdr']['stats']['overall']['avg_delay_usec'],
analysis['pdr']['stats']['overall']['min_delay_usec'],
analysis['pdr']['stats']['overall']['max_delay_usec']
- ])
- self.__record_data_put(frame_size, {'pdr': {
+ ]
+ if not self.config.disable_hdrh:
+ self.extract_hdrh_percentiles(
+ analysis['pdr']['stats']['overall']['lat_percentile'], row_data)
+ summary_table.add_row(row_data)
+
+ pdr_data = {
'type': 'PDR',
'rate_bps': analysis['pdr']['rate_bps'],
'rate_pps': analysis['pdr']['rate_pps'],
+ 'offered_tx_rate_bps': analysis['pdr']['stats']['offered_tx_rate_bps'],
+ 'theoretical_tx_rate_pps': analysis['pdr']['stats']['theoretical_tx_rate_pps'],
+ 'theoretical_tx_rate_bps': analysis['pdr']['stats']['theoretical_tx_rate_bps'],
'drop_percentage': analysis['pdr']['stats']['overall']['drop_percentage'],
'avg_delay_usec': analysis['pdr']['stats']['overall']['avg_delay_usec'],
'min_delay_usec': analysis['pdr']['stats']['overall']['min_delay_usec'],
'max_delay_usec': analysis['pdr']['stats']['overall']['max_delay_usec']
- }})
+ }
+ if not self.config.disable_hdrh:
+ self.extract_hdrh_percentiles(
+ analysis['pdr']['stats']['overall']['lat_percentile'], pdr_data, True)
+ self.__record_data_put(frame_size, {'pdr': pdr_data})
if self.config['single_run']:
- for frame_size, analysis in traffic_result.iteritems():
- summary_table.add_row([
+ for frame_size, analysis in list(traffic_result.items()):
+ row_data = [
frame_size,
analysis['stats']['overall']['drop_rate_percent'],
analysis['stats']['overall']['rx']['avg_delay_usec'],
analysis['stats']['overall']['rx']['min_delay_usec'],
analysis['stats']['overall']['rx']['max_delay_usec']
- ])
- self.__record_data_put(frame_size, {'single_run': {
+ ]
+ if not self.config.disable_hdrh:
+ self.extract_hdrh_percentiles(
+ analysis['stats']['overall']['rx']['lat_percentile'], row_data)
+ summary_table.add_row(row_data)
+
+ single_run_data = {
'type': 'single_run',
+ 'offered_tx_rate_bps': analysis['stats']['offered_tx_rate_bps'],
+ 'theoretical_tx_rate_pps': analysis['stats']['theoretical_tx_rate_pps'],
+ 'theoretical_tx_rate_bps': analysis['stats']['theoretical_tx_rate_bps'],
'drop_rate_percent': analysis['stats']['overall']['drop_rate_percent'],
'avg_delay_usec': analysis['stats']['overall']['rx']['avg_delay_usec'],
'min_delay_usec': analysis['stats']['overall']['rx']['min_delay_usec'],
'max_delay_usec': analysis['stats']['overall']['rx']['max_delay_usec']
- }})
+ }
+ if not self.config.disable_hdrh:
+ self.extract_hdrh_percentiles(
+ analysis['stats']['overall']['rx']['lat_percentile'], single_run_data, True)
+ self.__record_data_put(frame_size, {'single_run': single_run_data})
return summary_table
+ def extract_hdrh_percentiles(self, lat_percentile, data, add_key=False):
+ if add_key:
+ data['lat_percentile'] = {}
+ for percentile in self.config.lat_percentiles:
+ if add_key:
+ try:
+ data['lat_percentile_' + str(percentile)] = lat_percentile[percentile]
+ except TypeError:
+ data['lat_percentile_' + str(percentile)] = "n/a"
+ else:
+ try:
+ data.append(lat_percentile[percentile])
+ except TypeError:
+ data.append("n/a")
+
def __get_config_table(self, run_config, frame_size):
config_table = Table(self.config_header)
for key, name in zip(self.direction_keys, self.direction_names):
@@ -446,23 +544,64 @@ class NFVBenchSummarizer(Summarizer):
})
return config_table
- def __get_chain_analysis_table(self, packet_analysis):
- chain_analysis_table = Table(self.chain_analysis_header)
- forward_analysis = packet_analysis['direction-forward']
- reverse_analysis = packet_analysis['direction-reverse']
- reverse_analysis.reverse()
- for fwd, rev in zip(forward_analysis, reverse_analysis):
- chain_analysis_table.add_row([
- fwd['interface'],
- fwd['device'],
- fwd['packet_count'],
- fwd.get('packet_drop_count', None),
- fwd.get('packet_drop_percentage', None),
- rev['packet_count'],
- rev.get('packet_drop_count', None),
- rev.get('packet_drop_percentage', None),
- ])
- return chain_analysis_table
+ def _get_chain_table(self, chain_stats):
+ """Retrieve the table for a direction.
+
+ chain_stats: {
+ 'interfaces': ['Port0', 'drop %'', 'vhost0', 'Port1'],
+ 'chains': {
+ '0': {'packets': [2000054, '-0.023%', 1999996, 1999996],
+ 'lat_min_usec': 10,
+ 'lat_max_usec': 187,
+ 'lat_avg_usec': 45},
+ '1': {...},
+ 'total': {...}
+ }
+ }
+ """
+ chains = chain_stats['chains']
+ _annotate_chain_stats(chains)
+ header = [('Chain', Formatter.standard)] + \
+ [(ifname, Formatter.standard) for ifname in chain_stats['interfaces']]
+ # add latency columns if available Avg, Min, Max and percentiles
+ lat_keys = []
+ lat_map = {'lat_avg_usec': 'Avg lat.',
+ 'lat_min_usec': 'Min lat.',
+ 'lat_max_usec': 'Max lat.'}
+ if 'lat_avg_usec' in chains['0']:
+ lat_keys = ['lat_avg_usec', 'lat_min_usec', 'lat_max_usec']
+
+ if not self.config.disable_hdrh:
+ lat_keys.append('lat_percentile')
+ for percentile in self.config.lat_percentiles:
+ lat_map['lat_' + str(percentile) + '_percentile'] = \
+ str(percentile) + ' %ile lat.'
+
+ for lat_value in lat_map.values():
+ # 'append' expects a single parameter => double parentheses
+ header.append((lat_value, Formatter.standard))
+
+ table = Table(header)
+ for chain in sorted(list(chains.keys()), key=str):
+ row = [chain] + chains[chain]['packets']
+ for lat_key in lat_keys:
+
+ if lat_key != 'lat_percentile':
+ if chains[chain].get(lat_key, None):
+ row.append(Formatter.standard(chains[chain][lat_key]))
+ else:
+ row.append('n/a')
+ else:
+ if not self.config.disable_hdrh:
+ if chains[chain].get(lat_key, None):
+ for percentile in chains[chain][lat_key]:
+ row.append(Formatter.standard(
+ chains[chain][lat_key][percentile]))
+ else:
+ for _ in self.config.lat_percentiles:
+ row.append('n/a')
+ table.add_row(row)
+ return table
def __record_header_put(self, key, value):
if self.sender:
@@ -494,9 +633,9 @@ class NFVBenchSummarizer(Summarizer):
run_specific_data['pdr'] = data['pdr']
run_specific_data['pdr']['drop_limit'] = self.config['measurement']['PDR']
del data['pdr']
- for key in run_specific_data:
+ for data_value in run_specific_data.values():
data_to_send = data.copy()
- data_to_send.update(run_specific_data[key])
+ data_to_send.update(data_value)
self.sender.record_send(data_to_send)
self.__record_init()