aboutsummaryrefslogtreecommitdiffstats
diff options
context:
space:
mode:
authorzhifeng.jiang <jiang.zhifeng@zte.com.cn>2016-07-12 22:36:43 +0800
committerzhifeng jiang <jiang.zhifeng@zte.com.cn>2016-07-13 01:51:02 +0000
commitf385a6d107b3c5c479583e74e18ef3c5fa55b304 (patch)
treed28028bc5fd404ae560ee6cb21e0dc365ac7c550
parent01c843df1684678072988283b3789e11a34b7499 (diff)
Fix pep8 errors for python files in benchmarks,data,dashboard
JIRA:QTIP-89 Change-Id: I3465221f0bdc9a8eb7c4e26069f7367fb1add729 Signed-off-by: zhifeng.jiang <jiang.zhifeng@zte.com.cn>
-rw-r--r--benchmarks/playbooks/result_transform/dpi/dpi_transform.py3
-rwxr-xr-xbenchmarks/playbooks/result_transform/fio/fio_result_transform.py61
-rw-r--r--benchmarks/playbooks/result_transform/iperf/iperf_transform.py37
-rw-r--r--benchmarks/playbooks/result_transform/ramspd/ramspd_transform.py36
-rw-r--r--benchmarks/playbooks/result_transform/ssl/ssl_transform.py9
-rw-r--r--benchmarks/playbooks/result_transform/ubench_transform.py1
-rw-r--r--dashboard/pushtoDB.py30
-rw-r--r--data/report/Qtip_Report.py125
-rw-r--r--data/report/get_indices.py8
-rw-r--r--data/report/get_results.py47
-rw-r--r--data/report/qtip_graph.py33
11 files changed, 182 insertions, 208 deletions
diff --git a/benchmarks/playbooks/result_transform/dpi/dpi_transform.py b/benchmarks/playbooks/result_transform/dpi/dpi_transform.py
index b95e0e23..622030cd 100644
--- a/benchmarks/playbooks/result_transform/dpi/dpi_transform.py
+++ b/benchmarks/playbooks/result_transform/dpi/dpi_transform.py
@@ -1,5 +1,4 @@
import os
-import json
import pickle
import datetime
@@ -46,4 +45,4 @@ result = {}
result['DPI_benchmark(M pps)'] = round(dpi_result_pps, 3)
result['DPI_benchmark(Gb/s)'] = round(dpi_result_bps, 3)
with open('./result_temp', 'w+') as result_file:
- pickle.dump(result, result_file) \ No newline at end of file
+ pickle.dump(result, result_file)
diff --git a/benchmarks/playbooks/result_transform/fio/fio_result_transform.py b/benchmarks/playbooks/result_transform/fio/fio_result_transform.py
index f9410a62..9929aa18 100755
--- a/benchmarks/playbooks/result_transform/fio/fio_result_transform.py
+++ b/benchmarks/playbooks/result_transform/fio/fio_result_transform.py
@@ -4,22 +4,20 @@ import os
import datetime
with open("fio_result.json") as fio_raw:
- fio_data=json.load(fio_raw)
+ fio_data = json.load(fio_raw)
-r_iops=[];
-r_io_bytes=[];
-r_io_runtime=[];
-r_lat=[];
-w_iops=[];
-w_io_bytes=[];
-w_io_runtime=[];
-w_lat=[];
+r_iops = []
+r_io_bytes = []
+r_io_runtime = []
+r_lat = []
+w_iops = []
+w_io_bytes = []
+w_io_runtime = []
+w_lat = []
+total_jobs = len(fio_data["jobs"])
-
-total_jobs=len(fio_data["jobs"])
-
-for x in range (0,int(total_jobs)):
+for x in range(0, int(total_jobs)):
r_iops.append(fio_data["jobs"][x]["read"]["iops"])
r_io_bytes.append(fio_data["jobs"][x]["read"]["io_bytes"])
r_io_runtime.append(fio_data["jobs"][x]["read"]["runtime"])
@@ -29,29 +27,24 @@ for x in range (0,int(total_jobs)):
w_io_runtime.append(fio_data["jobs"][x]["write"]["runtime"])
w_lat.append(fio_data["jobs"][x]["write"]["lat"]["mean"])
+FIO_result_dict = {}
+for x in range(0, total_jobs):
+ FIO_result_dict['Job_' + str(x)] = {}
+ FIO_result_dict['Job_' + str(x)]['read'] = {}
+ FIO_result_dict['Job_' + str(x)]['read']['Total_IO_Bytes'] = r_io_bytes[x]
+ FIO_result_dict['Job_' + str(x)]['read']['IO/sec'] = r_iops[x]
+ FIO_result_dict['Job_' + str(x)]['read']['IO_runtime (millisec)'] = r_io_runtime[x]
+ FIO_result_dict['Job_' + str(x)]['read']['mean_IO_latenchy (microsec)'] = r_lat[x]
-FIO_result_dict={};
-
-for x in range (0,total_jobs):
- FIO_result_dict['Job_'+str(x)]={};
- FIO_result_dict['Job_'+str(x)]['read']={};
- FIO_result_dict['Job_'+str(x)]['read']['Total_IO_Bytes']=r_io_bytes[x]
- FIO_result_dict['Job_'+str(x)]['read']['IO/sec']=r_iops[x]
- FIO_result_dict['Job_'+str(x)]['read']['IO_runtime (millisec)']=r_io_runtime[x]
- FIO_result_dict['Job_'+str(x)]['read']['mean_IO_latenchy (microsec)']=r_lat[x]
-
- FIO_result_dict['Job_'+str(x)]['write']={};
- FIO_result_dict['Job_'+str(x)]['write']['Total_IO_Bytes']=w_io_bytes[x]
- FIO_result_dict['Job_'+str(x)]['write']['IO/sec']=w_iops[x]
- FIO_result_dict['Job_'+str(x)]['write']['IO_runtime (millisec)']=w_io_runtime[x]
- FIO_result_dict['Job_'+str(x)]['write']['mean_IO_latenchy (microsec)']=w_lat[x]
-
-
+ FIO_result_dict['Job_' + str(x)]['write'] = {}
+ FIO_result_dict['Job_' + str(x)]['write']['Total_IO_Bytes'] = w_io_bytes[x]
+ FIO_result_dict['Job_' + str(x)]['write']['IO/sec'] = w_iops[x]
+ FIO_result_dict['Job_' + str(x)]['write']['IO_runtime (millisec)'] = w_io_runtime[x]
+ FIO_result_dict['Job_' + str(x)]['write']['mean_IO_latenchy (microsec)'] = w_lat[x]
host_name = (os.popen("hostname").read().rstrip())
report_time = str(datetime.datetime.utcnow().isoformat())
-os.system("mv fio_result.json "+str(host_name)+"-"+report_time+".log")
-with open('./result_temp','w+')as out_fio_result:
- pickle.dump(FIO_result_dict,out_fio_result)
-
+os.system("mv fio_result.json " + str(host_name) + "-" + report_time + ".log")
+with open('./result_temp', 'w + ')as out_fio_result:
+ pickle.dump(FIO_result_dict, out_fio_result)
diff --git a/benchmarks/playbooks/result_transform/iperf/iperf_transform.py b/benchmarks/playbooks/result_transform/iperf/iperf_transform.py
index 39c5956c..8df5a79a 100644
--- a/benchmarks/playbooks/result_transform/iperf/iperf_transform.py
+++ b/benchmarks/playbooks/result_transform/iperf/iperf_transform.py
@@ -1,30 +1,29 @@
import json
import datetime
import pickle
-with open('iperf_raw.json','r') as ifile:
- raw_iperf_data=json.loads(ifile.read().rstrip())
-
-
-bits_sent= raw_iperf_data['end']['sum_sent']['bits_per_second']
-bits_received= raw_iperf_data['end']['sum_received']['bits_per_second']
-total_byte_sent=raw_iperf_data['end']['sum_sent']['bytes']
-total_byte_received=raw_iperf_data['end']['sum_received']['bytes']
-cpu_host_total_percent=raw_iperf_data['end']['cpu_utilization_percent']['host_total']
-cpu_remote_total_percent=raw_iperf_data['end']['cpu_utilization_percent']['remote_total']
+with open('iperf_raw.json', 'r') as ifile:
+ raw_iperf_data = json.loads(ifile.read().rstrip())
-result={}
+bits_sent = raw_iperf_data['end']['sum_sent']['bits_per_second']
+bits_received = raw_iperf_data['end']['sum_received']['bits_per_second']
+total_byte_sent = raw_iperf_data['end']['sum_sent']['bytes']
+total_byte_received = raw_iperf_data['end']['sum_received']['bytes']
+cpu_host_total_percent = raw_iperf_data['end']['cpu_utilization_percent']['host_total']
+cpu_remote_total_percent = raw_iperf_data['end']['cpu_utilization_percent']['remote_total']
+
+result = {}
time_stamp = str(datetime.datetime.utcnow().isoformat())
-result['1. Version']=raw_iperf_data['start']['version']
-result['2. Bandwidth']={}
+result['1. Version'] = raw_iperf_data['start']['version']
+result['2. Bandwidth'] = {}
result['2. Bandwidth']['1. throughput Sender (b/s)'] = bits_sent
result['2. Bandwidth']['2. throughput Received (b/s)'] = bits_received
-result['3. CPU']={}
-result['3. CPU']['1. CPU host total (%)']=cpu_host_total_percent
-result['3. CPU']['2. CPU remote total (%)']=cpu_remote_total_percent
+result['3. CPU'] = {}
+result['3. CPU']['1. CPU host total (%)'] = cpu_host_total_percent
+result['3. CPU']['2. CPU remote total (%)'] = cpu_remote_total_percent
-with open('iperf_raw-'+time_stamp+'.log','w+') as ofile:
+with open('iperf_raw-' + time_stamp + '.log', 'w+') as ofile:
ofile.write(json.dumps(raw_iperf_data))
-
+
with open('./result_temp', 'w+') as result_file:
- pickle.dump(result,result_file) \ No newline at end of file
+ pickle.dump(result, result_file)
diff --git a/benchmarks/playbooks/result_transform/ramspd/ramspd_transform.py b/benchmarks/playbooks/result_transform/ramspd/ramspd_transform.py
index aed68acf..c3f03dd0 100644
--- a/benchmarks/playbooks/result_transform/ramspd/ramspd_transform.py
+++ b/benchmarks/playbooks/result_transform/ramspd/ramspd_transform.py
@@ -1,9 +1,7 @@
import os
-import json
import pickle
import datetime
-
intmem_copy = os.popen("cat Intmem | grep 'BatchRun Copy' | awk '{print $4}'").read().rstrip()
intmem_scale = os.popen("cat Intmem | grep 'BatchRun Scale' | awk '{print $4}'").read().rstrip()
intmem_add = os.popen("cat Intmem | grep 'BatchRun Add' | awk '{print $4}'").read().rstrip()
@@ -22,35 +20,27 @@ floatmem_average = os.popen("cat Floatmem | grep 'BatchRun AVERAGE' | awk '{pri
print floatmem_copy
print floatmem_average
-
hostname = os.popen("hostname").read().rstrip()
time_stamp = str(datetime.datetime.utcnow().isoformat())
-
os.system("mv Intmem " + hostname + "-" + time_stamp + ".log")
os.system("cp Floatmem >> " + hostname + "-" + time_stamp + ".log")
+result = {}
-result = {};
-
-result['1. INTmem bandwidth'] = {};
-result['1. INTmem bandwidth']['1. Copy (MB/s)']=intmem_copy
-result['1. INTmem bandwidth']['2. Add (MB/s)']=intmem_add
-result['1. INTmem bandwidth']['3. Scale (MB/s)']=intmem_scale
-result['1. INTmem bandwidth']['4. Triad (MB/s)']=intmem_triad
-result['1. INTmem bandwidth']['5. Average (MB/s)']=intmem_average
-
-
-result['2. FLOATmem bandwidth'] = {};
-result['2. FLOATmem bandwidth']['1. Copy (MB/s)']=floatmem_copy
-result['2. FLOATmem bandwidth']['2. Add (MB/s)']=floatmem_add
-result['2. FLOATmem bandwidth']['3. Scale (MB/s)']=floatmem_scale
-result['2. FLOATmem bandwidth']['4. Triad (MB/s)']=floatmem_triad
-result['2. FLOATmem bandwidth']['5. Average (MB/s)']=floatmem_average
-
+result['1. INTmem bandwidth'] = {}
+result['1. INTmem bandwidth']['1. Copy (MB/s)'] = intmem_copy
+result['1. INTmem bandwidth']['2. Add (MB/s)'] = intmem_add
+result['1. INTmem bandwidth']['3. Scale (MB/s)'] = intmem_scale
+result['1. INTmem bandwidth']['4. Triad (MB/s)'] = intmem_triad
+result['1. INTmem bandwidth']['5. Average (MB/s)'] = intmem_average
+result['2. FLOATmem bandwidth'] = {}
+result['2. FLOATmem bandwidth']['1. Copy (MB/s)'] = floatmem_copy
+result['2. FLOATmem bandwidth']['2. Add (MB/s)'] = floatmem_add
+result['2. FLOATmem bandwidth']['3. Scale (MB/s)'] = floatmem_scale
+result['2. FLOATmem bandwidth']['4. Triad (MB/s)'] = floatmem_triad
+result['2. FLOATmem bandwidth']['5. Average (MB/s)'] = floatmem_average
with open('./result_temp', 'w+') as result_file:
pickle.dump(result, result_file)
-
-
diff --git a/benchmarks/playbooks/result_transform/ssl/ssl_transform.py b/benchmarks/playbooks/result_transform/ssl/ssl_transform.py
index 6e632251..029135ac 100644
--- a/benchmarks/playbooks/result_transform/ssl/ssl_transform.py
+++ b/benchmarks/playbooks/result_transform/ssl/ssl_transform.py
@@ -1,10 +1,7 @@
import os
-import json
import pickle
import datetime
-#total_cpu=os.popen("cat $HOME/tempD/nDPI/example/result.txt | tail -1").read()
-
openssl_version = os.popen("cat RSA_dump | head -1").read().rstrip()
rsa_512_sps = os.popen(
"cat RSA_dump | grep '512 bits ' | awk '{print $6}' ").read().rstrip()
@@ -23,7 +20,6 @@ rsa_4096_sps = os.popen(
rsa_4096_vps = os.popen(
"cat RSA_dump | grep '4096 bits ' | awk '{print $7}' ").read().rstrip()
-
aes_16B = os.popen(
"cat AES-128-CBC_dump | grep 'aes-128-cbc ' | awk '{print $2}' ").read().rstrip()
aes_64B = os.popen(
@@ -35,16 +31,12 @@ aes_1024B = os.popen(
aes_8192B = os.popen(
"cat AES-128-CBC_dump | grep 'aes-128-cbc ' | awk '{print $6}' ").read().rstrip()
-
hostname = os.popen("hostname").read().rstrip()
time_stamp = str(datetime.datetime.utcnow().isoformat())
-
os.system("mv RSA_dump " + hostname + "-" + time_stamp + ".log")
os.system("cat AES-128-CBC_dump >> " + hostname + "-" + time_stamp + ".log")
-
-
result = {}
result['1. Version'] = [openssl_version]
@@ -64,4 +56,3 @@ result['3. AES-128-cbc throughput']['5. 8192 Bytes block (B/sec)'] = aes_8192B
with open('./result_temp', 'w+') as result_file:
pickle.dump(result, result_file)
-
diff --git a/benchmarks/playbooks/result_transform/ubench_transform.py b/benchmarks/playbooks/result_transform/ubench_transform.py
index f15943d7..3c8ba1d8 100644
--- a/benchmarks/playbooks/result_transform/ubench_transform.py
+++ b/benchmarks/playbooks/result_transform/ubench_transform.py
@@ -1,7 +1,6 @@
import os
import json
import pickle
-import datetime
total_cpu = os.popen(
"cat $HOME/tempT/UnixBench/results/* | grep 'of tests' | awk '{print $1;}' | awk 'NR==1'").read().rstrip()
diff --git a/dashboard/pushtoDB.py b/dashboard/pushtoDB.py
index 75c1d612..d5458b1d 100644
--- a/dashboard/pushtoDB.py
+++ b/dashboard/pushtoDB.py
@@ -2,21 +2,25 @@ import requests
import json
import datetime
import os
+import sys
TEST_DB = 'http://testresults.opnfv.org/test/api/v1'
-suite_list = [('compute_result.json', 'compute_test_suite'),('network_result.json', 'network_test_suite'),('storage_result.json', 'storage_test_suite')]
-payload_list = { }
+suite_list = [('compute_result.json', 'compute_test_suite'),
+ ('network_result.json', 'network_test_suite'),
+ ('storage_result.json', 'storage_test_suite')]
+payload_list = {}
-def push_results_to_db(db_url, case_name, payload,logger=None, pod_name="dell-pod1"):
+
+def push_results_to_db(db_url, case_name, payload, logger=None, pod_name="dell-pod1"):
url = db_url + "/results"
- creation_date= str(datetime.datetime.utcnow().isoformat())
+ creation_date = str(datetime.datetime.utcnow().isoformat())
installer = os.environ['INSTALLER_TYPE']
pod_name = os.environ['NODE_NAME']
params = {"project_name": "qtip", "case_name": case_name,
"pod_name": pod_name, "installer": installer, "start_date": creation_date,
- "version": "test" , "details": payload}
+ "version": "test", "details": payload}
headers = {'Content-Type': 'application/json'}
print pod_name
@@ -31,13 +35,15 @@ def push_results_to_db(db_url, case_name, payload,logger=None, pod_name="dell-po
print "Error:", sys.exc_info()[0]
return False
+
def populate_payload(suite_list):
global payload_list
- for k,v in suite_list:
+ for k, v in suite_list:
+
+ if os.path.isfile('results/' + str(k)):
+ payload_list[k] = v
- if os.path.isfile('results/'+str(k)):
- payload_list[k]=v
def main():
@@ -45,10 +51,10 @@ def main():
populate_payload(suite_list)
if payload_list:
print payload_list
- for suite,case in payload_list.items():
- with open('results/'+suite,'r') as result_file:
- j=json.load(result_file)
- push_results_to_db(TEST_DB, case , j)
+ for suite, case in payload_list.items():
+ with open('results/' + suite, 'r') as result_file:
+ j = json.load(result_file)
+ push_results_to_db(TEST_DB, case, j)
elif not payload_list:
print 'Results not found'
diff --git a/data/report/Qtip_Report.py b/data/report/Qtip_Report.py
index 9f2226c4..cd20d57c 100644
--- a/data/report/Qtip_Report.py
+++ b/data/report/Qtip_Report.py
@@ -1,113 +1,108 @@
-from reportlab.pdfgen import canvas
from reportlab.platypus import SimpleDocTemplate, Paragraph, Spacer, Image
-from reportlab.lib.styles import getSampleStyleSheet, ParagraphStyle
+from reportlab.lib.styles import getSampleStyleSheet
from reportlab.lib.units import inch
from reportlab.lib.pagesizes import letter
-from reportlab.platypus import ListFlowable, ListItem
-import qtip_graph as graph
+import qtip_graph as graph
import get_indices as results
from get_results import report_concat
from get_results import generate_result
-def dump_result(Stor,directory, testcase):
+
+def dump_result(Stor, directory, testcase):
try:
- lower_s=testcase.lower()
- Stor.append(Paragraph(testcase,Style['h3']))
- l1=report_concat(directory,lower_s)
- l=1
+ lower_s = testcase.lower()
+ Stor.append(Paragraph(testcase, Style['h3']))
+ l1 = report_concat(directory, lower_s)
+ l = 1
for a in l1:
- Stor.append(Paragraph(testcase+" result_"+str(l),Style['h5']))
- raw_string=generate_result(a,0)
- replaced_string=raw_string.replace('\n', '<br/> ').replace(' ','&nbsp;')
- Stor.append(Paragraph(replaced_string,Style['BodyText']))
- l=l+1
+ Stor.append(Paragraph(testcase + " result_" + str(l), Style['h5']))
+ raw_string = generate_result(a, 0)
+ replaced_string = raw_string.replace('\n', '<br/> ').replace(' ', '&nbsp;')
+ Stor.append(Paragraph(replaced_string, Style['BodyText']))
+ l = l + 1
except OSError:
print "Results for {0} not found".format(testcase)
-doc = SimpleDocTemplate("../../results/QTIP_results.pdf",pagesize=letter,
- rightMargin=72,leftMargin=72,
- topMargin=72,bottomMargin=18)
-Stor=[]
-Style=getSampleStyleSheet()
-Title="QTIP Benchmark Suite"
-Stor.append(Paragraph(Title,Style['Title']))
-H1="Results"
-Stor.append(Spacer(0,36))
+doc = SimpleDocTemplate("../../results/QTIP_results.pdf", pagesize=letter,
+ rightMargin=72, leftMargin=72,
+ topMargin=72, bottomMargin=18)
+Stor = []
+Style = getSampleStyleSheet()
+Title = "QTIP Benchmark Suite"
+Stor.append(Paragraph(Title, Style['Title']))
+H1 = "Results"
+Stor.append(Spacer(0, 36))
Stor.append(Paragraph(H1, Style['h2']))
-compute=0
-storage=0
-network=0
+compute = 0
+storage = 0
+network = 0
try:
- compute=results.get_index('compute_result')
+ compute = results.get_index('compute_result')
except IOError:
pass
try:
- storage=results.get_index('storage_result')
+ storage = results.get_index('storage_result')
except IOError:
pass
try:
- network=results.get_index('network_result')
+ network = results.get_index('network_result')
except IOError:
pass
-Stor.append(Paragraph("Compute Suite: %f" %compute, Style['h5']))
-Stor.append(Paragraph("Storage Suite: %f" %storage, Style['h5']))
-Stor.append(Paragraph("Network Suite: %f" %network, Style['h5']))
-graph.plot_indices(compute,storage,network)
-qtip_graph=('qtip_graph.jpeg')
-im=Image(qtip_graph, 5*inch,4*inch)
+Stor.append(Paragraph("Compute Suite: %f" % compute, Style['h5']))
+Stor.append(Paragraph("Storage Suite: %f" % storage, Style['h5']))
+Stor.append(Paragraph("Network Suite: %f" % network, Style['h5']))
+graph.plot_indices(compute, storage, network)
+qtip_graph = ('qtip_graph.jpeg')
+im = Image(qtip_graph, 5 * inch, 4 * inch)
Stor.append(im)
Stor.append(Spacer(0, 12))
Stor.append(Paragraph("Reference POD", Style['h5']))
-ptext="The Dell OPNFV Lab POD3 has been taken as the reference POD against which the reference results have been collected. The POD consists of 6 identical servers. The details of such a server are:"
-Stor.append(Paragraph(ptext,Style['Normal']))
-ptext="<bullet>&bull;</bullet>Server Type: Dell PowerEdge R630 Server"
-Stor.append(Paragraph(ptext,Style['Bullet']))
-ptext="<bullet>&bull;</bullet>CPU: Intel Xeon E5-2698 @ 2300 MHz"
+ptext = "The Dell OPNFV Lab POD3 has been taken as the reference POD against which the reference results have been collected. The POD consists of 6 identical servers. The details of such a server are:"
+Stor.append(Paragraph(ptext, Style['Normal']))
+ptext = "<bullet>&bull;</bullet>Server Type: Dell PowerEdge R630 Server"
+Stor.append(Paragraph(ptext, Style['Bullet']))
+ptext = "<bullet>&bull;</bullet>CPU: Intel Xeon E5-2698 @ 2300 MHz"
Stor.append(Paragraph(ptext, Style["Bullet"]))
-ptext="<bullet>&bull;</bullet>RAM: 128GB"
+ptext = "<bullet>&bull;</bullet>RAM: 128GB"
Stor.append(Paragraph(ptext, Style["Bullet"]))
-ptext="<bullet>&bull;</bullet>Storage SSD: 420GB"
+ptext = "<bullet>&bull;</bullet>Storage SSD: 420GB"
Stor.append(Paragraph(ptext, Style["Bullet"]))
-ptext="<bullet>&bull;</bullet>Network Card: Intel 2P X520/2P I350 rNDC"
+ptext = "<bullet>&bull;</bullet>Network Card: Intel 2P X520/2P I350 rNDC"
Stor.append(Paragraph(ptext, Style["Bullet"]))
-ptext="Servers interconnected through a DELL S4810 switch using a 10Gbps physical link"
+ptext = "Servers interconnected through a DELL S4810 switch using a 10Gbps physical link"
Stor.append(Paragraph(ptext, Style["Bullet"]))
Stor.append(Spacer(0, 12))
-ptext="For Further Details of the Reference POD hardware, please visit: https://wiki.opnfv.org/reference_pod_hardware_details"
-Stor.append(Paragraph(ptext,Style['Normal']))
+ptext = "For Further Details of the Reference POD hardware, please visit: https://wiki.opnfv.org/reference_pod_hardware_details"
+Stor.append(Paragraph(ptext, Style['Normal']))
Stor.append(Spacer(0, 12))
-ptext="For Details of the Reference POD Results, please visit: https://wiki.opnfv.org/reference_pod_qtip_results"
+ptext = "For Details of the Reference POD Results, please visit: https://wiki.opnfv.org/reference_pod_qtip_results"
Stor.append(Spacer(0, 12))
-Stor.append(Paragraph(ptext,Style['Normal']))
+Stor.append(Paragraph(ptext, Style['Normal']))
Stor.append(Paragraph("RAW Results", Style['h1']))
Stor.append(Paragraph("Compute Results", Style['h2']))
-dump_result(Stor,"../../results/dhrystone/","Dhrystone_bm")
-dump_result(Stor,"../../results/dhrystone/","Dhrystone_vm")
-
-dump_result(Stor,"../../results/whetstone/","Whetstone_bm")
-dump_result(Stor,"../../results/whetstone/","Whetstone_vm")
+dump_result(Stor, "../../results/dhrystone/", "Dhrystone_bm")
+dump_result(Stor, "../../results/dhrystone/", "Dhrystone_vm")
-dump_result(Stor,"../../results/ramspeed/","Ramspeed_bm")
-dump_result(Stor,"../../results/ramspeed/","Ramspeed_vm")
+dump_result(Stor, "../../results/whetstone/", "Whetstone_bm")
+dump_result(Stor, "../../results/whetstone/", "Whetstone_vm")
-dump_result(Stor,"../../results/ssl/","SSL_bm")
-dump_result(Stor,"../../results/ssl/","SSL_vm")
+dump_result(Stor, "../../results/ramspeed/", "Ramspeed_bm")
+dump_result(Stor, "../../results/ramspeed/", "Ramspeed_vm")
-#dump_result(Stor,"../../results/dpi/","DPI_bm")
-#dump_result(Stor,"../../results/dpi/","DPI_vm")
+dump_result(Stor, "../../results/ssl/", "SSL_bm")
+dump_result(Stor, "../../results/ssl/", "SSL_vm")
Stor.append(Paragraph("Network Results", Style['h2']))
-dump_result(Stor,"../../results/iperf/","IPERF_bm")
-dump_result(Stor,"../../results/iperf/","IPERF_vm")
-dump_result(Stor,"../../results/iperf/","IPERF_vm_2")
+dump_result(Stor, "../../results/iperf/", "IPERF_bm")
+dump_result(Stor, "../../results/iperf/", "IPERF_vm")
+dump_result(Stor, "../../results/iperf/", "IPERF_vm_2")
Stor.append(Paragraph("Storage Results", Style['h2']))
-dump_result(Stor,"../../results/fio/","fio_bm")
-dump_result(Stor,"../../results/fio/","fio_vm")
+dump_result(Stor, "../../results/fio/", "fio_bm")
+dump_result(Stor, "../../results/fio/", "fio_vm")
doc.build(Stor)
-#canvas.save()
diff --git a/data/report/get_indices.py b/data/report/get_indices.py
index e23fdb89..91219c0b 100644
--- a/data/report/get_indices.py
+++ b/data/report/get_indices.py
@@ -1,8 +1,8 @@
import json
+
def get_index(suite):
- with open ('../../results/'+suite+'.json') as result_file:
- result_djson=json.load(result_file)
- index=result_djson['index']
-
+ with open('../../results/' + suite + '.json') as result_file:
+ result_djson = json.load(result_file)
+ index = result_djson['index']
return index
diff --git a/data/report/get_results.py b/data/report/get_results.py
index 01fb8080..23fd5383 100644
--- a/data/report/get_results.py
+++ b/data/report/get_results.py
@@ -2,48 +2,49 @@ import os
import json
-def report_concat (targ_dir, testcase):
- machine_temp=[];
- machines=[];
- diction={};
+def report_concat(targ_dir, testcase):
+ machine_temp = []
+ machines = []
for file in os.listdir(targ_dir):
if file.endswith(".json"):
machine_temp.append(file)
- l=len(machine_temp)
+ l = len(machine_temp)
- for x in range (0,l):
- file_t=machine_temp[x]
- with open (targ_dir+file_t) as result_file:
- result_djson=json.load(result_file)
+ for x in range(0, l):
+ file_t = machine_temp[x]
+ with open(targ_dir + file_t) as result_file:
+ result_djson = json.load(result_file)
if result_djson['1 Testcase Name'] == str(testcase):
machines.append(result_djson)
return machines
+
def space_count(l):
- spc=''
+ spc = ''
for x in range(l):
- spc=spc+' '
+ spc = spc + ' '
return spc
-def custom_dict(list1,list2,k):
- string_1=''
- for num_1 in range (0,len(list1)):
- string_1=string_1+space_count(k)+str(list1[num_1][0])+"="+str(list2[num_1])+"\n"
+def custom_dict(list1, list2, k):
+ string_1 = ''
+ for num_1 in range(0, len(list1)):
+ string_1 = string_1 + space_count(k) + str(list1[num_1][0]) + "=" + str(list2[num_1]) + "\n"
return string_1
-def generate_result(dict_a,k):
- list_1=[]
- list_2=[]
- count=0
- for i,j in sorted(dict_a.iteritems()):
+
+def generate_result(dict_a, k):
+ list_1 = []
+ list_2 = []
+ count = 0
+ for i, j in sorted(dict_a.iteritems()):
list_1.append([])
list_1[count].append(i)
if (str(type(dict_a.get(i)))) == "<type 'dict'>":
- list_2.append(str("\n"+generate_result(dict_a.get(i),int(k+1))))
+ list_2.append(str("\n" + generate_result(dict_a.get(i), int(k + 1))))
else:
list_2.append(dict_a.get(i))
- count=count+1
- return custom_dict(list_1,list_2,k)
+ count = count + 1
+ return custom_dict(list_1, list_2, k)
diff --git a/data/report/qtip_graph.py b/data/report/qtip_graph.py
index d7e64140..acbda40c 100644
--- a/data/report/qtip_graph.py
+++ b/data/report/qtip_graph.py
@@ -1,29 +1,30 @@
import matplotlib
-matplotlib.use('Agg')
import matplotlib.pyplot as plt
import numpy as np
-def plot_indices(a,b,c):
- N=3
- ind= np.arange(N)
- y_axis = (a,b,c )
- width=0.35
- f=plt.figure()
- ax=f.gca()
+matplotlib.use('Agg')
+
+
+def plot_indices(a, b, c):
+ N = 3
+ ind = np.arange(N)
+ y_axis = (a, b, c)
+ width = 0.35
+ f = plt.figure()
+ ax = f.gca()
ax.set_autoscale_on(True)
- my_bars=ax.bar(ind,y_axis,width, color='b')
+ my_bars = ax.bar(ind, y_axis, width, color='b')
ax.set_ylabel('Index Score*')
ax.set_xlabel('Suite')
ax.set_title(' QTIP benchmark scores')
ax.axis('on')
- my_bars=ax.bar(ind,y_axis,width)
- ax.set_xticks(ind+width/2)
- ax.set_xticklabels(['Compute','Storage','Network'])
- ax.axis([0,3,0,1.25])
- f.text(0.7,0.01,'* With Comparison to Refernece POD', fontsize=9)
+ my_bars = ax.bar(ind, y_axis, width)
+ ax.set_xticks(ind + width / 2)
+ ax.set_xticklabels(['Compute', 'Storage', 'Network'])
+ ax.axis([0, 3, 0, 1.25])
+ f.text(0.7, 0.01, '* With Comparison to Refernece POD', fontsize=9)
for rect in my_bars:
height = rect.get_height()
- ax.text(rect.get_x() + rect.get_width()/2., 1.05*height, height , ha='center', va='bottom')
-
+ ax.text(rect.get_x() + rect.get_width() / 2., 1.05 * height, height, ha='center', va='bottom')
f.savefig('qtip_graph.jpeg')