summaryrefslogtreecommitdiffstats
path: root/benchmarks/playbooks/result_transform/fio
diff options
context:
space:
mode:
authorzhifeng.jiang <jiang.zhifeng@zte.com.cn>2016-07-12 22:36:43 +0800
committerzhifeng jiang <jiang.zhifeng@zte.com.cn>2016-07-13 01:51:02 +0000
commitf385a6d107b3c5c479583e74e18ef3c5fa55b304 (patch)
treed28028bc5fd404ae560ee6cb21e0dc365ac7c550 /benchmarks/playbooks/result_transform/fio
parent01c843df1684678072988283b3789e11a34b7499 (diff)
Fix pep8 errors for python files in benchmarks,data,dashboard
JIRA:QTIP-89 Change-Id: I3465221f0bdc9a8eb7c4e26069f7367fb1add729 Signed-off-by: zhifeng.jiang <jiang.zhifeng@zte.com.cn>
Diffstat (limited to 'benchmarks/playbooks/result_transform/fio')
-rwxr-xr-xbenchmarks/playbooks/result_transform/fio/fio_result_transform.py61
1 files changed, 27 insertions, 34 deletions
diff --git a/benchmarks/playbooks/result_transform/fio/fio_result_transform.py b/benchmarks/playbooks/result_transform/fio/fio_result_transform.py
index f9410a62..9929aa18 100755
--- a/benchmarks/playbooks/result_transform/fio/fio_result_transform.py
+++ b/benchmarks/playbooks/result_transform/fio/fio_result_transform.py
@@ -4,22 +4,20 @@ import os
import datetime
with open("fio_result.json") as fio_raw:
- fio_data=json.load(fio_raw)
+ fio_data = json.load(fio_raw)
-r_iops=[];
-r_io_bytes=[];
-r_io_runtime=[];
-r_lat=[];
-w_iops=[];
-w_io_bytes=[];
-w_io_runtime=[];
-w_lat=[];
+r_iops = []
+r_io_bytes = []
+r_io_runtime = []
+r_lat = []
+w_iops = []
+w_io_bytes = []
+w_io_runtime = []
+w_lat = []
+total_jobs = len(fio_data["jobs"])
-
-total_jobs=len(fio_data["jobs"])
-
-for x in range (0,int(total_jobs)):
+for x in range(0, int(total_jobs)):
r_iops.append(fio_data["jobs"][x]["read"]["iops"])
r_io_bytes.append(fio_data["jobs"][x]["read"]["io_bytes"])
r_io_runtime.append(fio_data["jobs"][x]["read"]["runtime"])
@@ -29,29 +27,24 @@ for x in range (0,int(total_jobs)):
w_io_runtime.append(fio_data["jobs"][x]["write"]["runtime"])
w_lat.append(fio_data["jobs"][x]["write"]["lat"]["mean"])
+FIO_result_dict = {}
+for x in range(0, total_jobs):
+ FIO_result_dict['Job_' + str(x)] = {}
+ FIO_result_dict['Job_' + str(x)]['read'] = {}
+ FIO_result_dict['Job_' + str(x)]['read']['Total_IO_Bytes'] = r_io_bytes[x]
+ FIO_result_dict['Job_' + str(x)]['read']['IO/sec'] = r_iops[x]
+ FIO_result_dict['Job_' + str(x)]['read']['IO_runtime (millisec)'] = r_io_runtime[x]
+ FIO_result_dict['Job_' + str(x)]['read']['mean_IO_latenchy (microsec)'] = r_lat[x]
-FIO_result_dict={};
-
-for x in range (0,total_jobs):
- FIO_result_dict['Job_'+str(x)]={};
- FIO_result_dict['Job_'+str(x)]['read']={};
- FIO_result_dict['Job_'+str(x)]['read']['Total_IO_Bytes']=r_io_bytes[x]
- FIO_result_dict['Job_'+str(x)]['read']['IO/sec']=r_iops[x]
- FIO_result_dict['Job_'+str(x)]['read']['IO_runtime (millisec)']=r_io_runtime[x]
- FIO_result_dict['Job_'+str(x)]['read']['mean_IO_latenchy (microsec)']=r_lat[x]
-
- FIO_result_dict['Job_'+str(x)]['write']={};
- FIO_result_dict['Job_'+str(x)]['write']['Total_IO_Bytes']=w_io_bytes[x]
- FIO_result_dict['Job_'+str(x)]['write']['IO/sec']=w_iops[x]
- FIO_result_dict['Job_'+str(x)]['write']['IO_runtime (millisec)']=w_io_runtime[x]
- FIO_result_dict['Job_'+str(x)]['write']['mean_IO_latenchy (microsec)']=w_lat[x]
-
-
+ FIO_result_dict['Job_' + str(x)]['write'] = {}
+ FIO_result_dict['Job_' + str(x)]['write']['Total_IO_Bytes'] = w_io_bytes[x]
+ FIO_result_dict['Job_' + str(x)]['write']['IO/sec'] = w_iops[x]
+ FIO_result_dict['Job_' + str(x)]['write']['IO_runtime (millisec)'] = w_io_runtime[x]
+ FIO_result_dict['Job_' + str(x)]['write']['mean_IO_latenchy (microsec)'] = w_lat[x]
host_name = (os.popen("hostname").read().rstrip())
report_time = str(datetime.datetime.utcnow().isoformat())
-os.system("mv fio_result.json "+str(host_name)+"-"+report_time+".log")
-with open('./result_temp','w+')as out_fio_result:
- pickle.dump(FIO_result_dict,out_fio_result)
-
+os.system("mv fio_result.json " + str(host_name) + "-" + report_time + ".log")
+with open('./result_temp', 'w + ')as out_fio_result:
+ pickle.dump(FIO_result_dict, out_fio_result)