aboutsummaryrefslogtreecommitdiffstats
path: root/benchmarks/playbooks/result_transform/fio/fio_result_transform.py
diff options
context:
space:
mode:
Diffstat (limited to 'benchmarks/playbooks/result_transform/fio/fio_result_transform.py')
-rwxr-xr-xbenchmarks/playbooks/result_transform/fio/fio_result_transform.py61
1 files changed, 27 insertions, 34 deletions
diff --git a/benchmarks/playbooks/result_transform/fio/fio_result_transform.py b/benchmarks/playbooks/result_transform/fio/fio_result_transform.py
index f9410a62..9929aa18 100755
--- a/benchmarks/playbooks/result_transform/fio/fio_result_transform.py
+++ b/benchmarks/playbooks/result_transform/fio/fio_result_transform.py
@@ -4,22 +4,20 @@ import os
import datetime
with open("fio_result.json") as fio_raw:
- fio_data=json.load(fio_raw)
+ fio_data = json.load(fio_raw)
-r_iops=[];
-r_io_bytes=[];
-r_io_runtime=[];
-r_lat=[];
-w_iops=[];
-w_io_bytes=[];
-w_io_runtime=[];
-w_lat=[];
+r_iops = []
+r_io_bytes = []
+r_io_runtime = []
+r_lat = []
+w_iops = []
+w_io_bytes = []
+w_io_runtime = []
+w_lat = []
+total_jobs = len(fio_data["jobs"])
-
-total_jobs=len(fio_data["jobs"])
-
-for x in range (0,int(total_jobs)):
+for x in range(0, int(total_jobs)):
r_iops.append(fio_data["jobs"][x]["read"]["iops"])
r_io_bytes.append(fio_data["jobs"][x]["read"]["io_bytes"])
r_io_runtime.append(fio_data["jobs"][x]["read"]["runtime"])
@@ -29,29 +27,24 @@ for x in range (0,int(total_jobs)):
w_io_runtime.append(fio_data["jobs"][x]["write"]["runtime"])
w_lat.append(fio_data["jobs"][x]["write"]["lat"]["mean"])
+FIO_result_dict = {}
+for x in range(0, total_jobs):
+ FIO_result_dict['Job_' + str(x)] = {}
+ FIO_result_dict['Job_' + str(x)]['read'] = {}
+ FIO_result_dict['Job_' + str(x)]['read']['Total_IO_Bytes'] = r_io_bytes[x]
+ FIO_result_dict['Job_' + str(x)]['read']['IO/sec'] = r_iops[x]
+ FIO_result_dict['Job_' + str(x)]['read']['IO_runtime (millisec)'] = r_io_runtime[x]
+ FIO_result_dict['Job_' + str(x)]['read']['mean_IO_latenchy (microsec)'] = r_lat[x]
-FIO_result_dict={};
-
-for x in range (0,total_jobs):
- FIO_result_dict['Job_'+str(x)]={};
- FIO_result_dict['Job_'+str(x)]['read']={};
- FIO_result_dict['Job_'+str(x)]['read']['Total_IO_Bytes']=r_io_bytes[x]
- FIO_result_dict['Job_'+str(x)]['read']['IO/sec']=r_iops[x]
- FIO_result_dict['Job_'+str(x)]['read']['IO_runtime (millisec)']=r_io_runtime[x]
- FIO_result_dict['Job_'+str(x)]['read']['mean_IO_latenchy (microsec)']=r_lat[x]
-
- FIO_result_dict['Job_'+str(x)]['write']={};
- FIO_result_dict['Job_'+str(x)]['write']['Total_IO_Bytes']=w_io_bytes[x]
- FIO_result_dict['Job_'+str(x)]['write']['IO/sec']=w_iops[x]
- FIO_result_dict['Job_'+str(x)]['write']['IO_runtime (millisec)']=w_io_runtime[x]
- FIO_result_dict['Job_'+str(x)]['write']['mean_IO_latenchy (microsec)']=w_lat[x]
-
-
+ FIO_result_dict['Job_' + str(x)]['write'] = {}
+ FIO_result_dict['Job_' + str(x)]['write']['Total_IO_Bytes'] = w_io_bytes[x]
+ FIO_result_dict['Job_' + str(x)]['write']['IO/sec'] = w_iops[x]
+ FIO_result_dict['Job_' + str(x)]['write']['IO_runtime (millisec)'] = w_io_runtime[x]
+ FIO_result_dict['Job_' + str(x)]['write']['mean_IO_latenchy (microsec)'] = w_lat[x]
host_name = (os.popen("hostname").read().rstrip())
report_time = str(datetime.datetime.utcnow().isoformat())
-os.system("mv fio_result.json "+str(host_name)+"-"+report_time+".log")
-with open('./result_temp','w+')as out_fio_result:
- pickle.dump(FIO_result_dict,out_fio_result)
-
+os.system("mv fio_result.json " + str(host_name) + "-" + report_time + ".log")
+with open('./result_temp', 'w + ')as out_fio_result:
+ pickle.dump(FIO_result_dict, out_fio_result)