diff options
author | zhifeng.jiang <jiang.zhifeng@zte.com.cn> | 2016-09-23 11:38:35 +0800 |
---|---|---|
committer | zhifeng.jiang <jiang.zhifeng@zte.com.cn> | 2016-09-25 10:56:22 +0800 |
commit | 2b172eef7185382ed4283d1868896626ae3ef5f1 (patch) | |
tree | 7242ad699db6876264c9ac60f36763cc47f04ad1 /benchmarks/playbooks/result_transform/fio | |
parent | c806bbde071596f204782e7182a65173798ad3ca (diff) |
Change result keys to lower-case and remove ' .'
QTIP-93
Change-Id: I125163e05b49d9631838e89571d59a967252292e
Signed-off-by: zhifeng.jiang <jiang.zhifeng@zte.com.cn>
Diffstat (limited to 'benchmarks/playbooks/result_transform/fio')
-rwxr-xr-x | benchmarks/playbooks/result_transform/fio/fio_result_transform.py | 51 |
1 files changed, 15 insertions, 36 deletions
diff --git a/benchmarks/playbooks/result_transform/fio/fio_result_transform.py b/benchmarks/playbooks/result_transform/fio/fio_result_transform.py index 9929aa18..5ecac823 100755 --- a/benchmarks/playbooks/result_transform/fio/fio_result_transform.py +++ b/benchmarks/playbooks/result_transform/fio/fio_result_transform.py @@ -3,48 +3,27 @@ import pickle import os import datetime -with open("fio_result.json") as fio_raw: - fio_data = json.load(fio_raw) - -r_iops = [] -r_io_bytes = [] -r_io_runtime = [] -r_lat = [] -w_iops = [] -w_io_bytes = [] -w_io_runtime = [] -w_lat = [] -total_jobs = len(fio_data["jobs"]) +def get_fio_job_result(fio_job_data): + return {'read': {'io_bytes': fio_job_data["read"]["io_bytes"], + 'io_ps': fio_job_data["read"]["iops"], + 'io_runtime_millisec': fio_job_data["read"]["runtime"], + 'mean_io_latenchy_microsec': fio_job_data["read"]["lat"]["mean"]}, + 'write': {'io_bytes': fio_job_data["write"]["io_bytes"], + 'io_ps': fio_job_data["write"]["iops"], + 'io_runtime_millisec': fio_job_data["write"]["runtime"], + 'mean_io_latenchy_microsec': fio_job_data["write"]["lat"]["mean"]}} -for x in range(0, int(total_jobs)): - r_iops.append(fio_data["jobs"][x]["read"]["iops"]) - r_io_bytes.append(fio_data["jobs"][x]["read"]["io_bytes"]) - r_io_runtime.append(fio_data["jobs"][x]["read"]["runtime"]) - r_lat.append(fio_data["jobs"][x]["read"]["lat"]["mean"]) - w_iops.append(fio_data["jobs"][x]["write"]["iops"]) - w_io_bytes.append(fio_data["jobs"][x]["write"]["io_bytes"]) - w_io_runtime.append(fio_data["jobs"][x]["write"]["runtime"]) - w_lat.append(fio_data["jobs"][x]["write"]["lat"]["mean"]) -FIO_result_dict = {} - -for x in range(0, total_jobs): - FIO_result_dict['Job_' + str(x)] = {} - FIO_result_dict['Job_' + str(x)]['read'] = {} - FIO_result_dict['Job_' + str(x)]['read']['Total_IO_Bytes'] = r_io_bytes[x] - FIO_result_dict['Job_' + str(x)]['read']['IO/sec'] = r_iops[x] - FIO_result_dict['Job_' + str(x)]['read']['IO_runtime (millisec)'] = r_io_runtime[x] - FIO_result_dict['Job_' + str(x)]['read']['mean_IO_latenchy (microsec)'] = r_lat[x] +with open("fio_result.json") as fio_raw: + fio_data = json.load(fio_raw) - FIO_result_dict['Job_' + str(x)]['write'] = {} - FIO_result_dict['Job_' + str(x)]['write']['Total_IO_Bytes'] = w_io_bytes[x] - FIO_result_dict['Job_' + str(x)]['write']['IO/sec'] = w_iops[x] - FIO_result_dict['Job_' + str(x)]['write']['IO_runtime (millisec)'] = w_io_runtime[x] - FIO_result_dict['Job_' + str(x)]['write']['mean_IO_latenchy (microsec)'] = w_lat[x] +fio_result_dict = {} +for x, result in enumerate(map(get_fio_job_result, fio_data["jobs"])): + fio_result_dict['job_{0}'.format(x)] = result host_name = (os.popen("hostname").read().rstrip()) report_time = str(datetime.datetime.utcnow().isoformat()) os.system("mv fio_result.json " + str(host_name) + "-" + report_time + ".log") with open('./result_temp', 'w + ')as out_fio_result: - pickle.dump(FIO_result_dict, out_fio_result) + pickle.dump(fio_result_dict, out_fio_result) |