aboutsummaryrefslogtreecommitdiffstats
path: root/utils/transform/fio_transform.py
diff options
context:
space:
mode:
authorwu.zhihui <wu.zhihui1@zte.com.cn>2016-10-20 20:31:24 +0800
committerzhihui wu <wu.zhihui1@zte.com.cn>2016-10-21 07:05:29 +0000
commit36f6aa40ca02ef9ef1d24e61af337a960b8a76cd (patch)
tree71d102d0247b1f4f2bbfc236d79ebc27d476d03e /utils/transform/fio_transform.py
parent7f663c5d49b5cb619d48bb1b8656e9452c860121 (diff)
reorganize playbooks directory
In this patch, playbooks will not be deleted temporarily.Diectory playbooks will be reorganized like this benchmarks/ ├── fio_jobs │   └── test_job ├── perftest    ├── common    │   ├── git_proxy_pbook.yaml    │   ├── sys_info_pbook.yaml    │   └── sys_proxy_pbook.yaml    ├── dhrystone.yaml    ├── dpi.yaml    ├── etc    │   ├── dpi_average.sh    │   ├── info_collect.py    │   └── test_job    ├── fio.yaml    ├── iperf.yaml    ├── ramspeed.yaml    ├── ssl.yaml    └── whetstone.yaml utils/ └── transform ├── dpi_transform.py ├── final_report.py ├── fio_transform.py ├── __init__.py ├── iperf_transform.py ├── ramspeed_transform.py ├── ssl_transform.py └── ubench_transform.py JIRA: QTIP-131 Change-Id: I41003f2f1935efd15b6221ac05200f391fa8a6a9 Signed-off-by: wu.zhihui <wu.zhihui1@zte.com.cn>
Diffstat (limited to 'utils/transform/fio_transform.py')
-rwxr-xr-xutils/transform/fio_transform.py29
1 files changed, 29 insertions, 0 deletions
diff --git a/utils/transform/fio_transform.py b/utils/transform/fio_transform.py
new file mode 100755
index 00000000..5ecac823
--- /dev/null
+++ b/utils/transform/fio_transform.py
@@ -0,0 +1,29 @@
+import json
+import pickle
+import os
+import datetime
+
+
+def get_fio_job_result(fio_job_data):
+ return {'read': {'io_bytes': fio_job_data["read"]["io_bytes"],
+ 'io_ps': fio_job_data["read"]["iops"],
+ 'io_runtime_millisec': fio_job_data["read"]["runtime"],
+ 'mean_io_latenchy_microsec': fio_job_data["read"]["lat"]["mean"]},
+ 'write': {'io_bytes': fio_job_data["write"]["io_bytes"],
+ 'io_ps': fio_job_data["write"]["iops"],
+ 'io_runtime_millisec': fio_job_data["write"]["runtime"],
+ 'mean_io_latenchy_microsec': fio_job_data["write"]["lat"]["mean"]}}
+
+
+with open("fio_result.json") as fio_raw:
+ fio_data = json.load(fio_raw)
+
+fio_result_dict = {}
+for x, result in enumerate(map(get_fio_job_result, fio_data["jobs"])):
+ fio_result_dict['job_{0}'.format(x)] = result
+
+host_name = (os.popen("hostname").read().rstrip())
+report_time = str(datetime.datetime.utcnow().isoformat())
+os.system("mv fio_result.json " + str(host_name) + "-" + report_time + ".log")
+with open('./result_temp', 'w + ')as out_fio_result:
+ pickle.dump(fio_result_dict, out_fio_result)