aboutsummaryrefslogtreecommitdiffstats
diff options
context:
space:
mode:
authorwu.zhihui <wu.zhihui1@zte.com.cn>2016-10-20 20:31:24 +0800
committerzhihui wu <wu.zhihui1@zte.com.cn>2016-10-21 07:05:29 +0000
commit36f6aa40ca02ef9ef1d24e61af337a960b8a76cd (patch)
tree71d102d0247b1f4f2bbfc236d79ebc27d476d03e
parent7f663c5d49b5cb619d48bb1b8656e9452c860121 (diff)
reorganize playbooks directory
In this patch, playbooks will not be deleted temporarily.Diectory playbooks will be reorganized like this benchmarks/ ├── fio_jobs │   └── test_job ├── perftest    ├── common    │   ├── git_proxy_pbook.yaml    │   ├── sys_info_pbook.yaml    │   └── sys_proxy_pbook.yaml    ├── dhrystone.yaml    ├── dpi.yaml    ├── etc    │   ├── dpi_average.sh    │   ├── info_collect.py    │   └── test_job    ├── fio.yaml    ├── iperf.yaml    ├── ramspeed.yaml    ├── ssl.yaml    └── whetstone.yaml utils/ └── transform ├── dpi_transform.py ├── final_report.py ├── fio_transform.py ├── __init__.py ├── iperf_transform.py ├── ramspeed_transform.py ├── ssl_transform.py └── ubench_transform.py JIRA: QTIP-131 Change-Id: I41003f2f1935efd15b6221ac05200f391fa8a6a9 Signed-off-by: wu.zhihui <wu.zhihui1@zte.com.cn>
-rw-r--r--benchmarks/perftest/common/git_proxy_pbook.yaml11
-rw-r--r--benchmarks/perftest/common/sys_info_pbook.yaml42
-rw-r--r--benchmarks/perftest/common/sys_proxy_pbook.yaml53
-rw-r--r--benchmarks/perftest/dhrystone.yaml112
-rw-r--r--benchmarks/perftest/dpi.yaml120
-rw-r--r--benchmarks/perftest/etc/dpi_average.sh14
-rw-r--r--benchmarks/perftest/etc/info_collect.py86
-rw-r--r--benchmarks/perftest/etc/test_job13
-rw-r--r--benchmarks/perftest/fio.yaml105
-rw-r--r--benchmarks/perftest/iperf.yaml161
-rw-r--r--benchmarks/perftest/ramspeed.yaml109
-rw-r--r--benchmarks/perftest/ssl.yaml113
-rw-r--r--benchmarks/perftest/whetstone.yaml111
-rw-r--r--utils/transform/__init__.py0
-rw-r--r--utils/transform/dpi_transform.py47
-rw-r--r--utils/transform/final_report.py24
-rwxr-xr-xutils/transform/fio_transform.py29
-rw-r--r--utils/transform/iperf_transform.py27
-rw-r--r--utils/transform/ramspeed_transform.py41
-rw-r--r--utils/transform/ssl_transform.py54
-rw-r--r--utils/transform/ubench_transform.py32
21 files changed, 1304 insertions, 0 deletions
diff --git a/benchmarks/perftest/common/git_proxy_pbook.yaml b/benchmarks/perftest/common/git_proxy_pbook.yaml
new file mode 100644
index 00000000..5cb6f450
--- /dev/null
+++ b/benchmarks/perftest/common/git_proxy_pbook.yaml
@@ -0,0 +1,11 @@
+#git
+- name: set git proxy(http)
+ shell: "git config --global http.proxy {{ http_proxy }}"
+ when: http_proxy is defined
+ ignore_errors: yes
+
+- name: set git proxy(https)
+ shell: "git config --global https.proxy {{https_proxy}}"
+ when: https_proxy is defined
+ ignore_errors: yes
+
diff --git a/benchmarks/perftest/common/sys_info_pbook.yaml b/benchmarks/perftest/common/sys_info_pbook.yaml
new file mode 100644
index 00000000..5c2d8f79
--- /dev/null
+++ b/benchmarks/perftest/common/sys_info_pbook.yaml
@@ -0,0 +1,42 @@
+ - name: Epel Release install when CentOS
+ shell: sudo yum install epel-release -y
+ when: ansible_os_family == "RedHat"
+
+ - name: Inxi install when CentOS
+ shell: sudo yum install inxi -y
+ when: ansible_os_family == "RedHat"
+
+ - name: Software Properties Common
+ shell: sudo apt-get install software-properties-common -y
+ when: ansible_os_family == "Debian"
+
+ - name: adding trusty-backport main repo
+ shell: sudo apt-add-repository "deb http://archive.ubuntu.com/ubuntu/ trusty-backports main restricted universe multiverse"
+ when: ansible_os_family == "Debian"
+
+ - name: adding trusty main repo
+ shell: sudo apt-add-repository "deb http://archive.ubuntu.com/ubuntu/ trusty main restricted universe multiverse"
+ when: ansible_os_family == "Debian"
+
+ - name: system info collection tool install when Ubuntu
+ shell: sudo apt-get update && apt-get install inxi -y
+ when: ansible_os_family == "Debian"
+
+ - name: Install ansible copy dependencies if remote host has selinux enabled
+ shell: sudo yum install libselinux-python -y
+ when: ansible_os_family == "RedHat"
+
+ - name: Install ansiblle copy dependencies if remote host has selinux enaled
+ shell: sudo apt-get install python-selinux -y
+ when: ansible_os_family == "Debian"
+
+ - name: system_info script copy
+ copy: src=../etc/info_collect.py dest={{home_dir.stdout}}/qtip_result/
+
+ - name: collecting system informaton for non-network test cases
+ shell: cd $HOME/qtip_result && sudo python info_collect.py c
+ when: not network
+
+ - name: collecting system information for network test cases
+ shell: cd $HOME/qtip_result && sudo python info_collect.py n
+ when: network
diff --git a/benchmarks/perftest/common/sys_proxy_pbook.yaml b/benchmarks/perftest/common/sys_proxy_pbook.yaml
new file mode 100644
index 00000000..bf4a8ccb
--- /dev/null
+++ b/benchmarks/perftest/common/sys_proxy_pbook.yaml
@@ -0,0 +1,53 @@
+#env
+- name: insert shell proxy http
+ lineinfile: dest=/etc/profile.d/proxy.sh state=present create=yes owner=root group=root mode=0644 regexp="export http_proxy={{ http_proxy }}"
+ insertafter=EOF line="export http_proxy={{ http_proxy }}"
+ when: http_proxy is defined
+ ignore_errors: yes
+
+- name: insert shell proxy https
+ lineinfile: dest=/etc/profile.d/proxy.sh state=present create=yes owner=root group=root mode=0644 regexp="export https_proxy={{ https_proxy }}"
+ insertafter=EOF line="export https_proxy={{ https_proxy }}"
+ when: https_proxy is defined
+ ignore_errors: yes
+
+- name: insert no proxy
+ lineinfile: dest=/etc/profile.d/proxy.sh state=present create=yes owner=root group=root mode=0644 regexp="{{ no_proxy }}"
+ insertafter=EOF line="export no_proxy={{ no_proxy }}"
+ when: no_proxy is defined
+ ignore_errors: yes
+
+#wget
+- name: insert wget proxy(http)
+ lineinfile: dest=/etc/wgetrc state=present regexp="http_proxy={{ http_proxy }}"
+ insertafter="^#http_proxy" line="http_proxy={{ http_proxy }}"
+ when: http_proxy is defined
+ ignore_errors: yes
+
+- name: insert wget proxy(https)
+ lineinfile: dest=/etc/wgetrc state=present regexp="https_proxy={{ https_proxy }}"
+ insertafter="^#https_proxy" line="https_proxy={{ https_proxy }}"
+ when: https_proxy is defined
+ ignore_errors: yes
+
+#yum
+- name: insert yum proxy(http)
+ lineinfile: dest=/etc/yum.conf state=present regexp="proxy={{ http_proxy }}"
+ insertafter=EOF line="proxy={{ http_proxy }}"
+ when: ansible_os_family == "RedHat" and http_proxy is defined
+ ignore_errors: yes
+
+#apt
+
+- name: insert apt proxy(http)
+ lineinfile: dest=/etc/apt/apt.conf state=present create=yes regexp="Acquire::http::Proxy \"{{ http_proxy }}\";"
+ insertafter=EOF line="Acquire::http::Proxy \"{{ http_proxy }}\";"
+ when: ansible_os_family == "Debian" and http_proxy is defined
+ ignore_errors: yes
+
+- name: insert apt proxy(https)
+ lineinfile: dest=/etc/apt/apt.conf state=present create=yes regexp="Acquire::https::Proxy \"{{ https_proxy }}\";"
+ insertafter=EOF line="Acquire::https::Proxy \"{{ https_proxy }}\";"
+ when: ansible_os_family == "Debian" and https_proxy is defined
+ ignore_errors: yes
+
diff --git a/benchmarks/perftest/dhrystone.yaml b/benchmarks/perftest/dhrystone.yaml
new file mode 100644
index 00000000..5f83bbb0
--- /dev/null
+++ b/benchmarks/perftest/dhrystone.yaml
@@ -0,0 +1,112 @@
+ - hosts: localhost
+ connection: local
+ gather_facts: no
+
+ tasks:
+ - name: making dhrystone directory
+ file: path={{workingdir}}/{{Dest_dir}}/dhrystone state=directory
+
+ - name: making temporary dhrystone directory
+ file: path={{workingdir}}/{{Dest_dir}}/dhrystone/dhrystone_temp state=directory
+
+ - hosts: "{{role}}"
+ become: yes
+ remote_user: "{{username}}"
+
+ tasks:
+ - name: checking home directory
+ shell: echo $HOME
+ register: home_dir
+
+ - name: cleaning tempT
+ shell: sudo rm -rf $HOME/tempT
+
+ - name: cleaning qtip_result
+ shell: sudo rm -rf $HOME/qtip_result
+
+ - name: make directory
+ shell: sudo mkdir $HOME/qtip_result
+
+ - include: ../common/sys_proxy_pbook.yaml
+
+ - include: ../common/sys_info_pbook.yaml
+ vars:
+ network: false
+
+ - name: Installing UnixBench dependencies if CentOS
+ shell: sudo yum install git gcc patch perl-Time-HiRes -y
+ when: ansible_os_family == "RedHat"
+
+ - name: Installing UnixBench dependencies if Ubuntu
+ shell: sudo apt-get install git gcc patch perl -y
+ when: ansible_os_family == "Debian"
+
+ - include: ../common/git_proxy_pbook.yaml
+
+ - name: Clone unixbench
+ git: repo=https://github.com/kdlucas/byte-unixbench.git
+ dest=$HOME/tempT
+
+ - name: make
+ shell: sudo make --directory $HOME/tempT/UnixBench/
+
+ - name: Run dhrystone
+ shell: cd $HOME/tempT/UnixBench/&& sudo ./Run -v dhrystone
+
+ - name: collecting and transforming result script copy
+ copy: src={{workingdir}}/utils/transform/ubench_transform.py dest={{home_dir.stdout}}/qtip_result/
+
+ - name: transforming result
+ shell: cd $HOME/qtip_result/ && sudo python ubench_transform.py
+
+ - name: copying consolidated report script
+ copy: src={{workingdir}}/utils/transform/final_report.py dest={{home_dir.stdout}}/qtip_result/
+
+ - name: making consolidated report
+ shell: cd $HOME/qtip_result && sudo python final_report.py Dhrystone {{fname}}
+
+ - name: making directory
+ file: path={{home_dir.stdout}}/qtip_result/log state=directory
+
+ - name: copying result to temp directory
+ shell: sudo cp -r $HOME/tempT/UnixBench/results/* $HOME/qtip_result/log/
+
+ - name: registering files
+ shell: (cd $HOME/qtip_result/; find . -maxdepth 1 -name "*.json") | cut -d'/' -f2
+ register: files_to_copy
+
+ - name: copy results
+ fetch: src={{home_dir.stdout}}/qtip_result/{{item}} dest={{workingdir}}/{{Dest_dir}}/dhrystone/dhrystone_temp
+ with_items: "{{files_to_copy.stdout_lines}}"
+
+ - name: registering log files
+ shell: (cd $HOME/qtip_result/log/; find . -maxdepth 1 -name "*.log") | cut -d'/' -f2
+ register: copy_log_results
+
+ - name: copying log results
+ fetch: src={{home_dir.stdout}}/qtip_result/log/{{item}} dest={{workingdir}}/{{Dest_dir}}/dhrystone/dhrystone_temp
+ with_items: "{{copy_log_results.stdout_lines}}"
+
+ - name: cleaning tempT
+ shell: sudo rm -rf $HOME/tempT
+
+ - name: cleaning_qtip_result
+ shell: sudo rm -rf $HOME/qtip_result
+
+
+ - hosts: localhost
+ connection: local
+ gather_facts: no
+
+ tasks:
+ - name: extracting_json
+ shell: ( find {{workingdir}}/{{Dest_dir}}/dhrystone/dhrystone_temp/ -name "*.json" | xargs cp -t {{workingdir}}/{{Dest_dir}}/dhrystone/)
+
+ - name: making_logs_folder
+ shell: mkdir -p {{workingdir}}/{{Dest_dir}}/dhrystone/logs
+
+ - name: extracting_log
+ shell: ( find {{workingdir}}/{{Dest_dir}}/dhrystone/dhrystone_temp/ -name "*.log" | xargs cp -t {{workingdir}}/{{Dest_dir}}/dhrystone/logs)
+
+ - name: removing dhrystone_temp
+ shell: rm -rf {{workingdir}}/{{Dest_dir}}/dhrystone/dhrystone_temp
diff --git a/benchmarks/perftest/dpi.yaml b/benchmarks/perftest/dpi.yaml
new file mode 100644
index 00000000..59d01d58
--- /dev/null
+++ b/benchmarks/perftest/dpi.yaml
@@ -0,0 +1,120 @@
+ - hosts: localhost
+ connection: local
+ gather_facts: no
+
+ tasks:
+ - name: making dpi directory
+ file: path={{workingdir}}/{{Dest_dir}}/dpi state=directory
+
+ - name: making temporary whetstone directory
+ file: path={{workingdir}}/{{Dest_dir}}/dpi/dpi_temp state=directory
+
+ - hosts: "{{role}}"
+ become: yes
+ remote_user: "{{username}}"
+
+ tasks:
+ - name: echo
+ shell: echo $USER
+
+ - name: checking home directory
+ shell: echo $HOME
+ register: home_dir
+
+ - name: cleaning
+ shell: sudo rm -rf $HOME/tempD
+
+ - name: cleaning previous results
+ shell: sudo rm -rf $HOME/qtip_result
+
+ - name: make qtip_result
+ shell: sudo mkdir $HOME/qtip_result
+
+ - include: ../common/sys_proxy_pbook.yaml
+
+ - include: ../common/sys_info_pbook.yaml
+ vars:
+ network: false
+
+ - name: Installing nDPI dependencies if CentOS
+ shell: sudo yum install git gcc patch perl-Time-HiRes autofconf automake libpcap-devel libtool -y
+ when: ansible_os_family == "RedHat"
+
+ - name: Installing nDPI dependcies if Ubuntu
+ shell: sudo apt-get install git gcc patch autoconf automake libpcap-dev libtool -y
+ when: ansible_os_family == "Debian"
+
+ - name: making nDPI temporary directory
+ shell: sudo mkdir $HOME/tempD
+
+ - include: ../common/git_proxy_pbook.yaml
+
+ - name: Clone nDPI
+ git: repo=https://github.com/ntop/nDPI.git
+ dest=$HOME/tempD/nDPI
+
+ - name: autogen
+ shell: cd $HOME/tempD/nDPI && sudo ./autogen.sh
+
+ - name: configure
+ shell: cd $HOME/tempD/nDPI && sudo ./configure
+
+ - name: make
+ shell: cd $HOME/tempD/nDPI && sudo make
+
+ - name: Fetching Test_pcap file
+ shell: cd $HOME/tempD/nDPI/example && wget http://build.opnfv.org/artifacts.opnfv.org/qtip/utilities/test.pcap
+
+ - name: fetch Averaging script
+ copy: src=../etc/dpi_average.sh dest={{home_dir.stdout}}/tempD/nDPI/example mode=777
+
+ - name: Run nDPI benchmark
+ shell: cd $HOME/tempD/nDPI/example && sudo ./dpi_average.sh
+
+ - name: copy result to temp_direc
+ shell: sudo cp $HOME/tempD/nDPI/example/dpi_dump.txt $HOME/qtip_result
+
+ - name: fetch dpi result transform script
+ copy: src={{workdingdir}}/utils/transform/dpi_transform.py dest={{home_dir.stdout}}/qtip_result
+
+ - name: Transforming results
+ shell: cd $HOME/qtip_result && sudo python dpi_transform.py
+
+ - name: copy report formation script
+ copy: src={{workdingdir}}/utils/transform/final_report.py dest={{home_dir.stdout}}/qtip_result
+
+ - name: consolidating report
+ shell: cd $HOME/qtip_result && sudo python final_report.py DPI {{fname}}
+
+ - name: registering files
+ shell: (cd $HOME/qtip_result/; find . -maxdepth 1 -name "*.json") | cut -d'/' -f2
+ register: files_to_copy
+
+ - name: copy results
+ fetch: src={{home_dir.stdout}}/qtip_result/{{item}} dest={{workingdir}}/{{Dest_dir}}/dpi/dpi_temp
+ with_items: "{{files_to_copy.stdout_lines}}"
+
+ - name: registering log files
+ shell: (cd $HOME/qtip_result/; find . -maxdepth 1 -name "*.log") | cut -d'/' -f2
+ register: copy_log_results
+
+ - name: copying log results
+ fetch: src={{home_dir.stdout}}/qtip_result/{{item}} dest={{workingdir}}/{{Dest_dir}}/dpi/dpi_temp
+ with_items: "{{copy_log_results.stdout_lines}}"
+
+ - hosts: localhost
+ connection: local
+ gather_facts: no
+
+ tasks:
+ - name: extracting_json
+ shell: ( find {{workingdir}}/{{Dest_dir}}/dpi/dpi_temp/ -name "*.json" | xargs cp -t {{workingdir}}/{{Dest_dir}}/dpi/)
+
+ - name: making_logs_folder
+ shell: mkdir -p {{workingdir}}/{{Dest_dir}}/dpi/logs
+
+ - name: extracting_log
+ shell: ( find {{workingdir}}/{{Dest_dir}}/dpi/dpi_temp/ -name "*.log" | xargs cp -t {{workingdir}}/{{Dest_dir}}/dpi/logs)
+
+ - name: removing dpi_temp
+ shell: rm -rf {{workingdir}}/{{Dest_dir}}/dpi/dpi_temp
diff --git a/benchmarks/perftest/etc/dpi_average.sh b/benchmarks/perftest/etc/dpi_average.sh
new file mode 100644
index 00000000..405d3ff6
--- /dev/null
+++ b/benchmarks/perftest/etc/dpi_average.sh
@@ -0,0 +1,14 @@
+#!/bin/bash
+
+COUNTER=0
+WDIR=$PWD
+while [ $COUNTER -lt 10 ]; do
+
+ echo $WDIR
+ $( ./ndpiReader -i test.pcap >> $WDIR/dpi_dump.txt )
+ let COUNTER=COUNTER+1
+ echo "Run number: $COUNTER"
+
+done
+
+
diff --git a/benchmarks/perftest/etc/info_collect.py b/benchmarks/perftest/etc/info_collect.py
new file mode 100644
index 00000000..3fc35d5a
--- /dev/null
+++ b/benchmarks/perftest/etc/info_collect.py
@@ -0,0 +1,86 @@
+import os
+import pickle
+import json
+import sys
+
+os.system('inxi -b -c0 -n > $PWD/est_2')
+est_ob = open("est_2", "r+")
+est_ob2 = open("est_1", "w+")
+in_string = est_ob.read().replace('\n', ' ')
+cpu_idle = float(os.popen("""top -bn1 | grep "Cpu(s)" | awk '{print $8}'""").read().rstrip())
+cpu_usage = 100 - cpu_idle
+est_ob2.write(in_string)
+est_ob.close()
+est_ob2.close()
+
+inxi_host = os.popen("""cat $PWD/est_1 | grep -o -P '(?<=Host:).*(?=Kernel)' """).read().lstrip().rstrip()
+inxi_mem = os.popen("""cat $PWD/est_1 | grep -o -P '(?<=Memory:).*(?=MB)' """).read().lstrip().rstrip() + "MB"
+inxi_cpu = os.popen("""cat $PWD/est_1 | grep -o -P '(?<=CPU).*(?=speed)' | cut -f2 -d':'""").read().lstrip().rstrip()
+inxi_distro = os.popen(""" cat $PWD/est_1 | grep -o -P '(?<=Distro:).*(?=Machine:)' """).read().rstrip().lstrip()
+inxi_kernel = os.popen(""" cat $PWD/est_1 | grep -o -P '(?<=Kernel:).*(?=Console:)' """).read().rstrip().lstrip()
+inxi_HD = os.popen(""" cat $PWD/est_1 | grep -o -P '(?<=HDD Total Size:).*(?=Info:)' """).read().rstrip().lstrip()
+inxi_product = os.popen(""" cat $PWD/est_1 | grep -o -P '(?<=product:).*(?=Mobo:)' """).read().rstrip().lstrip()
+
+info_dict = {'hostname': inxi_host,
+ 'product': inxi_product,
+ 'os': inxi_distro,
+ 'kernel': inxi_kernel,
+ 'cpu': inxi_cpu,
+ 'cpu_usage': '{0}%'.format(str(round(cpu_usage, 3))),
+ 'memory_usage': inxi_mem,
+ 'disk_usage': inxi_HD}
+network_flag = str(sys.argv[1]).rstrip()
+
+if (network_flag == 'n'):
+
+ info_dict['network_interfaces'] = {}
+ tem_2 = """ cat $PWD/est_1 | grep -o -P '(?<=Network:).*(?=Info:)'"""
+ print os.system(tem_2 + ' > Hello')
+ i = int(os.popen(tem_2 + " | grep -o 'Card' | wc -l ").read())
+ print i
+
+ for x in range(1, i + 1):
+ tem = """ cat $PWD/est_1 | grep -o -P '(?<=Card-""" + str(x) + """:).*(?=Card-""" + str(x + 1) + """)'"""
+ if i == 1:
+ tem = """ cat $PWD/est_1 | grep -o -P '(?<=Network:).*(?=Info:)'"""
+ inxi_card_1 = ((os.popen(tem + " | grep -o -P '(?<=Card:).*(?=Drives:)'|sed 's/ *driver:.*//'").read().rstrip().lstrip()))
+ print inxi_card_1
+ info_dict['network_interfaces']['interface_' + str(x)] = {}
+ info_dict['network_interfaces']['interface_' + str(x)]['network_card'] = inxi_card_1
+ inxi_card_2 = ((os.popen(tem + "| grep -o -P '(?<=Card:).*(?=Drives:)'|sed -e 's/^.*IF: //'").read())).rstrip().lstrip()
+ info_dict['network_interfaces']['interface_' + str(x)]['interface_info'] = inxi_card_2
+ elif x < (i):
+ print "two"
+ inxi_card_1 = ((os.popen(tem + "| sed 's/ *driver:.*//'").read().rstrip().lstrip()))
+ info_dict['network_interfaces']['interface_' + str(x)] = {}
+ info_dict['network_interfaces']['interface_' + str(x)]['network_Card'] = inxi_card_1
+ inxi_card_2 = ((os.popen(tem + "|sed -e 's/^.*IF: //'").read())).rstrip().lstrip()
+ info_dict['network_interfaces']['interface_' + str(x)]['interface_info'] = inxi_card_2
+ elif x == i:
+ print "Three"
+ info_dict['network_interfaces']['interface_' + str(x)] = {}
+ inxi_card_1 = ((os.popen(""" cat $PWD/est_1 | grep -o -P '(?<=Card-""" + str(x) + """:).*(?=Drives:)'| sed 's/ *driver:.*//' """).read().rstrip().lstrip()))
+ info_dict['network_interfaces']['interface_' + str(x)]['network_Card'] = inxi_card_1
+ inxi_card_2 = ((os.popen(""" cat $PWD/est_1 | grep -o -P '(?<=Card-""" + str(x) + """:).*(?=Drives:)'| sed -e 's/^.*IF: //' """).read().rstrip().lstrip()))
+ info_dict['network_interfaces']['interface_' + str(x)]['interface_info'] = inxi_card_2
+ else:
+ print "No network cards"
+ os.system("bwm-ng -o plain -c 1 | grep -v '=' | grep -v 'iface' | grep -v '-' > bwm_dump")
+ n_interface = int(os.popen(" cat bwm_dump | grep -v 'total' | wc -l ").read().rstrip())
+ interface = {}
+ for x in range(1, n_interface):
+ interface_name = os.popen(" cat bwm_dump | awk 'NR==" + str(x) + "' | awk '{print $1}' ").read().rstrip().replace(':', '')
+ interface[str(interface_name)] = {}
+ interface[str(interface_name)]['Rx (KB/s)'] = os.popen(" cat bwm_dump | awk 'NR==" + str(x) + "' | awk '{print $2}' ").read().rstrip()
+ interface[str(interface_name)]['Tx (KB/s)'] = os.popen(" cat bwm_dump | awk 'NR==" + str(x) + "' | awk '{print $4}' ").read().rstrip()
+ interface[str(interface_name)]['Total (KB/s)'] = os.popen(" cat bwm_dump | awk 'NR== " + str(x) + "' | awk '{print $6}' ").read().rstrip()
+
+ info_dict['interface_io'] = interface
+
+print info_dict
+
+with open('./sys_info_temp', 'w+')as out_info:
+ pickle.dump(info_dict, out_info)
+
+with open('temp', 'w+') as result_json:
+ json.dump(info_dict, result_json, indent=4, sort_keys=True)
diff --git a/benchmarks/perftest/etc/test_job b/benchmarks/perftest/etc/test_job
new file mode 100644
index 00000000..6817abca
--- /dev/null
+++ b/benchmarks/perftest/etc/test_job
@@ -0,0 +1,13 @@
+[global]
+
+runtime= 600
+ioengine=libaio
+iodepth=2
+direct=1
+bs=4k
+rw=randrw
+
+[job1]
+size=5G
+
+
diff --git a/benchmarks/perftest/fio.yaml b/benchmarks/perftest/fio.yaml
new file mode 100644
index 00000000..bd29c127
--- /dev/null
+++ b/benchmarks/perftest/fio.yaml
@@ -0,0 +1,105 @@
+ - hosts: localhost
+ connection: local
+ gather_facts: no
+
+ tasks:
+ - name: making fio directory
+ file: path={{workingdir}}/{{Dest_dir}}/fio state=directory
+
+ - name: making temporary fio directory
+ file: path={{workingdir}}/{{Dest_dir}}/fio/fio_temp state=directory
+
+
+ - hosts: "{{role}}"
+ become: yes
+ remote_user: "{{username}}"
+
+ tasks:
+ - name: checking home directory
+ shell: echo $HOME
+ register: home_dir
+
+ - name: cleaning
+ shell: sudo rm -rf $HOME/fio
+
+ - name: cleaning previous results
+ shell: sudo rm -rf $HOME/qtip_result
+
+ - name: making fio temporary directory
+ shell: sudo mkdir $HOME/fio
+
+ - name: making results temporary directory
+ shell: sudo mkdir $HOME/qtip_result
+
+ - include: ../common/sys_proxy_pbook.yaml
+
+ - include: ../common/sys_info_pbook.yaml
+ vars:
+ network: false
+
+ - name: Installing fio dependencies when CentOS
+ shell: sudo yum install wget gcc libaio-devel -y
+ when: ansible_os_family == "RedHat"
+
+ - name: Installing fio dependencies when Ubuntu
+ shell: sudo apt-get install wget gcc libaio-dev -y
+ when: ansible_os_family == "Debian"
+
+ - name: Fetching fio
+ shell: cd $HOME/fio/ && wget http://freecode.com/urls/3aa21b8c106cab742bf1f20d60629e3f -O fio.tar.gz
+ - name: Untar fio
+ shell: cd $HOME/fio/ && sudo tar -zxvf fio.tar.gz
+ - name: configure
+ shell: cd $HOME/fio/fio-2.1.10 && sudo ./configure && sudo make
+
+ - name: Fetching fio job
+ copy: src=../etc/fio_test_job dest={{home_dir.stdout}}/fio/fio-2.1.10/
+
+ - name: Benchmarking block storage through fio
+ shell: cd $HOME/fio/fio-2.1.10 && sudo ./fio --output-format=json --output=$HOME/qtip_result/fio_result.json fio_test_job
+
+ - name: Fetching result transformation script
+ copy: src={{workingdir}}/utils/fio_transform.py dest={{home_dir.stdout}}/qtip_result
+
+ - name: Transforming result
+ shell: cd $HOME/qtip_result && sudo python fio_transform.py
+
+ - name: copy report formation script
+ copy: src={{workingdir}}/utils/transform/final_report.py dest={{home_dir.stdout}}/qtip_result
+
+ - name: consolidating report
+ shell: cd $HOME/qtip_result && sudo python final_report.py FIO {{fname}}
+
+ - name: registering files
+ shell: (cd $HOME/qtip_result/; find . -maxdepth 1 -name "*.json") | cut -d'/' -f2
+ register: files_to_copy
+
+ - name: copy results
+ fetch: src={{home_dir.stdout}}/qtip_result/{{item}} dest={{workingdir}}/{{Dest_dir}}/fio/fio_temp
+ with_items: "{{files_to_copy.stdout_lines}}"
+
+ - name: registering log files
+ shell: (cd $HOME/qtip_result/; find . -maxdepth 1 -name "*.log") | cut -d'/' -f2
+ register: copy_log_results
+
+ - name: copying log results
+ fetch: src={{home_dir.stdout}}/qtip_result/{{item}} dest={{workingdir}}/{{Dest_dir}}/fio/fio_temp
+ with_items: "{{copy_log_results.stdout_lines}}"
+
+ - hosts: localhost
+ connection: local
+ gather_facts: no
+
+ tasks:
+ - name: extracting_json
+ shell: ( find {{workingdir}}/{{Dest_dir}}/fio/fio_temp/ -name "*.json" | xargs cp -t {{workingdir}}/{{Dest_dir}}/fio/)
+
+ - name: making_logs_folder
+ shell: mkdir -p {{workingdir}}/{{Dest_dir}}/fio/logs
+
+ - name: extracting_log
+ shell: ( find {{workingdir}}/{{Dest_dir}}/fio/fio_temp/ -name "*.log" | xargs cp -t {{workingdir}}/{{Dest_dir}}/fio/logs)
+
+ - name: removing fio_log
+ shell: rm -rf {{workingdir}}/{{Dest_dir}}/fio/fio_temp
+
diff --git a/benchmarks/perftest/iperf.yaml b/benchmarks/perftest/iperf.yaml
new file mode 100644
index 00000000..af5b836a
--- /dev/null
+++ b/benchmarks/perftest/iperf.yaml
@@ -0,0 +1,161 @@
+ - hosts: localhost
+ connection: local
+ gather_facts: no
+
+ tasks:
+ - name: getting directory
+ shell: sudo echo $PWD
+ register: qtip_dir
+
+ - name: making Iperf directory
+ file: path={{workingdir}}/{{Dest_dir}}/iperf state=directory
+
+ - name: making temporary iperf directory
+ file: path={{workingdir}}/{{Dest_dir}}/iperf/iperf_temp state=directory
+
+
+ - hosts: "{{role}}"
+ become: yes
+ remote_user: "{{username}}"
+
+ tasks:
+ - name: Rolename
+ set_fact:
+ rolename: "{{role}}"
+ when: role is defined
+
+ - name: installertype
+ set_fact:
+ installertype: "{{installer}}"
+
+ - name: Get Hostname
+ shell: echo $HOSTNAME
+ register: hostID
+
+ - name: echo
+ shell: echo index_var
+
+ - name: checking home directory
+ shell: echo $HOME
+ register: home_dir
+
+ - name: cleaning
+ shell: sudo rm -rf $HOME/iperf
+
+ - name: cleaning previous results
+ shell: sudo rm -rf $HOME/qtip_result
+
+ - name: making Iperf temporary directory
+ shell: sudo mkdir $HOME/iperf
+
+ - name: making results temporary directory
+ shell: sudo mkdir $HOME/qtip_result
+
+ - include: ../common/sys_proxy_pbook.yaml
+
+ - include: ../common/sys_info_pbook.yaml
+ vars:
+ network: true
+
+ - name: Installing Epel-release when CentOS
+ shell: sudo yum install epel-release -y
+ when: ansible_os_family == "RedHat"
+
+ - name: Allow iperf server port in iptables input rules
+ shell: iptables -A INPUT -p tcp --dport {{iperf_port}} -j ACCEPT
+ vars:
+ iperf_port: 5201
+ ignore_errors: yes
+ when: rolename == "1-server" and installertype == 'fuel'
+
+ - name: Installing IPERF when Ubuntu
+ shell: sudo apt-get install iperf3 -y
+ when: ansible_os_family == "Debian"
+
+ - name: Installing Iperf3
+ shell: sudo yum install iperf3 -y
+ when: ansible_os_family == "RedHat"
+
+ - name: Running iperf on server
+ shell: iperf3 -s
+ async: 400
+ poll: 0
+ when: rolename == "1-server"
+
+ - name: Running Iperf on Host
+ shell: iperf3 --time {{duration}} -b 0 G -c {{ip1}} -J -O10 >> {{home_dir.stdout}}/qtip_result/iperf_raw.json
+ ignore_errors: yes
+ with_items:
+ - "{{ip1}}"
+ when: rolename == "2-host" and "{{privateip1}}" == "NONE"
+
+ - name: Running Iperf on Host
+ shell: iperf3 --time {{duration}} -b 0 G -c {{privateip1}} -J -O10 >> {{home_dir.stdout}}/qtip_result/iperf_raw.json
+ ignore_errors: yes
+ with_items:
+ - "{{ip1}}"
+ when: rolename == "2-host" and "{{privateip1}}" != "NONE"
+
+ - name: Fetching result transformation script
+ copy: src={{workingdir}}/utils/transform/iperf_transform.py dest={{home_dir.stdout}}/qtip_result
+ - name: Transforming result
+
+ shell: cd $HOME/qtip_result && sudo python iperf_transform.py
+ when: rolename =="2-host" and "{{ip2}}" == ''
+
+ - name: copy report formation script
+ copy: src={{workdingdir}}/utils/transform/final_report.py dest={{home_dir.stdout}}/qtip_result
+ when: rolename =="2-host" and "{{ip2}}" == ''
+
+ - name: consolidating report
+ shell: cd $HOME/qtip_result && sudo python final_report.py IPERF {{fname}}
+ when: rolename =="2-host" and "{{ip2}}" == ''
+
+ - name: Files to Copy
+ shell: (cd $HOME/qtip_result/; find . -maxdepth 1 -name "*.json") | cut -d'/' -f2
+ register: files_to_copy
+ when: rolename =="2-host" and "{{ip2}}" == ''
+
+ - name: copy results
+ fetch: src={{home_dir.stdout}}/qtip_result/{{item}} dest={{workingdir}}/{{Dest_dir}}/iperf/iperf_temp
+ with_items: "{{files_to_copy.stdout_lines}}"
+ when: rolename =="2-host" and "{{ip2}}" == ''
+
+ - name: registering log files
+ shell: (cd $HOME/qtip_result/; find . -maxdepth 1 -name "*.log") | cut -d'/' -f2
+ register: copy_log_results
+ when: rolename =="2-host" and "{{ip2}}" == ''
+
+ - name: copying log results
+ fetch: src={{home_dir.stdout}}/qtip_result/{{item}} dest={{workingdir}}/{{Dest_dir}}/iperf/iperf_temp
+ with_items: "{{copy_log_results.stdout_lines}}"
+ when: rolename =="2-host" and "{{ip2}}" == ''
+
+
+ - hosts: localhost
+ connection: local
+ gather_facts: no
+
+ tasks:
+ - name: Rolename
+ set_fact:
+ rolename: "{{role}}"
+ when: role is defined
+
+ - name: extracting_json
+ shell: ( find {{workingdir}}/{{Dest_dir}}/iperf/iperf_temp/ -name "*.json" | xargs cp -t {{workingdir}}/{{Dest_dir}}/iperf/)
+ when: rolename == "2-host"
+
+ - name: making_logs_folder
+ shell: mkdir -p {{workingdir}}/{{Dest_dir}}/iperf/logs
+
+ - name: extracting_log
+ shell: ( find {{workingdir}}/{{Dest_dir}}/iperf/iperf_temp/ -name "*.log" | xargs cp -t {{workingdir}}/{{Dest_dir}}/iperf/logs)
+ when: rolename == "2-host"
+
+ - name: removing iperf_raw file
+ shell: rm -rf {{workingdir}}/{{Dest_dir}}/iperf/iperf_raw.json
+ when: rolename == "2-host"
+
+ - name: removing iperf_temp
+ shell: rm -rf {{workingdir}}/{{Dest_dir}}/iperf/iperf_temp
diff --git a/benchmarks/perftest/ramspeed.yaml b/benchmarks/perftest/ramspeed.yaml
new file mode 100644
index 00000000..f62c6056
--- /dev/null
+++ b/benchmarks/perftest/ramspeed.yaml
@@ -0,0 +1,109 @@
+ - hosts: localhost
+ connection: local
+ gather_facts: no
+
+ tasks:
+ - name: making ramspeed directory
+ file: path={{workingdir}}/{{Dest_dir}}/ramspeed state=directory
+
+ - name: making temporary ramspeed directory
+ file: path={{workingdir}}/{{Dest_dir}}/ramspeed/ramspeed_temp state=directory
+
+
+ - hosts: "{{role}}"
+ become: yes
+ remote_user: "{{username}}"
+
+ tasks:
+ - name: checking home directory
+ shell: echo $HOME
+ register: home_dir
+
+ - name: cleaning
+ shell: sudo rm -rf $HOME/ramspeed
+
+ - name: cleaning previous results
+ shell: sudo rm -rf $HOME/qtip_result
+
+ - name: making ramspeed temporary directory
+ shell: sudo mkdir $HOME/ramspeed
+
+ - name: making results temporary directory
+ shell: sudo mkdir $HOME/qtip_result
+
+ - include: ../common/sys_proxy_pbook.yaml
+
+ - include: ./common/sys_info_pbook.yaml
+ vars:
+ network: false
+
+ - name: Installing RAM_Speed dependencies when CentOS
+ shell: sudo yum install wget gcc -y
+ when: ansible_os_family == "RedHat"
+
+ - name: Installing RAM_Speed dependencies when Ubuntu
+ shell: sudo apt-get install wget gcc -y
+ when: ansible_os_family == "Debian"
+
+ - name: make dummy file
+ shell: sudo touch $HOME/ramspeed/ramspeed.tar.gz
+
+ - name: Fetching RAM_Speed
+ shell: cd $HOME/ramspeed/ && sudo wget -O ramspeed.tar.gz https://docs.google.com/uc?id=0B92Bp5LZTM7gRFctalZLMktTNDQ
+
+ - name: Untar RAM_SPeed
+ shell: cd $HOME/ramspeed/ && sudo tar -zxvf ramspeed.tar.gz
+
+ - name: configure
+ shell: cd $HOME/ramspeed/ramsmp-3.5.0 && ./build.sh
+
+ - name: Benchmarking IntMem Bandwidth
+ shell: cd $HOME/ramspeed/ramsmp-3.5.0 && ./ramsmp -b 3 -l 5 -p 1 >> $HOME/qtip_result/Intmem
+
+ - name: Benchmarking FloatMem Bandwidth
+ shell: cd $HOME/ramspeed/ramsmp-3.5.0 && ./ramsmp -b 6 -l 5 -p 1 >> $HOME/qtip_result/Floatmem
+
+ - name: Fetching result transformation script
+ copy: src={{workdingdir}}/utils/transform/ramspeed_transform.py dest={{home_dir.stdout}}/qtip_result
+
+ - name: Transforming result
+ shell: cd $HOME/qtip_result && sudo python ramspd_transform.py
+
+ - name: copy report formation script
+ copy: src={{workingdir}}/utils/transform/final_report.py dest={{home_dir.stdout}}/qtip_result
+
+ - name: consolidating report
+ shell: cd $HOME/qtip_result && sudo python final_report.py RamSpeed {{fname}}
+
+ - name: registering files
+ shell: (cd $HOME/qtip_result/; find . -maxdepth 1 -name "*.json") | cut -d'/' -f2
+ register: files_to_copy
+
+ - name: copy results
+ fetch: src={{home_dir.stdout}}/qtip_result/{{item}} dest={{workingdir}}/{{Dest_dir}}/ramspeed/ramspeed_temp
+ with_items: "{{files_to_copy.stdout_lines}}"
+
+ - name: registering log files
+ shell: (cd $HOME/qtip_result/; find . -maxdepth 1 -name "*.log") | cut -d'/' -f2
+ register: copy_log_results
+
+ - name: copying log results
+ fetch: src={{home_dir.stdout}}/qtip_result/{{item}} dest={{workingdir}}/{{Dest_dir}}/ramspeed/ramspeed_temp
+ with_items: "{{copy_log_results.stdout_lines}}"
+
+ - hosts: localhost
+ connection: local
+ gather_facts: no
+
+ tasks:
+ - name: extracting_json
+ shell: ( find {{workingdir}}/{{Dest_dir}}/ramspeed/ramspeed_temp/ -name "*.json" | xargs cp -t {{workingdir}}/{{Dest_dir}}/ramspeed/)
+
+ - name: making_logs_folder
+ shell: mkdir -p {{workingdir}}/{{Dest_dir}}/ramspeed/logs
+
+ - name: extracting_log
+ shell: ( find {{workingdir}}/{{Dest_dir}}/ramspeed/ramspeed_temp/ -name "*.log" | xargs cp -t {{workingdir}}/{{Dest_dir}}/ramspeed/logs)
+
+ - name: removing ramspeed_log
+ shell: rm -rf {{workingdir}}/{{Dest_dir}}/ramspeed/ramspeed_temp
diff --git a/benchmarks/perftest/ssl.yaml b/benchmarks/perftest/ssl.yaml
new file mode 100644
index 00000000..4b5f036a
--- /dev/null
+++ b/benchmarks/perftest/ssl.yaml
@@ -0,0 +1,113 @@
+ - hosts: localhost
+ connection: local
+ gather_facts: no
+
+ tasks:
+ - name: making ssl directory
+ file: path={{workingdir}}/{{Dest_dir}}/ssl state=directory
+
+ - name: making temporary ssl directory
+ file: path={{workingdir}}/{{Dest_dir}}/ssl/ssl_temp state=directory
+
+ - hosts: "{{role}}"
+ become: yes
+ remote_user: "{{username}}"
+
+ tasks:
+ - name: checking home directory
+ shell: sudo echo $HOME
+ register: home_dir
+
+ - name: cleaning
+ shell: sudo rm -rf $HOME/Open_SSL
+
+ - name: cleaning previous results
+ shell: sudo rm -rf $HOME/qtip_result
+
+ - name: making OpenSSL temporary directory
+ shell: sudo mkdir $HOME/Open_SSL
+
+ - name: making results temporary directory
+ shell: sudo mkdir $HOME/qtip_result
+
+ - include: ../common/sys_proxy_pbook.yaml
+
+ - include: ../common/sys_info_pbook.yaml
+ vars:
+ network: false
+
+ - name: Installing OpenSSL dependencies when CentOS
+ shell: sudo yum install git wget gcc patch perl-Time-HiRes autofconf automake libpcap-devel libtool -y
+ when: ansible_os_family == "RedHat"
+
+ - name: Installing OpenSSL dependencies when Ubuntu
+ shell: sudo apt-get install git gcc wget perl autoconf automake libpcap-dev libtool -y
+ when: ansible_os_family == "Debian"
+
+ - name: Fetching OpenSSL
+ shell: cd $HOME/Open_SSL/ && sudo wget http://artifacts.opnfv.org/qtip/utilities/openssl-1.0.2f.tar.gz
+
+ - name: Untar OpenSSL
+ shell: cd $HOME/Open_SSL/ && sudo tar -zxvf openssl-1.0.2f.tar.gz
+ - name: configure
+ shell: cd $HOME/Open_SSL/openssl-1.0.2f && sudo ./config
+
+ - name: make
+ shell: cd $HOME/Open_SSL/openssl-1.0.2f && sudo make
+
+ - name: make install
+ shell: cd $HOME/Open_SSL/openssl-1.0.2f && sudo make install
+
+ - name: Benchmarking RSA signatures
+ shell: cd $HOME/Open_SSL/openssl-1.0.2f/apps && sudo ./openssl speed rsa >> $HOME/qtip_result/RSA_dump
+
+ - name: Benchmaring AES-128-cbc cipher encryption throughput
+ shell: cd $HOME/Open_SSL/openssl-1.0.2f/apps && sudo ./openssl speed -evp aes-128-cbc >> $HOME/qtip_result/AES-128-CBC_dump
+
+ - name: Fetching result transformation script
+ copy: src={{workingdir}}/utils/transform/ssl_transform.py dest={{home_dir.stdout}}/qtip_result
+
+ - name: Transforming result
+ shell: cd $HOME/qtip_result && python ssl_transform.py
+
+ - name: copy report formation script
+ copy: src={{workingdir}}/utils/transform/final_report.py dest={{home_dir.stdout}}/qtip_result
+
+ - name: consolidating report
+ shell: cd $HOME/qtip_result && python final_report.py SSL {{fname}}
+
+ - name: registering files
+ shell: (cd $HOME/qtip_result/; find . -maxdepth 1 -name "*.json") | cut -d'/' -f2
+ register: files_to_copy
+
+ - name: copy results
+ fetch: src={{home_dir.stdout}}/qtip_result/{{item}} dest={{workingdir}}/{{Dest_dir}}/ssl/ssl_temp
+ with_items: "{{files_to_copy.stdout_lines}}"
+
+ - name: registering log files
+ shell: (cd $HOME/qtip_result/; find . -maxdepth 1 -name "*.log") | cut -d'/' -f2
+ register: copy_log_results
+
+ - name: copying log results
+ fetch: src={{home_dir.stdout}}/qtip_result/{{item}} dest={{workingdir}}/{{Dest_dir}}/ssl/ssl_temp
+ with_items: "{{copy_log_results.stdout_lines}}"
+
+ - hosts: localhost
+ connection: local
+ gather_facts: no
+
+ tasks:
+ - name: echo
+ shell: echo $PWD
+
+ - name: extracting_json
+ shell: ( find {{workingdir}}/{{Dest_dir}}/ssl/ssl_temp/ -name "*.json" | xargs cp -t {{workingdir}}/{{Dest_dir}}/ssl/)
+
+ - name: making_logs_folder
+ shell: mkdir -p {{workingdir}}/{{Dest_dir}}/ssl/logs
+
+ - name: extracting_log
+ shell: ( find {{workingdir}}/{{Dest_dir}}/ssl/ssl_temp/ -name "*.log" | xargs cp -t {{workingdir}}/{{Dest_dir}}/ssl/logs)
+
+ - name: removing ssl_temp
+ shell: rm -rf {{workingdir}}/{{Dest_dir}}/ssl/ssl_temp
diff --git a/benchmarks/perftest/whetstone.yaml b/benchmarks/perftest/whetstone.yaml
new file mode 100644
index 00000000..32a22fe3
--- /dev/null
+++ b/benchmarks/perftest/whetstone.yaml
@@ -0,0 +1,111 @@
+ - hosts: localhost
+ connection: local
+ gather_facts: no
+
+ tasks:
+ - name: making whetstone directory
+ file: path={{workingdir}}/{{Dest_dir}}/whetstone state=directory
+
+ - name: making temporary whetstone directory
+ file: path={{workingdir}}/{{Dest_dir}}/whetstone/whetstone_temp state=directory
+
+ - hosts: "{{role}}"
+ become: yes
+ remote_user: "{{username}}"
+
+ tasks:
+ - name: storing_home
+ shell: echo $HOME
+ register: home_dir
+
+ - name: cleaning tempT
+ shell: sudo rm -rf $HOME/tempT
+
+ - name: cleaning_qtip_result
+ shell: sudo rm -rf $HOME/qtip_result
+
+ - name: make directory
+ shell: sudo mkdir $HOME/qtip_result
+
+ - include: ../common/sys_proxy_pbook.yaml
+
+ - include: ../common/sys_info_pbook.yaml
+ vars:
+ network: false
+
+ - name: Installing UnixBench dependencies if CentOS
+ shell: sudo yum install git gcc patch perl-Time-HiRes -y
+ when: ansible_os_family == "RedHat"
+
+ - name: Installing UnixBench dependencies if Ubuntu
+ shell: sudo apt-get install git gcc patch perl -y
+ when: ansible_os_family == "Debian"
+
+ - include: ./git_proxy_pbook.yaml
+
+ - name: Clone unixbench
+ git: repo=https://github.com/kdlucas/byte-unixbench.git
+ dest=$HOME/tempT
+
+ - name: make
+ shell: sudo make --directory $HOME/tempT/UnixBench/
+
+ - name: Run Whetstone
+ shell: cd $HOME/tempT/UnixBench/&&./Run -v whetstone
+
+ - name: collecting and transforming result script copy
+ copy: src={{workingdir}}/utils/transform/ubench_transform.py dest={{home_dir.stdout}}/qtip_result/
+
+ - name: transforming result
+ shell: cd $HOME/qtip_result && sudo python ubench_transform.py
+
+ - name: copying consolidated report script
+ copy: src={{workingdir}}/utils/transform/final_report.py dest={{home_dir.stdout}}/qtip_result/
+
+ - name: making consolidated report
+ shell: cd $HOME/qtip_result && sudo python final_report.py Whetstone {{fname}}
+
+ - name: making directory
+ file: path={{home_dir.stdout}}/qtip_result/log state=directory
+
+ - name: copying result to temp directory
+ shell: sudo cp -r $HOME/tempT/UnixBench/results/* $HOME/qtip_result/log
+
+ - name: registering files
+ shell: (cd $HOME/qtip_result/; find . -maxdepth 1 -name "*.json") | cut -d'/' -f2
+ register: files_to_copy
+
+ - name: copy results
+ fetch: src={{home_dir.stdout}}/qtip_result/{{item}} dest={{workingdir}}/{{Dest_dir}}/whetstone/whetstone_temp
+ with_items: "{{files_to_copy.stdout_lines}}"
+
+ - name: registering log files
+ shell: (cd $HOME/qtip_result/log/; find . -maxdepth 1 -name "*.log") | cut -d'/' -f2
+ register: copy_log_results
+
+ - name: copying log results
+ fetch: src={{home_dir.stdout}}/qtip_result/log/{{item}} dest={{workingdir}}/{{Dest_dir}}/whetstone/whetstone_temp
+ with_items: "{{copy_log_results.stdout_lines}}"
+
+ - name: cleaning tempT
+ shell: sudo rm -rf $HOME/tempT
+
+ - name: cleaning_qtip_result
+ shell: sudo rm -rf $HOME/qtip_result
+
+ - hosts: localhost
+ connection: local
+ gather_facts: no
+
+ tasks:
+ - name: extracting_json
+ shell: ( find {{workingdir}}/{{Dest_dir}}/whetstone/whetstone_temp/ -name "*.json" | xargs cp -t {{workingdir}}/{{Dest_dir}}/whetstone/)
+
+ - name: making_logs_folder
+ shell: mkdir -p {{workingdir}}/{{Dest_dir}}/whetstone/logs
+
+ - name: extracting_log
+ shell: ( find {{workingdir}}/{{Dest_dir}}/whetstone/whetstone_temp/ -name "*.log" | xargs cp -t {{workingdir}}/{{Dest_dir}}/whetstone/logs)
+
+ - name: removing whetstone_temp
+ shell: rm -rf {{workingdir}}/{{Dest_dir}}/whetstone/whetstone_temp
diff --git a/utils/transform/__init__.py b/utils/transform/__init__.py
new file mode 100644
index 00000000..e69de29b
--- /dev/null
+++ b/utils/transform/__init__.py
diff --git a/utils/transform/dpi_transform.py b/utils/transform/dpi_transform.py
new file mode 100644
index 00000000..ee29d8e2
--- /dev/null
+++ b/utils/transform/dpi_transform.py
@@ -0,0 +1,47 @@
+import os
+import pickle
+import datetime
+
+sum_dpi_pps = float(0)
+sum_dpi_bps = float(0)
+
+for x in range(1, 11):
+ dpi_result_pps = float(
+ os.popen(
+ "cat $HOME/qtip_result/dpi_dump.txt | grep 'nDPI throughput:' | awk 'NR=='" +
+ str(x) +
+ " | awk '{print $3}'").read().lstrip())
+ dpi_result_bps = float(
+ os.popen(
+ "cat $HOME/qtip_result/dpi_dump.txt | grep 'nDPI throughput:' | awk 'NR=='" +
+ str(x) +
+ " | awk '{print $7}'").read().rstrip())
+
+ if (dpi_result_pps > 100):
+ dpi_result_pps = dpi_result_pps / 1000
+
+ if (dpi_result_bps > 100):
+ dpi_result_bps = dpi_result_bps / 1000
+
+ sum_dpi_pps += dpi_result_pps
+ sum_dpi_bps += dpi_result_bps
+
+dpi_result_pps = sum_dpi_pps / 10
+dpi_result_bps = sum_dpi_bps / 10
+
+host = os.popen("hostname").read().rstrip()
+log_time_stamp = str(datetime.datetime.utcnow().isoformat())
+
+os.popen(
+ "cat $HOME/qtip_result/dpi_dump.txt > $HOME/qtip_result/" +
+ host +
+ "-" +
+ log_time_stamp +
+ ".log")
+
+home_dir = str(os.popen("echo $HOME").read().rstrip())
+host = os.popen("echo $HOSTNAME")
+result = {'pps': round(dpi_result_pps, 3),
+ 'bps': round(dpi_result_bps, 3)}
+with open('./result_temp', 'w+') as result_file:
+ pickle.dump(result, result_file)
diff --git a/utils/transform/final_report.py b/utils/transform/final_report.py
new file mode 100644
index 00000000..274742d4
--- /dev/null
+++ b/utils/transform/final_report.py
@@ -0,0 +1,24 @@
+import pickle
+import json
+import datetime
+import os
+import sys
+
+home_dir = str((os.popen("echo $HOME").read().rstrip()))
+
+with open('./sys_info_temp', 'r') as sys_info_f:
+ sys_info_dict = pickle.load(sys_info_f)
+with open('./result_temp', 'r') as result_f:
+ result_dict = pickle.load(result_f)
+
+host_name = (os.popen("hostname").read().rstrip())
+benchmark_name = str(sys.argv[1])
+testcase_name = str(sys.argv[2])
+report_time_stamp = str(datetime.datetime.utcnow().isoformat())
+final_dict = {"name": testcase_name,
+ "time": report_time_stamp,
+ "system_information": sys_info_dict,
+ "details": result_dict}
+
+with open('./' + host_name + '-' + report_time_stamp + '.json', 'w+') as result_json:
+ json.dump(final_dict, result_json, indent=4, sort_keys=True)
diff --git a/utils/transform/fio_transform.py b/utils/transform/fio_transform.py
new file mode 100755
index 00000000..5ecac823
--- /dev/null
+++ b/utils/transform/fio_transform.py
@@ -0,0 +1,29 @@
+import json
+import pickle
+import os
+import datetime
+
+
+def get_fio_job_result(fio_job_data):
+ return {'read': {'io_bytes': fio_job_data["read"]["io_bytes"],
+ 'io_ps': fio_job_data["read"]["iops"],
+ 'io_runtime_millisec': fio_job_data["read"]["runtime"],
+ 'mean_io_latenchy_microsec': fio_job_data["read"]["lat"]["mean"]},
+ 'write': {'io_bytes': fio_job_data["write"]["io_bytes"],
+ 'io_ps': fio_job_data["write"]["iops"],
+ 'io_runtime_millisec': fio_job_data["write"]["runtime"],
+ 'mean_io_latenchy_microsec': fio_job_data["write"]["lat"]["mean"]}}
+
+
+with open("fio_result.json") as fio_raw:
+ fio_data = json.load(fio_raw)
+
+fio_result_dict = {}
+for x, result in enumerate(map(get_fio_job_result, fio_data["jobs"])):
+ fio_result_dict['job_{0}'.format(x)] = result
+
+host_name = (os.popen("hostname").read().rstrip())
+report_time = str(datetime.datetime.utcnow().isoformat())
+os.system("mv fio_result.json " + str(host_name) + "-" + report_time + ".log")
+with open('./result_temp', 'w + ')as out_fio_result:
+ pickle.dump(fio_result_dict, out_fio_result)
diff --git a/utils/transform/iperf_transform.py b/utils/transform/iperf_transform.py
new file mode 100644
index 00000000..b52e4634
--- /dev/null
+++ b/utils/transform/iperf_transform.py
@@ -0,0 +1,27 @@
+import json
+import datetime
+import pickle
+with open('iperf_raw.json', 'r') as ifile:
+ raw_iperf_data = json.loads(ifile.read().rstrip())
+
+bits_sent = raw_iperf_data['end']['sum_sent']['bits_per_second']
+bits_received = raw_iperf_data['end']['sum_received']['bits_per_second']
+total_byte_sent = raw_iperf_data['end']['sum_sent']['bytes']
+total_byte_received = raw_iperf_data['end']['sum_received']['bytes']
+cpu_host_total_percent = raw_iperf_data['end']['cpu_utilization_percent']['host_total']
+cpu_remote_total_percent = raw_iperf_data['end']['cpu_utilization_percent']['remote_total']
+
+time_stamp = str(datetime.datetime.utcnow().isoformat())
+
+result = {'version': raw_iperf_data['start']['version'],
+ 'bandwidth': {'sender_throughput': bits_sent,
+ 'received_throughput': bits_received},
+ 'cpu': {'cpu_host': cpu_host_total_percent,
+ 'cpu_remote': cpu_remote_total_percent}
+ }
+
+with open('iperf_raw-' + time_stamp + '.log', 'w+') as ofile:
+ ofile.write(json.dumps(raw_iperf_data))
+
+with open('./result_temp', 'w+') as result_file:
+ pickle.dump(result, result_file)
diff --git a/utils/transform/ramspeed_transform.py b/utils/transform/ramspeed_transform.py
new file mode 100644
index 00000000..960f84fc
--- /dev/null
+++ b/utils/transform/ramspeed_transform.py
@@ -0,0 +1,41 @@
+import os
+import pickle
+import datetime
+
+intmem_copy = os.popen("cat Intmem | grep 'BatchRun Copy' | awk '{print $4}'").read().rstrip()
+intmem_scale = os.popen("cat Intmem | grep 'BatchRun Scale' | awk '{print $4}'").read().rstrip()
+intmem_add = os.popen("cat Intmem | grep 'BatchRun Add' | awk '{print $4}'").read().rstrip()
+intmem_triad = os.popen("cat Intmem | grep 'BatchRun Triad' | awk '{print $4}'").read().rstrip()
+intmem_average = os.popen("cat Intmem | grep 'BatchRun AVERAGE' | awk '{print $4}'").read().rstrip()
+
+print intmem_copy
+print intmem_average
+
+floatmem_copy = os.popen("cat Floatmem | grep 'BatchRun Copy' | awk '{print $4}'").read().rstrip()
+floatmem_scale = os.popen("cat Floatmem | grep 'BatchRun Scale' | awk '{print $4}'").read().rstrip()
+floatmem_add = os.popen("cat Floatmem | grep 'BatchRun Add' | awk '{print $4}'").read().rstrip()
+floatmem_triad = os.popen("cat Floatmem | grep 'BatchRun Triad' | awk '{print $4}'").read().rstrip()
+floatmem_average = os.popen("cat Floatmem | grep 'BatchRun AVERAGE' | awk '{print $4}'").read().rstrip()
+
+print floatmem_copy
+print floatmem_average
+
+hostname = os.popen("hostname").read().rstrip()
+time_stamp = str(datetime.datetime.utcnow().isoformat())
+
+os.system("mv Intmem " + hostname + "-" + time_stamp + ".log")
+os.system("cp Floatmem >> " + hostname + "-" + time_stamp + ".log")
+
+result = {"int_bandwidth": {"copy": intmem_copy,
+ "add": intmem_add,
+ "scale": intmem_scale,
+ "triad": intmem_triad,
+ "average": intmem_average},
+ "float_bandwidth": {"copy": floatmem_copy,
+ "add": floatmem_add,
+ "scale": floatmem_scale,
+ "triad": floatmem_triad,
+ "average": floatmem_average}}
+
+with open('./result_temp', 'w+') as result_file:
+ pickle.dump(result, result_file)
diff --git a/utils/transform/ssl_transform.py b/utils/transform/ssl_transform.py
new file mode 100644
index 00000000..de84d24b
--- /dev/null
+++ b/utils/transform/ssl_transform.py
@@ -0,0 +1,54 @@
+import os
+import pickle
+import datetime
+
+openssl_version = os.popen("cat RSA_dump | head -1").read().rstrip()
+rsa_512_sps = os.popen(
+ "cat RSA_dump | grep '512 bits ' | awk '{print $6}' ").read().rstrip()
+rsa_512_vps = os.popen(
+ "cat RSA_dump | grep '512 bits ' | awk '{print $7}' ").read().rstrip()
+rsa_1024_sps = os.popen(
+ "cat RSA_dump | grep '1024 bits ' | awk '{print $6}' ").read().rstrip()
+rsa_1024_vps = os.popen(
+ "cat RSA_dump | grep '1024 bits ' | awk '{print $7}' ").read().rstrip()
+rsa_2048_sps = os.popen(
+ "cat RSA_dump | grep '2048 bits ' | awk '{print $6}' ").read().rstrip()
+rsa_2048_vps = os.popen(
+ "cat RSA_dump | grep '2048 bits ' | awk '{print $7}' ").read().rstrip()
+rsa_4096_sps = os.popen(
+ "cat RSA_dump | grep '4096 bits ' | awk '{print $6}' ").read().rstrip()
+rsa_4096_vps = os.popen(
+ "cat RSA_dump | grep '4096 bits ' | awk '{print $7}' ").read().rstrip()
+
+aes_16B = os.popen(
+ "cat AES-128-CBC_dump | grep 'aes-128-cbc ' | awk '{print $2}' ").read().rstrip()
+aes_64B = os.popen(
+ "cat AES-128-CBC_dump | grep 'aes-128-cbc ' | awk '{print $3}' ").read().rstrip()
+aes_256B = os.popen(
+ "cat AES-128-CBC_dump | grep 'aes-128-cbc ' | awk '{print $4}' ").read().rstrip()
+aes_1024B = os.popen(
+ "cat AES-128-CBC_dump | grep 'aes-128-cbc ' | awk '{print $5}' ").read().rstrip()
+aes_8192B = os.popen(
+ "cat AES-128-CBC_dump | grep 'aes-128-cbc ' | awk '{print $6}' ").read().rstrip()
+
+hostname = os.popen("hostname").read().rstrip()
+time_stamp = str(datetime.datetime.utcnow().isoformat())
+
+os.system("mv RSA_dump " + hostname + "-" + time_stamp + ".log")
+os.system("cat AES-128-CBC_dump >> " + hostname + "-" + time_stamp + ".log")
+
+result = {"version": [openssl_version],
+ "rsa_sig": {"512_bits": rsa_512_sps,
+ "1024_bits": rsa_1024_sps,
+ "2048_bits": rsa_2048_sps,
+ "4096_bits": rsa_4096_sps,
+ "unit": "sig/sec"},
+ "aes_128_cbc": {"16B_block": aes_16B,
+ "64B_block": aes_64B,
+ "256B_block": aes_256B,
+ "1024B_block": aes_1024B,
+ "8192B_block": aes_8192B,
+ "unit": "B/sec"}}
+
+with open('./result_temp', 'w+') as result_file:
+ pickle.dump(result, result_file)
diff --git a/utils/transform/ubench_transform.py b/utils/transform/ubench_transform.py
new file mode 100644
index 00000000..ab5fe171
--- /dev/null
+++ b/utils/transform/ubench_transform.py
@@ -0,0 +1,32 @@
+import os
+import json
+import pickle
+
+total_cpu = os.popen(
+ "cat $HOME/tempT/UnixBench/results/* | grep 'of tests' | awk '{print $1;}' | awk 'NR==1'").read().rstrip()
+
+cpu_1 = os.popen(
+ "cat $HOME/tempT/UnixBench/results/* | grep 'of tests' | awk '{print $6;}' | awk 'NR==1'").read().rstrip()
+
+
+cpu_2 = os.popen(
+ "cat $HOME/tempT/UnixBench/results/* | grep 'of tests' | awk '{print $6;}' | awk 'NR==2'").read().rstrip()
+
+
+index_1 = os.popen(
+ "cat $HOME/tempT/UnixBench/results/* | grep 'Index Score (Partial Only) ' | awk '{print $7;}' | awk 'NR==1'").read().rstrip()
+index_2 = os.popen(
+ "cat $HOME/tempT/UnixBench/results/* | grep 'Index Score (Partial Only) ' | awk '{print $7;}' | awk 'NR==2'").read().rstrip()
+
+
+result = {"n_cpu": total_cpu,
+ "single": {"n_para_test": cpu_1,
+ "score": index_1},
+ "multi": {"n_para_test": cpu_2,
+ "score": index_2}
+ }
+
+with open('result_temp', 'w+') as result_file:
+ pickle.dump(result, result_file)
+print json.dumps(result, indent=4, sort_keys=True)
+# print result.items()