diff options
author | Yujun Zhang <zhang.yujunz@zte.com.cn> | 2016-11-03 08:10:41 +0000 |
---|---|---|
committer | Gerrit Code Review <gerrit@opnfv.org> | 2016-11-03 08:10:41 +0000 |
commit | 403e8ead12863163573f45c1be07a48c05061f45 (patch) | |
tree | 9f0398c9a8e3ebc397e3f30e88a083f09a04d0b5 | |
parent | 10fa387f3ad554a4c42c580b424f79c2171c64d3 (diff) | |
parent | a9d5c798ad0da8494056181cc1b68f85f9b203c9 (diff) |
Merge "Remove playbooks directory"
23 files changed, 1 insertions, 1484 deletions
diff --git a/benchmarks/playbooks/cachebench.yaml b/benchmarks/playbooks/cachebench.yaml deleted file mode 100644 index ca9a709c..00000000 --- a/benchmarks/playbooks/cachebench.yaml +++ /dev/null @@ -1,49 +0,0 @@ - - hosts: "{{ roles }}" - - tasks: - - name: clean - shell: rm -rf /usr/bin/phoronix-test-suite - shell: rm -rf /usr/share/doc/phoronix-test-suite/ - shell: rm -rf /usr/share/phoronix-test-suite - shell: rm -rf /root/photmp/ - - - name: cleaning results - file: path=/root/results state=absent - - - name: make direc - file: path=/root/photmp/ state=directory - - - name: Fetch Phornonix - shell: cd /root/photmp/ && wget https://www.dropbox.com/s/5kks513ozxy7vvs/phoronix-suite.tar.gz - - - name: unarchive Phoronix - shell: cd /root/photmp/ && tar -zxvf phoronix-suite.tar.gz - - - name: Install Phornonix Dependencies - shell: yum install php php-xml php-client php-process -y - - - name: Install Phornonix - shell: cd /root/photmp/phoronix-test-suite-master/&&./install-sh - - - name: batchSetup - shell: phoronix-test-suite batch-setup - - - name: install - shell: phoronix-test-suite batch-install cachebench - - - name: run - shell: phoronix-test-suite batch-run cachebench - - - name: making directory - file: path=/root/results state=directory - - - name: copying result to temp directory - shell: cp -r /var/lib/phoronix-test-suite/test-results/* /root/results/ - - - name: registering files - shell: (cd /root/results/; find . -maxdepth 1 -type f) | cut -d'/' -f2 - register: files_to_copy - - - name: copy results - fetch: src=/root/results/{{item}} dest={{workingdir}}/{{Dest_dir}}/cachebench - with_items: "{{files_to_copy.stdout_lines}}" diff --git a/benchmarks/playbooks/dhrystone.yaml b/benchmarks/playbooks/dhrystone.yaml deleted file mode 100644 index 8fe6a490..00000000 --- a/benchmarks/playbooks/dhrystone.yaml +++ /dev/null @@ -1,111 +0,0 @@ - - hosts: localhost - connection: local - gather_facts: no - - tasks: - - name: making dhrystone directory - file: path={{workingdir}}/{{Dest_dir}}/dhrystone state=directory - - - name: making temporary dhrystone directory - file: path={{workingdir}}/{{Dest_dir}}/dhrystone/dhrystone_temp state=directory - - - hosts: "{{role}}" - become: yes - remote_user: "{{username}}" - - tasks: - - name: checking home directory - shell: echo $HOME - register: home_dir - - - name: cleaning tempT - shell: sudo rm -rf $HOME/tempT - - - name: cleaning qtip_result - shell: sudo rm -rf $HOME/qtip_result - - - name: make directory - shell: sudo mkdir $HOME/qtip_result - - - include: ./sys_proxy_pbook.yaml - - - include: ./sys_info_pbook.yaml - vars: - network: false - - - name: Installing UnixBench dependencies if CentOS - shell: sudo yum install git gcc patch perl-Time-HiRes -y - when: ansible_os_family == "RedHat" - - - name: Installing UnixBench dependencies if Ubuntu - shell: sudo apt-get install git gcc patch perl -y - when: ansible_os_family == "Debian" - - - include: ./git_proxy_pbook.yaml - - - name: Clone unixbench - git: repo=https://github.com/kdlucas/byte-unixbench.git - dest=$HOME/tempT - - - name: make - shell: sudo make --directory $HOME/tempT/UnixBench/ - - - name: Run dhrystone - shell: cd $HOME/tempT/UnixBench/&& sudo ./Run -v dhrystone - - - name: collecting and transforming result script copy - copy: src=./result_transform/ubench_transform.py dest={{home_dir.stdout}}/qtip_result/ - - - name: transforming result - shell: cd $HOME/qtip_result/ && sudo python ubench_transform.py - - name: copying consolidated report script - copy: src=./result_transform/final_report.py dest={{home_dir.stdout}}/qtip_result/ - - - name: making consolidated report - shell: cd $HOME/qtip_result && sudo python final_report.py Dhrystone {{fname}} - - - name: making directory - file: path={{home_dir.stdout}}/qtip_result/log state=directory - - - name: copying result to temp directory - shell: sudo cp -r $HOME/tempT/UnixBench/results/* $HOME/qtip_result/log/ - - - name: registering files - shell: (cd $HOME/qtip_result/; find . -maxdepth 1 -name "*.json") | cut -d'/' -f2 - register: files_to_copy - - - name: copy results - fetch: src={{home_dir.stdout}}/qtip_result/{{item}} dest={{workingdir}}/{{Dest_dir}}/dhrystone/dhrystone_temp - with_items: "{{files_to_copy.stdout_lines}}" - - - name: registering log files - shell: (cd $HOME/qtip_result/log/; find . -maxdepth 1 -name "*.log") | cut -d'/' -f2 - register: copy_log_results - - - name: copying log results - fetch: src={{home_dir.stdout}}/qtip_result/log/{{item}} dest={{workingdir}}/{{Dest_dir}}/dhrystone/dhrystone_temp - with_items: "{{copy_log_results.stdout_lines}}" - - - name: cleaning tempT - shell: sudo rm -rf $HOME/tempT - - - name: cleaning_qtip_result - shell: sudo rm -rf $HOME/qtip_result - - - - hosts: localhost - connection: local - gather_facts: no - - tasks: - - name: extracting_json - shell: ( find {{workingdir}}/{{Dest_dir}}/dhrystone/dhrystone_temp/ -name "*.json" | xargs cp -t {{workingdir}}/{{Dest_dir}}/dhrystone/) - - - name: making_logs_folder - shell: mkdir -p {{workingdir}}/{{Dest_dir}}/dhrystone/logs - - - name: extracting_log - shell: ( find {{workingdir}}/{{Dest_dir}}/dhrystone/dhrystone_temp/ -name "*.log" | xargs cp -t {{workingdir}}/{{Dest_dir}}/dhrystone/logs) - - - name: removing dhrystone_temp - shell: rm -rf {{workingdir}}/{{Dest_dir}}/dhrystone/dhrystone_temp diff --git a/benchmarks/playbooks/dpi.yaml b/benchmarks/playbooks/dpi.yaml deleted file mode 100644 index 46e065a1..00000000 --- a/benchmarks/playbooks/dpi.yaml +++ /dev/null @@ -1,120 +0,0 @@ - - hosts: localhost - connection: local - gather_facts: no - - tasks: - - name: making dpi directory - file: path={{workingdir}}/{{Dest_dir}}/dpi state=directory - - - name: making temporary whetstone directory - file: path={{workingdir}}/{{Dest_dir}}/dpi/dpi_temp state=directory - - - hosts: "{{role}}" - become: yes - remote_user: "{{username}}" - - tasks: - - name: echo - shell: echo $USER - - - name: checking home directory - shell: echo $HOME - register: home_dir - - - name: cleaning - shell: sudo rm -rf $HOME/tempD - - - name: cleaning previous results - shell: sudo rm -rf $HOME/qtip_result - - - name: make qtip_result - shell: sudo mkdir $HOME/qtip_result - - - include: ./sys_proxy_pbook.yaml - - - include: ./sys_info_pbook.yaml - vars: - network: false - - - name: Installing nDPI dependencies if CentOS - shell: sudo yum install git gcc patch perl-Time-HiRes autofconf automake libpcap-devel libtool -y - when: ansible_os_family == "RedHat" - - - name: Installing nDPI dependcies if Ubuntu - shell: sudo apt-get install git gcc patch autoconf automake libpcap-dev libtool -y - when: ansible_os_family == "Debian" - - - name: making nDPI temporary directory - shell: sudo mkdir $HOME/tempD - - - include: ./git_proxy_pbook.yaml - - - name: Clone nDPI - git: repo=https://github.com/ntop/nDPI.git - dest=$HOME/tempD/nDPI - - - name: autogen - shell: cd $HOME/tempD/nDPI && sudo ./autogen.sh - - - name: configure - shell: cd $HOME/tempD/nDPI && sudo ./configure - - - name: make - shell: cd $HOME/tempD/nDPI && sudo make - - - name: Fetching Test_pcap file - shell: cd $HOME/tempD/nDPI/example && wget http://build.opnfv.org/artifacts.opnfv.org/qtip/utilities/test.pcap - - - name: fetch Averaging script - copy: src=./result_transform/dpi/dpi_average.sh dest={{home_dir.stdout}}/tempD/nDPI/example mode=777 - - - name: Run nDPI benchmark - shell: cd $HOME/tempD/nDPI/example && sudo ./dpi_average.sh - - - name: copy result to temp_direc - shell: sudo cp $HOME/tempD/nDPI/example/dpi_dump.txt $HOME/qtip_result - - - name: fetch dpi result transform script - copy: src=./result_transform/dpi/dpi_transform.py dest={{home_dir.stdout}}/qtip_result - - - name: Transforming results - shell: cd $HOME/qtip_result && sudo python dpi_transform.py - - - name: copy report formation script - copy: src=./result_transform/final_report.py dest={{home_dir.stdout}}/qtip_result - - - name: consolidating report - shell: cd $HOME/qtip_result && sudo python final_report.py DPI {{fname}} - - - name: registering files - shell: (cd $HOME/qtip_result/; find . -maxdepth 1 -name "*.json") | cut -d'/' -f2 - register: files_to_copy - - - name: copy results - fetch: src={{home_dir.stdout}}/qtip_result/{{item}} dest={{workingdir}}/{{Dest_dir}}/dpi/dpi_temp - with_items: "{{files_to_copy.stdout_lines}}" - - - name: registering log files - shell: (cd $HOME/qtip_result/; find . -maxdepth 1 -name "*.log") | cut -d'/' -f2 - register: copy_log_results - - - name: copying log results - fetch: src={{home_dir.stdout}}/qtip_result/{{item}} dest={{workingdir}}/{{Dest_dir}}/dpi/dpi_temp - with_items: "{{copy_log_results.stdout_lines}}" - - - hosts: localhost - connection: local - gather_facts: no - - tasks: - - name: extracting_json - shell: ( find {{workingdir}}/{{Dest_dir}}/dpi/dpi_temp/ -name "*.json" | xargs cp -t {{workingdir}}/{{Dest_dir}}/dpi/) - - - name: making_logs_folder - shell: mkdir -p {{workingdir}}/{{Dest_dir}}/dpi/logs - - - name: extracting_log - shell: ( find {{workingdir}}/{{Dest_dir}}/dpi/dpi_temp/ -name "*.log" | xargs cp -t {{workingdir}}/{{Dest_dir}}/dpi/logs) - - - name: removing dpi_temp - shell: rm -rf {{workingdir}}/{{Dest_dir}}/dpi/dpi_temp diff --git a/benchmarks/playbooks/fio.yaml b/benchmarks/playbooks/fio.yaml deleted file mode 100644 index 813dc411..00000000 --- a/benchmarks/playbooks/fio.yaml +++ /dev/null @@ -1,105 +0,0 @@ - - hosts: localhost - connection: local - gather_facts: no - - tasks: - - name: making fio directory - file: path={{workingdir}}/{{Dest_dir}}/fio state=directory - - - name: making temporary fio directory - file: path={{workingdir}}/{{Dest_dir}}/fio/fio_temp state=directory - - - - hosts: "{{role}}" - become: yes - remote_user: "{{username}}" - - tasks: - - name: checking home directory - shell: echo $HOME - register: home_dir - - - name: cleaning - shell: sudo rm -rf $HOME/fio - - - name: cleaning previous results - shell: sudo rm -rf $HOME/qtip_result - - - name: making fio temporary directory - shell: sudo mkdir $HOME/fio - - - name: making results temporary directory - shell: sudo mkdir $HOME/qtip_result - - - include: ./sys_proxy_pbook.yaml - - - include: ./sys_info_pbook.yaml - vars: - network: false - - - name: Installing fio dependencies when CentOS - shell: sudo yum install wget gcc libaio-devel -y - when: ansible_os_family == "RedHat" - - - name: Installing fio dependencies when Ubuntu - shell: sudo apt-get install wget gcc libaio-dev -y - when: ansible_os_family == "Debian" - - - name: Fetching fio - shell: cd $HOME/fio/ && wget http://freecode.com/urls/3aa21b8c106cab742bf1f20d60629e3f -O fio.tar.gz - - name: Untar fio - shell: cd $HOME/fio/ && sudo tar -zxvf fio.tar.gz - - name: configure - shell: cd $HOME/fio/fio-2.1.10 && sudo ./configure && sudo make - - - name: Fetching fio job - copy: src=./../fio_jobs/test_job dest={{home_dir.stdout}}/fio/fio-2.1.10/ - - - name: Benchmarking block storage through fio - shell: cd $HOME/fio/fio-2.1.10 && sudo ./fio --output-format=json --output=$HOME/qtip_result/fio_result.json test_job - - - name: Fetching result transformation script - copy: src=./result_transform/fio/fio_result_transform.py dest={{home_dir.stdout}}/qtip_result - - - name: Transforming result - shell: cd $HOME/qtip_result && sudo python fio_result_transform.py - - - name: copy report formation script - copy: src=./result_transform/final_report.py dest={{home_dir.stdout}}/qtip_result - - - name: consolidating report - shell: cd $HOME/qtip_result && sudo python final_report.py FIO {{fname}} - - - name: registering files - shell: (cd $HOME/qtip_result/; find . -maxdepth 1 -name "*.json") | cut -d'/' -f2 - register: files_to_copy - - - name: copy results - fetch: src={{home_dir.stdout}}/qtip_result/{{item}} dest={{workingdir}}/{{Dest_dir}}/fio/fio_temp - with_items: "{{files_to_copy.stdout_lines}}" - - - name: registering log files - shell: (cd $HOME/qtip_result/; find . -maxdepth 1 -name "*.log") | cut -d'/' -f2 - register: copy_log_results - - - name: copying log results - fetch: src={{home_dir.stdout}}/qtip_result/{{item}} dest={{workingdir}}/{{Dest_dir}}/fio/fio_temp - with_items: "{{copy_log_results.stdout_lines}}" - - - hosts: localhost - connection: local - gather_facts: no - - tasks: - - name: extracting_json - shell: ( find {{workingdir}}/{{Dest_dir}}/fio/fio_temp/ -name "*.json" | xargs cp -t {{workingdir}}/{{Dest_dir}}/fio/) - - - name: making_logs_folder - shell: mkdir -p {{workingdir}}/{{Dest_dir}}/fio/logs - - - name: extracting_log - shell: ( find {{workingdir}}/{{Dest_dir}}/fio/fio_temp/ -name "*.log" | xargs cp -t {{workingdir}}/{{Dest_dir}}/fio/logs) - - - name: removing fio_log - shell: rm -rf {{workingdir}}/{{Dest_dir}}/fio/fio_temp - diff --git a/benchmarks/playbooks/git_proxy_pbook.yaml b/benchmarks/playbooks/git_proxy_pbook.yaml deleted file mode 100644 index 5cb6f450..00000000 --- a/benchmarks/playbooks/git_proxy_pbook.yaml +++ /dev/null @@ -1,11 +0,0 @@ -#git -- name: set git proxy(http) - shell: "git config --global http.proxy {{ http_proxy }}" - when: http_proxy is defined - ignore_errors: yes - -- name: set git proxy(https) - shell: "git config --global https.proxy {{https_proxy}}" - when: https_proxy is defined - ignore_errors: yes - diff --git a/benchmarks/playbooks/info_script/info_collect.py b/benchmarks/playbooks/info_script/info_collect.py deleted file mode 100644 index 3fc35d5a..00000000 --- a/benchmarks/playbooks/info_script/info_collect.py +++ /dev/null @@ -1,86 +0,0 @@ -import os -import pickle -import json -import sys - -os.system('inxi -b -c0 -n > $PWD/est_2') -est_ob = open("est_2", "r+") -est_ob2 = open("est_1", "w+") -in_string = est_ob.read().replace('\n', ' ') -cpu_idle = float(os.popen("""top -bn1 | grep "Cpu(s)" | awk '{print $8}'""").read().rstrip()) -cpu_usage = 100 - cpu_idle -est_ob2.write(in_string) -est_ob.close() -est_ob2.close() - -inxi_host = os.popen("""cat $PWD/est_1 | grep -o -P '(?<=Host:).*(?=Kernel)' """).read().lstrip().rstrip() -inxi_mem = os.popen("""cat $PWD/est_1 | grep -o -P '(?<=Memory:).*(?=MB)' """).read().lstrip().rstrip() + "MB" -inxi_cpu = os.popen("""cat $PWD/est_1 | grep -o -P '(?<=CPU).*(?=speed)' | cut -f2 -d':'""").read().lstrip().rstrip() -inxi_distro = os.popen(""" cat $PWD/est_1 | grep -o -P '(?<=Distro:).*(?=Machine:)' """).read().rstrip().lstrip() -inxi_kernel = os.popen(""" cat $PWD/est_1 | grep -o -P '(?<=Kernel:).*(?=Console:)' """).read().rstrip().lstrip() -inxi_HD = os.popen(""" cat $PWD/est_1 | grep -o -P '(?<=HDD Total Size:).*(?=Info:)' """).read().rstrip().lstrip() -inxi_product = os.popen(""" cat $PWD/est_1 | grep -o -P '(?<=product:).*(?=Mobo:)' """).read().rstrip().lstrip() - -info_dict = {'hostname': inxi_host, - 'product': inxi_product, - 'os': inxi_distro, - 'kernel': inxi_kernel, - 'cpu': inxi_cpu, - 'cpu_usage': '{0}%'.format(str(round(cpu_usage, 3))), - 'memory_usage': inxi_mem, - 'disk_usage': inxi_HD} -network_flag = str(sys.argv[1]).rstrip() - -if (network_flag == 'n'): - - info_dict['network_interfaces'] = {} - tem_2 = """ cat $PWD/est_1 | grep -o -P '(?<=Network:).*(?=Info:)'""" - print os.system(tem_2 + ' > Hello') - i = int(os.popen(tem_2 + " | grep -o 'Card' | wc -l ").read()) - print i - - for x in range(1, i + 1): - tem = """ cat $PWD/est_1 | grep -o -P '(?<=Card-""" + str(x) + """:).*(?=Card-""" + str(x + 1) + """)'""" - if i == 1: - tem = """ cat $PWD/est_1 | grep -o -P '(?<=Network:).*(?=Info:)'""" - inxi_card_1 = ((os.popen(tem + " | grep -o -P '(?<=Card:).*(?=Drives:)'|sed 's/ *driver:.*//'").read().rstrip().lstrip())) - print inxi_card_1 - info_dict['network_interfaces']['interface_' + str(x)] = {} - info_dict['network_interfaces']['interface_' + str(x)]['network_card'] = inxi_card_1 - inxi_card_2 = ((os.popen(tem + "| grep -o -P '(?<=Card:).*(?=Drives:)'|sed -e 's/^.*IF: //'").read())).rstrip().lstrip() - info_dict['network_interfaces']['interface_' + str(x)]['interface_info'] = inxi_card_2 - elif x < (i): - print "two" - inxi_card_1 = ((os.popen(tem + "| sed 's/ *driver:.*//'").read().rstrip().lstrip())) - info_dict['network_interfaces']['interface_' + str(x)] = {} - info_dict['network_interfaces']['interface_' + str(x)]['network_Card'] = inxi_card_1 - inxi_card_2 = ((os.popen(tem + "|sed -e 's/^.*IF: //'").read())).rstrip().lstrip() - info_dict['network_interfaces']['interface_' + str(x)]['interface_info'] = inxi_card_2 - elif x == i: - print "Three" - info_dict['network_interfaces']['interface_' + str(x)] = {} - inxi_card_1 = ((os.popen(""" cat $PWD/est_1 | grep -o -P '(?<=Card-""" + str(x) + """:).*(?=Drives:)'| sed 's/ *driver:.*//' """).read().rstrip().lstrip())) - info_dict['network_interfaces']['interface_' + str(x)]['network_Card'] = inxi_card_1 - inxi_card_2 = ((os.popen(""" cat $PWD/est_1 | grep -o -P '(?<=Card-""" + str(x) + """:).*(?=Drives:)'| sed -e 's/^.*IF: //' """).read().rstrip().lstrip())) - info_dict['network_interfaces']['interface_' + str(x)]['interface_info'] = inxi_card_2 - else: - print "No network cards" - os.system("bwm-ng -o plain -c 1 | grep -v '=' | grep -v 'iface' | grep -v '-' > bwm_dump") - n_interface = int(os.popen(" cat bwm_dump | grep -v 'total' | wc -l ").read().rstrip()) - interface = {} - for x in range(1, n_interface): - interface_name = os.popen(" cat bwm_dump | awk 'NR==" + str(x) + "' | awk '{print $1}' ").read().rstrip().replace(':', '') - interface[str(interface_name)] = {} - interface[str(interface_name)]['Rx (KB/s)'] = os.popen(" cat bwm_dump | awk 'NR==" + str(x) + "' | awk '{print $2}' ").read().rstrip() - interface[str(interface_name)]['Tx (KB/s)'] = os.popen(" cat bwm_dump | awk 'NR==" + str(x) + "' | awk '{print $4}' ").read().rstrip() - interface[str(interface_name)]['Total (KB/s)'] = os.popen(" cat bwm_dump | awk 'NR== " + str(x) + "' | awk '{print $6}' ").read().rstrip() - - info_dict['interface_io'] = interface - -print info_dict - -with open('./sys_info_temp', 'w+')as out_info: - pickle.dump(info_dict, out_info) - -with open('temp', 'w+') as result_json: - json.dump(info_dict, result_json, indent=4, sort_keys=True) diff --git a/benchmarks/playbooks/iperf.yaml b/benchmarks/playbooks/iperf.yaml deleted file mode 100644 index b6fd75c5..00000000 --- a/benchmarks/playbooks/iperf.yaml +++ /dev/null @@ -1,161 +0,0 @@ - - hosts: localhost - connection: local - gather_facts: no - - tasks: - - name: getting directory - shell: sudo echo $PWD - register: qtip_dir - - - name: making Iperf directory - file: path={{workingdir}}/{{Dest_dir}}/iperf state=directory - - - name: making temporary iperf directory - file: path={{workingdir}}/{{Dest_dir}}/iperf/iperf_temp state=directory - - - - hosts: "{{role}}" - become: yes - remote_user: "{{username}}" - - tasks: - - name: Rolename - set_fact: - rolename: "{{role}}" - when: role is defined - - - name: installertype - set_fact: - installertype: "{{installer}}" - - - name: Get Hostname - shell: echo $HOSTNAME - register: hostID - - - name: echo - shell: echo index_var - - - name: checking home directory - shell: echo $HOME - register: home_dir - - - name: cleaning - shell: sudo rm -rf $HOME/iperf - - - name: cleaning previous results - shell: sudo rm -rf $HOME/qtip_result - - - name: making Iperf temporary directory - shell: sudo mkdir $HOME/iperf - - - name: making results temporary directory - shell: sudo mkdir $HOME/qtip_result - - - include: ./sys_proxy_pbook.yaml - - - include: ./sys_info_pbook.yaml - vars: - network: true - - - name: Installing Epel-release when CentOS - shell: sudo yum install epel-release -y - when: ansible_os_family == "RedHat" - - - name: Allow iperf server port in iptables input rules - shell: iptables -A INPUT -p tcp --dport {{iperf_port}} -j ACCEPT - vars: - iperf_port: 5201 - ignore_errors: yes - when: rolename == "1-server" and installertype == 'fuel' - - - name: Installing IPERF when Ubuntu - shell: sudo apt-get install iperf3 -y - when: ansible_os_family == "Debian" - - - name: Installing Iperf3 - shell: sudo yum install iperf3 -y - when: ansible_os_family == "RedHat" - - - name: Running iperf on server - shell: iperf3 -s - async: 400 - poll: 0 - when: rolename == "1-server" - - - name: Running Iperf on Host - shell: iperf3 --time {{duration}} -b 0 G -c {{ip1}} -J -O10 >> {{home_dir.stdout}}/qtip_result/iperf_raw.json - ignore_errors: yes - with_items: - - "{{ip1}}" - when: rolename == "2-host" and "{{privateip1}}" == "NONE" - - - name: Running Iperf on Host - shell: iperf3 --time {{duration}} -b 0 G -c {{privateip1}} -J -O10 >> {{home_dir.stdout}}/qtip_result/iperf_raw.json - ignore_errors: yes - with_items: - - "{{ip1}}" - when: rolename == "2-host" and "{{privateip1}}" != "NONE" - - - name: Fetching result transformation script - copy: src=./result_transform/iperf/iperf_transform.py dest={{home_dir.stdout}}/qtip_result - - name: Transforming result - - shell: cd $HOME/qtip_result && sudo python iperf_transform.py - when: rolename =="2-host" and "{{ip2}}" == '' - - - name: copy report formation script - copy: src=./result_transform/final_report.py dest={{home_dir.stdout}}/qtip_result - when: rolename =="2-host" and "{{ip2}}" == '' - - - name: consolidating report - shell: cd $HOME/qtip_result && sudo python final_report.py IPERF {{fname}} - when: rolename =="2-host" and "{{ip2}}" == '' - - - name: Files to Copy - shell: (cd $HOME/qtip_result/; find . -maxdepth 1 -name "*.json") | cut -d'/' -f2 - register: files_to_copy - when: rolename =="2-host" and "{{ip2}}" == '' - - - name: copy results - fetch: src={{home_dir.stdout}}/qtip_result/{{item}} dest={{workingdir}}/{{Dest_dir}}/iperf/iperf_temp - with_items: files_to_copy.stdout_lines - when: rolename =="2-host" and "{{ip2}}" == '' - - - name: registering log files - shell: (cd $HOME/qtip_result/; find . -maxdepth 1 -name "*.log") | cut -d'/' -f2 - register: copy_log_results - when: rolename =="2-host" and "{{ip2}}" == '' - - - name: copying log results - fetch: src={{home_dir.stdout}}/qtip_result/{{item}} dest={{workingdir}}/{{Dest_dir}}/iperf/iperf_temp - with_items: copy_log_results.stdout_lines - when: rolename =="2-host" and "{{ip2}}" == '' - - - - hosts: localhost - connection: local - gather_facts: no - - tasks: - - name: Rolename - set_fact: - rolename: "{{role}}" - when: role is defined - - - name: extracting_json - shell: ( find {{workingdir}}/{{Dest_dir}}/iperf/iperf_temp/ -name "*.json" | xargs cp -t {{workingdir}}/{{Dest_dir}}/iperf/) - when: rolename == "2-host" - - - name: making_logs_folder - shell: mkdir -p {{workingdir}}/{{Dest_dir}}/iperf/logs - - - name: extracting_log - shell: ( find {{workingdir}}/{{Dest_dir}}/iperf/iperf_temp/ -name "*.log" | xargs cp -t {{workingdir}}/{{Dest_dir}}/iperf/logs) - when: rolename == "2-host" - - - name: removing iperf_raw file - shell: rm -rf {{workingdir}}/{{Dest_dir}}/iperf/iperf_raw.json - when: rolename == "2-host" - - - name: removing iperf_temp - shell: rm -rf {{workingdir}}/{{Dest_dir}}/iperf/iperf_temp diff --git a/benchmarks/playbooks/netperf.yaml b/benchmarks/playbooks/netperf.yaml deleted file mode 100644 index a07752cb..00000000 --- a/benchmarks/playbooks/netperf.yaml +++ /dev/null @@ -1,97 +0,0 @@ - - hosts: localhost - connection: local - gather_facts: no - - tasks: - - name: getting directory - shell: echo $PWD - register: qtip_dir - - - name: making Netperf directory - file: path={{workingdir}}/{{Dest_dir}}/netperf state=directory - - - name: making temporary netperf directory - file: path={{workingdir}}/{{Dest_dir}}/netperf/netperf_temp state=directory - - - - hosts: "{{role}}" - become: yes - remote_user: "{{username}}" - - tasks: - - name: Rolename - set_fact: - rolename: "{{role}}" - when: role is defined - - - name: Get Hostname - shell: echo $HOSTNAME - register: hostID - - - name: checking home directory - shell: echo $HOME - register: home_dir - - - name: cleaning - shell: rm -rf $HOME/netperf - - - name: cleaning previous results - shell: rm -rf $HOME/qtip_result - - - name: making netperf temporary directory - shell: mkdir $HOME/netperf - - - name: making results temporary directory - shell: mkdir $HOME/qtip_result - - - name: Fetching netperf - shell: wget ftp://ftp.netperf.org/netperf/netperf-2.7.0.tar.gz - when: ansible_os_family == "RedHat" - - - name: Extracting Netperf - shell: tar -xvf netperf-2.7.0.tar.gz - - - name: configuring netperf - shell: cd $HOME/netperf-2.7.0 && ./configure - - - name: Making Netperf - shell: cd $HOME/netperf-2.7.0 && make - - - name: Installing Netperf - shell: cd $HOME/netperf-2.7.0 && make install - - - name: Running netperf on server - shell: /usr/local/bin/netserver -p 4000 - when: rolename == "1-server" - - - name: Running netperf on Host - shell: /usr/local/bin/netperf -H {{privateip1}} -p 4000 -l {{duration}} -t {{teststream}} -fG >> ./qtip_result/server{{hostID.stdout}}-{{item}}.json - ignore_errors: yes - with_items: - - "{{ip1}}" - when: rolename == "2-host" and "{{ip2}}" == '' - - - name: Files to Copy - shell: (cd $HOME/qtip_result/; find . -maxdepth 1 -name "*.json") | cut -d'/' -f2 - register: files_to_copy - - - name: copy results - fetch: src={{home_dir.stdout}}/qtip_result/{{item}} dest={{workingdir}}/{{Dest_dir}}/netperf/netperf_temp - with_items: "{{files_to_copy.stdout_lines}}" - - - hosts: localhost - connection: local - gather_facts: no - - tasks: - - name: Rolename - set_fact: - rolename: "{{role}}" - when: role is defined - - - name: extracting_json - shell: ( find {{workingdir}}/{{Dest_dir}}/netperf/netperf_temp/ -name "*.json" | xargs cp -t {{workingdir}}/{{Dest_dir}}/netperf/) - when: rolename == "2-host" - - - name: removing netperf_temp - shell: rm -rf {{workingdir}}/{{Dest_dir}}/netperf/netperf_temp diff --git a/benchmarks/playbooks/pktgen.yaml b/benchmarks/playbooks/pktgen.yaml deleted file mode 100644 index f466129b..00000000 --- a/benchmarks/playbooks/pktgen.yaml +++ /dev/null @@ -1,47 +0,0 @@ - - hosts: localhost - connection: local - gather_facts: no - - tasks: - - name: getting directory - shell: echo $PWD - register: qtip_dir - - - name: making pktgen directory - file: path={{workingdir}}/{{Dest_dir}}/pktgen state=directory - - - name: making temporary pktgen directory - file: path={{workingdir}}/{{Dest_dir}}/pktgen/pktgen_temp state=directory - - - - hosts: "{{role}}" - - tasks: - - name: Rolename - set_fact: - rolename: "{{role}}" - when: role is defined - - - name: IPTABLE Setup - shell: iptables -F - when: rolename == '1-server' - - - name: iptables - shell: sudo iptables -A INPUT -p udp --dport 1000 -j DROP - when: rolename == '1-server' - - - name: run pktgen - shell: bash pktgen.sh {{privateip1}} 1 {{packetsize}} {{duration}} >> .json - when: rolename == '2-host' - - - hosts: "{{role}}" - - tasks: - - name: Rolename - set_fact: - rolename: "{{role}}" - when: role is defined - - - name: Get server packetsize - shell: iptables -L -vnx >> pktgenServer{{privateip1}}.txt - when: rolename == '1-server' diff --git a/benchmarks/playbooks/ramspeed.yaml b/benchmarks/playbooks/ramspeed.yaml deleted file mode 100644 index 3fb78cb5..00000000 --- a/benchmarks/playbooks/ramspeed.yaml +++ /dev/null @@ -1,109 +0,0 @@ - - hosts: localhost - connection: local - gather_facts: no - - tasks: - - name: making ramspeed directory - file: path={{workingdir}}/{{Dest_dir}}/ramspeed state=directory - - - name: making temporary ramspeed directory - file: path={{workingdir}}/{{Dest_dir}}/ramspeed/ramspeed_temp state=directory - - - - hosts: "{{role}}" - become: yes - remote_user: "{{username}}" - - tasks: - - name: checking home directory - shell: echo $HOME - register: home_dir - - - name: cleaning - shell: sudo rm -rf $HOME/ramspeed - - - name: cleaning previous results - shell: sudo rm -rf $HOME/qtip_result - - - name: making ramspeed temporary directory - shell: sudo mkdir $HOME/ramspeed - - - name: making results temporary directory - shell: sudo mkdir $HOME/qtip_result - - - include: ./sys_proxy_pbook.yaml - - - include: ./sys_info_pbook.yaml - vars: - network: false - - - name: Installing RAM_Speed dependencies when CentOS - shell: sudo yum install wget gcc -y - when: ansible_os_family == "RedHat" - - - name: Installing RAM_Speed dependencies when Ubuntu - shell: sudo apt-get install wget gcc -y - when: ansible_os_family == "Debian" - - - name: make dummy file - shell: sudo touch $HOME/ramspeed/ramspeed.tar.gz - - - name: Fetching RAM_Speed - shell: cd $HOME/ramspeed/ && sudo wget -O ramspeed.tar.gz https://docs.google.com/uc?id=0B92Bp5LZTM7gRFctalZLMktTNDQ - - - name: Untar RAM_SPeed - shell: cd $HOME/ramspeed/ && sudo tar -zxvf ramspeed.tar.gz - - - name: configure - shell: cd $HOME/ramspeed/ramsmp-3.5.0 && ./build.sh - - - name: Benchmarking IntMem Bandwidth - shell: cd $HOME/ramspeed/ramsmp-3.5.0 && ./ramsmp -b 3 -l 5 -p 1 >> $HOME/qtip_result/Intmem - - - name: Benchmarking FloatMem Bandwidth - shell: cd $HOME/ramspeed/ramsmp-3.5.0 && ./ramsmp -b 6 -l 5 -p 1 >> $HOME/qtip_result/Floatmem - - - name: Fetching result transformation script - copy: src=./result_transform/ramspd/ramspd_transform.py dest={{home_dir.stdout}}/qtip_result - - - name: Transforming result - shell: cd $HOME/qtip_result && sudo python ramspd_transform.py - - - name: copy report formation script - copy: src=./result_transform/final_report.py dest={{home_dir.stdout}}/qtip_result - - - name: consolidating report - shell: cd $HOME/qtip_result && sudo python final_report.py RamSpeed {{fname}} - - - name: registering files - shell: (cd $HOME/qtip_result/; find . -maxdepth 1 -name "*.json") | cut -d'/' -f2 - register: files_to_copy - - - name: copy results - fetch: src={{home_dir.stdout}}/qtip_result/{{item}} dest={{workingdir}}/{{Dest_dir}}/ramspeed/ramspeed_temp - with_items: "{{files_to_copy.stdout_lines}}" - - - name: registering log files - shell: (cd $HOME/qtip_result/; find . -maxdepth 1 -name "*.log") | cut -d'/' -f2 - register: copy_log_results - - - name: copying log results - fetch: src={{home_dir.stdout}}/qtip_result/{{item}} dest={{workingdir}}/{{Dest_dir}}/ramspeed/ramspeed_temp - with_items: "{{copy_log_results.stdout_lines}}" - - - hosts: localhost - connection: local - gather_facts: no - - tasks: - - name: extracting_json - shell: ( find {{workingdir}}/{{Dest_dir}}/ramspeed/ramspeed_temp/ -name "*.json" | xargs cp -t {{workingdir}}/{{Dest_dir}}/ramspeed/) - - - name: making_logs_folder - shell: mkdir -p {{workingdir}}/{{Dest_dir}}/ramspeed/logs - - - name: extracting_log - shell: ( find {{workingdir}}/{{Dest_dir}}/ramspeed/ramspeed_temp/ -name "*.log" | xargs cp -t {{workingdir}}/{{Dest_dir}}/ramspeed/logs) - - - name: removing ramspeed_log - shell: rm -rf {{workingdir}}/{{Dest_dir}}/ramspeed/ramspeed_temp diff --git a/benchmarks/playbooks/result_transform/dpi/dpi_average.sh b/benchmarks/playbooks/result_transform/dpi/dpi_average.sh deleted file mode 100644 index 405d3ff6..00000000 --- a/benchmarks/playbooks/result_transform/dpi/dpi_average.sh +++ /dev/null @@ -1,14 +0,0 @@ -#!/bin/bash - -COUNTER=0 -WDIR=$PWD -while [ $COUNTER -lt 10 ]; do - - echo $WDIR - $( ./ndpiReader -i test.pcap >> $WDIR/dpi_dump.txt ) - let COUNTER=COUNTER+1 - echo "Run number: $COUNTER" - -done - - diff --git a/benchmarks/playbooks/result_transform/dpi/dpi_transform.py b/benchmarks/playbooks/result_transform/dpi/dpi_transform.py deleted file mode 100644 index ee29d8e2..00000000 --- a/benchmarks/playbooks/result_transform/dpi/dpi_transform.py +++ /dev/null @@ -1,47 +0,0 @@ -import os -import pickle -import datetime - -sum_dpi_pps = float(0) -sum_dpi_bps = float(0) - -for x in range(1, 11): - dpi_result_pps = float( - os.popen( - "cat $HOME/qtip_result/dpi_dump.txt | grep 'nDPI throughput:' | awk 'NR=='" + - str(x) + - " | awk '{print $3}'").read().lstrip()) - dpi_result_bps = float( - os.popen( - "cat $HOME/qtip_result/dpi_dump.txt | grep 'nDPI throughput:' | awk 'NR=='" + - str(x) + - " | awk '{print $7}'").read().rstrip()) - - if (dpi_result_pps > 100): - dpi_result_pps = dpi_result_pps / 1000 - - if (dpi_result_bps > 100): - dpi_result_bps = dpi_result_bps / 1000 - - sum_dpi_pps += dpi_result_pps - sum_dpi_bps += dpi_result_bps - -dpi_result_pps = sum_dpi_pps / 10 -dpi_result_bps = sum_dpi_bps / 10 - -host = os.popen("hostname").read().rstrip() -log_time_stamp = str(datetime.datetime.utcnow().isoformat()) - -os.popen( - "cat $HOME/qtip_result/dpi_dump.txt > $HOME/qtip_result/" + - host + - "-" + - log_time_stamp + - ".log") - -home_dir = str(os.popen("echo $HOME").read().rstrip()) -host = os.popen("echo $HOSTNAME") -result = {'pps': round(dpi_result_pps, 3), - 'bps': round(dpi_result_bps, 3)} -with open('./result_temp', 'w+') as result_file: - pickle.dump(result, result_file) diff --git a/benchmarks/playbooks/result_transform/final_report.py b/benchmarks/playbooks/result_transform/final_report.py deleted file mode 100644 index 274742d4..00000000 --- a/benchmarks/playbooks/result_transform/final_report.py +++ /dev/null @@ -1,24 +0,0 @@ -import pickle -import json -import datetime -import os -import sys - -home_dir = str((os.popen("echo $HOME").read().rstrip())) - -with open('./sys_info_temp', 'r') as sys_info_f: - sys_info_dict = pickle.load(sys_info_f) -with open('./result_temp', 'r') as result_f: - result_dict = pickle.load(result_f) - -host_name = (os.popen("hostname").read().rstrip()) -benchmark_name = str(sys.argv[1]) -testcase_name = str(sys.argv[2]) -report_time_stamp = str(datetime.datetime.utcnow().isoformat()) -final_dict = {"name": testcase_name, - "time": report_time_stamp, - "system_information": sys_info_dict, - "details": result_dict} - -with open('./' + host_name + '-' + report_time_stamp + '.json', 'w+') as result_json: - json.dump(final_dict, result_json, indent=4, sort_keys=True) diff --git a/benchmarks/playbooks/result_transform/fio/fio_result_transform.py b/benchmarks/playbooks/result_transform/fio/fio_result_transform.py deleted file mode 100755 index 5ecac823..00000000 --- a/benchmarks/playbooks/result_transform/fio/fio_result_transform.py +++ /dev/null @@ -1,29 +0,0 @@ -import json -import pickle -import os -import datetime - - -def get_fio_job_result(fio_job_data): - return {'read': {'io_bytes': fio_job_data["read"]["io_bytes"], - 'io_ps': fio_job_data["read"]["iops"], - 'io_runtime_millisec': fio_job_data["read"]["runtime"], - 'mean_io_latenchy_microsec': fio_job_data["read"]["lat"]["mean"]}, - 'write': {'io_bytes': fio_job_data["write"]["io_bytes"], - 'io_ps': fio_job_data["write"]["iops"], - 'io_runtime_millisec': fio_job_data["write"]["runtime"], - 'mean_io_latenchy_microsec': fio_job_data["write"]["lat"]["mean"]}} - - -with open("fio_result.json") as fio_raw: - fio_data = json.load(fio_raw) - -fio_result_dict = {} -for x, result in enumerate(map(get_fio_job_result, fio_data["jobs"])): - fio_result_dict['job_{0}'.format(x)] = result - -host_name = (os.popen("hostname").read().rstrip()) -report_time = str(datetime.datetime.utcnow().isoformat()) -os.system("mv fio_result.json " + str(host_name) + "-" + report_time + ".log") -with open('./result_temp', 'w + ')as out_fio_result: - pickle.dump(fio_result_dict, out_fio_result) diff --git a/benchmarks/playbooks/result_transform/iperf/iperf_transform.py b/benchmarks/playbooks/result_transform/iperf/iperf_transform.py deleted file mode 100644 index b52e4634..00000000 --- a/benchmarks/playbooks/result_transform/iperf/iperf_transform.py +++ /dev/null @@ -1,27 +0,0 @@ -import json
-import datetime
-import pickle
-with open('iperf_raw.json', 'r') as ifile:
- raw_iperf_data = json.loads(ifile.read().rstrip())
-
-bits_sent = raw_iperf_data['end']['sum_sent']['bits_per_second']
-bits_received = raw_iperf_data['end']['sum_received']['bits_per_second']
-total_byte_sent = raw_iperf_data['end']['sum_sent']['bytes']
-total_byte_received = raw_iperf_data['end']['sum_received']['bytes']
-cpu_host_total_percent = raw_iperf_data['end']['cpu_utilization_percent']['host_total']
-cpu_remote_total_percent = raw_iperf_data['end']['cpu_utilization_percent']['remote_total']
-
-time_stamp = str(datetime.datetime.utcnow().isoformat())
-
-result = {'version': raw_iperf_data['start']['version'],
- 'bandwidth': {'sender_throughput': bits_sent,
- 'received_throughput': bits_received},
- 'cpu': {'cpu_host': cpu_host_total_percent,
- 'cpu_remote': cpu_remote_total_percent}
- }
-
-with open('iperf_raw-' + time_stamp + '.log', 'w+') as ofile:
- ofile.write(json.dumps(raw_iperf_data))
-
-with open('./result_temp', 'w+') as result_file:
- pickle.dump(result, result_file)
diff --git a/benchmarks/playbooks/result_transform/ramspd/ramspd_transform.py b/benchmarks/playbooks/result_transform/ramspd/ramspd_transform.py deleted file mode 100644 index 960f84fc..00000000 --- a/benchmarks/playbooks/result_transform/ramspd/ramspd_transform.py +++ /dev/null @@ -1,41 +0,0 @@ -import os -import pickle -import datetime - -intmem_copy = os.popen("cat Intmem | grep 'BatchRun Copy' | awk '{print $4}'").read().rstrip() -intmem_scale = os.popen("cat Intmem | grep 'BatchRun Scale' | awk '{print $4}'").read().rstrip() -intmem_add = os.popen("cat Intmem | grep 'BatchRun Add' | awk '{print $4}'").read().rstrip() -intmem_triad = os.popen("cat Intmem | grep 'BatchRun Triad' | awk '{print $4}'").read().rstrip() -intmem_average = os.popen("cat Intmem | grep 'BatchRun AVERAGE' | awk '{print $4}'").read().rstrip() - -print intmem_copy -print intmem_average - -floatmem_copy = os.popen("cat Floatmem | grep 'BatchRun Copy' | awk '{print $4}'").read().rstrip() -floatmem_scale = os.popen("cat Floatmem | grep 'BatchRun Scale' | awk '{print $4}'").read().rstrip() -floatmem_add = os.popen("cat Floatmem | grep 'BatchRun Add' | awk '{print $4}'").read().rstrip() -floatmem_triad = os.popen("cat Floatmem | grep 'BatchRun Triad' | awk '{print $4}'").read().rstrip() -floatmem_average = os.popen("cat Floatmem | grep 'BatchRun AVERAGE' | awk '{print $4}'").read().rstrip() - -print floatmem_copy -print floatmem_average - -hostname = os.popen("hostname").read().rstrip() -time_stamp = str(datetime.datetime.utcnow().isoformat()) - -os.system("mv Intmem " + hostname + "-" + time_stamp + ".log") -os.system("cp Floatmem >> " + hostname + "-" + time_stamp + ".log") - -result = {"int_bandwidth": {"copy": intmem_copy, - "add": intmem_add, - "scale": intmem_scale, - "triad": intmem_triad, - "average": intmem_average}, - "float_bandwidth": {"copy": floatmem_copy, - "add": floatmem_add, - "scale": floatmem_scale, - "triad": floatmem_triad, - "average": floatmem_average}} - -with open('./result_temp', 'w+') as result_file: - pickle.dump(result, result_file) diff --git a/benchmarks/playbooks/result_transform/ssl/ssl_transform.py b/benchmarks/playbooks/result_transform/ssl/ssl_transform.py deleted file mode 100644 index de84d24b..00000000 --- a/benchmarks/playbooks/result_transform/ssl/ssl_transform.py +++ /dev/null @@ -1,54 +0,0 @@ -import os -import pickle -import datetime - -openssl_version = os.popen("cat RSA_dump | head -1").read().rstrip() -rsa_512_sps = os.popen( - "cat RSA_dump | grep '512 bits ' | awk '{print $6}' ").read().rstrip() -rsa_512_vps = os.popen( - "cat RSA_dump | grep '512 bits ' | awk '{print $7}' ").read().rstrip() -rsa_1024_sps = os.popen( - "cat RSA_dump | grep '1024 bits ' | awk '{print $6}' ").read().rstrip() -rsa_1024_vps = os.popen( - "cat RSA_dump | grep '1024 bits ' | awk '{print $7}' ").read().rstrip() -rsa_2048_sps = os.popen( - "cat RSA_dump | grep '2048 bits ' | awk '{print $6}' ").read().rstrip() -rsa_2048_vps = os.popen( - "cat RSA_dump | grep '2048 bits ' | awk '{print $7}' ").read().rstrip() -rsa_4096_sps = os.popen( - "cat RSA_dump | grep '4096 bits ' | awk '{print $6}' ").read().rstrip() -rsa_4096_vps = os.popen( - "cat RSA_dump | grep '4096 bits ' | awk '{print $7}' ").read().rstrip() - -aes_16B = os.popen( - "cat AES-128-CBC_dump | grep 'aes-128-cbc ' | awk '{print $2}' ").read().rstrip() -aes_64B = os.popen( - "cat AES-128-CBC_dump | grep 'aes-128-cbc ' | awk '{print $3}' ").read().rstrip() -aes_256B = os.popen( - "cat AES-128-CBC_dump | grep 'aes-128-cbc ' | awk '{print $4}' ").read().rstrip() -aes_1024B = os.popen( - "cat AES-128-CBC_dump | grep 'aes-128-cbc ' | awk '{print $5}' ").read().rstrip() -aes_8192B = os.popen( - "cat AES-128-CBC_dump | grep 'aes-128-cbc ' | awk '{print $6}' ").read().rstrip() - -hostname = os.popen("hostname").read().rstrip() -time_stamp = str(datetime.datetime.utcnow().isoformat()) - -os.system("mv RSA_dump " + hostname + "-" + time_stamp + ".log") -os.system("cat AES-128-CBC_dump >> " + hostname + "-" + time_stamp + ".log") - -result = {"version": [openssl_version], - "rsa_sig": {"512_bits": rsa_512_sps, - "1024_bits": rsa_1024_sps, - "2048_bits": rsa_2048_sps, - "4096_bits": rsa_4096_sps, - "unit": "sig/sec"}, - "aes_128_cbc": {"16B_block": aes_16B, - "64B_block": aes_64B, - "256B_block": aes_256B, - "1024B_block": aes_1024B, - "8192B_block": aes_8192B, - "unit": "B/sec"}} - -with open('./result_temp', 'w+') as result_file: - pickle.dump(result, result_file) diff --git a/benchmarks/playbooks/result_transform/ubench_transform.py b/benchmarks/playbooks/result_transform/ubench_transform.py deleted file mode 100644 index ab5fe171..00000000 --- a/benchmarks/playbooks/result_transform/ubench_transform.py +++ /dev/null @@ -1,32 +0,0 @@ -import os -import json -import pickle - -total_cpu = os.popen( - "cat $HOME/tempT/UnixBench/results/* | grep 'of tests' | awk '{print $1;}' | awk 'NR==1'").read().rstrip() - -cpu_1 = os.popen( - "cat $HOME/tempT/UnixBench/results/* | grep 'of tests' | awk '{print $6;}' | awk 'NR==1'").read().rstrip() - - -cpu_2 = os.popen( - "cat $HOME/tempT/UnixBench/results/* | grep 'of tests' | awk '{print $6;}' | awk 'NR==2'").read().rstrip() - - -index_1 = os.popen( - "cat $HOME/tempT/UnixBench/results/* | grep 'Index Score (Partial Only) ' | awk '{print $7;}' | awk 'NR==1'").read().rstrip() -index_2 = os.popen( - "cat $HOME/tempT/UnixBench/results/* | grep 'Index Score (Partial Only) ' | awk '{print $7;}' | awk 'NR==2'").read().rstrip() - - -result = {"n_cpu": total_cpu, - "single": {"n_para_test": cpu_1, - "score": index_1}, - "multi": {"n_para_test": cpu_2, - "score": index_2} - } - -with open('result_temp', 'w+') as result_file: - pickle.dump(result, result_file) -print json.dumps(result, indent=4, sort_keys=True) -# print result.items() diff --git a/benchmarks/playbooks/ssl.yaml b/benchmarks/playbooks/ssl.yaml deleted file mode 100644 index 55e0af9b..00000000 --- a/benchmarks/playbooks/ssl.yaml +++ /dev/null @@ -1,113 +0,0 @@ - - hosts: localhost - connection: local - gather_facts: no - - tasks: - - name: making ssl directory - file: path={{workingdir}}/{{Dest_dir}}/ssl state=directory - - - name: making temporary ssl directory - file: path={{workingdir}}/{{Dest_dir}}/ssl/ssl_temp state=directory - - - hosts: "{{role}}" - become: yes - remote_user: "{{username}}" - - tasks: - - name: checking home directory - shell: sudo echo $HOME - register: home_dir - - - name: cleaning - shell: sudo rm -rf $HOME/Open_SSL - - - name: cleaning previous results - shell: sudo rm -rf $HOME/qtip_result - - - name: making OpenSSL temporary directory - shell: sudo mkdir $HOME/Open_SSL - - - name: making results temporary directory - shell: sudo mkdir $HOME/qtip_result - - - include: ./sys_proxy_pbook.yaml - - - include: ./sys_info_pbook.yaml - vars: - network: false - - - name: Installing OpenSSL dependencies when CentOS - shell: sudo yum install git wget gcc patch perl-Time-HiRes autofconf automake libpcap-devel libtool -y - when: ansible_os_family == "RedHat" - - - name: Installing OpenSSL dependencies when Ubuntu - shell: sudo apt-get install git gcc wget perl autoconf automake libpcap-dev libtool -y - when: ansible_os_family == "Debian" - - - name: Fetching OpenSSL - shell: cd $HOME/Open_SSL/ && sudo wget http://artifacts.opnfv.org/qtip/utilities/openssl-1.0.2f.tar.gz - - - name: Untar OpenSSL - shell: cd $HOME/Open_SSL/ && sudo tar -zxvf openssl-1.0.2f.tar.gz - - name: configure - shell: cd $HOME/Open_SSL/openssl-1.0.2f && sudo ./config - - - name: make - shell: cd $HOME/Open_SSL/openssl-1.0.2f && sudo make - - - name: make install - shell: cd $HOME/Open_SSL/openssl-1.0.2f && sudo make install - - - name: Benchmarking RSA signatures - shell: cd $HOME/Open_SSL/openssl-1.0.2f/apps && sudo ./openssl speed rsa >> $HOME/qtip_result/RSA_dump - - - name: Benchmaring AES-128-cbc cipher encryption throughput - shell: cd $HOME/Open_SSL/openssl-1.0.2f/apps && sudo ./openssl speed -evp aes-128-cbc >> $HOME/qtip_result/AES-128-CBC_dump - - - name: Fetching result transformation script - copy: src=./result_transform/ssl/ssl_transform.py dest={{home_dir.stdout}}/qtip_result - - - name: Transforming result - shell: cd $HOME/qtip_result && python ssl_transform.py - - - name: copy report formation script - copy: src=./result_transform/final_report.py dest={{home_dir.stdout}}/qtip_result - - - name: consolidating report - shell: cd $HOME/qtip_result && python final_report.py SSL {{fname}} - - - name: registering files - shell: (cd $HOME/qtip_result/; find . -maxdepth 1 -name "*.json") | cut -d'/' -f2 - register: files_to_copy - - - name: copy results - fetch: src={{home_dir.stdout}}/qtip_result/{{item}} dest={{workingdir}}/{{Dest_dir}}/ssl/ssl_temp - with_items: "{{files_to_copy.stdout_lines}}" - - - name: registering log files - shell: (cd $HOME/qtip_result/; find . -maxdepth 1 -name "*.log") | cut -d'/' -f2 - register: copy_log_results - - - name: copying log results - fetch: src={{home_dir.stdout}}/qtip_result/{{item}} dest={{workingdir}}/{{Dest_dir}}/ssl/ssl_temp - with_items: "{{copy_log_results.stdout_lines}}" - - - hosts: localhost - connection: local - gather_facts: no - - tasks: - - name: echo - shell: echo $PWD - - - name: extracting_json - shell: ( find {{workingdir}}/{{Dest_dir}}/ssl/ssl_temp/ -name "*.json" | xargs cp -t {{workingdir}}/{{Dest_dir}}/ssl/) - - - name: making_logs_folder - shell: mkdir -p {{workingdir}}/{{Dest_dir}}/ssl/logs - - - name: extracting_log - shell: ( find {{workingdir}}/{{Dest_dir}}/ssl/ssl_temp/ -name "*.log" | xargs cp -t {{workingdir}}/{{Dest_dir}}/ssl/logs) - - - name: removing ssl_temp - shell: rm -rf {{workingdir}}/{{Dest_dir}}/ssl/ssl_temp diff --git a/benchmarks/playbooks/sys_info_pbook.yaml b/benchmarks/playbooks/sys_info_pbook.yaml deleted file mode 100644 index cfcad119..00000000 --- a/benchmarks/playbooks/sys_info_pbook.yaml +++ /dev/null @@ -1,42 +0,0 @@ - - name: Epel Release install when CentOS - shell: sudo yum install epel-release -y - when: ansible_os_family == "RedHat" - - - name: Inxi install when CentOS - shell: sudo yum install inxi -y - when: ansible_os_family == "RedHat" - - - name: Software Properties Common - shell: sudo apt-get install software-properties-common -y - when: ansible_os_family == "Debian" - - - name: adding trusty-backport main repo - shell: sudo apt-add-repository "deb http://archive.ubuntu.com/ubuntu/ trusty-backports main restricted universe multiverse" - when: ansible_os_family == "Debian" - - - name: adding trusty main repo - shell: sudo apt-add-repository "deb http://archive.ubuntu.com/ubuntu/ trusty main restricted universe multiverse" - when: ansible_os_family == "Debian" - - - name: system info collection tool install when Ubuntu - shell: sudo apt-get update && apt-get install inxi -y - when: ansible_os_family == "Debian" - - - name: Install ansible copy dependencies if remote host has selinux enabled - shell: sudo yum install libselinux-python -y - when: ansible_os_family == "RedHat" - - - name: Install ansiblle copy dependencies if remote host has selinux enaled - shell: sudo apt-get install python-selinux -y - when: ansible_os_family == "Debian" - - - name: system_info script copy - copy: src=./info_script/info_collect.py dest={{home_dir.stdout}}/qtip_result/ - - - name: collecting system informaton for non-network test cases - shell: cd $HOME/qtip_result && sudo python info_collect.py c - when: not network - - - name: collecting system information for network test cases - shell: cd $HOME/qtip_result && sudo python info_collect.py n - when: network diff --git a/benchmarks/playbooks/sys_proxy_pbook.yaml b/benchmarks/playbooks/sys_proxy_pbook.yaml deleted file mode 100644 index bf4a8ccb..00000000 --- a/benchmarks/playbooks/sys_proxy_pbook.yaml +++ /dev/null @@ -1,53 +0,0 @@ -#env -- name: insert shell proxy http - lineinfile: dest=/etc/profile.d/proxy.sh state=present create=yes owner=root group=root mode=0644 regexp="export http_proxy={{ http_proxy }}" - insertafter=EOF line="export http_proxy={{ http_proxy }}" - when: http_proxy is defined - ignore_errors: yes - -- name: insert shell proxy https - lineinfile: dest=/etc/profile.d/proxy.sh state=present create=yes owner=root group=root mode=0644 regexp="export https_proxy={{ https_proxy }}" - insertafter=EOF line="export https_proxy={{ https_proxy }}" - when: https_proxy is defined - ignore_errors: yes - -- name: insert no proxy - lineinfile: dest=/etc/profile.d/proxy.sh state=present create=yes owner=root group=root mode=0644 regexp="{{ no_proxy }}" - insertafter=EOF line="export no_proxy={{ no_proxy }}" - when: no_proxy is defined - ignore_errors: yes - -#wget -- name: insert wget proxy(http) - lineinfile: dest=/etc/wgetrc state=present regexp="http_proxy={{ http_proxy }}" - insertafter="^#http_proxy" line="http_proxy={{ http_proxy }}" - when: http_proxy is defined - ignore_errors: yes - -- name: insert wget proxy(https) - lineinfile: dest=/etc/wgetrc state=present regexp="https_proxy={{ https_proxy }}" - insertafter="^#https_proxy" line="https_proxy={{ https_proxy }}" - when: https_proxy is defined - ignore_errors: yes - -#yum -- name: insert yum proxy(http) - lineinfile: dest=/etc/yum.conf state=present regexp="proxy={{ http_proxy }}" - insertafter=EOF line="proxy={{ http_proxy }}" - when: ansible_os_family == "RedHat" and http_proxy is defined - ignore_errors: yes - -#apt - -- name: insert apt proxy(http) - lineinfile: dest=/etc/apt/apt.conf state=present create=yes regexp="Acquire::http::Proxy \"{{ http_proxy }}\";" - insertafter=EOF line="Acquire::http::Proxy \"{{ http_proxy }}\";" - when: ansible_os_family == "Debian" and http_proxy is defined - ignore_errors: yes - -- name: insert apt proxy(https) - lineinfile: dest=/etc/apt/apt.conf state=present create=yes regexp="Acquire::https::Proxy \"{{ https_proxy }}\";" - insertafter=EOF line="Acquire::https::Proxy \"{{ https_proxy }}\";" - when: ansible_os_family == "Debian" and https_proxy is defined - ignore_errors: yes - diff --git a/benchmarks/playbooks/whetstone.yaml b/benchmarks/playbooks/whetstone.yaml deleted file mode 100644 index 0b1f89a7..00000000 --- a/benchmarks/playbooks/whetstone.yaml +++ /dev/null @@ -1,111 +0,0 @@ - - hosts: localhost - connection: local - gather_facts: no - - tasks: - - name: making whetstone directory - file: path={{workingdir}}/{{Dest_dir}}/whetstone state=directory - - - name: making temporary whetstone directory - file: path={{workingdir}}/{{Dest_dir}}/whetstone/whetstone_temp state=directory - - - hosts: "{{role}}" - become: yes - remote_user: "{{username}}" - - tasks: - - name: storing_home - shell: echo $HOME - register: home_dir - - - name: cleaning tempT - shell: sudo rm -rf $HOME/tempT - - - name: cleaning_qtip_result - shell: sudo rm -rf $HOME/qtip_result - - - name: make directory - shell: sudo mkdir $HOME/qtip_result - - - include: ./sys_proxy_pbook.yaml - - - include: ./sys_info_pbook.yaml - vars: - network: false - - - name: Installing UnixBench dependencies if CentOS - shell: sudo yum install git gcc patch perl-Time-HiRes -y - when: ansible_os_family == "RedHat" - - - name: Installing UnixBench dependencies if Ubuntu - shell: sudo apt-get install git gcc patch perl -y - when: ansible_os_family == "Debian" - - - include: ./git_proxy_pbook.yaml - - - name: Clone unixbench - git: repo=https://github.com/kdlucas/byte-unixbench.git - dest=$HOME/tempT - - - name: make - shell: sudo make --directory $HOME/tempT/UnixBench/ - - - name: Run Whetstone - shell: cd $HOME/tempT/UnixBench/&&./Run -v whetstone - - - name: collecting and transforming result script copy - copy: src=./result_transform/ubench_transform.py dest={{home_dir.stdout}}/qtip_result/ - - - name: transforming result - shell: cd $HOME/qtip_result && sudo python ubench_transform.py - - - name: copying consolidated report script - copy: src=./result_transform/final_report.py dest={{home_dir.stdout}}/qtip_result/ - - - name: making consolidated report - shell: cd $HOME/qtip_result && sudo python final_report.py Whetstone {{fname}} - - - name: making directory - file: path={{home_dir.stdout}}/qtip_result/log state=directory - - - name: copying result to temp directory - shell: sudo cp -r $HOME/tempT/UnixBench/results/* $HOME/qtip_result/log - - - name: registering files - shell: (cd $HOME/qtip_result/; find . -maxdepth 1 -name "*.json") | cut -d'/' -f2 - register: files_to_copy - - - name: copy results - fetch: src={{home_dir.stdout}}/qtip_result/{{item}} dest={{workingdir}}/{{Dest_dir}}/whetstone/whetstone_temp - with_items: "{{files_to_copy.stdout_lines}}" - - - name: registering log files - shell: (cd $HOME/qtip_result/log/; find . -maxdepth 1 -name "*.log") | cut -d'/' -f2 - register: copy_log_results - - - name: copying log results - fetch: src={{home_dir.stdout}}/qtip_result/log/{{item}} dest={{workingdir}}/{{Dest_dir}}/whetstone/whetstone_temp - with_items: "{{copy_log_results.stdout_lines}}" - - - name: cleaning tempT - shell: sudo rm -rf $HOME/tempT - - - name: cleaning_qtip_result - shell: sudo rm -rf $HOME/qtip_result - - - hosts: localhost - connection: local - gather_facts: no - - tasks: - - name: extracting_json - shell: ( find {{workingdir}}/{{Dest_dir}}/whetstone/whetstone_temp/ -name "*.json" | xargs cp -t {{workingdir}}/{{Dest_dir}}/whetstone/) - - - name: making_logs_folder - shell: mkdir -p {{workingdir}}/{{Dest_dir}}/whetstone/logs - - - name: extracting_log - shell: ( find {{workingdir}}/{{Dest_dir}}/whetstone/whetstone_temp/ -name "*.log" | xargs cp -t {{workingdir}}/{{Dest_dir}}/whetstone/logs) - - - name: removing whetstone_temp - shell: rm -rf {{workingdir}}/{{Dest_dir}}/whetstone/whetstone_temp diff --git a/func/driver.py b/func/driver.py index 9a011c2a..47d00f1f 100644 --- a/func/driver.py +++ b/func/driver.py @@ -65,7 +65,7 @@ class Driver: logger.info(extra_vars) ansible_api = AnsibleApi() ansible_api.execute_playbook('./config/hosts', - './benchmarks/playbooks/{0}.yaml'.format(benchmark), + './benchmarks/perftest/{0}.yaml'.format(benchmark), './config/QtipKey', extra_vars) return self.get_ansible_result(extra_vars['role'], ansible_api.get_detail_playbook_stats()) |