summaryrefslogtreecommitdiffstats
diff options
context:
space:
mode:
-rw-r--r--CONTRIBUTING.rst19
-rw-r--r--INFO7
-rw-r--r--benchmarks/fio_jobs/test_job13
-rw-r--r--benchmarks/playbooks/cachebench.yaml49
-rw-r--r--benchmarks/playbooks/dhrystone.yaml111
-rw-r--r--benchmarks/playbooks/dpi.yaml120
-rw-r--r--benchmarks/playbooks/fio.yaml105
-rw-r--r--benchmarks/playbooks/git_proxy_pbook.yaml11
-rw-r--r--benchmarks/playbooks/info_script/info_collect.py86
-rw-r--r--benchmarks/playbooks/iperf.yaml161
-rw-r--r--benchmarks/playbooks/netperf.yaml97
-rw-r--r--benchmarks/playbooks/pktgen.yaml47
-rw-r--r--benchmarks/playbooks/ramspeed.yaml109
-rw-r--r--benchmarks/playbooks/result_transform/dpi/dpi_average.sh14
-rw-r--r--benchmarks/playbooks/result_transform/dpi/dpi_transform.py47
-rw-r--r--benchmarks/playbooks/result_transform/final_report.py24
-rwxr-xr-xbenchmarks/playbooks/result_transform/fio/fio_result_transform.py29
-rw-r--r--benchmarks/playbooks/result_transform/iperf/iperf_transform.py27
-rw-r--r--benchmarks/playbooks/result_transform/ramspd/ramspd_transform.py41
-rw-r--r--benchmarks/playbooks/result_transform/ssl/ssl_transform.py54
-rw-r--r--benchmarks/playbooks/result_transform/ubench_transform.py32
-rw-r--r--benchmarks/playbooks/ssl.yaml113
-rw-r--r--benchmarks/playbooks/sys_info_pbook.yaml42
-rw-r--r--benchmarks/playbooks/sys_proxy_pbook.yaml53
-rw-r--r--benchmarks/playbooks/whetstone.yaml111
-rw-r--r--docs/apidocs/index.rst4
-rw-r--r--docs/apidocs/qtip_restful_api.rst4
-rw-r--r--docs/configguide/configuration.rst8
-rw-r--r--docs/configguide/index.rst4
-rw-r--r--docs/designspec/dashboard.rst70
-rw-r--r--docs/designspec/index.rst13
-rw-r--r--docs/overview/index.rst5
-rw-r--r--docs/overview/overview.rst4
-rw-r--r--docs/userguide/index.rst4
-rw-r--r--docs/userguide/introduction.rst4
-rw-r--r--func/driver.py2
-rwxr-xr-x[-rw-r--r--]scripts/cleanup_creds.sh3
-rw-r--r--scripts/ref_results/compute_benchmarks_indices.py21
-rw-r--r--scripts/ref_results/generator_ref_json.py81
-rw-r--r--scripts/ref_results/index_calculation.py2
-rw-r--r--supporting/servers/elk.yml1
-rw-r--r--supporting/servers/inventory7
-rw-r--r--supporting/servers/ngnix.yml4
-rw-r--r--supporting/servers/roles/docker/handlers/main.yml (renamed from supporting/servers/roles/elk/handlers/main.yml)4
-rw-r--r--supporting/servers/roles/docker/tasks/main.yml39
-rw-r--r--supporting/servers/roles/elk/tasks/main.yml50
-rw-r--r--supporting/servers/roles/ngnix/defaults/main.yml (renamed from supporting/servers/roles/elk/defaults/main.yml)0
-rw-r--r--supporting/servers/roles/ngnix/handlers/main.yml4
-rw-r--r--supporting/servers/roles/ngnix/tasks/main.yml11
-rw-r--r--supporting/servers/roles/ngnix/templates/elk.conf.j2 (renamed from supporting/servers/roles/elk/templates/elk.conf.j2)0
-rw-r--r--supporting/servers/roles/ssh/files/yujunz.authorized_keys2
51 files changed, 201 insertions, 1672 deletions
diff --git a/CONTRIBUTING.rst b/CONTRIBUTING.rst
index d3e21b80..6350a862 100644
--- a/CONTRIBUTING.rst
+++ b/CONTRIBUTING.rst
@@ -56,6 +56,22 @@ Specially, it is recommended to link each patch set with a JIRA issue. Put
in commit message to create an automatic link.
+*************
+Documentation
+*************
+
+The documents are built automatically by sphinx from reStructuredText (reST).
+Please read `reStructuredText Primer`_ if you are not familiar with it.
+
+A cheat sheet for headings are as following
+
+* # with overline, for parts
+* * with overline, for chapters
+* =, for sections
+* -, for subsections
+* ^, for subsubsections
+* ", for paragraphs
+
************************
Frequent Asked Questions
************************
@@ -76,4 +92,5 @@ to submit. The current members are listed in `INFO`_.
.. _OPNFV Releases: https://wiki.opnfv.org/display/SWREL
.. _Issue Types: https://jira.opnfv.org/secure/ShowConstantsHelp.jspa?decorator=popup#IssueTypes
.. _OpenStack Style Guidelines: http://docs.openstack.org/developer/hacking/
-.. _INFO: https://git.opnfv.org/cgit/qtip/tree/INFO \ No newline at end of file
+.. _INFO: https://git.opnfv.org/cgit/qtip/tree/INFO
+.. _reStructuredText Primer: http://www.sphinx-doc.org/en/stable/rest.html
diff --git a/INFO b/INFO
index 9a7229cd..842a92b7 100644
--- a/INFO
+++ b/INFO
@@ -12,7 +12,6 @@ IRC: #opnfv-qtip@freenode
Committers:
-Michael Haugh mhaugh@ixiacom.com
Morgan Richomme morgan.richomme@orange.com
Prabu Kuppuswamy prabu.kuppuswamy@spirent.com
Prakash Ramchandran prakash.ramchandran@huawei.com
@@ -33,8 +32,8 @@ Link to TSC approval of the project:
Link(s) to approval of committers update:
-- http://lists.opnfv.org/pipermail/opnfv-tsc/2016-July/002745.html
-- http://lists.opnfv.org/pipermail/opnfv-tsc/2016-July/002755.html
-- http://lists.opnfv.org/pipermail/opnfv-tsc/2016-August/002848.html
+- https://lists.opnfv.org/pipermail/opnfv-tsc/2016-July/002745.html
+- https://lists.opnfv.org/pipermail/opnfv-tsc/2016-July/002755.html
+- https://lists.opnfv.org/pipermail/opnfv-tsc/2016-August/002848.html
- https://lists.opnfv.org/pipermail/opnfv-tsc/2016-October/003104.html
- https://lists.opnfv.org/pipermail/opnfv-tsc/2016-October/003118.html
diff --git a/benchmarks/fio_jobs/test_job b/benchmarks/fio_jobs/test_job
deleted file mode 100644
index 6817abca..00000000
--- a/benchmarks/fio_jobs/test_job
+++ /dev/null
@@ -1,13 +0,0 @@
-[global]
-
-runtime= 600
-ioengine=libaio
-iodepth=2
-direct=1
-bs=4k
-rw=randrw
-
-[job1]
-size=5G
-
-
diff --git a/benchmarks/playbooks/cachebench.yaml b/benchmarks/playbooks/cachebench.yaml
deleted file mode 100644
index ca9a709c..00000000
--- a/benchmarks/playbooks/cachebench.yaml
+++ /dev/null
@@ -1,49 +0,0 @@
- - hosts: "{{ roles }}"
-
- tasks:
- - name: clean
- shell: rm -rf /usr/bin/phoronix-test-suite
- shell: rm -rf /usr/share/doc/phoronix-test-suite/
- shell: rm -rf /usr/share/phoronix-test-suite
- shell: rm -rf /root/photmp/
-
- - name: cleaning results
- file: path=/root/results state=absent
-
- - name: make direc
- file: path=/root/photmp/ state=directory
-
- - name: Fetch Phornonix
- shell: cd /root/photmp/ && wget https://www.dropbox.com/s/5kks513ozxy7vvs/phoronix-suite.tar.gz
-
- - name: unarchive Phoronix
- shell: cd /root/photmp/ && tar -zxvf phoronix-suite.tar.gz
-
- - name: Install Phornonix Dependencies
- shell: yum install php php-xml php-client php-process -y
-
- - name: Install Phornonix
- shell: cd /root/photmp/phoronix-test-suite-master/&&./install-sh
-
- - name: batchSetup
- shell: phoronix-test-suite batch-setup
-
- - name: install
- shell: phoronix-test-suite batch-install cachebench
-
- - name: run
- shell: phoronix-test-suite batch-run cachebench
-
- - name: making directory
- file: path=/root/results state=directory
-
- - name: copying result to temp directory
- shell: cp -r /var/lib/phoronix-test-suite/test-results/* /root/results/
-
- - name: registering files
- shell: (cd /root/results/; find . -maxdepth 1 -type f) | cut -d'/' -f2
- register: files_to_copy
-
- - name: copy results
- fetch: src=/root/results/{{item}} dest={{workingdir}}/{{Dest_dir}}/cachebench
- with_items: "{{files_to_copy.stdout_lines}}"
diff --git a/benchmarks/playbooks/dhrystone.yaml b/benchmarks/playbooks/dhrystone.yaml
deleted file mode 100644
index 8fe6a490..00000000
--- a/benchmarks/playbooks/dhrystone.yaml
+++ /dev/null
@@ -1,111 +0,0 @@
- - hosts: localhost
- connection: local
- gather_facts: no
-
- tasks:
- - name: making dhrystone directory
- file: path={{workingdir}}/{{Dest_dir}}/dhrystone state=directory
-
- - name: making temporary dhrystone directory
- file: path={{workingdir}}/{{Dest_dir}}/dhrystone/dhrystone_temp state=directory
-
- - hosts: "{{role}}"
- become: yes
- remote_user: "{{username}}"
-
- tasks:
- - name: checking home directory
- shell: echo $HOME
- register: home_dir
-
- - name: cleaning tempT
- shell: sudo rm -rf $HOME/tempT
-
- - name: cleaning qtip_result
- shell: sudo rm -rf $HOME/qtip_result
-
- - name: make directory
- shell: sudo mkdir $HOME/qtip_result
-
- - include: ./sys_proxy_pbook.yaml
-
- - include: ./sys_info_pbook.yaml
- vars:
- network: false
-
- - name: Installing UnixBench dependencies if CentOS
- shell: sudo yum install git gcc patch perl-Time-HiRes -y
- when: ansible_os_family == "RedHat"
-
- - name: Installing UnixBench dependencies if Ubuntu
- shell: sudo apt-get install git gcc patch perl -y
- when: ansible_os_family == "Debian"
-
- - include: ./git_proxy_pbook.yaml
-
- - name: Clone unixbench
- git: repo=https://github.com/kdlucas/byte-unixbench.git
- dest=$HOME/tempT
-
- - name: make
- shell: sudo make --directory $HOME/tempT/UnixBench/
-
- - name: Run dhrystone
- shell: cd $HOME/tempT/UnixBench/&& sudo ./Run -v dhrystone
-
- - name: collecting and transforming result script copy
- copy: src=./result_transform/ubench_transform.py dest={{home_dir.stdout}}/qtip_result/
-
- - name: transforming result
- shell: cd $HOME/qtip_result/ && sudo python ubench_transform.py
- - name: copying consolidated report script
- copy: src=./result_transform/final_report.py dest={{home_dir.stdout}}/qtip_result/
-
- - name: making consolidated report
- shell: cd $HOME/qtip_result && sudo python final_report.py Dhrystone {{fname}}
-
- - name: making directory
- file: path={{home_dir.stdout}}/qtip_result/log state=directory
-
- - name: copying result to temp directory
- shell: sudo cp -r $HOME/tempT/UnixBench/results/* $HOME/qtip_result/log/
-
- - name: registering files
- shell: (cd $HOME/qtip_result/; find . -maxdepth 1 -name "*.json") | cut -d'/' -f2
- register: files_to_copy
-
- - name: copy results
- fetch: src={{home_dir.stdout}}/qtip_result/{{item}} dest={{workingdir}}/{{Dest_dir}}/dhrystone/dhrystone_temp
- with_items: "{{files_to_copy.stdout_lines}}"
-
- - name: registering log files
- shell: (cd $HOME/qtip_result/log/; find . -maxdepth 1 -name "*.log") | cut -d'/' -f2
- register: copy_log_results
-
- - name: copying log results
- fetch: src={{home_dir.stdout}}/qtip_result/log/{{item}} dest={{workingdir}}/{{Dest_dir}}/dhrystone/dhrystone_temp
- with_items: "{{copy_log_results.stdout_lines}}"
-
- - name: cleaning tempT
- shell: sudo rm -rf $HOME/tempT
-
- - name: cleaning_qtip_result
- shell: sudo rm -rf $HOME/qtip_result
-
-
- - hosts: localhost
- connection: local
- gather_facts: no
-
- tasks:
- - name: extracting_json
- shell: ( find {{workingdir}}/{{Dest_dir}}/dhrystone/dhrystone_temp/ -name "*.json" | xargs cp -t {{workingdir}}/{{Dest_dir}}/dhrystone/)
-
- - name: making_logs_folder
- shell: mkdir -p {{workingdir}}/{{Dest_dir}}/dhrystone/logs
-
- - name: extracting_log
- shell: ( find {{workingdir}}/{{Dest_dir}}/dhrystone/dhrystone_temp/ -name "*.log" | xargs cp -t {{workingdir}}/{{Dest_dir}}/dhrystone/logs)
-
- - name: removing dhrystone_temp
- shell: rm -rf {{workingdir}}/{{Dest_dir}}/dhrystone/dhrystone_temp
diff --git a/benchmarks/playbooks/dpi.yaml b/benchmarks/playbooks/dpi.yaml
deleted file mode 100644
index 46e065a1..00000000
--- a/benchmarks/playbooks/dpi.yaml
+++ /dev/null
@@ -1,120 +0,0 @@
- - hosts: localhost
- connection: local
- gather_facts: no
-
- tasks:
- - name: making dpi directory
- file: path={{workingdir}}/{{Dest_dir}}/dpi state=directory
-
- - name: making temporary whetstone directory
- file: path={{workingdir}}/{{Dest_dir}}/dpi/dpi_temp state=directory
-
- - hosts: "{{role}}"
- become: yes
- remote_user: "{{username}}"
-
- tasks:
- - name: echo
- shell: echo $USER
-
- - name: checking home directory
- shell: echo $HOME
- register: home_dir
-
- - name: cleaning
- shell: sudo rm -rf $HOME/tempD
-
- - name: cleaning previous results
- shell: sudo rm -rf $HOME/qtip_result
-
- - name: make qtip_result
- shell: sudo mkdir $HOME/qtip_result
-
- - include: ./sys_proxy_pbook.yaml
-
- - include: ./sys_info_pbook.yaml
- vars:
- network: false
-
- - name: Installing nDPI dependencies if CentOS
- shell: sudo yum install git gcc patch perl-Time-HiRes autofconf automake libpcap-devel libtool -y
- when: ansible_os_family == "RedHat"
-
- - name: Installing nDPI dependcies if Ubuntu
- shell: sudo apt-get install git gcc patch autoconf automake libpcap-dev libtool -y
- when: ansible_os_family == "Debian"
-
- - name: making nDPI temporary directory
- shell: sudo mkdir $HOME/tempD
-
- - include: ./git_proxy_pbook.yaml
-
- - name: Clone nDPI
- git: repo=https://github.com/ntop/nDPI.git
- dest=$HOME/tempD/nDPI
-
- - name: autogen
- shell: cd $HOME/tempD/nDPI && sudo ./autogen.sh
-
- - name: configure
- shell: cd $HOME/tempD/nDPI && sudo ./configure
-
- - name: make
- shell: cd $HOME/tempD/nDPI && sudo make
-
- - name: Fetching Test_pcap file
- shell: cd $HOME/tempD/nDPI/example && wget http://build.opnfv.org/artifacts.opnfv.org/qtip/utilities/test.pcap
-
- - name: fetch Averaging script
- copy: src=./result_transform/dpi/dpi_average.sh dest={{home_dir.stdout}}/tempD/nDPI/example mode=777
-
- - name: Run nDPI benchmark
- shell: cd $HOME/tempD/nDPI/example && sudo ./dpi_average.sh
-
- - name: copy result to temp_direc
- shell: sudo cp $HOME/tempD/nDPI/example/dpi_dump.txt $HOME/qtip_result
-
- - name: fetch dpi result transform script
- copy: src=./result_transform/dpi/dpi_transform.py dest={{home_dir.stdout}}/qtip_result
-
- - name: Transforming results
- shell: cd $HOME/qtip_result && sudo python dpi_transform.py
-
- - name: copy report formation script
- copy: src=./result_transform/final_report.py dest={{home_dir.stdout}}/qtip_result
-
- - name: consolidating report
- shell: cd $HOME/qtip_result && sudo python final_report.py DPI {{fname}}
-
- - name: registering files
- shell: (cd $HOME/qtip_result/; find . -maxdepth 1 -name "*.json") | cut -d'/' -f2
- register: files_to_copy
-
- - name: copy results
- fetch: src={{home_dir.stdout}}/qtip_result/{{item}} dest={{workingdir}}/{{Dest_dir}}/dpi/dpi_temp
- with_items: "{{files_to_copy.stdout_lines}}"
-
- - name: registering log files
- shell: (cd $HOME/qtip_result/; find . -maxdepth 1 -name "*.log") | cut -d'/' -f2
- register: copy_log_results
-
- - name: copying log results
- fetch: src={{home_dir.stdout}}/qtip_result/{{item}} dest={{workingdir}}/{{Dest_dir}}/dpi/dpi_temp
- with_items: "{{copy_log_results.stdout_lines}}"
-
- - hosts: localhost
- connection: local
- gather_facts: no
-
- tasks:
- - name: extracting_json
- shell: ( find {{workingdir}}/{{Dest_dir}}/dpi/dpi_temp/ -name "*.json" | xargs cp -t {{workingdir}}/{{Dest_dir}}/dpi/)
-
- - name: making_logs_folder
- shell: mkdir -p {{workingdir}}/{{Dest_dir}}/dpi/logs
-
- - name: extracting_log
- shell: ( find {{workingdir}}/{{Dest_dir}}/dpi/dpi_temp/ -name "*.log" | xargs cp -t {{workingdir}}/{{Dest_dir}}/dpi/logs)
-
- - name: removing dpi_temp
- shell: rm -rf {{workingdir}}/{{Dest_dir}}/dpi/dpi_temp
diff --git a/benchmarks/playbooks/fio.yaml b/benchmarks/playbooks/fio.yaml
deleted file mode 100644
index 813dc411..00000000
--- a/benchmarks/playbooks/fio.yaml
+++ /dev/null
@@ -1,105 +0,0 @@
- - hosts: localhost
- connection: local
- gather_facts: no
-
- tasks:
- - name: making fio directory
- file: path={{workingdir}}/{{Dest_dir}}/fio state=directory
-
- - name: making temporary fio directory
- file: path={{workingdir}}/{{Dest_dir}}/fio/fio_temp state=directory
-
-
- - hosts: "{{role}}"
- become: yes
- remote_user: "{{username}}"
-
- tasks:
- - name: checking home directory
- shell: echo $HOME
- register: home_dir
-
- - name: cleaning
- shell: sudo rm -rf $HOME/fio
-
- - name: cleaning previous results
- shell: sudo rm -rf $HOME/qtip_result
-
- - name: making fio temporary directory
- shell: sudo mkdir $HOME/fio
-
- - name: making results temporary directory
- shell: sudo mkdir $HOME/qtip_result
-
- - include: ./sys_proxy_pbook.yaml
-
- - include: ./sys_info_pbook.yaml
- vars:
- network: false
-
- - name: Installing fio dependencies when CentOS
- shell: sudo yum install wget gcc libaio-devel -y
- when: ansible_os_family == "RedHat"
-
- - name: Installing fio dependencies when Ubuntu
- shell: sudo apt-get install wget gcc libaio-dev -y
- when: ansible_os_family == "Debian"
-
- - name: Fetching fio
- shell: cd $HOME/fio/ && wget http://freecode.com/urls/3aa21b8c106cab742bf1f20d60629e3f -O fio.tar.gz
- - name: Untar fio
- shell: cd $HOME/fio/ && sudo tar -zxvf fio.tar.gz
- - name: configure
- shell: cd $HOME/fio/fio-2.1.10 && sudo ./configure && sudo make
-
- - name: Fetching fio job
- copy: src=./../fio_jobs/test_job dest={{home_dir.stdout}}/fio/fio-2.1.10/
-
- - name: Benchmarking block storage through fio
- shell: cd $HOME/fio/fio-2.1.10 && sudo ./fio --output-format=json --output=$HOME/qtip_result/fio_result.json test_job
-
- - name: Fetching result transformation script
- copy: src=./result_transform/fio/fio_result_transform.py dest={{home_dir.stdout}}/qtip_result
-
- - name: Transforming result
- shell: cd $HOME/qtip_result && sudo python fio_result_transform.py
-
- - name: copy report formation script
- copy: src=./result_transform/final_report.py dest={{home_dir.stdout}}/qtip_result
-
- - name: consolidating report
- shell: cd $HOME/qtip_result && sudo python final_report.py FIO {{fname}}
-
- - name: registering files
- shell: (cd $HOME/qtip_result/; find . -maxdepth 1 -name "*.json") | cut -d'/' -f2
- register: files_to_copy
-
- - name: copy results
- fetch: src={{home_dir.stdout}}/qtip_result/{{item}} dest={{workingdir}}/{{Dest_dir}}/fio/fio_temp
- with_items: "{{files_to_copy.stdout_lines}}"
-
- - name: registering log files
- shell: (cd $HOME/qtip_result/; find . -maxdepth 1 -name "*.log") | cut -d'/' -f2
- register: copy_log_results
-
- - name: copying log results
- fetch: src={{home_dir.stdout}}/qtip_result/{{item}} dest={{workingdir}}/{{Dest_dir}}/fio/fio_temp
- with_items: "{{copy_log_results.stdout_lines}}"
-
- - hosts: localhost
- connection: local
- gather_facts: no
-
- tasks:
- - name: extracting_json
- shell: ( find {{workingdir}}/{{Dest_dir}}/fio/fio_temp/ -name "*.json" | xargs cp -t {{workingdir}}/{{Dest_dir}}/fio/)
-
- - name: making_logs_folder
- shell: mkdir -p {{workingdir}}/{{Dest_dir}}/fio/logs
-
- - name: extracting_log
- shell: ( find {{workingdir}}/{{Dest_dir}}/fio/fio_temp/ -name "*.log" | xargs cp -t {{workingdir}}/{{Dest_dir}}/fio/logs)
-
- - name: removing fio_log
- shell: rm -rf {{workingdir}}/{{Dest_dir}}/fio/fio_temp
-
diff --git a/benchmarks/playbooks/git_proxy_pbook.yaml b/benchmarks/playbooks/git_proxy_pbook.yaml
deleted file mode 100644
index 5cb6f450..00000000
--- a/benchmarks/playbooks/git_proxy_pbook.yaml
+++ /dev/null
@@ -1,11 +0,0 @@
-#git
-- name: set git proxy(http)
- shell: "git config --global http.proxy {{ http_proxy }}"
- when: http_proxy is defined
- ignore_errors: yes
-
-- name: set git proxy(https)
- shell: "git config --global https.proxy {{https_proxy}}"
- when: https_proxy is defined
- ignore_errors: yes
-
diff --git a/benchmarks/playbooks/info_script/info_collect.py b/benchmarks/playbooks/info_script/info_collect.py
deleted file mode 100644
index 3fc35d5a..00000000
--- a/benchmarks/playbooks/info_script/info_collect.py
+++ /dev/null
@@ -1,86 +0,0 @@
-import os
-import pickle
-import json
-import sys
-
-os.system('inxi -b -c0 -n > $PWD/est_2')
-est_ob = open("est_2", "r+")
-est_ob2 = open("est_1", "w+")
-in_string = est_ob.read().replace('\n', ' ')
-cpu_idle = float(os.popen("""top -bn1 | grep "Cpu(s)" | awk '{print $8}'""").read().rstrip())
-cpu_usage = 100 - cpu_idle
-est_ob2.write(in_string)
-est_ob.close()
-est_ob2.close()
-
-inxi_host = os.popen("""cat $PWD/est_1 | grep -o -P '(?<=Host:).*(?=Kernel)' """).read().lstrip().rstrip()
-inxi_mem = os.popen("""cat $PWD/est_1 | grep -o -P '(?<=Memory:).*(?=MB)' """).read().lstrip().rstrip() + "MB"
-inxi_cpu = os.popen("""cat $PWD/est_1 | grep -o -P '(?<=CPU).*(?=speed)' | cut -f2 -d':'""").read().lstrip().rstrip()
-inxi_distro = os.popen(""" cat $PWD/est_1 | grep -o -P '(?<=Distro:).*(?=Machine:)' """).read().rstrip().lstrip()
-inxi_kernel = os.popen(""" cat $PWD/est_1 | grep -o -P '(?<=Kernel:).*(?=Console:)' """).read().rstrip().lstrip()
-inxi_HD = os.popen(""" cat $PWD/est_1 | grep -o -P '(?<=HDD Total Size:).*(?=Info:)' """).read().rstrip().lstrip()
-inxi_product = os.popen(""" cat $PWD/est_1 | grep -o -P '(?<=product:).*(?=Mobo:)' """).read().rstrip().lstrip()
-
-info_dict = {'hostname': inxi_host,
- 'product': inxi_product,
- 'os': inxi_distro,
- 'kernel': inxi_kernel,
- 'cpu': inxi_cpu,
- 'cpu_usage': '{0}%'.format(str(round(cpu_usage, 3))),
- 'memory_usage': inxi_mem,
- 'disk_usage': inxi_HD}
-network_flag = str(sys.argv[1]).rstrip()
-
-if (network_flag == 'n'):
-
- info_dict['network_interfaces'] = {}
- tem_2 = """ cat $PWD/est_1 | grep -o -P '(?<=Network:).*(?=Info:)'"""
- print os.system(tem_2 + ' > Hello')
- i = int(os.popen(tem_2 + " | grep -o 'Card' | wc -l ").read())
- print i
-
- for x in range(1, i + 1):
- tem = """ cat $PWD/est_1 | grep -o -P '(?<=Card-""" + str(x) + """:).*(?=Card-""" + str(x + 1) + """)'"""
- if i == 1:
- tem = """ cat $PWD/est_1 | grep -o -P '(?<=Network:).*(?=Info:)'"""
- inxi_card_1 = ((os.popen(tem + " | grep -o -P '(?<=Card:).*(?=Drives:)'|sed 's/ *driver:.*//'").read().rstrip().lstrip()))
- print inxi_card_1
- info_dict['network_interfaces']['interface_' + str(x)] = {}
- info_dict['network_interfaces']['interface_' + str(x)]['network_card'] = inxi_card_1
- inxi_card_2 = ((os.popen(tem + "| grep -o -P '(?<=Card:).*(?=Drives:)'|sed -e 's/^.*IF: //'").read())).rstrip().lstrip()
- info_dict['network_interfaces']['interface_' + str(x)]['interface_info'] = inxi_card_2
- elif x < (i):
- print "two"
- inxi_card_1 = ((os.popen(tem + "| sed 's/ *driver:.*//'").read().rstrip().lstrip()))
- info_dict['network_interfaces']['interface_' + str(x)] = {}
- info_dict['network_interfaces']['interface_' + str(x)]['network_Card'] = inxi_card_1
- inxi_card_2 = ((os.popen(tem + "|sed -e 's/^.*IF: //'").read())).rstrip().lstrip()
- info_dict['network_interfaces']['interface_' + str(x)]['interface_info'] = inxi_card_2
- elif x == i:
- print "Three"
- info_dict['network_interfaces']['interface_' + str(x)] = {}
- inxi_card_1 = ((os.popen(""" cat $PWD/est_1 | grep -o -P '(?<=Card-""" + str(x) + """:).*(?=Drives:)'| sed 's/ *driver:.*//' """).read().rstrip().lstrip()))
- info_dict['network_interfaces']['interface_' + str(x)]['network_Card'] = inxi_card_1
- inxi_card_2 = ((os.popen(""" cat $PWD/est_1 | grep -o -P '(?<=Card-""" + str(x) + """:).*(?=Drives:)'| sed -e 's/^.*IF: //' """).read().rstrip().lstrip()))
- info_dict['network_interfaces']['interface_' + str(x)]['interface_info'] = inxi_card_2
- else:
- print "No network cards"
- os.system("bwm-ng -o plain -c 1 | grep -v '=' | grep -v 'iface' | grep -v '-' > bwm_dump")
- n_interface = int(os.popen(" cat bwm_dump | grep -v 'total' | wc -l ").read().rstrip())
- interface = {}
- for x in range(1, n_interface):
- interface_name = os.popen(" cat bwm_dump | awk 'NR==" + str(x) + "' | awk '{print $1}' ").read().rstrip().replace(':', '')
- interface[str(interface_name)] = {}
- interface[str(interface_name)]['Rx (KB/s)'] = os.popen(" cat bwm_dump | awk 'NR==" + str(x) + "' | awk '{print $2}' ").read().rstrip()
- interface[str(interface_name)]['Tx (KB/s)'] = os.popen(" cat bwm_dump | awk 'NR==" + str(x) + "' | awk '{print $4}' ").read().rstrip()
- interface[str(interface_name)]['Total (KB/s)'] = os.popen(" cat bwm_dump | awk 'NR== " + str(x) + "' | awk '{print $6}' ").read().rstrip()
-
- info_dict['interface_io'] = interface
-
-print info_dict
-
-with open('./sys_info_temp', 'w+')as out_info:
- pickle.dump(info_dict, out_info)
-
-with open('temp', 'w+') as result_json:
- json.dump(info_dict, result_json, indent=4, sort_keys=True)
diff --git a/benchmarks/playbooks/iperf.yaml b/benchmarks/playbooks/iperf.yaml
deleted file mode 100644
index b6fd75c5..00000000
--- a/benchmarks/playbooks/iperf.yaml
+++ /dev/null
@@ -1,161 +0,0 @@
- - hosts: localhost
- connection: local
- gather_facts: no
-
- tasks:
- - name: getting directory
- shell: sudo echo $PWD
- register: qtip_dir
-
- - name: making Iperf directory
- file: path={{workingdir}}/{{Dest_dir}}/iperf state=directory
-
- - name: making temporary iperf directory
- file: path={{workingdir}}/{{Dest_dir}}/iperf/iperf_temp state=directory
-
-
- - hosts: "{{role}}"
- become: yes
- remote_user: "{{username}}"
-
- tasks:
- - name: Rolename
- set_fact:
- rolename: "{{role}}"
- when: role is defined
-
- - name: installertype
- set_fact:
- installertype: "{{installer}}"
-
- - name: Get Hostname
- shell: echo $HOSTNAME
- register: hostID
-
- - name: echo
- shell: echo index_var
-
- - name: checking home directory
- shell: echo $HOME
- register: home_dir
-
- - name: cleaning
- shell: sudo rm -rf $HOME/iperf
-
- - name: cleaning previous results
- shell: sudo rm -rf $HOME/qtip_result
-
- - name: making Iperf temporary directory
- shell: sudo mkdir $HOME/iperf
-
- - name: making results temporary directory
- shell: sudo mkdir $HOME/qtip_result
-
- - include: ./sys_proxy_pbook.yaml
-
- - include: ./sys_info_pbook.yaml
- vars:
- network: true
-
- - name: Installing Epel-release when CentOS
- shell: sudo yum install epel-release -y
- when: ansible_os_family == "RedHat"
-
- - name: Allow iperf server port in iptables input rules
- shell: iptables -A INPUT -p tcp --dport {{iperf_port}} -j ACCEPT
- vars:
- iperf_port: 5201
- ignore_errors: yes
- when: rolename == "1-server" and installertype == 'fuel'
-
- - name: Installing IPERF when Ubuntu
- shell: sudo apt-get install iperf3 -y
- when: ansible_os_family == "Debian"
-
- - name: Installing Iperf3
- shell: sudo yum install iperf3 -y
- when: ansible_os_family == "RedHat"
-
- - name: Running iperf on server
- shell: iperf3 -s
- async: 400
- poll: 0
- when: rolename == "1-server"
-
- - name: Running Iperf on Host
- shell: iperf3 --time {{duration}} -b 0 G -c {{ip1}} -J -O10 >> {{home_dir.stdout}}/qtip_result/iperf_raw.json
- ignore_errors: yes
- with_items:
- - "{{ip1}}"
- when: rolename == "2-host" and "{{privateip1}}" == "NONE"
-
- - name: Running Iperf on Host
- shell: iperf3 --time {{duration}} -b 0 G -c {{privateip1}} -J -O10 >> {{home_dir.stdout}}/qtip_result/iperf_raw.json
- ignore_errors: yes
- with_items:
- - "{{ip1}}"
- when: rolename == "2-host" and "{{privateip1}}" != "NONE"
-
- - name: Fetching result transformation script
- copy: src=./result_transform/iperf/iperf_transform.py dest={{home_dir.stdout}}/qtip_result
- - name: Transforming result
-
- shell: cd $HOME/qtip_result && sudo python iperf_transform.py
- when: rolename =="2-host" and "{{ip2}}" == ''
-
- - name: copy report formation script
- copy: src=./result_transform/final_report.py dest={{home_dir.stdout}}/qtip_result
- when: rolename =="2-host" and "{{ip2}}" == ''
-
- - name: consolidating report
- shell: cd $HOME/qtip_result && sudo python final_report.py IPERF {{fname}}
- when: rolename =="2-host" and "{{ip2}}" == ''
-
- - name: Files to Copy
- shell: (cd $HOME/qtip_result/; find . -maxdepth 1 -name "*.json") | cut -d'/' -f2
- register: files_to_copy
- when: rolename =="2-host" and "{{ip2}}" == ''
-
- - name: copy results
- fetch: src={{home_dir.stdout}}/qtip_result/{{item}} dest={{workingdir}}/{{Dest_dir}}/iperf/iperf_temp
- with_items: files_to_copy.stdout_lines
- when: rolename =="2-host" and "{{ip2}}" == ''
-
- - name: registering log files
- shell: (cd $HOME/qtip_result/; find . -maxdepth 1 -name "*.log") | cut -d'/' -f2
- register: copy_log_results
- when: rolename =="2-host" and "{{ip2}}" == ''
-
- - name: copying log results
- fetch: src={{home_dir.stdout}}/qtip_result/{{item}} dest={{workingdir}}/{{Dest_dir}}/iperf/iperf_temp
- with_items: copy_log_results.stdout_lines
- when: rolename =="2-host" and "{{ip2}}" == ''
-
-
- - hosts: localhost
- connection: local
- gather_facts: no
-
- tasks:
- - name: Rolename
- set_fact:
- rolename: "{{role}}"
- when: role is defined
-
- - name: extracting_json
- shell: ( find {{workingdir}}/{{Dest_dir}}/iperf/iperf_temp/ -name "*.json" | xargs cp -t {{workingdir}}/{{Dest_dir}}/iperf/)
- when: rolename == "2-host"
-
- - name: making_logs_folder
- shell: mkdir -p {{workingdir}}/{{Dest_dir}}/iperf/logs
-
- - name: extracting_log
- shell: ( find {{workingdir}}/{{Dest_dir}}/iperf/iperf_temp/ -name "*.log" | xargs cp -t {{workingdir}}/{{Dest_dir}}/iperf/logs)
- when: rolename == "2-host"
-
- - name: removing iperf_raw file
- shell: rm -rf {{workingdir}}/{{Dest_dir}}/iperf/iperf_raw.json
- when: rolename == "2-host"
-
- - name: removing iperf_temp
- shell: rm -rf {{workingdir}}/{{Dest_dir}}/iperf/iperf_temp
diff --git a/benchmarks/playbooks/netperf.yaml b/benchmarks/playbooks/netperf.yaml
deleted file mode 100644
index a07752cb..00000000
--- a/benchmarks/playbooks/netperf.yaml
+++ /dev/null
@@ -1,97 +0,0 @@
- - hosts: localhost
- connection: local
- gather_facts: no
-
- tasks:
- - name: getting directory
- shell: echo $PWD
- register: qtip_dir
-
- - name: making Netperf directory
- file: path={{workingdir}}/{{Dest_dir}}/netperf state=directory
-
- - name: making temporary netperf directory
- file: path={{workingdir}}/{{Dest_dir}}/netperf/netperf_temp state=directory
-
-
- - hosts: "{{role}}"
- become: yes
- remote_user: "{{username}}"
-
- tasks:
- - name: Rolename
- set_fact:
- rolename: "{{role}}"
- when: role is defined
-
- - name: Get Hostname
- shell: echo $HOSTNAME
- register: hostID
-
- - name: checking home directory
- shell: echo $HOME
- register: home_dir
-
- - name: cleaning
- shell: rm -rf $HOME/netperf
-
- - name: cleaning previous results
- shell: rm -rf $HOME/qtip_result
-
- - name: making netperf temporary directory
- shell: mkdir $HOME/netperf
-
- - name: making results temporary directory
- shell: mkdir $HOME/qtip_result
-
- - name: Fetching netperf
- shell: wget ftp://ftp.netperf.org/netperf/netperf-2.7.0.tar.gz
- when: ansible_os_family == "RedHat"
-
- - name: Extracting Netperf
- shell: tar -xvf netperf-2.7.0.tar.gz
-
- - name: configuring netperf
- shell: cd $HOME/netperf-2.7.0 && ./configure
-
- - name: Making Netperf
- shell: cd $HOME/netperf-2.7.0 && make
-
- - name: Installing Netperf
- shell: cd $HOME/netperf-2.7.0 && make install
-
- - name: Running netperf on server
- shell: /usr/local/bin/netserver -p 4000
- when: rolename == "1-server"
-
- - name: Running netperf on Host
- shell: /usr/local/bin/netperf -H {{privateip1}} -p 4000 -l {{duration}} -t {{teststream}} -fG >> ./qtip_result/server{{hostID.stdout}}-{{item}}.json
- ignore_errors: yes
- with_items:
- - "{{ip1}}"
- when: rolename == "2-host" and "{{ip2}}" == ''
-
- - name: Files to Copy
- shell: (cd $HOME/qtip_result/; find . -maxdepth 1 -name "*.json") | cut -d'/' -f2
- register: files_to_copy
-
- - name: copy results
- fetch: src={{home_dir.stdout}}/qtip_result/{{item}} dest={{workingdir}}/{{Dest_dir}}/netperf/netperf_temp
- with_items: "{{files_to_copy.stdout_lines}}"
-
- - hosts: localhost
- connection: local
- gather_facts: no
-
- tasks:
- - name: Rolename
- set_fact:
- rolename: "{{role}}"
- when: role is defined
-
- - name: extracting_json
- shell: ( find {{workingdir}}/{{Dest_dir}}/netperf/netperf_temp/ -name "*.json" | xargs cp -t {{workingdir}}/{{Dest_dir}}/netperf/)
- when: rolename == "2-host"
-
- - name: removing netperf_temp
- shell: rm -rf {{workingdir}}/{{Dest_dir}}/netperf/netperf_temp
diff --git a/benchmarks/playbooks/pktgen.yaml b/benchmarks/playbooks/pktgen.yaml
deleted file mode 100644
index f466129b..00000000
--- a/benchmarks/playbooks/pktgen.yaml
+++ /dev/null
@@ -1,47 +0,0 @@
- - hosts: localhost
- connection: local
- gather_facts: no
-
- tasks:
- - name: getting directory
- shell: echo $PWD
- register: qtip_dir
-
- - name: making pktgen directory
- file: path={{workingdir}}/{{Dest_dir}}/pktgen state=directory
-
- - name: making temporary pktgen directory
- file: path={{workingdir}}/{{Dest_dir}}/pktgen/pktgen_temp state=directory
-
-
- - hosts: "{{role}}"
-
- tasks:
- - name: Rolename
- set_fact:
- rolename: "{{role}}"
- when: role is defined
-
- - name: IPTABLE Setup
- shell: iptables -F
- when: rolename == '1-server'
-
- - name: iptables
- shell: sudo iptables -A INPUT -p udp --dport 1000 -j DROP
- when: rolename == '1-server'
-
- - name: run pktgen
- shell: bash pktgen.sh {{privateip1}} 1 {{packetsize}} {{duration}} >> .json
- when: rolename == '2-host'
-
- - hosts: "{{role}}"
-
- tasks:
- - name: Rolename
- set_fact:
- rolename: "{{role}}"
- when: role is defined
-
- - name: Get server packetsize
- shell: iptables -L -vnx >> pktgenServer{{privateip1}}.txt
- when: rolename == '1-server'
diff --git a/benchmarks/playbooks/ramspeed.yaml b/benchmarks/playbooks/ramspeed.yaml
deleted file mode 100644
index 3fb78cb5..00000000
--- a/benchmarks/playbooks/ramspeed.yaml
+++ /dev/null
@@ -1,109 +0,0 @@
- - hosts: localhost
- connection: local
- gather_facts: no
-
- tasks:
- - name: making ramspeed directory
- file: path={{workingdir}}/{{Dest_dir}}/ramspeed state=directory
-
- - name: making temporary ramspeed directory
- file: path={{workingdir}}/{{Dest_dir}}/ramspeed/ramspeed_temp state=directory
-
-
- - hosts: "{{role}}"
- become: yes
- remote_user: "{{username}}"
-
- tasks:
- - name: checking home directory
- shell: echo $HOME
- register: home_dir
-
- - name: cleaning
- shell: sudo rm -rf $HOME/ramspeed
-
- - name: cleaning previous results
- shell: sudo rm -rf $HOME/qtip_result
-
- - name: making ramspeed temporary directory
- shell: sudo mkdir $HOME/ramspeed
-
- - name: making results temporary directory
- shell: sudo mkdir $HOME/qtip_result
-
- - include: ./sys_proxy_pbook.yaml
-
- - include: ./sys_info_pbook.yaml
- vars:
- network: false
-
- - name: Installing RAM_Speed dependencies when CentOS
- shell: sudo yum install wget gcc -y
- when: ansible_os_family == "RedHat"
-
- - name: Installing RAM_Speed dependencies when Ubuntu
- shell: sudo apt-get install wget gcc -y
- when: ansible_os_family == "Debian"
-
- - name: make dummy file
- shell: sudo touch $HOME/ramspeed/ramspeed.tar.gz
-
- - name: Fetching RAM_Speed
- shell: cd $HOME/ramspeed/ && sudo wget -O ramspeed.tar.gz https://docs.google.com/uc?id=0B92Bp5LZTM7gRFctalZLMktTNDQ
-
- - name: Untar RAM_SPeed
- shell: cd $HOME/ramspeed/ && sudo tar -zxvf ramspeed.tar.gz
-
- - name: configure
- shell: cd $HOME/ramspeed/ramsmp-3.5.0 && ./build.sh
-
- - name: Benchmarking IntMem Bandwidth
- shell: cd $HOME/ramspeed/ramsmp-3.5.0 && ./ramsmp -b 3 -l 5 -p 1 >> $HOME/qtip_result/Intmem
-
- - name: Benchmarking FloatMem Bandwidth
- shell: cd $HOME/ramspeed/ramsmp-3.5.0 && ./ramsmp -b 6 -l 5 -p 1 >> $HOME/qtip_result/Floatmem
-
- - name: Fetching result transformation script
- copy: src=./result_transform/ramspd/ramspd_transform.py dest={{home_dir.stdout}}/qtip_result
-
- - name: Transforming result
- shell: cd $HOME/qtip_result && sudo python ramspd_transform.py
-
- - name: copy report formation script
- copy: src=./result_transform/final_report.py dest={{home_dir.stdout}}/qtip_result
-
- - name: consolidating report
- shell: cd $HOME/qtip_result && sudo python final_report.py RamSpeed {{fname}}
-
- - name: registering files
- shell: (cd $HOME/qtip_result/; find . -maxdepth 1 -name "*.json") | cut -d'/' -f2
- register: files_to_copy
-
- - name: copy results
- fetch: src={{home_dir.stdout}}/qtip_result/{{item}} dest={{workingdir}}/{{Dest_dir}}/ramspeed/ramspeed_temp
- with_items: "{{files_to_copy.stdout_lines}}"
-
- - name: registering log files
- shell: (cd $HOME/qtip_result/; find . -maxdepth 1 -name "*.log") | cut -d'/' -f2
- register: copy_log_results
-
- - name: copying log results
- fetch: src={{home_dir.stdout}}/qtip_result/{{item}} dest={{workingdir}}/{{Dest_dir}}/ramspeed/ramspeed_temp
- with_items: "{{copy_log_results.stdout_lines}}"
-
- - hosts: localhost
- connection: local
- gather_facts: no
-
- tasks:
- - name: extracting_json
- shell: ( find {{workingdir}}/{{Dest_dir}}/ramspeed/ramspeed_temp/ -name "*.json" | xargs cp -t {{workingdir}}/{{Dest_dir}}/ramspeed/)
-
- - name: making_logs_folder
- shell: mkdir -p {{workingdir}}/{{Dest_dir}}/ramspeed/logs
-
- - name: extracting_log
- shell: ( find {{workingdir}}/{{Dest_dir}}/ramspeed/ramspeed_temp/ -name "*.log" | xargs cp -t {{workingdir}}/{{Dest_dir}}/ramspeed/logs)
-
- - name: removing ramspeed_log
- shell: rm -rf {{workingdir}}/{{Dest_dir}}/ramspeed/ramspeed_temp
diff --git a/benchmarks/playbooks/result_transform/dpi/dpi_average.sh b/benchmarks/playbooks/result_transform/dpi/dpi_average.sh
deleted file mode 100644
index 405d3ff6..00000000
--- a/benchmarks/playbooks/result_transform/dpi/dpi_average.sh
+++ /dev/null
@@ -1,14 +0,0 @@
-#!/bin/bash
-
-COUNTER=0
-WDIR=$PWD
-while [ $COUNTER -lt 10 ]; do
-
- echo $WDIR
- $( ./ndpiReader -i test.pcap >> $WDIR/dpi_dump.txt )
- let COUNTER=COUNTER+1
- echo "Run number: $COUNTER"
-
-done
-
-
diff --git a/benchmarks/playbooks/result_transform/dpi/dpi_transform.py b/benchmarks/playbooks/result_transform/dpi/dpi_transform.py
deleted file mode 100644
index ee29d8e2..00000000
--- a/benchmarks/playbooks/result_transform/dpi/dpi_transform.py
+++ /dev/null
@@ -1,47 +0,0 @@
-import os
-import pickle
-import datetime
-
-sum_dpi_pps = float(0)
-sum_dpi_bps = float(0)
-
-for x in range(1, 11):
- dpi_result_pps = float(
- os.popen(
- "cat $HOME/qtip_result/dpi_dump.txt | grep 'nDPI throughput:' | awk 'NR=='" +
- str(x) +
- " | awk '{print $3}'").read().lstrip())
- dpi_result_bps = float(
- os.popen(
- "cat $HOME/qtip_result/dpi_dump.txt | grep 'nDPI throughput:' | awk 'NR=='" +
- str(x) +
- " | awk '{print $7}'").read().rstrip())
-
- if (dpi_result_pps > 100):
- dpi_result_pps = dpi_result_pps / 1000
-
- if (dpi_result_bps > 100):
- dpi_result_bps = dpi_result_bps / 1000
-
- sum_dpi_pps += dpi_result_pps
- sum_dpi_bps += dpi_result_bps
-
-dpi_result_pps = sum_dpi_pps / 10
-dpi_result_bps = sum_dpi_bps / 10
-
-host = os.popen("hostname").read().rstrip()
-log_time_stamp = str(datetime.datetime.utcnow().isoformat())
-
-os.popen(
- "cat $HOME/qtip_result/dpi_dump.txt > $HOME/qtip_result/" +
- host +
- "-" +
- log_time_stamp +
- ".log")
-
-home_dir = str(os.popen("echo $HOME").read().rstrip())
-host = os.popen("echo $HOSTNAME")
-result = {'pps': round(dpi_result_pps, 3),
- 'bps': round(dpi_result_bps, 3)}
-with open('./result_temp', 'w+') as result_file:
- pickle.dump(result, result_file)
diff --git a/benchmarks/playbooks/result_transform/final_report.py b/benchmarks/playbooks/result_transform/final_report.py
deleted file mode 100644
index 274742d4..00000000
--- a/benchmarks/playbooks/result_transform/final_report.py
+++ /dev/null
@@ -1,24 +0,0 @@
-import pickle
-import json
-import datetime
-import os
-import sys
-
-home_dir = str((os.popen("echo $HOME").read().rstrip()))
-
-with open('./sys_info_temp', 'r') as sys_info_f:
- sys_info_dict = pickle.load(sys_info_f)
-with open('./result_temp', 'r') as result_f:
- result_dict = pickle.load(result_f)
-
-host_name = (os.popen("hostname").read().rstrip())
-benchmark_name = str(sys.argv[1])
-testcase_name = str(sys.argv[2])
-report_time_stamp = str(datetime.datetime.utcnow().isoformat())
-final_dict = {"name": testcase_name,
- "time": report_time_stamp,
- "system_information": sys_info_dict,
- "details": result_dict}
-
-with open('./' + host_name + '-' + report_time_stamp + '.json', 'w+') as result_json:
- json.dump(final_dict, result_json, indent=4, sort_keys=True)
diff --git a/benchmarks/playbooks/result_transform/fio/fio_result_transform.py b/benchmarks/playbooks/result_transform/fio/fio_result_transform.py
deleted file mode 100755
index 5ecac823..00000000
--- a/benchmarks/playbooks/result_transform/fio/fio_result_transform.py
+++ /dev/null
@@ -1,29 +0,0 @@
-import json
-import pickle
-import os
-import datetime
-
-
-def get_fio_job_result(fio_job_data):
- return {'read': {'io_bytes': fio_job_data["read"]["io_bytes"],
- 'io_ps': fio_job_data["read"]["iops"],
- 'io_runtime_millisec': fio_job_data["read"]["runtime"],
- 'mean_io_latenchy_microsec': fio_job_data["read"]["lat"]["mean"]},
- 'write': {'io_bytes': fio_job_data["write"]["io_bytes"],
- 'io_ps': fio_job_data["write"]["iops"],
- 'io_runtime_millisec': fio_job_data["write"]["runtime"],
- 'mean_io_latenchy_microsec': fio_job_data["write"]["lat"]["mean"]}}
-
-
-with open("fio_result.json") as fio_raw:
- fio_data = json.load(fio_raw)
-
-fio_result_dict = {}
-for x, result in enumerate(map(get_fio_job_result, fio_data["jobs"])):
- fio_result_dict['job_{0}'.format(x)] = result
-
-host_name = (os.popen("hostname").read().rstrip())
-report_time = str(datetime.datetime.utcnow().isoformat())
-os.system("mv fio_result.json " + str(host_name) + "-" + report_time + ".log")
-with open('./result_temp', 'w + ')as out_fio_result:
- pickle.dump(fio_result_dict, out_fio_result)
diff --git a/benchmarks/playbooks/result_transform/iperf/iperf_transform.py b/benchmarks/playbooks/result_transform/iperf/iperf_transform.py
deleted file mode 100644
index b52e4634..00000000
--- a/benchmarks/playbooks/result_transform/iperf/iperf_transform.py
+++ /dev/null
@@ -1,27 +0,0 @@
-import json
-import datetime
-import pickle
-with open('iperf_raw.json', 'r') as ifile:
- raw_iperf_data = json.loads(ifile.read().rstrip())
-
-bits_sent = raw_iperf_data['end']['sum_sent']['bits_per_second']
-bits_received = raw_iperf_data['end']['sum_received']['bits_per_second']
-total_byte_sent = raw_iperf_data['end']['sum_sent']['bytes']
-total_byte_received = raw_iperf_data['end']['sum_received']['bytes']
-cpu_host_total_percent = raw_iperf_data['end']['cpu_utilization_percent']['host_total']
-cpu_remote_total_percent = raw_iperf_data['end']['cpu_utilization_percent']['remote_total']
-
-time_stamp = str(datetime.datetime.utcnow().isoformat())
-
-result = {'version': raw_iperf_data['start']['version'],
- 'bandwidth': {'sender_throughput': bits_sent,
- 'received_throughput': bits_received},
- 'cpu': {'cpu_host': cpu_host_total_percent,
- 'cpu_remote': cpu_remote_total_percent}
- }
-
-with open('iperf_raw-' + time_stamp + '.log', 'w+') as ofile:
- ofile.write(json.dumps(raw_iperf_data))
-
-with open('./result_temp', 'w+') as result_file:
- pickle.dump(result, result_file)
diff --git a/benchmarks/playbooks/result_transform/ramspd/ramspd_transform.py b/benchmarks/playbooks/result_transform/ramspd/ramspd_transform.py
deleted file mode 100644
index 960f84fc..00000000
--- a/benchmarks/playbooks/result_transform/ramspd/ramspd_transform.py
+++ /dev/null
@@ -1,41 +0,0 @@
-import os
-import pickle
-import datetime
-
-intmem_copy = os.popen("cat Intmem | grep 'BatchRun Copy' | awk '{print $4}'").read().rstrip()
-intmem_scale = os.popen("cat Intmem | grep 'BatchRun Scale' | awk '{print $4}'").read().rstrip()
-intmem_add = os.popen("cat Intmem | grep 'BatchRun Add' | awk '{print $4}'").read().rstrip()
-intmem_triad = os.popen("cat Intmem | grep 'BatchRun Triad' | awk '{print $4}'").read().rstrip()
-intmem_average = os.popen("cat Intmem | grep 'BatchRun AVERAGE' | awk '{print $4}'").read().rstrip()
-
-print intmem_copy
-print intmem_average
-
-floatmem_copy = os.popen("cat Floatmem | grep 'BatchRun Copy' | awk '{print $4}'").read().rstrip()
-floatmem_scale = os.popen("cat Floatmem | grep 'BatchRun Scale' | awk '{print $4}'").read().rstrip()
-floatmem_add = os.popen("cat Floatmem | grep 'BatchRun Add' | awk '{print $4}'").read().rstrip()
-floatmem_triad = os.popen("cat Floatmem | grep 'BatchRun Triad' | awk '{print $4}'").read().rstrip()
-floatmem_average = os.popen("cat Floatmem | grep 'BatchRun AVERAGE' | awk '{print $4}'").read().rstrip()
-
-print floatmem_copy
-print floatmem_average
-
-hostname = os.popen("hostname").read().rstrip()
-time_stamp = str(datetime.datetime.utcnow().isoformat())
-
-os.system("mv Intmem " + hostname + "-" + time_stamp + ".log")
-os.system("cp Floatmem >> " + hostname + "-" + time_stamp + ".log")
-
-result = {"int_bandwidth": {"copy": intmem_copy,
- "add": intmem_add,
- "scale": intmem_scale,
- "triad": intmem_triad,
- "average": intmem_average},
- "float_bandwidth": {"copy": floatmem_copy,
- "add": floatmem_add,
- "scale": floatmem_scale,
- "triad": floatmem_triad,
- "average": floatmem_average}}
-
-with open('./result_temp', 'w+') as result_file:
- pickle.dump(result, result_file)
diff --git a/benchmarks/playbooks/result_transform/ssl/ssl_transform.py b/benchmarks/playbooks/result_transform/ssl/ssl_transform.py
deleted file mode 100644
index de84d24b..00000000
--- a/benchmarks/playbooks/result_transform/ssl/ssl_transform.py
+++ /dev/null
@@ -1,54 +0,0 @@
-import os
-import pickle
-import datetime
-
-openssl_version = os.popen("cat RSA_dump | head -1").read().rstrip()
-rsa_512_sps = os.popen(
- "cat RSA_dump | grep '512 bits ' | awk '{print $6}' ").read().rstrip()
-rsa_512_vps = os.popen(
- "cat RSA_dump | grep '512 bits ' | awk '{print $7}' ").read().rstrip()
-rsa_1024_sps = os.popen(
- "cat RSA_dump | grep '1024 bits ' | awk '{print $6}' ").read().rstrip()
-rsa_1024_vps = os.popen(
- "cat RSA_dump | grep '1024 bits ' | awk '{print $7}' ").read().rstrip()
-rsa_2048_sps = os.popen(
- "cat RSA_dump | grep '2048 bits ' | awk '{print $6}' ").read().rstrip()
-rsa_2048_vps = os.popen(
- "cat RSA_dump | grep '2048 bits ' | awk '{print $7}' ").read().rstrip()
-rsa_4096_sps = os.popen(
- "cat RSA_dump | grep '4096 bits ' | awk '{print $6}' ").read().rstrip()
-rsa_4096_vps = os.popen(
- "cat RSA_dump | grep '4096 bits ' | awk '{print $7}' ").read().rstrip()
-
-aes_16B = os.popen(
- "cat AES-128-CBC_dump | grep 'aes-128-cbc ' | awk '{print $2}' ").read().rstrip()
-aes_64B = os.popen(
- "cat AES-128-CBC_dump | grep 'aes-128-cbc ' | awk '{print $3}' ").read().rstrip()
-aes_256B = os.popen(
- "cat AES-128-CBC_dump | grep 'aes-128-cbc ' | awk '{print $4}' ").read().rstrip()
-aes_1024B = os.popen(
- "cat AES-128-CBC_dump | grep 'aes-128-cbc ' | awk '{print $5}' ").read().rstrip()
-aes_8192B = os.popen(
- "cat AES-128-CBC_dump | grep 'aes-128-cbc ' | awk '{print $6}' ").read().rstrip()
-
-hostname = os.popen("hostname").read().rstrip()
-time_stamp = str(datetime.datetime.utcnow().isoformat())
-
-os.system("mv RSA_dump " + hostname + "-" + time_stamp + ".log")
-os.system("cat AES-128-CBC_dump >> " + hostname + "-" + time_stamp + ".log")
-
-result = {"version": [openssl_version],
- "rsa_sig": {"512_bits": rsa_512_sps,
- "1024_bits": rsa_1024_sps,
- "2048_bits": rsa_2048_sps,
- "4096_bits": rsa_4096_sps,
- "unit": "sig/sec"},
- "aes_128_cbc": {"16B_block": aes_16B,
- "64B_block": aes_64B,
- "256B_block": aes_256B,
- "1024B_block": aes_1024B,
- "8192B_block": aes_8192B,
- "unit": "B/sec"}}
-
-with open('./result_temp', 'w+') as result_file:
- pickle.dump(result, result_file)
diff --git a/benchmarks/playbooks/result_transform/ubench_transform.py b/benchmarks/playbooks/result_transform/ubench_transform.py
deleted file mode 100644
index ab5fe171..00000000
--- a/benchmarks/playbooks/result_transform/ubench_transform.py
+++ /dev/null
@@ -1,32 +0,0 @@
-import os
-import json
-import pickle
-
-total_cpu = os.popen(
- "cat $HOME/tempT/UnixBench/results/* | grep 'of tests' | awk '{print $1;}' | awk 'NR==1'").read().rstrip()
-
-cpu_1 = os.popen(
- "cat $HOME/tempT/UnixBench/results/* | grep 'of tests' | awk '{print $6;}' | awk 'NR==1'").read().rstrip()
-
-
-cpu_2 = os.popen(
- "cat $HOME/tempT/UnixBench/results/* | grep 'of tests' | awk '{print $6;}' | awk 'NR==2'").read().rstrip()
-
-
-index_1 = os.popen(
- "cat $HOME/tempT/UnixBench/results/* | grep 'Index Score (Partial Only) ' | awk '{print $7;}' | awk 'NR==1'").read().rstrip()
-index_2 = os.popen(
- "cat $HOME/tempT/UnixBench/results/* | grep 'Index Score (Partial Only) ' | awk '{print $7;}' | awk 'NR==2'").read().rstrip()
-
-
-result = {"n_cpu": total_cpu,
- "single": {"n_para_test": cpu_1,
- "score": index_1},
- "multi": {"n_para_test": cpu_2,
- "score": index_2}
- }
-
-with open('result_temp', 'w+') as result_file:
- pickle.dump(result, result_file)
-print json.dumps(result, indent=4, sort_keys=True)
-# print result.items()
diff --git a/benchmarks/playbooks/ssl.yaml b/benchmarks/playbooks/ssl.yaml
deleted file mode 100644
index 55e0af9b..00000000
--- a/benchmarks/playbooks/ssl.yaml
+++ /dev/null
@@ -1,113 +0,0 @@
- - hosts: localhost
- connection: local
- gather_facts: no
-
- tasks:
- - name: making ssl directory
- file: path={{workingdir}}/{{Dest_dir}}/ssl state=directory
-
- - name: making temporary ssl directory
- file: path={{workingdir}}/{{Dest_dir}}/ssl/ssl_temp state=directory
-
- - hosts: "{{role}}"
- become: yes
- remote_user: "{{username}}"
-
- tasks:
- - name: checking home directory
- shell: sudo echo $HOME
- register: home_dir
-
- - name: cleaning
- shell: sudo rm -rf $HOME/Open_SSL
-
- - name: cleaning previous results
- shell: sudo rm -rf $HOME/qtip_result
-
- - name: making OpenSSL temporary directory
- shell: sudo mkdir $HOME/Open_SSL
-
- - name: making results temporary directory
- shell: sudo mkdir $HOME/qtip_result
-
- - include: ./sys_proxy_pbook.yaml
-
- - include: ./sys_info_pbook.yaml
- vars:
- network: false
-
- - name: Installing OpenSSL dependencies when CentOS
- shell: sudo yum install git wget gcc patch perl-Time-HiRes autofconf automake libpcap-devel libtool -y
- when: ansible_os_family == "RedHat"
-
- - name: Installing OpenSSL dependencies when Ubuntu
- shell: sudo apt-get install git gcc wget perl autoconf automake libpcap-dev libtool -y
- when: ansible_os_family == "Debian"
-
- - name: Fetching OpenSSL
- shell: cd $HOME/Open_SSL/ && sudo wget http://artifacts.opnfv.org/qtip/utilities/openssl-1.0.2f.tar.gz
-
- - name: Untar OpenSSL
- shell: cd $HOME/Open_SSL/ && sudo tar -zxvf openssl-1.0.2f.tar.gz
- - name: configure
- shell: cd $HOME/Open_SSL/openssl-1.0.2f && sudo ./config
-
- - name: make
- shell: cd $HOME/Open_SSL/openssl-1.0.2f && sudo make
-
- - name: make install
- shell: cd $HOME/Open_SSL/openssl-1.0.2f && sudo make install
-
- - name: Benchmarking RSA signatures
- shell: cd $HOME/Open_SSL/openssl-1.0.2f/apps && sudo ./openssl speed rsa >> $HOME/qtip_result/RSA_dump
-
- - name: Benchmaring AES-128-cbc cipher encryption throughput
- shell: cd $HOME/Open_SSL/openssl-1.0.2f/apps && sudo ./openssl speed -evp aes-128-cbc >> $HOME/qtip_result/AES-128-CBC_dump
-
- - name: Fetching result transformation script
- copy: src=./result_transform/ssl/ssl_transform.py dest={{home_dir.stdout}}/qtip_result
-
- - name: Transforming result
- shell: cd $HOME/qtip_result && python ssl_transform.py
-
- - name: copy report formation script
- copy: src=./result_transform/final_report.py dest={{home_dir.stdout}}/qtip_result
-
- - name: consolidating report
- shell: cd $HOME/qtip_result && python final_report.py SSL {{fname}}
-
- - name: registering files
- shell: (cd $HOME/qtip_result/; find . -maxdepth 1 -name "*.json") | cut -d'/' -f2
- register: files_to_copy
-
- - name: copy results
- fetch: src={{home_dir.stdout}}/qtip_result/{{item}} dest={{workingdir}}/{{Dest_dir}}/ssl/ssl_temp
- with_items: "{{files_to_copy.stdout_lines}}"
-
- - name: registering log files
- shell: (cd $HOME/qtip_result/; find . -maxdepth 1 -name "*.log") | cut -d'/' -f2
- register: copy_log_results
-
- - name: copying log results
- fetch: src={{home_dir.stdout}}/qtip_result/{{item}} dest={{workingdir}}/{{Dest_dir}}/ssl/ssl_temp
- with_items: "{{copy_log_results.stdout_lines}}"
-
- - hosts: localhost
- connection: local
- gather_facts: no
-
- tasks:
- - name: echo
- shell: echo $PWD
-
- - name: extracting_json
- shell: ( find {{workingdir}}/{{Dest_dir}}/ssl/ssl_temp/ -name "*.json" | xargs cp -t {{workingdir}}/{{Dest_dir}}/ssl/)
-
- - name: making_logs_folder
- shell: mkdir -p {{workingdir}}/{{Dest_dir}}/ssl/logs
-
- - name: extracting_log
- shell: ( find {{workingdir}}/{{Dest_dir}}/ssl/ssl_temp/ -name "*.log" | xargs cp -t {{workingdir}}/{{Dest_dir}}/ssl/logs)
-
- - name: removing ssl_temp
- shell: rm -rf {{workingdir}}/{{Dest_dir}}/ssl/ssl_temp
diff --git a/benchmarks/playbooks/sys_info_pbook.yaml b/benchmarks/playbooks/sys_info_pbook.yaml
deleted file mode 100644
index cfcad119..00000000
--- a/benchmarks/playbooks/sys_info_pbook.yaml
+++ /dev/null
@@ -1,42 +0,0 @@
- - name: Epel Release install when CentOS
- shell: sudo yum install epel-release -y
- when: ansible_os_family == "RedHat"
-
- - name: Inxi install when CentOS
- shell: sudo yum install inxi -y
- when: ansible_os_family == "RedHat"
-
- - name: Software Properties Common
- shell: sudo apt-get install software-properties-common -y
- when: ansible_os_family == "Debian"
-
- - name: adding trusty-backport main repo
- shell: sudo apt-add-repository "deb http://archive.ubuntu.com/ubuntu/ trusty-backports main restricted universe multiverse"
- when: ansible_os_family == "Debian"
-
- - name: adding trusty main repo
- shell: sudo apt-add-repository "deb http://archive.ubuntu.com/ubuntu/ trusty main restricted universe multiverse"
- when: ansible_os_family == "Debian"
-
- - name: system info collection tool install when Ubuntu
- shell: sudo apt-get update && apt-get install inxi -y
- when: ansible_os_family == "Debian"
-
- - name: Install ansible copy dependencies if remote host has selinux enabled
- shell: sudo yum install libselinux-python -y
- when: ansible_os_family == "RedHat"
-
- - name: Install ansiblle copy dependencies if remote host has selinux enaled
- shell: sudo apt-get install python-selinux -y
- when: ansible_os_family == "Debian"
-
- - name: system_info script copy
- copy: src=./info_script/info_collect.py dest={{home_dir.stdout}}/qtip_result/
-
- - name: collecting system informaton for non-network test cases
- shell: cd $HOME/qtip_result && sudo python info_collect.py c
- when: not network
-
- - name: collecting system information for network test cases
- shell: cd $HOME/qtip_result && sudo python info_collect.py n
- when: network
diff --git a/benchmarks/playbooks/sys_proxy_pbook.yaml b/benchmarks/playbooks/sys_proxy_pbook.yaml
deleted file mode 100644
index bf4a8ccb..00000000
--- a/benchmarks/playbooks/sys_proxy_pbook.yaml
+++ /dev/null
@@ -1,53 +0,0 @@
-#env
-- name: insert shell proxy http
- lineinfile: dest=/etc/profile.d/proxy.sh state=present create=yes owner=root group=root mode=0644 regexp="export http_proxy={{ http_proxy }}"
- insertafter=EOF line="export http_proxy={{ http_proxy }}"
- when: http_proxy is defined
- ignore_errors: yes
-
-- name: insert shell proxy https
- lineinfile: dest=/etc/profile.d/proxy.sh state=present create=yes owner=root group=root mode=0644 regexp="export https_proxy={{ https_proxy }}"
- insertafter=EOF line="export https_proxy={{ https_proxy }}"
- when: https_proxy is defined
- ignore_errors: yes
-
-- name: insert no proxy
- lineinfile: dest=/etc/profile.d/proxy.sh state=present create=yes owner=root group=root mode=0644 regexp="{{ no_proxy }}"
- insertafter=EOF line="export no_proxy={{ no_proxy }}"
- when: no_proxy is defined
- ignore_errors: yes
-
-#wget
-- name: insert wget proxy(http)
- lineinfile: dest=/etc/wgetrc state=present regexp="http_proxy={{ http_proxy }}"
- insertafter="^#http_proxy" line="http_proxy={{ http_proxy }}"
- when: http_proxy is defined
- ignore_errors: yes
-
-- name: insert wget proxy(https)
- lineinfile: dest=/etc/wgetrc state=present regexp="https_proxy={{ https_proxy }}"
- insertafter="^#https_proxy" line="https_proxy={{ https_proxy }}"
- when: https_proxy is defined
- ignore_errors: yes
-
-#yum
-- name: insert yum proxy(http)
- lineinfile: dest=/etc/yum.conf state=present regexp="proxy={{ http_proxy }}"
- insertafter=EOF line="proxy={{ http_proxy }}"
- when: ansible_os_family == "RedHat" and http_proxy is defined
- ignore_errors: yes
-
-#apt
-
-- name: insert apt proxy(http)
- lineinfile: dest=/etc/apt/apt.conf state=present create=yes regexp="Acquire::http::Proxy \"{{ http_proxy }}\";"
- insertafter=EOF line="Acquire::http::Proxy \"{{ http_proxy }}\";"
- when: ansible_os_family == "Debian" and http_proxy is defined
- ignore_errors: yes
-
-- name: insert apt proxy(https)
- lineinfile: dest=/etc/apt/apt.conf state=present create=yes regexp="Acquire::https::Proxy \"{{ https_proxy }}\";"
- insertafter=EOF line="Acquire::https::Proxy \"{{ https_proxy }}\";"
- when: ansible_os_family == "Debian" and https_proxy is defined
- ignore_errors: yes
-
diff --git a/benchmarks/playbooks/whetstone.yaml b/benchmarks/playbooks/whetstone.yaml
deleted file mode 100644
index 0b1f89a7..00000000
--- a/benchmarks/playbooks/whetstone.yaml
+++ /dev/null
@@ -1,111 +0,0 @@
- - hosts: localhost
- connection: local
- gather_facts: no
-
- tasks:
- - name: making whetstone directory
- file: path={{workingdir}}/{{Dest_dir}}/whetstone state=directory
-
- - name: making temporary whetstone directory
- file: path={{workingdir}}/{{Dest_dir}}/whetstone/whetstone_temp state=directory
-
- - hosts: "{{role}}"
- become: yes
- remote_user: "{{username}}"
-
- tasks:
- - name: storing_home
- shell: echo $HOME
- register: home_dir
-
- - name: cleaning tempT
- shell: sudo rm -rf $HOME/tempT
-
- - name: cleaning_qtip_result
- shell: sudo rm -rf $HOME/qtip_result
-
- - name: make directory
- shell: sudo mkdir $HOME/qtip_result
-
- - include: ./sys_proxy_pbook.yaml
-
- - include: ./sys_info_pbook.yaml
- vars:
- network: false
-
- - name: Installing UnixBench dependencies if CentOS
- shell: sudo yum install git gcc patch perl-Time-HiRes -y
- when: ansible_os_family == "RedHat"
-
- - name: Installing UnixBench dependencies if Ubuntu
- shell: sudo apt-get install git gcc patch perl -y
- when: ansible_os_family == "Debian"
-
- - include: ./git_proxy_pbook.yaml
-
- - name: Clone unixbench
- git: repo=https://github.com/kdlucas/byte-unixbench.git
- dest=$HOME/tempT
-
- - name: make
- shell: sudo make --directory $HOME/tempT/UnixBench/
-
- - name: Run Whetstone
- shell: cd $HOME/tempT/UnixBench/&&./Run -v whetstone
-
- - name: collecting and transforming result script copy
- copy: src=./result_transform/ubench_transform.py dest={{home_dir.stdout}}/qtip_result/
-
- - name: transforming result
- shell: cd $HOME/qtip_result && sudo python ubench_transform.py
-
- - name: copying consolidated report script
- copy: src=./result_transform/final_report.py dest={{home_dir.stdout}}/qtip_result/
-
- - name: making consolidated report
- shell: cd $HOME/qtip_result && sudo python final_report.py Whetstone {{fname}}
-
- - name: making directory
- file: path={{home_dir.stdout}}/qtip_result/log state=directory
-
- - name: copying result to temp directory
- shell: sudo cp -r $HOME/tempT/UnixBench/results/* $HOME/qtip_result/log
-
- - name: registering files
- shell: (cd $HOME/qtip_result/; find . -maxdepth 1 -name "*.json") | cut -d'/' -f2
- register: files_to_copy
-
- - name: copy results
- fetch: src={{home_dir.stdout}}/qtip_result/{{item}} dest={{workingdir}}/{{Dest_dir}}/whetstone/whetstone_temp
- with_items: "{{files_to_copy.stdout_lines}}"
-
- - name: registering log files
- shell: (cd $HOME/qtip_result/log/; find . -maxdepth 1 -name "*.log") | cut -d'/' -f2
- register: copy_log_results
-
- - name: copying log results
- fetch: src={{home_dir.stdout}}/qtip_result/log/{{item}} dest={{workingdir}}/{{Dest_dir}}/whetstone/whetstone_temp
- with_items: "{{copy_log_results.stdout_lines}}"
-
- - name: cleaning tempT
- shell: sudo rm -rf $HOME/tempT
-
- - name: cleaning_qtip_result
- shell: sudo rm -rf $HOME/qtip_result
-
- - hosts: localhost
- connection: local
- gather_facts: no
-
- tasks:
- - name: extracting_json
- shell: ( find {{workingdir}}/{{Dest_dir}}/whetstone/whetstone_temp/ -name "*.json" | xargs cp -t {{workingdir}}/{{Dest_dir}}/whetstone/)
-
- - name: making_logs_folder
- shell: mkdir -p {{workingdir}}/{{Dest_dir}}/whetstone/logs
-
- - name: extracting_log
- shell: ( find {{workingdir}}/{{Dest_dir}}/whetstone/whetstone_temp/ -name "*.log" | xargs cp -t {{workingdir}}/{{Dest_dir}}/whetstone/logs)
-
- - name: removing whetstone_temp
- shell: rm -rf {{workingdir}}/{{Dest_dir}}/whetstone/whetstone_temp
diff --git a/docs/apidocs/index.rst b/docs/apidocs/index.rst
index 916fab08..241a2680 100644
--- a/docs/apidocs/index.rst
+++ b/docs/apidocs/index.rst
@@ -3,9 +3,9 @@
.. (c) 2015 Dell Inc.
.. (c) 2016 ZTE Corp.
-****************
+################
QTIP Configguide
-****************
+################
.. toctree::
:maxdepth: 2
diff --git a/docs/apidocs/qtip_restful_api.rst b/docs/apidocs/qtip_restful_api.rst
index ca77224c..3f3297d5 100644
--- a/docs/apidocs/qtip_restful_api.rst
+++ b/docs/apidocs/qtip_restful_api.rst
@@ -3,8 +3,8 @@
.. (c) 2015 Dell Inc.
.. (c) 2016 ZTE Corp.
-
+****************
Qtip restful api
-================
+****************
You can get all the Qtip restful api by http://qtip_server_ip:5000/api/spec.html.
diff --git a/docs/configguide/configuration.rst b/docs/configguide/configuration.rst
index d6d2fd5d..78e96492 100644
--- a/docs/configguide/configuration.rst
+++ b/docs/configguide/configuration.rst
@@ -16,7 +16,7 @@ to configure OPNFV with this specific installer
Installing QTIP using Docker
-^^^^^^^^^^^^^^^^^^^^^^^^^^^^
+============================
QTIP has a Docker images on the docker hub. Pulling opnfv/qtip docker image
from docker hub:
@@ -50,11 +50,11 @@ be navigated to using the following command.
OpenStack parameters and credentials
-^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^
+====================================
Environment variables
-"""""""""""""""""""""
+---------------------
Before running QTIP it is necessary to export OpenStack environment variables
from the OpenStack *openrc* file. This can be done by running the following
@@ -69,7 +69,7 @@ environment variables.
QTIP default key pair
-""""""""""""""""""""""
+----------------------
QTIP uses a SSH key pair to connect to the guest image. You should generate key pair
before running QTIP test. And put key pair in the ``config/`` directory.
diff --git a/docs/configguide/index.rst b/docs/configguide/index.rst
index 291a809b..d5e05d63 100644
--- a/docs/configguide/index.rst
+++ b/docs/configguide/index.rst
@@ -4,9 +4,9 @@
.. (c) 2016 ZTE Corp.
-=================
+#################
QTIP Config Guide
-=================
+#################
.. toctree::
:maxdepth: 2
diff --git a/docs/designspec/dashboard.rst b/docs/designspec/dashboard.rst
new file mode 100644
index 00000000..ad5520b6
--- /dev/null
+++ b/docs/designspec/dashboard.rst
@@ -0,0 +1,70 @@
+.. This work is licensed under a Creative Commons Attribution 4.0 International License.
+.. http://creativecommons.org/licenses/by/4.0
+.. (c) 2016 ZTE Corp.
+
+
+*********
+Dashboard
+*********
+
+The dashboard gives user an intuitive view of benchmark result.
+
+Purpose
+=======
+
+The basic element to be displayed is QPI a.k.a. QTIP Performance Index. But it
+is also important to show user
+
+#. How is the final score calculated?
+#. Under what condition is the test plan executed?
+#. How many runs of a performance tests have been executed and is there any deviation?
+#. Comparison of benchmark result from different PODs or configuration
+
+Templates
+=========
+
+Different board templates are created to satisfy the above requirements.
+
+Composition
+-----------
+
+QTIP gives a simple score but there must be a complex formula behind it. This
+view explains the composition of the QPI.
+
+Condition
+---------
+
+The condition of a benchmark result includes
+
+* System Under Test
+
+ * Hardware environment
+ * Hypervisor version
+ * Operation System release version
+ * System Configuration
+
+* Test Tools
+
+ * Release version
+ * Configuration
+
+* Test Facility
+
+ * Laboratory
+ * Engineer
+ * Date
+
+Conditions that do NOT have an obvious affect on the test result may be ignored,
+e.g. temperature, power supply.
+
+Deviation
+---------
+
+Performance tests are usually repeated many times to reduce random disturbance.
+This view shall show an overview of deviation among different runs.
+
+Comparison
+----------
+
+Comparison can be done between different PODs or different configuration on the
+same PODs.
diff --git a/docs/designspec/index.rst b/docs/designspec/index.rst
new file mode 100644
index 00000000..e9b3f9fd
--- /dev/null
+++ b/docs/designspec/index.rst
@@ -0,0 +1,13 @@
+.. This work is licensed under a Creative Commons Attribution 4.0 International License.
+.. http://creativecommons.org/licenses/by/4.0
+.. (c) 2016 ZTE Corp.
+
+
+##########################
+QTIP Design Specifications
+##########################
+
+.. toctree::
+ :maxdepth: 2
+
+ dashboard.rst
diff --git a/docs/overview/index.rst b/docs/overview/index.rst
index 731b8d49..9a387360 100644
--- a/docs/overview/index.rst
+++ b/docs/overview/index.rst
@@ -3,9 +3,10 @@
.. (c) 2016 ZTE Corp.
-=====================
+
+#####################
QTIP Project Overview
-=====================
+#####################
.. toctree::
:maxdepth: 2
diff --git a/docs/overview/overview.rst b/docs/overview/overview.rst
index aa09c7c3..4fd42356 100644
--- a/docs/overview/overview.rst
+++ b/docs/overview/overview.rst
@@ -3,9 +3,9 @@
.. (c) 2015 Dell Inc.
.. (c) 2016 ZTE Corp.
-========
+********
Overview
-========
+********
.. _QTIP: https://wiki.opnfv.org/platform_performance_benchmarking
diff --git a/docs/userguide/index.rst b/docs/userguide/index.rst
index 5ae4f345..4be3e498 100644
--- a/docs/userguide/index.rst
+++ b/docs/userguide/index.rst
@@ -4,9 +4,9 @@
.. (c) 2016 ZTE Corp.
-===============
+###############
QTIP User Guide
-===============
+###############
.. toctree::
:maxdepth: 2
diff --git a/docs/userguide/introduction.rst b/docs/userguide/introduction.rst
index 4876d0e2..d0d9f3c1 100644
--- a/docs/userguide/introduction.rst
+++ b/docs/userguide/introduction.rst
@@ -13,8 +13,8 @@ run QTIP the first time when the user pull QTIP image on to their host machine.
In order to install and config QTIP please follow the instructions in the
configuration.rst located in docs/configguide/configuration.rst.
-QTIP Directory structure:
--------------------------
+QTIP Directory structure
+========================
The QTIP directory has been sectioned off into multiple folders to facilitate
segmenting information into relevant categories. The folders that concern
diff --git a/func/driver.py b/func/driver.py
index 9a011c2a..47d00f1f 100644
--- a/func/driver.py
+++ b/func/driver.py
@@ -65,7 +65,7 @@ class Driver:
logger.info(extra_vars)
ansible_api = AnsibleApi()
ansible_api.execute_playbook('./config/hosts',
- './benchmarks/playbooks/{0}.yaml'.format(benchmark),
+ './benchmarks/perftest/{0}.yaml'.format(benchmark),
'./config/QtipKey', extra_vars)
return self.get_ansible_result(extra_vars['role'], ansible_api.get_detail_playbook_stats())
diff --git a/scripts/cleanup_creds.sh b/scripts/cleanup_creds.sh
index 9bf44305..b4eee924 100644..100755
--- a/scripts/cleanup_creds.sh
+++ b/scripts/cleanup_creds.sh
@@ -9,6 +9,3 @@ case "$INSTALLER_TYPE" in
ssh $sshoptions -i ./config/QtipKey root@$DEST_IP "sed -i '/root@$HOSTNAME/d' /root/.ssh/authorized_keys"
;;
esac
-
-
-
diff --git a/scripts/ref_results/compute_benchmarks_indices.py b/scripts/ref_results/compute_benchmarks_indices.py
index 9012cefc..0b6eae36 100644
--- a/scripts/ref_results/compute_benchmarks_indices.py
+++ b/scripts/ref_results/compute_benchmarks_indices.py
@@ -10,7 +10,6 @@ def dpi_index():
dpi_vm_ref = get_reference('compute', 'dpi_vm')
dpi_vm_index = get_index(dpi_dict, 'dpi_vm', dpi_vm_ref, 'details', 'bps')
-
dpi_index = (dpi_bm_index + dpi_vm_index) / 2
dpi_dict_i = {}
dpi_dict_i['index'] = dpi_index
@@ -118,11 +117,11 @@ def ssl_index():
ssl_RSA4096b_bm_index = get_index(ssl_dict, "ssl_bm", ssl_RSA4096b_bm_ref, 'details', 'rsa_sig', '4096_bits')
ssl_RSA_bm_index = (ssl_RSA512b_bm_index + ssl_RSA1024b_bm_index + ssl_RSA2048b_bm_index + ssl_RSA4096b_bm_index) / 4
- ssl_AES16B_bm_index = get_index(ssl_dict, "ssl_bm", ssl_AES16B_bm_ref, 'details', 'aes_128_cbc', '16_block')
- ssl_AES64B_bm_index = get_index(ssl_dict, "ssl_bm", ssl_AES64B_bm_ref, 'details', 'aes_128_cbc', '64_block')
- ssl_AES256B_bm_index = get_index(ssl_dict, "ssl_bm", ssl_AES256B_bm_ref, 'details', 'aes_128_cbc', '256_block')
- ssl_AES1024B_bm_index = get_index(ssl_dict, "ssl_bm", ssl_AES1024B_bm_ref, 'details', 'aes_128_cbc', '1024_block')
- ssl_AES8192B_bm_index = get_index(ssl_dict, "ssl_bm", ssl_AES8192B_bm_ref, 'details', 'aes_128_cbc', '8192_block')
+ ssl_AES16B_bm_index = get_index(ssl_dict, "ssl_bm", ssl_AES16B_bm_ref, 'details', 'aes_128_cbc', '16B_block')
+ ssl_AES64B_bm_index = get_index(ssl_dict, "ssl_bm", ssl_AES64B_bm_ref, 'details', 'aes_128_cbc', '64B_block')
+ ssl_AES256B_bm_index = get_index(ssl_dict, "ssl_bm", ssl_AES256B_bm_ref, 'details', 'aes_128_cbc', '256B_block')
+ ssl_AES1024B_bm_index = get_index(ssl_dict, "ssl_bm", ssl_AES1024B_bm_ref, 'details', 'aes_128_cbc', '1024B_block')
+ ssl_AES8192B_bm_index = get_index(ssl_dict, "ssl_bm", ssl_AES8192B_bm_ref, 'details', 'aes_128_cbc', '8192B_block')
ssl_AES_bm_index = (ssl_AES16B_bm_index + ssl_AES64B_bm_index + ssl_AES256B_bm_index + ssl_AES1024B_bm_index + ssl_AES8192B_bm_index) / 5
ssl_bm_index = (ssl_RSA_bm_index + ssl_AES_bm_index) / 2
@@ -144,11 +143,11 @@ def ssl_index():
ssl_RSA4096b_vm_index = get_index(ssl_dict, "ssl_vm", ssl_RSA4096b_vm_ref, 'details', 'rsa_sig', '4096_bits')
ssl_RSA_vm_index = (ssl_RSA512b_vm_index + ssl_RSA1024b_vm_index + ssl_RSA2048b_vm_index + ssl_RSA4096b_vm_index) / 4
- ssl_AES16B_vm_index = get_index(ssl_dict, "ssl_vm", ssl_AES16B_vm_ref, 'details', 'aes_128_cbc', '16_block')
- ssl_AES64B_vm_index = get_index(ssl_dict, "ssl_vm", ssl_AES64B_vm_ref, 'details', 'aes_128_cbc', '64_block')
- ssl_AES256B_vm_index = get_index(ssl_dict, "ssl_vm", ssl_AES256B_vm_ref, 'details', 'aes_128_cbc', '256_block')
- ssl_AES1024B_vm_index = get_index(ssl_dict, "ssl_vm", ssl_AES1024B_vm_ref, 'details', 'aes_128_cbc', '1024_block')
- ssl_AES8192B_vm_index = get_index(ssl_dict, "ssl_vm", ssl_AES8192B_vm_ref, 'details', 'aes_128_cbc', '8192_block')
+ ssl_AES16B_vm_index = get_index(ssl_dict, "ssl_vm", ssl_AES16B_vm_ref, 'details', 'aes_128_cbc', '16B_block')
+ ssl_AES64B_vm_index = get_index(ssl_dict, "ssl_vm", ssl_AES64B_vm_ref, 'details', 'aes_128_cbc', '64B_block')
+ ssl_AES256B_vm_index = get_index(ssl_dict, "ssl_vm", ssl_AES256B_vm_ref, 'details', 'aes_128_cbc', '256B_block')
+ ssl_AES1024B_vm_index = get_index(ssl_dict, "ssl_vm", ssl_AES1024B_vm_ref, 'details', 'aes_128_cbc', '1024B_block')
+ ssl_AES8192B_vm_index = get_index(ssl_dict, "ssl_vm", ssl_AES8192B_vm_ref, 'details', 'aes_128_cbc', '8192B_block')
ssl_AES_vm_index = (ssl_AES16B_vm_index + ssl_AES64B_vm_index + ssl_AES256B_vm_index + ssl_AES1024B_vm_index + ssl_AES8192B_vm_index) / 5
ssl_vm_index = (ssl_RSA_vm_index + ssl_AES_vm_index) / 2
diff --git a/scripts/ref_results/generator_ref_json.py b/scripts/ref_results/generator_ref_json.py
deleted file mode 100644
index 6b2d813c..00000000
--- a/scripts/ref_results/generator_ref_json.py
+++ /dev/null
@@ -1,81 +0,0 @@
-import json
-
-dict_ref = {}
-dict_ref['compute'] = {}
-dict_ref['compute']['dpi_bm'] = 8.12
-dict_ref['compute']['dpi_vm'] = 22.12
-
-dict_ref['compute']['whetstone_bm'] = {}
-dict_ref['compute']['whetstone_vm'] = {}
-dict_ref['compute']['whetstone_bm']['single_cpu'] = 806.1
-dict_ref['compute']['whetstone_bm']['multi_cpu'] = 41483.3
-dict_ref['compute']['whetstone_vm']['single_cpu'] = 789.0
-dict_ref['compute']['whetstone_vm']['multi_cpu'] = 2950.6
-
-dict_ref['compute']['dhrystone_bm'] = {}
-dict_ref['compute']['dhrystone_vm'] = {}
-dict_ref['compute']['dhrystone_bm']['single_cpu'] = 3231.7
-dict_ref['compute']['dhrystone_bm']['multi_cpu'] = 103362.1
-dict_ref['compute']['dhrystone_vm']['single_cpu'] = 2953.6
-dict_ref['compute']['dhrystone_vm']['multi_cpu'] = 10585.8
-
-dict_ref['compute']['ssl_bm'] = {}
-dict_ref['compute']['ssl_bm']['RSA'] = {}
-dict_ref['compute']['ssl_bm']['AES'] = {}
-dict_ref['compute']['ssl_bm']['RSA']['512b'] = 22148.9
-dict_ref['compute']['ssl_bm']['RSA']['1024b'] = 7931.44
-dict_ref['compute']['ssl_bm']['RSA']['2048b'] = 1544.3
-dict_ref['compute']['ssl_bm']['RSA']['4096b'] = 161.92
-dict_ref['compute']['ssl_bm']['AES']['16B'] = 735490250
-dict_ref['compute']['ssl_bm']['AES']['64B'] = 788429210
-dict_ref['compute']['ssl_bm']['AES']['256B'] = 803323650
-dict_ref['compute']['ssl_bm']['AES']['1024B'] = 808861020
-dict_ref['compute']['ssl_bm']['AES']['8192B'] = 807701160
-
-dict_ref['compute']['ssl_vm'] = {}
-dict_ref['compute']['ssl_vm']['RSA'] = {}
-dict_ref['compute']['ssl_vm']['AES'] = {}
-dict_ref['compute']['ssl_vm']['RSA']['512b'] = 22148.9
-dict_ref['compute']['ssl_vm']['RSA']['1024b'] = 7931.44
-dict_ref['compute']['ssl_vm']['RSA']['2048b'] = 1544.3
-dict_ref['compute']['ssl_vm']['RSA']['4096b'] = 161.92
-dict_ref['compute']['ssl_vm']['AES']['16B'] = 735490250
-dict_ref['compute']['ssl_vm']['AES']['64B'] = 788429210
-dict_ref['compute']['ssl_vm']['AES']['256B'] = 803323650
-dict_ref['compute']['ssl_vm']['AES']['1024B'] = 808861020
-dict_ref['compute']['ssl_vm']['AES']['8192B'] = 807701160
-
-dict_ref['compute']['ramspeed_bm'] = {}
-dict_ref['compute']['ramspeed_bm']['INTmem'] = {}
-dict_ref['compute']['ramspeed_bm']['FLOATmem'] = {}
-dict_ref['compute']['ramspeed_bm']['INTmem']['Average (MB/s)'] = 12268.38
-dict_ref['compute']['ramspeed_bm']['FLOATmem']['Average (MB/s)'] = 9758.79
-
-dict_ref['compute']['ramspeed_vm'] = {}
-dict_ref['compute']['ramspeed_vm']['INTmem'] = {}
-dict_ref['compute']['ramspeed_vm']['FLOATmem'] = {}
-dict_ref['compute']['ramspeed_vm']['INTmem']['Average (MB/s)'] = 12147.59
-dict_ref['compute']['ramspeed_vm']['FLOATmem']['Average (MB/s)'] = 9064.09
-
-dict_ref['storage'] = {}
-dict_ref['storage']['fio_bm'] = {}
-dict_ref['storage']['fio_bm']['read'] = {}
-dict_ref['storage']['fio_bm']['write'] = {}
-dict_ref['storage']['fio_bm']['read']['IOPS'] = 6693
-dict_ref['storage']['fio_bm']['write']['IOPS'] = 6688
-
-dict_ref['storage']['fio_vm'] = {}
-dict_ref['storage']['fio_vm']['read'] = {}
-dict_ref['storage']['fio_vm']['write'] = {}
-dict_ref['storage']['fio_vm']['read']['IOPS'] = 2239
-dict_ref['storage']['fio_vm']['write']['IOPS'] = 2237
-
-dict_ref['network'] = {}
-dict_ref['network']['iperf_bm'] = {}
-dict_ref['network']['iperf_vm'] = {}
-dict_ref['network']['iperf_vm_2'] = {}
-dict_ref['network']['iperf_bm']['throughput received(b/s)'] = 944473000.0
-dict_ref['network']['iperf_vm']['throughput received(b/s)'] = 14416700000.0
-dict_ref['network']['iperf_vm_2']['throughput received(b/s)'] = 2461530000.0
-with open('reference.json', 'w+') as result_json:
- json.dump(dict_ref, result_json, indent=4, sort_keys=True)
diff --git a/scripts/ref_results/index_calculation.py b/scripts/ref_results/index_calculation.py
index 4ead79ff..95c3c4a6 100644
--- a/scripts/ref_results/index_calculation.py
+++ b/scripts/ref_results/index_calculation.py
@@ -28,7 +28,7 @@ def generic_index(dict_gen, testcase, reference_num, *args):
result = 0
for k, v in dict_gen.iteritems():
dict_temp = dict_gen[k]
- if dict_gen[k]['name'] == str(testcase):
+ if dict_gen[k]['name'] == '{0}.yaml'.format(testcase):
count = count + 1
for arg in args:
if arg == args[c - 1]:
diff --git a/supporting/servers/elk.yml b/supporting/servers/elk.yml
index 406ba93f..e2696573 100644
--- a/supporting/servers/elk.yml
+++ b/supporting/servers/elk.yml
@@ -1,4 +1,5 @@
---
- hosts: elk-servers
roles:
+ - docker
- elk
diff --git a/supporting/servers/inventory b/supporting/servers/inventory
index aa9388fe..1414fa7c 100644
--- a/supporting/servers/inventory
+++ b/supporting/servers/inventory
@@ -1,4 +1,7 @@
-qtip-dev ansible_host=qtip
+qtip-dev ansible_host=qtip.openzero.net
[elk-servers]
-qtip-dev \ No newline at end of file
+qtip-dev
+
+[ngnix-servers]
+qtip-dev
diff --git a/supporting/servers/ngnix.yml b/supporting/servers/ngnix.yml
new file mode 100644
index 00000000..d57a5603
--- /dev/null
+++ b/supporting/servers/ngnix.yml
@@ -0,0 +1,4 @@
+---
+- hosts: ngnix-servers
+ roles:
+ - ngnix
diff --git a/supporting/servers/roles/elk/handlers/main.yml b/supporting/servers/roles/docker/handlers/main.yml
index 46bd76c7..985f8d41 100644
--- a/supporting/servers/roles/elk/handlers/main.yml
+++ b/supporting/servers/roles/docker/handlers/main.yml
@@ -2,7 +2,3 @@
- name: starting docker service
become: true
service: name=docker state=started
-
-- name: restart nginx
- become: true
- service: name=nginx state=restarted
diff --git a/supporting/servers/roles/docker/tasks/main.yml b/supporting/servers/roles/docker/tasks/main.yml
new file mode 100644
index 00000000..881650dd
--- /dev/null
+++ b/supporting/servers/roles/docker/tasks/main.yml
@@ -0,0 +1,39 @@
+- name: adding keys
+ become: true
+ apt_key: keyserver=hkp://p80.pool.sks-keyservers.net id=0X58118E89F3A912897C070ADBF76221572C52609D
+
+- name: adding entry for ubuntu
+ become: true
+ apt_repository: repo='deb https://apt.dockerproject.org/repo ubuntu-xenial main' state=present filename='docker'
+ when: ansible_distribution == "Ubuntu" and ansible_distribution_version == "16.04"
+
+- name: installing from docker repo
+ become: true
+ shell: apt-cache policy docker-engine
+ when: ansible_distribution == "Ubuntu" and ansible_distribution_version== "16.04"
+
+- name: adding entry for ubuntu
+ become: true
+ apt_repository: repo='deb https://apt.dockerproject.org/repo ubuntu-trusty main' state=present filename='docker'
+ when: ansible_distribution == "Ubuntu" and ansible_distribution_version == "14.04"
+
+- name: updating
+ become: true
+ apt: update_cache=yes
+
+- name: installing docker engine
+ become: true
+ apt: name=docker-engine
+ notify:
+ - starting docker service
+
+- name: install pip
+ apt: pkg={{item}} state=installed
+ with_items:
+ - python-dev
+ - python-pip
+
+- name: install docker-py
+ pip:
+ name: docker-py
+
diff --git a/supporting/servers/roles/elk/tasks/main.yml b/supporting/servers/roles/elk/tasks/main.yml
index cc544be5..ed987f1a 100644
--- a/supporting/servers/roles/elk/tasks/main.yml
+++ b/supporting/servers/roles/elk/tasks/main.yml
@@ -1,43 +1,4 @@
---
-- name: adding keys
- become: true
- apt_key: keyserver=hkp://p80.pool.sks-keyservers.net id=0X58118E89F3A912897C070ADBF76221572C52609D
-
-- name: adding entry for ubuntu
- become: true
- apt_repository: repo='deb https://apt.dockerproject.org/repo ubuntu-xenial main' state=present filename='docker'
- when: ansible_distribution == "Ubuntu" and ansible_distribution_version == "16.04"
-
-- name: installing from docker repo
- become: true
- shell: apt-cache policy docker-engine
- when: ansible_distribution == "Ubuntu" and ansible_distribution_version== "16.04"
-
-- name: adding entry for ubuntu
- become: true
- apt_repository: repo='deb https://apt.dockerproject.org/repo ubuntu-trusty main' state=present filename='docker'
- when: ansible_distribution == "Ubuntu" and ansible_distribution_version == "14.04"
-
-- name: updating
- become: true
- apt: update_cache=yes
-
-- name: installing docker engine
- become: true
- apt: name=docker-engine
-
-- name: install pip
- apt:
- pkg:{{ item }} state=installed
- state: installed
- with_items:
- - python-dev
- - python-pip
-
-- name: install docker-py
- pip:
- name: docker-py
-
- name: pulling elasticsearch and kibana
become: true
docker_image: name={{ item }} state=present
@@ -59,14 +20,3 @@
image: kibana
published_ports: 5601:5601
links: esearch:elasticsearch
-
-- name: nginx is installed
- become: true
- package: name=nginx state=present
-- name: qtip server configuration is generated
- become: true
- template: src={{ item }}.conf.j2 dest=/etc/nginx/sites-enabled/{{ item }}.conf
- with_items:
- - elk
- notify:
- - restart nginx
diff --git a/supporting/servers/roles/elk/defaults/main.yml b/supporting/servers/roles/ngnix/defaults/main.yml
index 855fc7ea..855fc7ea 100644
--- a/supporting/servers/roles/elk/defaults/main.yml
+++ b/supporting/servers/roles/ngnix/defaults/main.yml
diff --git a/supporting/servers/roles/ngnix/handlers/main.yml b/supporting/servers/roles/ngnix/handlers/main.yml
new file mode 100644
index 00000000..35585363
--- /dev/null
+++ b/supporting/servers/roles/ngnix/handlers/main.yml
@@ -0,0 +1,4 @@
+---
+- name: restart nginx
+ become: true
+ service: name=nginx state=restarted
diff --git a/supporting/servers/roles/ngnix/tasks/main.yml b/supporting/servers/roles/ngnix/tasks/main.yml
new file mode 100644
index 00000000..8673b3d2
--- /dev/null
+++ b/supporting/servers/roles/ngnix/tasks/main.yml
@@ -0,0 +1,11 @@
+---
+- name: nginx is installed
+ become: true
+ package: name=nginx state=present
+- name: qtip server configuration is generated
+ become: true
+ template: src={{ item }}.conf.j2 dest=/etc/nginx/sites-enabled/{{ item }}.conf
+ with_items:
+ - elk
+ notify:
+ - restart nginx
diff --git a/supporting/servers/roles/elk/templates/elk.conf.j2 b/supporting/servers/roles/ngnix/templates/elk.conf.j2
index 271690e0..271690e0 100644
--- a/supporting/servers/roles/elk/templates/elk.conf.j2
+++ b/supporting/servers/roles/ngnix/templates/elk.conf.j2
diff --git a/supporting/servers/roles/ssh/files/yujunz.authorized_keys b/supporting/servers/roles/ssh/files/yujunz.authorized_keys
index 168979c3..83ed540d 100644
--- a/supporting/servers/roles/ssh/files/yujunz.authorized_keys
+++ b/supporting/servers/roles/ssh/files/yujunz.authorized_keys
@@ -1 +1,3 @@
ssh-rsa AAAAB3NzaC1yc2EAAAADAQABAAABAQC0o+WARhbI5DeZK9eiZKVprY3oQ910npo/EPFI9prfpN7zeF0UA4TwT9rqsrwFgRQeJkKvbDTOPt5Ox9vLIEnxT4PdirvrYNxtyJHHfbL4ZxKemYhBi2BA6CAbI3f9CR+dtbfOBxtTpXAj1Y5oA9y59o1hqHpuVmM8dBpcQt/ELdYCHZ0khxft5WaSOURsslTZN18bikVX9WHlKflVyRA8efxqzTZ4w5ufHl1Fv9i/G0u7iGbUtvlI8X7+Z+ePuysZhHKErQSGfv2NybDi+r8xM5hL2LYXxjAu0PV58olUl1SX40OY9yA2Yx+PVzIGhv6EQ8snUFnsoMy0ZIbQ+ysB yujunz@silver.mbp
+ssh-rsa AAAAB3NzaC1yc2EAAAADAQABAAABAQDs9VhmmGg0c7FTLMtoZlPue4N1DdSCh6r6YssmEY3e1GHPXJTGGTX7QKXEBlWd7EXq+uedwN+lU9C/FClwneP0M+4vprgqdmUcmlCoSI/RffQAymcuGbxvfptNcLpBApHn4C+e93H/5ryaypaG+6n8WtbDZDtvWgn2ZJY6hDdJSw6Y++C+zDI0QliTsrCHmnGnYcRuDWUNgaiERvW3cvpeF5duFJcDZ0NtMl2UjeGMYL0JU6YfIOIeeFwD0Su86Nf3RtoKF+RzO4LcrGLvywzM7C/iCncIdj6GZjpVBYnB5U6YUh8VqLjHVHjmjTpAjmsWcc8NnwNG9rHDbG6nrGWd yujunz@silver
+