summaryrefslogtreecommitdiffstats
path: root/utils
diff options
context:
space:
mode:
Diffstat (limited to 'utils')
-rwxr-xr-xutils/docu-build-new.sh114
-rwxr-xr-xutils/jenkins-jnlp-connect.sh2
-rwxr-xr-xutils/test/result_collection_api/dashboard/vsperf2Dashboard.py121
3 files changed, 122 insertions, 115 deletions
diff --git a/utils/docu-build-new.sh b/utils/docu-build-new.sh
deleted file mode 100755
index 67a62e381..000000000
--- a/utils/docu-build-new.sh
+++ /dev/null
@@ -1,114 +0,0 @@
-#!/bin/bash
-set -e
-set -o pipefail
-
-export PATH=$PATH:/usr/local/bin/
-git_sha1="$(git rev-parse HEAD)"
-
-clean() {{
-if [[ -d docs/output ]]; then
-rm -rf docs/output
-echo "cleaning up output directory"
-fi
-}}
-
-trap clean EXIT TERM INT SIGTERM SIGHUP
-
-#set git_sha1
-files=()
-while read -r -d ''; do
- files+=("$REPLY")
-done < <(find docs/ -type f -iname '*.rst' -print0)
-for file in "${{files[@]}}"; do
- sed -i "s/_sha1_/$git_sha1/g" $file
-done
-
-directories=()
-while read -d $'\n'; do
- directories+=("$REPLY")
-done < <(find docs/ -name 'index.rst' -printf '%h\n' | sort -u )
-
-for dir in "${{directories[@]}}"; do
- _name="${{dir##*/}}"
- _build="${{dir}}/build"
- _output="docs/output/${{_name}}"
- echo
- echo "#################${{_name//?/#}}"
- echo "Building DOCS in ${{_name}}"
- echo "#################${{_name//?/#}}"
- echo
-
- mkdir -p "${{_output}}"
-
- sphinx-build -b html -E -c docs/etc "${{dir}}" "${{_output}}"
-
- sphinx-build -b latex -E -c docs/etc "${{dir}}" "${{_build}}"
- make -C "${{_build}}" LATEXOPTS='--interaction=nonstopmode' all-pdf
- mv "${{_build}}"/*.pdf "${{_output}}"
-
-done
-
-# NOTE: make sure source parameters for GS paths are not empty.
-[[ $GERRIT_CHANGE_NUMBER =~ .+ ]]
-[[ $GERRIT_PROJECT =~ .+ ]]
-[[ $GERRIT_BRANCH =~ .+ ]]
-
-gs_path_review="artifacts.opnfv.org/review/$GERRIT_CHANGE_NUMBER"
-
-if [[ $GERRIT_BRANCH = "master" ]] ; then
- gs_path_branch="artifacts.opnfv.org/$GERRIT_PROJECT"
-else
- gs_path_branch="artifacts.opnfv.org/$GERRIT_PROJECT/${{GERRIT_BRANCH##*/}}"
-fi
-
-for dir in "${{directories[@]}}"; do
- echo
- echo "#############################"
- echo "UPLOADING DOCS in ${{dir##*/}}"
- echo "#############################"
- echo
-
-
- if [[ $JOB_NAME =~ "verify" ]] ; then
-
- #upload artifacts for verify job
- gsutil cp -r docs/output/"${{dir##*/}}/" "gs://$gs_path_review/"
-
- # post link to gerrit as comment
- gerrit_comment="$(echo '"Document is available at 'http://$gs_path_review/"${{dir##*/}}"/index.html' for review"')"
- echo "$gerrit_comment"
- ssh -p 29418 gerrit.opnfv.org gerrit review -p $GERRIT_PROJECT -m \
- "$gerrit_comment" $GERRIT_PATCHSET_REVISION
-
- #set cache to 0
- for x in $(gsutil ls gs://$gs_path_review/"${{dir##*/}}" | grep html);
- do
- gsutil setmeta -h "Content-Type:text/html" \
- -h "Cache-Control:private, max-age=0, no-transform" \
- "$x"
- done
-
- else
-
- #upload artifacts for merge job
- gsutil cp -r docs/output/"${{dir##*/}}" "gs://$gs_path_branch/docs/"
- echo "Latest document is available at http://$gs_path_branch/docs/"${{dir##*/}}"/index.html"
-
- #set cache to 0
- for x in $(gsutil ls gs://$gs_path_branch/"${{dir}}" | grep html);
- do
- gsutil setmeta -h "Content-Type:text/html" \
- -h "Cache-Control:private, max-age=0, no-transform" \
- "$x"
- done
-
- #Clean up review when merging
- if gsutil ls "gs://$gs_path_review" > /dev/null 2>&1 ; then
- echo
- echo "Deleting Out-of-dated Documents..."
- gsutil rm -r "gs://$gs_path_review"
- fi
-
- fi
-
-done
diff --git a/utils/jenkins-jnlp-connect.sh b/utils/jenkins-jnlp-connect.sh
index d263b198a..00c19971f 100755
--- a/utils/jenkins-jnlp-connect.sh
+++ b/utils/jenkins-jnlp-connect.sh
@@ -100,7 +100,7 @@ fi
if [[ $started_monit == "true" ]]; then
wget --timestamping https://build.opnfv.org/ci/jnlpJars/slave.jar && true
-chown $jenkinsuser $jenkinsuser slave.jar
+chown $jenkinsuser:$jenkinsuser slave.jar
if [[ -f /var/run/$jenkinsuser/jenkins_jnlp_pid ]];
then echo "pid file found"
diff --git a/utils/test/result_collection_api/dashboard/vsperf2Dashboard.py b/utils/test/result_collection_api/dashboard/vsperf2Dashboard.py
new file mode 100755
index 000000000..323d3915c
--- /dev/null
+++ b/utils/test/result_collection_api/dashboard/vsperf2Dashboard.py
@@ -0,0 +1,121 @@
+#!/usr/bin/python
+
+# Copyright 2015 Intel Corporation.
+#
+# Licensed under the Apache License, Version 2.0 (the "License"),
+# you may not use this file except in compliance with the License.
+# You may obtain a copy of the License at
+#
+# http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
+
+def get_vsperf_cases():
+ """
+ get the list of the supported test cases
+ TODO: update the list when adding a new test case for the dashboard
+ """
+ return ["tput_ovsdpdk", "tput_ovs",
+ "b2b_ovsdpdk", "b2b_ovs",
+ "tput_mod_vlan_ovsdpdk", "tput_mod_vlan_ovs",
+ "cont_ovsdpdk", "cont_ovs",
+ "pvp_cont_ovsdpdkuser", "pvp_cont_ovsdpdkcuse", "pvp_cont_ovsvirtio",
+ "pvvp_cont_ovsdpdkuser", "pvvp_cont_ovsdpdkcuse", "pvvp_cont_ovsvirtio",
+ "scalability_ovsdpdk", "scalability_ovs",
+ "pvp_tput_ovsdpdkuser", "pvp_tput_ovsdpdkcuse", "pvp_tput_ovsvirtio",
+ "pvp_b2b_ovsdpdkuser", "pvp_b2b_ovsdpdkcuse", "pvp_b2b_ovsvirtio",
+ "pvvp_tput_ovsdpdkuser", "pvvp_tput_ovsdpdkcuse", "pvvp_tput_ovsvirtio",
+ "pvvp_b2b_ovsdpdkuser", "pvvp_b2b_ovsdpdkcuse", "pvvp_b2b_ovsvirtio",
+ "cpu_load_ovsdpdk", "cpu_load_ovs",
+ "mem_load_ovsdpdk", "mem_load_ovs"]
+
+
+def check_vsperf_case_exist(case):
+ """
+ check if the testcase exists
+ if the test case is not defined or not declared in the list
+ return False
+ """
+ vsperf_cases = get_vsperf_cases()
+
+ if (case is None or case not in vsperf_cases):
+ return False
+ else:
+ return True
+
+
+def format_vsperf_for_dashboard(case, results):
+ """
+ generic method calling the method corresponding to the test case
+ check that the testcase is properly declared first
+ then build the call to the specific method
+ """
+ if check_vsperf_case_exist(case):
+ res = format_common_for_dashboard(case, results)
+ else:
+ res = []
+ print "Test cases not declared"
+ return res
+
+
+def format_common_for_dashboard(case, results):
+ """
+ Common post processing
+ """
+ test_data_description = case + " results for Dashboard"
+ test_data = [{'description': test_data_description}]
+
+ graph_name = ''
+ if "b2b" in case:
+ graph_name = "B2B frames"
+ else:
+ graph_name = "Rx frames per second"
+
+ # Graph 1: Rx fps = f(time)
+ # ********************************
+ new_element = []
+ for data in results:
+ new_element.append({'x': data['creation_date'],
+ 'y1': data['details']['64'],
+ 'y2': data['details']['128'],
+ 'y3': data['details']['512'],
+ 'y4': data['details']['1024'],
+ 'y5': data['details']['1518']})
+
+ test_data.append({'name': graph_name,
+ 'info': {'type': "graph",
+ 'xlabel': 'time',
+ 'y1label': 'frame size 64B',
+ 'y2label': 'frame size 128B',
+ 'y3label': 'frame size 512B',
+ 'y4label': 'frame size 1024B',
+ 'y5label': 'frame size 1518B'},
+ 'data_set': new_element})
+
+ return test_data
+
+
+
+
+############################ For local test ################################
+import os
+
+def _test():
+ ans = [{'creation_date': '2015-09-12', 'project_name': 'vsperf', 'version': 'ovs_master', 'pod_name': 'pod1-vsperf', 'case_name': 'tput_ovsdpdk', 'installer': 'build_sie', 'details': {'64': '26.804', '1024': '1097.284', '512': '178.137', '1518': '12635.860', '128': '100.564'}},
+ {'creation_date': '2015-09-33', 'project_name': 'vsperf', 'version': 'ovs_master', 'pod_name': 'pod1-vsperf', 'case_name': 'tput_ovsdpdk', 'installer': 'build_sie', 'details': {'64': '16.804', '1024': '1087.284', '512': '168.137', '1518': '12625.860', '128': '99.564'}}]
+
+ result = format_vsperf_for_dashboard("pvp_cont_ovsdpdkcuse", ans)
+ print result
+
+ result = format_vsperf_for_dashboard("b2b_ovsdpdk", ans)
+ print result
+
+ result = format_vsperf_for_dashboard("non_existing", ans)
+ print result
+
+if __name__ == '__main__':
+ _test()