aboutsummaryrefslogtreecommitdiffstats
diff options
context:
space:
mode:
authorchenliangyjy <chenliangyjy@chinamobile.com>2020-05-08 12:38:30 +0800
committerchenliangyjy <chenliangyjy@chinamobile.com>2020-05-08 12:56:31 +0800
commit06bf8cf347d8af6740ab5df842abebf17dabac30 (patch)
tree9002d105565ddfd4d69d8cfcc2338746f941a008
parentae7bfeed7d4a02938068e4f72b962f56cdf9041c (diff)
migrate hdv code from cirv to subproject cirv-hdv project
Signed-off-by: chenliangyjy <chenliangyjy@chinamobile.com> Change-Id: Ieb02ccf45bdf7b0b46cb204c864a521d547a5231 Signed-off-by: chenliangyjy <chenliangyjy@chinamobile.com>
-rw-r--r--.gitignore12
-rw-r--r--.gitreview4
-rw-r--r--AUTHORS1
-rw-r--r--CONTRIBUTING.md70
-rw-r--r--ChangeLog4
-rw-r--r--INFO.yaml66
-rw-r--r--LICENSE13
-rw-r--r--README.md30
-rw-r--r--check179
-rw-r--r--ci/build-cirv.sh124
-rw-r--r--hdv/__init__.py0
-rw-r--r--hdv/redfish/__init__.py0
-rw-r--r--hdv/redfish/conf/cases.xlsxbin0 -> 17263 bytes
-rw-r--r--hdv/redfish/conf/cases.yaml517
-rw-r--r--hdv/redfish/conf/config.yaml17
-rw-r--r--hdv/redfish/conf/depends.yaml33
-rw-r--r--hdv/redfish/conf/report.yaml832
-rw-r--r--hdv/redfish/docs/readme.md129
-rw-r--r--hdv/redfish/errors.py47
-rw-r--r--hdv/redfish/excel_2_yaml.py62
-rw-r--r--hdv/redfish/hdv.py60
-rw-r--r--hdv/redfish/hdv_redfish.py676
-rw-r--r--hdv/redfish/http_handler.py129
-rw-r--r--hdv/redfish/log_utils.py33
-rw-r--r--hdv/redfish/yaml_utils.py28
-rw-r--r--pylintrc396
-rw-r--r--requirements.txt17
-rw-r--r--setup.cfg24
-rw-r--r--setup.py29
-rw-r--r--test-requirements.txt8
-rw-r--r--tox.ini25
31 files changed, 3565 insertions, 0 deletions
diff --git a/.gitignore b/.gitignore
new file mode 100644
index 0000000..884471c
--- /dev/null
+++ b/.gitignore
@@ -0,0 +1,12 @@
+*.pyc
+*.log
+.project
+.pydevproject
+nosetests.xml
+coverage.xml
+.coverage
+cover
+.settings
+docs_output
+.tox
+docs/_build/*
diff --git a/.gitreview b/.gitreview
new file mode 100644
index 0000000..74a0693
--- /dev/null
+++ b/.gitreview
@@ -0,0 +1,4 @@
+[gerrit]
+host=gerrit.opnfv.org
+port=29418
+project=cirv-hdv.git
diff --git a/AUTHORS b/AUTHORS
new file mode 100644
index 0000000..47122e5
--- /dev/null
+++ b/AUTHORS
@@ -0,0 +1 @@
+DW Talton <dtalton@contractor.linuxfoundation.org>
diff --git a/CONTRIBUTING.md b/CONTRIBUTING.md
new file mode 100644
index 0000000..1aa6108
--- /dev/null
+++ b/CONTRIBUTING.md
@@ -0,0 +1,70 @@
+<!---
+This work is licensed under a Creative Commons Attribution 4.0 International License.
+http://creativecommons.org/licenses/by/4.0
+-->
+
+# General Coding Style
+
+## Code
+
+Abide by [PEP-8] for general code. Some particular points to note:
+
+* Wrap code at 79 characters.
+* Use only spaces - no tabs.
+* Use implicit string concatenation where possible. Don't use the escape
+ character unless absolutely necessary.
+* Be liberal in your use of whitespace to group related statements together.
+ However, don't leave a space after the docstring and the first statement.
+* Use single quotes for all string literals.
+
+## Documentation
+
+Follow [PEP-257] and the [Sphinx guidelines] for documentation. In particular:
+
+* Wrap docstrings at 72 characters.
+* Use double-quotes for all docstrings.
+* Write all inline comments in lower-case, except where using a name/initialism.
+* Document **all** library functions/classes completely. Tests, however, only need a test case docstring.
+
+To summarise the docstring conventions:
+
+```python
+def my_function(athing, stuff=5):
+ """
+ Summary line here in imperative tense.
+
+ Longer description here...
+
+ :param athing: Details about this paramter here
+ :param stuff: Ditto
+
+ :returns: None
+ """
+ pass # code here...
+```
+
+### Validation
+
+All code should be checked with the PyLint linter and PEP8 style guide checker.
+Pylint can be run like so:
+
+```bash
+pylint <file or directory>
+```
+
+Most PyLint errors should be resolved. You will need to do this manually.
+However, there are cases where they may not make sense (e.g. you **need** to
+pass `N` parameters to a function). In this case, disable the relevant
+case using an inline `disable` like so:
+
+```python
+# pylint: disable=[code]
+```
+
+On the other hand, all PEP8 errors should be resolved.
+
+---
+
+[PEP-8]: http://legacy.python.org/dev/peps/pep-0008/
+[PEP-257]: http://legacy.python.org/dev/peps/pep-0257/
+[Sphinx guidelines]: https://pythonhosted.org/an_example_pypi_project/sphinx.html
diff --git a/ChangeLog b/ChangeLog
new file mode 100644
index 0000000..982973f
--- /dev/null
+++ b/ChangeLog
@@ -0,0 +1,4 @@
+CHANGES
+=======
+
+* Initial empty repository
diff --git a/INFO.yaml b/INFO.yaml
new file mode 100644
index 0000000..c721070
--- /dev/null
+++ b/INFO.yaml
@@ -0,0 +1,66 @@
+---
+project: 'Common Infrastructure Realization & Validation (CIRV)'
+project_creation_date: '2019-09-17'
+project_category: 'Integration and Testing'
+lifecycle_state: 'Incubation'
+project_lead: &opnfv_cirv_ptl
+ name: 'Chen Liang'
+ email: 'chenliangyjy@chinamobile.com'
+ company: 'chinamobile.com'
+ id: 'chenliangyjy'
+ timezone: 'CST'
+primary_contact: *opnfv_cirv_ptl
+issue_tracking:
+ type: 'jira'
+ url: 'https://jira.opnfv.org/projects/CIRV'
+ key: 'CIRV'
+mailing_list:
+ type: 'groups.io'
+ url: 'opnfv-tech-discuss@lists.opnfv.org'
+ tag: '[CIRV]'
+realtime_discussion:
+ type: irc
+ server: 'freenode.net'
+ channel: '#opnfv-cirv'
+meetings:
+ - type: 'Zoom'
+ # yamllint disable rule:line-length
+ agenda: 'https://wiki.lfnetworking.org/pages/viewpage.action?pageId=25364128'
+ url: https://zoom.us/j/694881078
+ repeats: 'weekly'
+ time: 'Wed 1300-1400 UTC'
+repositories:
+ - 'cirv'
+committers:
+ - <<: *opnfv_cirv_ptl
+ - name: 'Cedric Ollivier'
+ email: 'cedric.ollivier@orange.com'
+ company: 'orange.com'
+ id: 'ollivier'
+ - name: 'Trevor Cooper'
+ email: 'trevor.cooper@intel.com'
+ company: 'intel.com'
+ id: 'trev'
+ - name: 'Sridhar Rao'
+ email: 'sridhar.rao@spirent.com'
+ company: 'spirent.com'
+ id: 'sridharkn'
+ - name: 'Lincoln Lavoie'
+ email: 'lylavoie@iol.unh.edu'
+ company: 'iol.unh.edu'
+ id: 'lylavoie'
+ - name: 'Fu Qiao'
+ email: 'fuqiao@chinamobile.com'
+ company: 'chinamobile.com'
+ id: 'fuqiao'
+ - name: 'Chen Liang'
+ email: 'chenliangyjy@chinamobile.com'
+ company: 'chinamobile.com'
+ id: 'chenliangyjy'
+tsc:
+ approval: 'http://ircbot.wl.linuxfoundation.org/meetings/opnfv-meeting/2019/opnfv-meeting.2019-09-17-13.01.log.html'
+ changes:
+ - type: 'rename'
+ name: 'CNTT-RI -> CIRV'
+ link: 'http://ircbot.wl.linuxfoundation.org/meetings/opnfv-meeting/2019/opnfv-meeting.2019-10-29-13.00.log.html'
+ # yamllint enable rule:line-length
diff --git a/LICENSE b/LICENSE
new file mode 100644
index 0000000..3922ffe
--- /dev/null
+++ b/LICENSE
@@ -0,0 +1,13 @@
+Copyright 2020 Open Platform for NFV Project, Inc. and its contributors
+
+ Licensed under the Apache License, Version 2.0 (the "License");
+ you may not use this file except in compliance with the License.
+ You may obtain a copy of the License at
+
+ http://www.apache.org/licenses/LICENSE-2.0
+
+ Unless required by applicable law or agreed to in writing, software
+ distributed under the License is distributed on an "AS IS" BASIS,
+ WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ See the License for the specific language governing permissions and
+ limitations under the License.
diff --git a/README.md b/README.md
new file mode 100644
index 0000000..a49796a
--- /dev/null
+++ b/README.md
@@ -0,0 +1,30 @@
+<!---
+This work is licensed under a Creative Commons Attribution 4.0 International License.
+http://creativecommons.org/licenses/by/4.0
+-->
+
+# CIRV - Common Infrastructure Realization & Validation
+
+Documentation for this project is contained under the **./docs** directory.
+Additional information about CIRV project are available at [project wiki].
+
+---
+
+[project wiki]: https://wiki.opnfv.org/display/CIRV
+
+This is CIRV hardware delivery validation tool repos.
+You can find the hdv readme about refish implementation at ./hdv/redfish/docs/readme.md
+
+
+TODO:
+1. refactor the config.yaml parsing by a friendly input with comments explanation support
+2. adding case flag :enable/disable to run some case e.g: reboot the server.
+3. readme.txt: adding how extend the function test instruction.
+4. check more reffish interface.
+5. suppport large scale nodes.
+6. extend for multi vendors for redfish validation
+7. adding efficient unit test for code
+8. implement impi interface
+
+
+ \ No newline at end of file
diff --git a/check b/check
new file mode 100644
index 0000000..5b10198
--- /dev/null
+++ b/check
@@ -0,0 +1,179 @@
+#!/bin/bash
+
+# Copyright 2017 Intel Corporation.
+#
+# Licensed under the Apache License, Version 2.0 (the "License");
+# you may not use this file except in compliance with the License.
+# You may obtain a copy of the License at
+#
+# http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
+
+# CIRV code checker
+
+PYLINT="pylint"
+PYLINT_RC='pylintrc'
+PYLINT_RATING_GATE="10"
+PYLINT_RATING_MIN=$PYLINT_RATING_GATE
+FILE_REGEX="(^valid|\.py)$"
+FILE_LIST="/tmp/cirv_check_list.txt"
+BC=`which bc`
+
+# print usage if requested
+function usage() {
+ cat <<EOM
+Usage: $0 [TARGET]...
+
+Performs code check for defined TARGETs. Target can be file or directory.
+In case that directory is specified, then it will be searched recursively
+for all python files.
+If TARGET is not specified, then all python files from current CIRV
+repository will be checked.
+File will pass check if its pylint rating is greater or equal to $PYLINT_RATING_GATE.
+Otherwise gained pylint rating will be displayed.
+
+
+ -h, --help Script usage
+ -b, --black Suppress colours. Output will be black&white.
+ -m, --modified Script will check python files, which have
+ been modified within current repository.
+
+Examples:
+ ./check
+
+ Check all python files in current CIRV repository
+
+ ./check tools/sdv/valid
+
+ Check just one file.
+
+ ./check -m
+
+ Check all modified files in current repository
+
+ ./check tools/sdv
+
+ Check all python files in given directories
+
+EOM
+}
+
+# compare pylint result with predefined gate
+function rating_is_ok() {
+ # bc is not part of basic Centos installation
+ # so let us check if it is available
+ if [ "x$BC" == "x" ] ; then
+ # no bc, so do integer comparison only
+ int_rating=`echo $1 | sed -e 's/\..*$//'`
+ int_rating_min=`echo $PYLINT_RATING_MIN | sed -e 's/\..*$//'`
+ [ $int_rating -lt $int_rating_min ] && PYLINT_RATING_MIN=$int_rating
+ if [ $int_rating -lt $PYLINT_RATING_GATE ] ; then
+ return 1
+ else
+ return 0
+ fi
+ else
+ if (( $(echo "$1<$PYLINT_RATING_MIN" | bc -l) )) ; then
+ PYLINT_RATING_MIN=$1
+ fi
+ if (( $(echo "$1<$PYLINT_RATING_GATE" | bc -l) )) ; then
+ return 1
+ else
+ return 0
+ fi
+ fi
+}
+
+##### MAIN #####
+# check if help is requested
+if [ "x$1" == "x-h" -o "x$1" == "x--help" ] ; then
+ usage
+ exit 0
+fi
+
+# set colours
+if [ "x$1" == "x-b" -o "x$1" == "x--black" ] ; then
+ shift
+ RED=""
+ GREEN=""
+ BLACK=""
+else
+ RED="\e[31m"
+ GREEN="\e[32m"
+ BLACK="\e[0m"
+fi
+
+
+# check if pylint is available
+if ! which $PYLINT &>/dev/null ; then
+ echo "$PYLINT is not available, thus check can't be executed"
+ exit 1
+fi
+
+# check if we were run within cirv directory
+if [ ! -f INFO.yaml 2> /dev/null ] ; then
+ echo "`basename $0` must be run from root directory"
+ exit 2
+fi
+
+# get list of files to be checked
+rm $FILE_LIST &> /dev/null
+if [ "x$1" == "x-m" -o "x$1" == "x--modified" ] ; then
+ # check of modified files requested
+ git status --porcelain | cut -b4- | egrep -i "${FILE_REGEX}" | sort > $FILE_LIST
+elif [ "x$*" == "x" ] ; then
+ # list is empty, check all python files
+ git ls-tree --name-only -r HEAD | egrep -i "${FILE_REGEX}" | sort > $FILE_LIST
+else
+ for item in $* ; do
+ if [ -d $item ] ; then
+ git ls-tree --name-only -r HEAD $item | egrep -i "${FILE_REGEX}" | sort >> $FILE_LIST
+ elif [ -f $item ] ; then
+ echo $item >> $FILE_LIST
+ else
+ echo "$item doesn't exist, thus check was aborted"
+ exit 3
+ fi
+ done
+fi
+
+# check if there is anything to check
+echo "Execution of pylint checks:"
+if [ -s $FILE_LIST ] ; then
+ for pyfile in `cat $FILE_LIST | sort` ; do
+ # get base name
+ pyfile_basename="'"`basename $pyfile .py`"'"
+ # run pylint and extract final rating
+ output=`$PYLINT --rcfile $PYLINT_RC $pyfile 2>/dev/null`
+ rating=`echo -e $output | tail -n3 | grep rated | sed -e 's/^.*rated at \(-\?[0-9.]*\).*$/\1/'`
+ # evaluate and display aquired rating
+ if [ "x$rating" == "x" ] ; then
+ # rating is not available for files without python statements
+ printf " %-70s %-6s\n" $pyfile "NA"
+ elif rating_is_ok $rating ; then
+ printf " %-70s ${GREEN}%-6s${BLACK}\n" $pyfile "OK"
+ else
+ echo -e "$output" | awk '/^\*+ Module|^[A-Z]\:/'
+ printf " %-70s ${RED}%-6s${BLACK}\n" $pyfile $rating
+ fi
+ done
+else
+ echo "Nothing to check."
+ exit 4
+fi
+
+# clean up
+rm $FILE_LIST &> /dev/null
+
+if [ "$PYLINT_RATING_MIN" != "$PYLINT_RATING_GATE" ] ; then
+ echo -e "Pylint check has failed. All files must have score ${PYLINT_RATING_GATE}.\n"
+ exit 1
+else
+ exit 0
+fi
+##### MAIN end #####
diff --git a/ci/build-cirv.sh b/ci/build-cirv.sh
new file mode 100644
index 0000000..3ff7b47
--- /dev/null
+++ b/ci/build-cirv.sh
@@ -0,0 +1,124 @@
+#!/bin/bash
+#
+# Copyright 2020-2018 Spirent Communications, Intel Corporation., Tieto
+#
+# Licensed under the Apache License, Version 2.0 (the "License");
+# you may not use this file except in compliance with the License.
+# You may obtain a copy of the License at
+#
+# http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
+
+# CIRV build execution script
+
+# Usage:
+# build-cirv.sh job_type
+# where job_type is one of "verify", "merge", "daily"
+# Version-1: 'verify'
+
+#
+# exit codes
+#
+
+EXIT=0
+EXIT_SANITY_FAILED=1
+EXIT_PYLINT_FAILED=2
+
+#
+# configuration
+#
+
+SWV_BIN="./hdv/redfish/hdv.py"
+LOG_FILE_PREFIX="/tmp/cirv_build"
+DATE=$(date -u +"%Y-%m-%d_%H-%M-%S")
+BRANCH=${GIT_BRANCH##*/}
+CIRVENV_DIR="$HOME/cirvenv"
+# WORKSPACE="./"
+
+#
+# main
+#
+
+echo
+
+# enter workspace dir
+cd $WORKSPACE
+
+
+# create virtualenv if needed
+if [ ! -e $CIRVENV_DIR ] ; then
+ echo "Create CIRV environment"
+ echo "========================="
+ virtualenv --python=python3 "$CIRVENV_DIR"
+ echo
+fi
+
+# acivate and update virtualenv
+echo "Update CIRV environment"
+echo "========================="
+source "$CIRVENV_DIR"/bin/activate
+pip install -r ./requirements.txt
+echo
+
+
+# execute pylint to check code quality
+function execute_cirv_pylint_check {
+ if ! ./check -b ; then
+ EXIT=$EXIT_PYLINT_FAILED
+ fi
+}
+
+# verify basic cirv functionality
+function execute_cirv_sanity {
+ DATE_SUFFIX=$(date -u +"%Y-%m-%d_%H-%M-%S")
+ LOG_FILE="${LOG_FILE_PREFIX}_sanity_${DATE_SUFFIX}.log"
+ echo "Execution of CIRV sanity checks:"
+ for PARAM in '--version' '--help'; do
+ echo -e "------------------------------------------------" >> $LOG_FILE
+ echo "$SWV_BIN $PARAM " >> $LOG_FILE
+ echo -e "------------------------------------------------" >> $LOG_FILE
+ $SWV_BIN $PARAM &>> $LOG_FILE
+ if $SWV_BIN $PARAM &>> $LOG_FILE ; then
+ printf " %-70s %-6s\n" "$SWV_BIN $PARAM" "OK"
+ else
+ printf " %-70s %-6s\n" "$SWV_BIN $PARAM" "FAILED"
+ EXIT=$EXIT_SANITY_TC_FAILED
+ fi
+ echo >> $LOG_FILE
+ done
+}
+
+# execute job based on passed parameter
+case $1 in
+ "verify")
+ echo "================="
+ echo "CIRV verify job"
+ echo "================="
+
+ #execute_cirv_pylint_check
+ execute_cirv_sanity
+
+ exit $EXIT
+ ;;
+ "merge")
+ echo "================"
+ echo "CIRV merge job"
+ echo "================"
+
+ exit $EXIT
+ ;;
+ *)
+ echo "================"
+ echo "CIRV daily job"
+ echo "================"
+
+ exit $EXIT
+ ;;
+esac
+
+exit $EXIT
diff --git a/hdv/__init__.py b/hdv/__init__.py
new file mode 100644
index 0000000..e69de29
--- /dev/null
+++ b/hdv/__init__.py
diff --git a/hdv/redfish/__init__.py b/hdv/redfish/__init__.py
new file mode 100644
index 0000000..e69de29
--- /dev/null
+++ b/hdv/redfish/__init__.py
diff --git a/hdv/redfish/conf/cases.xlsx b/hdv/redfish/conf/cases.xlsx
new file mode 100644
index 0000000..e7fc61d
--- /dev/null
+++ b/hdv/redfish/conf/cases.xlsx
Binary files differ
diff --git a/hdv/redfish/conf/cases.yaml b/hdv/redfish/conf/cases.yaml
new file mode 100644
index 0000000..5609708
--- /dev/null
+++ b/hdv/redfish/conf/cases.yaml
@@ -0,0 +1,517 @@
+---
+- case_name: set asset code
+ case_sn: 1
+ expected_code: 200
+ expected_result: '{"AssetTag": "CM_cc@1234"}'
+ group: asset managment
+ header: null
+ method: PATCH
+ request_body: '{"AssetTag": "CM_cc@1234"}'
+ url: https://{bmc_ip}/redfish/v1/Systems/{system_id}
+- case_name: get asset code
+ case_sn: 2
+ expected_code: 200
+ expected_result: '{"AssetTag": "CM_cc@1234"}'
+ group: asset managment
+ header: null
+ method: GET
+ request_body: null
+ url: https://{bmc_ip}/redfish/v1/Systems/{system_id}
+- case_name: set host name
+ case_sn: 3
+ expected_code: 200
+ expected_result: '{"HostName": "NFV-RPZJHZ-01B"}'
+ group: asset managment
+ header: null
+ method: PATCH
+ request_body: '{"HostName": "NFV-RPZJHZ-01B"}'
+ url: https://{bmc_ip}/redfish/v1/Systems/{system_id}
+- case_name: check host name
+ case_sn: 4
+ expected_code: 200
+ expected_result: '{"HostName": "NFV-RPZJHZ-01B"}'
+ group: asset managment
+ header: null
+ method: GET
+ request_body: null
+ url: https://{bmc_ip}/redfish/v1/Systems/{system_id}
+- case_name: check manufacturer
+ case_sn: 5
+ expected_code: 200
+ expected_result: '{"Manufacturer": "New H3C Technologies Co., Ltd."}'
+ group: asset managment
+ header: null
+ method: GET
+ request_body: null
+ url: https://{bmc_ip}/redfish/v1/Systems/{system_id}
+- case_name: check model
+ case_sn: 6
+ expected_code: 200
+ expected_result: '{"Model": "UniServer R4900 G3"}'
+ group: asset managment
+ header: null
+ method: GET
+ request_body: null
+ url: https://{bmc_ip}/redfish/v1/Systems/{system_id}
+- case_name: check serial number
+ case_sn: 7
+ expected_code: 200
+ expected_result: '{"SerialNumber": "N/A"}'
+ group: asset managment
+ header: null
+ method: GET
+ request_body: null
+ url: https://{bmc_ip}/redfish/v1/Systems/{system_id}
+- case_name: check main board name
+ case_sn: 8
+ expected_code: 200
+ expected_result: '{"Oem":{"Mainboard": {"BoardName": "RS33M2C9S"}}}'
+ group: asset managment
+ header: null
+ method: GET
+ request_body: null
+ url: https://{bmc_ip}/redfish/v1/Chassis/{chassis_id}
+- case_name: check main board serial number
+ case_sn: 9
+ expected_code: 200
+ expected_result: '{"Oem": {"Mainboard": {"SerialNumber": "N/A"}}}'
+ group: asset managment
+ header: null
+ method: GET
+ request_body: null
+ url: https://{bmc_ip}/redfish/v1/Chassis/{chassis_id}
+- case_name: check BIOS version
+ case_sn: 10
+ expected_code: 200
+ expected_result: '{"BiosVersion": "2.00.35P01 V100R001B02D035SP01"}'
+ group: asset managment
+ header: null
+ method: GET
+ request_body: null
+ url: https://{bmc_ip}/redfish/v1/Systems/{system_id}
+- case_name: check CPU amount
+ case_sn: 11
+ expected_code: 200
+ expected_result: '{"Members@odata.count": 2}'
+ group: compoment management
+ header: null
+ method: GET
+ request_body: null
+ url: https://{bmc_ip}/redfish/v1/Systems/{system_id}/Processors
+- case_name: check CPU info
+ case_sn: 12
+ expected_code: 200
+ expected_result: '{ "count": 2, "Manufacturer": "Intel(R) Corporation", "MaxSpeedMHz":
+ 2300, "Model": "Intel(R) Xeon(R) Gold 5218N CPU @ 2.30GHz", "ProcessorArchitecture":
+ ["x86", "IA-64", "ARM", "MIPS", "OEM"], "Socket": [1, 2], "Status": { "Health":
+ "OK", "State": "Enabled" }, "TotalCores": 16, "TotalThreads":
+ 32}'
+ group: compoment management
+ header: null
+ method: GET
+ request_body: null
+ url: https://{bmc_ip}{cpu_id}
+- case_name: check memory mount
+ case_sn: 13
+ expected_code: 200
+ expected_result: '{"Members@odata.count": 12}'
+ group: compoment management
+ header: null
+ method: GET
+ request_body: null
+ url: https://{bmc_ip}/redfish/v1/Systems/{system_id}/Memory
+- case_name: check memory information
+ case_sn: 14
+ expected_code: 200
+ expected_result: '{ "count": 12, "BaseModuleType": "RDIMM", "CapacityMiB":
+ 32768, "DeviceLocator": "N/A", "Manufacturer": ["Hynix Semiconductor", "Micron"], "MemoryDeviceType":
+ "DDR4", "OperatingSpeedMhz": 2666, "PartNumber": ["HMA84GR7AFR4N-VK","36ASF4G72PZ-2G6D1"], "Status":
+ { "Health": "OK", "State": "Enabled" }}'
+ group: compoment management
+ header: null
+ method: GET
+ request_body: null
+ url: https://{bmc_ip}{memory_id}
+- case_name: check raid card amount
+ case_sn: 15
+ expected_code: 200
+ expected_result: '{"Members@odata.count": 1}'
+ group: compoment management
+ header: null
+ method: GET
+ request_body: null
+ url: https://{bmc_ip}/redfish/v1/Systems/{system_id}/Storages
+- case_name: check raid card information
+ case_sn: 16
+ expected_code: 200
+ expected_result: '{ "count": 1, "StorageControllers": [ { "FirmwareVersion":
+ "2.62", "Manufacturer": "H3C", "Model": "N/A", "Status":
+ { "Health": "OK", "State": "Enabled" } } ]}'
+ group: compoment management
+ header: null
+ method: GET
+ request_body: null
+ url: https://{bmc_ip}{storage_id}
+- case_name: check harddisk information
+ case_sn: 17
+ expected_code: 200
+ expected_result: '{ "count": 4, "CapacityBytes": [480102187008, 960193626112], "Location":
+ { "Info": "N/A", "InfoFormat": "DeviceName" }, "Manufacturer":
+ "ATA", "MediaType": "SSD", "Model": ["INTEL SSDSC2KB48", "INTEL SSDSC2KB96"], "Protocol":
+ "SATA", "Status": { "Health": "OK", "State": "Enabled" }}'
+ group: compoment management
+ header: null
+ method: GET
+ request_body: null
+ url: https://{bmc_ip}{drives_id}
+- case_name: check network interface adapter information
+ case_sn: 18
+ expected_code: 200
+ expected_result: '{ "count": 3, "Manufacturer": "Mellanox", "Model": "NIC-620F-B2-25Gb-2P-1-X", "Name":
+ ["PCIeSlot2", "PCIeSlot3", "PCIeSlot6"], "Oem": { "Public": { "CardModel":
+ "2*25GE", "RootBDF": ["0000:17:00.0", "0000:17:02.0", "0000:AE:02.0"], } }, "Status":
+ { "Health": "OK", "State": "Enabled" }}'
+ group: compoment management
+ header: null
+ method: GET
+ request_body: null
+ url: https://{bmc_ip}{networkadapters_id}
+- case_name: check network interface adapter port information
+ case_sn: 19
+ expected_code: 200
+ expected_result: '{ "count": 6, "AssociatedNetworkAddresses": [ "N/A" ], "Oem":
+ { "Public": { "BDF": "N/A", "PortType": "OpticalPort" } }, "PhysicalPortNumber":
+ ["1", "2"]}'
+ group: compoment management
+ header: null
+ method: GET
+ request_body: null
+ url: https://{bmc_ip}{networkports_id}
+- case_name: check fans information
+ case_sn: 20
+ expected_code: 200
+ expected_result: '{ "FanSummary": { "Count": 6 }, "Fans": [ { "MemberId":
+ "N/A", "Oem": { "Public": { "SpeedRatio":
+ "N/A" } }, "Status": { "Health":
+ "OK", "State": "Enabled" } }, { "MemberId":
+ "N/A", "Oem": { "Public": { "SpeedRatio":
+ "N/A" } }, "Status": { "Health":
+ "OK", "State": "Enabled" } },{ "MemberId":
+ "N/A", "Oem": { "Public": { "SpeedRatio":
+ "N/A" } }, "Status": { "Health":
+ "OK", "State": "Enabled" } },{ "MemberId":
+ "N/A", "Oem": { "Public": { "SpeedRatio":
+ "N/A" } }, "Status": { "Health":
+ "OK", "State": "Enabled" } },{ "MemberId":
+ "N/A", "Oem": { "Public": { "SpeedRatio":
+ "N/A" } }, "Status": { "Health":
+ "OK", "State": "Enabled" } },{ "MemberId":
+ "N/A", "Oem": { "Public": { "SpeedRatio":
+ "N/A" } }, "Status": { "Health":
+ "OK", "State": "Enabled" } } ],}'
+ group: compoment management
+ header: null
+ method: GET
+ request_body: null
+ url: https://{bmc_ip}/redfish/v1/Chassis/{chassis_id}/Thermal
+- case_name: check power amount
+ case_sn: 21
+ expected_code: 200
+ expected_result: '{ "DeviceMaxNum": { "PowerSupplyNum": 2},}'
+ group: compoment management
+ header: null
+ method: GET
+ request_body: null
+ url: https://{bmc_ip}/redfish/v1/Chassis/{chassis_id}
+- case_name: check power detail info
+ case_sn: 22
+ expected_code: 200
+ expected_result: '{ "PowerControl": [ { "PowerConsumedWatts":
+ "N/A","Status":{ "Health": "OK", "State": "Enabled" } }, ], "PowerSupplies":
+ [ { "LineInputVoltage": "N/A", "MemberId": "1", "PowerCapacityWatts":
+ 800,"Status": { "Health": "OK", "State": "Enabled" } }, { "LineInputVoltage":
+ "N/A", "MemberId": "2", "PowerCapacityWatts": 800,"Status":
+ { "Health": "OK", "State": "Enabled" } } ],}'
+ group: compoment management
+ header: null
+ method: GET
+ request_body: null
+ url: https://{bmc_ip}/redfish/v1/Chassis/{chassis_id}/Power
+- case_name: check logical dirve health status
+ case_sn: 23
+ expected_code: 200
+ expected_result: '{ "count": 2, "Name": "N/A", "Status": { "Health":
+ ["OK", "Critical"], "State": "Enabled" }}'
+ group: compoment management
+ header: null
+ method: GET
+ request_body: null
+ url: https://{bmc_ip}{volume_id}
+- case_name: check server temperature air intake
+ case_sn: 24
+ expected_code: 200
+ expected_result: '{ "Temperatures": [ { "Name": "INPUT_TEMP", "ReadingCelsius":
+ "N/A", } ]}'
+ group: sensor management
+ header: null
+ method: GET
+ request_body: null
+ url: https://{bmc_ip}/redfish/v1/Chassis/{chassis_id}/Thermal
+- case_name: check cpu temperature
+ case_sn: 25
+ expected_code: 200
+ expected_result: '{ "Temperatures": [ { "Name": "INPUT_TEMP", "ReadingCelsius":
+ "N/A", }, { "Name": "CPU1_TEMP", "ReadingCelsius":
+ "N/A", }, { "Name": "CPU2_TEMP", "ReadingCelsius":
+ "N/A", }, ]}'
+ group: sensor management
+ header: null
+ method: GET
+ request_body: null
+ url: https://{bmc_ip}/redfish/v1/Chassis/{chassis_id}/Thermal
+- case_name: check server power state
+ case_sn: 26
+ expected_code: 200
+ expected_result: '{"PowerState": "On"}'
+ group: power management
+ header: null
+ method: GET
+ request_body: null
+ url: https://{bmc_ip}/redfish/v1/Systems/{system_id}
+- case_name: remote power on server
+ case_sn: 27
+ expected_code: 200
+ expected_result: '{"error":{"@Message.ExtendedInfo": [{"Message": "Successfully
+ Completed Request", "Severity":"OK"}]}}'
+ group: power management
+ header: null
+ method: POST
+ request_body: '{"ResetType": "On"}'
+ url: https://{bmc_ip}/redfish/v1/Systems/{system_id}/Actions/ComputerSystem.Reset
+- case_name: remote power off server
+ case_sn: 28
+ expected_code: 200
+ expected_result: '{"error":{"@Message.ExtendedInfo": [{"Message": "Successfully
+ Completed Request", "Severity":"OK"}]}}'
+ group: power management
+ header: null
+ method: POST
+ request_body: '{"ResetType": "GracefulShutdown"}'
+ url: https://{bmc_ip}/redfish/v1/Systems/{system_id}/Actions/ComputerSystem.Reset
+- case_name: remote reset server
+ case_sn: 29
+ expected_code: 200
+ expected_result: '{"error":{"@Message.ExtendedInfo": [{"Message": "Successfully
+ Completed Request", "Severity":"OK"}]}}'
+ group: power management
+ header: null
+ method: POST
+ request_body: '{"ResetType": "ForceRestart"}'
+ url: https://{bmc_ip}/redfish/v1/Systems/{system_id}/Actions/ComputerSystem.Reset
+- case_name: remote configure CPU in hyperthreading disabled
+ case_sn: 30
+ expected_code: 200
+ expected_result: '{"Attributes": {"ProcessorHyperThreading": "Disabled"}}'
+ group: remote configure
+ header: null
+ method: PATCH
+ request_body: '{ "Attributes": { "ProcessorHyperThreading": "Disabled" }}'
+ url: https://{bmc_ip}/redfish/v1/Systems/{system_id}/Bios/Settings
+- case_name: remote get CPU hyperthreading in disabled
+ case_sn: 31
+ expected_code: 200
+ expected_result: '{"Attributes": {"ProcessorHyperThreading": "Disabled"}}'
+ group: remote configure
+ header: null
+ method: GET
+ request_body: null
+ url: https://{bmc_ip}/redfish/v1/Systems/{system_id}/Bios
+- case_name: remote configure CPU in hyperthreading enabled
+ case_sn: 32
+ expected_code: 200
+ expected_result: '{"Attributes": {"ProcessorHyperThreading": "Enabled"}}'
+ group: remote configure
+ header: null
+ method: PATCH
+ request_body: '{ "Attributes": { "ProcessorHyperThreading": "Enabled" }}'
+ url: https://{bmc_ip}/redfish/v1/Systems/{system_id}/Bios/Settings
+- case_name: remote get CPU hyperthreading in enabled
+ case_sn: 33
+ expected_code: 200
+ expected_result: '{"Attributes": {"ProcessorHyperThreading": "Enabled"}}'
+ group: remote configure
+ header: null
+ method: GET
+ request_body: null
+ url: https://{bmc_ip}/redfish/v1/Systems/{system_id}/Bios
+- case_name: disable PXE mode
+ case_sn: 34
+ expected_code: 200
+ expected_result: '{ "Attributes": { "IPv4PXESupport": "Disabled" }}'
+ group: remote configure
+ header: null
+ method: PATCH
+ request_body: '{ "Attributes": { "IPv4PXESupport": "Disabled" }}'
+ url: https://{bmc_ip}/redfish/v1/Systems/{system_id}/Bios/Settings
+- case_name: check IPV4 PXE mode in disabled
+ case_sn: 35
+ expected_code: 200
+ expected_result: '{ "Attributes": { "IPv4PXESupport": "Disabled" }}'
+ group: remote configure
+ header: null
+ method: GET
+ request_body: null
+ url: https://{bmc_ip}/redfish/v1/Systems/{system_id}/Bios
+- case_name: enable PXE mode
+ case_sn: 36
+ expected_code: 200
+ expected_result: '{ "Attributes": { "IPv4PXESupport": "Enabled" }}'
+ group: remote configure
+ header: null
+ method: PATCH
+ request_body: '{ "Attributes": { "IPv4PXESupport": "Enabled" }}'
+ url: https://{bmc_ip}/redfish/v1/Systems/{system_id}/Bios/Settings
+- case_name: check ipv4 PXE mode in enabled
+ case_sn: 37
+ expected_code: 200
+ expected_result: '{ "Attributes": { "IPv4PXESupport": "Enabled" }}'
+ group: remote configure
+ header: null
+ method: GET
+ request_body: null
+ url: https://{bmc_ip}/redfish/v1/Systems/{system_id}/Bios
+- case_name: set boot type order
+ case_sn: 38
+ expected_code: 200
+ expected_result: '{ "Attributes": { "BootTypeOrder0": "HardDiskDrive", "BootTypeOrder1":
+ "DVDROMDrive", "BootTypeOrder2": "PXE", "BootTypeOrder3":
+ "Others", }}'
+ group: remote interface management
+ header: null
+ method: PATCH
+ request_body: '{ "Attributes": { "BootTypeOrder0": "HardDiskDrive", "BootTypeOrder1":
+ "DVDROMDrive", "BootTypeOrder2": "PXE", "BootTypeOrder3":
+ "Others", }}'
+ url: https://{bmc_ip}/redfish/v1/Systems/{system_id}/Bios/Settings
+- case_name: check boot order
+ case_sn: 39
+ expected_code: 200
+ expected_result: '{ "Attributes": { "BootTypeOrder0": "HardDiskDrive", "BootTypeOrder1":
+ "DVDROMDrive", "BootTypeOrder2": "PXE", "BootTypeOrder3":
+ "Others", }}'
+ group: remote interface management
+ header: null
+ method: GET
+ request_body: null
+ url: https://{bmc_ip}/redfish/v1/Systems/{system_id}/Bios
+- case_name: configure boot order
+ case_sn: 40
+ expected_code: 200
+ expected_result: '{ "Attributes": { "BootTypeOrder0": "DVDROMDrive", "BootTypeOrder1":
+ "HardDiskDrive", "BootTypeOrder2": "Others", "BootTypeOrder3":
+ "PXE", }}'
+ group: remote interface management
+ header: null
+ method: PATCH
+ request_body: '{ "Attributes": { "BootTypeOrder0": "DVDROMDrive", "BootTypeOrder1":
+ "HardDiskDrive", "BootTypeOrder2": "Others", "BootTypeOrder3":
+ "PXE", }}'
+ url: https://{bmc_ip}/redfish/v1/Systems/{system_id}/Bios/Settings
+- case_name: check boot order
+ case_sn: 41
+ expected_code: 200
+ expected_result: '{ "Attributes": { "BootTypeOrder0": "DVDROMDrive", "BootTypeOrder1":
+ "HardDiskDrive", "BootTypeOrder2": "Others", "BootTypeOrder3":
+ "PXE", }}'
+ group: remote interface management
+ header: null
+ method: GET
+ request_body: null
+ url: https://{bmc_ip}/redfish/v1/Systems/{system_id}/Bios
+- case_name: configure new boot PXE order first
+ case_sn: 42
+ expected_code: 200
+ expected_result: '{ "Attributes": { "BootTypeOrder0": "PXE", "BootTypeOrder1":
+ "HardDiskDrive", "BootTypeOrder2": "DVDROMDrive", "BootTypeOrder3":
+ "Others", }}'
+ group: remote interface management
+ header: null
+ method: PATCH
+ request_body: '{ "Attributes": { "BootTypeOrder0": "PXE", "BootTypeOrder1":
+ "HardDiskDrive", "BootTypeOrder2": "DVDROMDrive", "BootTypeOrder3":
+ "Others", }}'
+ url: https://{bmc_ip}/redfish/v1/Systems/{system_id}/Bios/Settings
+- case_name: check boot order PEX order first
+ case_sn: 43
+ expected_code: 200
+ expected_result: '{ "Attributes": { "BootTypeOrder0": "PXE", "BootTypeOrder1":
+ "HardDiskDrive", "BootTypeOrder2": "DVDROMDrive", "BootTypeOrder3":
+ "Others", }}'
+ group: remote interface management
+ header: null
+ method: GET
+ request_body: null
+ url: https://{bmc_ip}/redfish/v1/Systems/{system_id}/Bios
+- case_name: check BMC Firmware version
+ case_sn: 44
+ expected_code: 200
+ expected_result: '{"count": 1, "FirmwareVersion": "1.30.11P01 HDM V100R001B03D011SP01"}'
+ group: remote interface management
+ header: null
+ method: GET
+ request_body: null
+ url: https://{bmc_ip}{manager_id}
+- case_name: change BMC account
+ case_sn: 45
+ expected_code: 200
+ expected_result: '{"UserName": "CM_cc@1234","RoleId": "Administrator",}'
+ group: remote interface management
+ header: null
+ method: PATCH
+ request_body: '{"UserName": "CM_cc@1234","Password": "1234@CM_cc","RoleId": "Administrator",}'
+ url: https://{bmc_ip}/redfish/v1/AccountService/Accounts/3
+- case_name: configure BMC ip in static, ipv4
+ case_sn: 46
+ expected_code: 200
+ expected_result: '{"count": 1, "IPv4Addresses": [ { "Address":
+ "192.168.66.120", "AddressOrigin": "Static", "Gateway":
+ "192.168.66.1", "SubnetMask": "255.255.255.128" } ]}'
+ group: remote interface management
+ header: null
+ method: PATCH
+ request_body: '{ "IPv4Addresses": [ { "Address": "192.168.66.120", "AddressOrigin":
+ "Static", "Gateway": "192.168.66.1", "SubnetMask": "255.255.255.128" } ]}'
+ url: https://{bmc_ip}{manager_id}/EthernetInterfaces/eth1
+- case_name: configure BMC ip in DHCP, gateway and subnet mask ipv4
+ case_sn: 47
+ expected_code: 200
+ expected_result: '{"count": 1, "IPv4Addresses": [ { "Address":
+ "192.168.66.120", "AddressOrigin": "DHCP", "Gateway": "192.168.66.1", "SubnetMask":
+ "255.255.255.128" } ]}'
+ group: remote interface management
+ header: null
+ method: PATCH
+ request_body: '{ "IPv4Addresses": [ { "AddressOrigin": "DHCP" } ]}'
+ url: https://{bmc_ip}{manager_id}/EthernetInterfaces/eth1
+- case_name: configure BMC ip in static, ipv4
+ case_sn: 48
+ expected_code: 200
+ expected_result: '{"count": 1, "IPv4Addresses": [ { "AddressOrigin":
+ "DHCP", } ]}'
+ group: remote interface management
+ header: null
+ method: PATCH
+ request_body: '{ "IPv4Addresses": [ { "AddressOrigin": "DHCP" } ]}'
+ url: https://{bmc_ip}{manager_id}/EthernetInterfaces/eth1
+- case_name: configure BMC ip in static, ipv6
+ case_sn: 49
+ expected_code: 200
+ expected_result: '{"count": 1, "IPv6Addresses": [ { "Address":
+ "N/A", "AddressOrigin": "N/A", "PrefixLength": 64 }, { "Address":
+ "2019::11", "AddressOrigin": "Static", "PrefixLength": 64 } ]}'
+ group: remote interface management
+ header: null
+ method: PATCH
+ request_body: '{ "IPv6Addresses": [ { "Address": "2019::11", "AddressOrigin":
+ "Static", "PrefixLength": 64 } ]}'
+ url: https://{bmc_ip}{manager_id}/EthernetInterfaces/eth1
diff --git a/hdv/redfish/conf/config.yaml b/hdv/redfish/conf/config.yaml
new file mode 100644
index 0000000..b57b71c
--- /dev/null
+++ b/hdv/redfish/conf/config.yaml
@@ -0,0 +1,17 @@
+---
+bmc_ip: 172.29.160.22
+bmc_user: root
+bmc_pwd: Huawei12#$
+system_id: 1
+chassis_id: 1
+attr_name: 3
+pro_seq: 4
+url_seq: 5
+req_header_seq: 6
+req_body_seq: 7
+expect_return_code_seq: 8
+expect_return_value_seq: 9
+return_code_seq: 10
+return_value_seq: 11
+detail_result: 12
+final_result: 13
diff --git a/hdv/redfish/conf/depends.yaml b/hdv/redfish/conf/depends.yaml
new file mode 100644
index 0000000..eecdcd4
--- /dev/null
+++ b/hdv/redfish/conf/depends.yaml
@@ -0,0 +1,33 @@
+---
+- component_id: cpu_id
+ key_flags: Members
+ pro_value: GET
+ url_value: https://{bmc_ip}/redfish/v1/Systems/{system_id}/Processors
+- component_id: memory_id
+ key_flags: Members
+ pro_value: GET
+ url_value: https://{bmc_ip}/redfish/v1/Systems/{system_id}/Memory
+- component_id: storage_id
+ key_flags: Members
+ pro_value: GET
+ url_value: https://{bmc_ip}/redfish/v1/Systems/{system_id}/Storages
+- component_id: drives_id
+ key_flags: Drives
+ pro_value: GET
+ url_value: https://{bmc_ip}{storage_id}
+- component_id: networkadapters_id
+ key_flags: Members
+ pro_value: GET
+ url_value: https://{bmc_ip}/redfish/v1/Chassis/{chassis_id}/NetworkAdapters
+- component_id: networkports_id
+ key_flags: Controllers:Link:NetworkPorts
+ pro_value: GET
+ url_value: https://{bmc_ip}{networkadapters_id}
+- component_id: volume_id
+ key_flags: Members
+ pro_value: GET
+ url_value: https://{bmc_ip}{storage_id}/Volumes
+- component_id: manager_id
+ key_flags: Members
+ pro_value: GET
+ url_value: https://{bmc_ip}/redfish/v1/Managers
diff --git a/hdv/redfish/conf/report.yaml b/hdv/redfish/conf/report.yaml
new file mode 100644
index 0000000..d396360
--- /dev/null
+++ b/hdv/redfish/conf/report.yaml
@@ -0,0 +1,832 @@
+---
+- case_name: set asset code
+ case_sn: 1
+ details_result: '{''count'': ''N/A for this case'', ''info'': [{''return_code'':
+ ''Failure'', ''AssetTag'': "Failure, expect value: CM_cc@1234, return value: Can''t
+ find the key AssetTag in return value"}]}'
+ expected_code: 200
+ expected_result: '{"AssetTag": "CM_cc@1234"}'
+ final_rst: Failure
+ group: asset managment
+ header: null
+ method: PATCH
+ request_body: '{"AssetTag": "CM_cc@1234"}'
+ return_code_seq: '[''N/A'']'
+ url: https://{bmc_ip}/redfish/v1/Systems/{system_id}
+- case_name: get asset code
+ case_sn: 2
+ details_result: '{''count'': ''N/A for this case'', ''info'': [{''return_code'':
+ ''Success'', ''AssetTag'': ''Success''}]}'
+ expected_code: 200
+ expected_result: '{"AssetTag": "CM_cc@1234"}'
+ final_rst: Success
+ group: asset managment
+ header: null
+ method: GET
+ request_body: null
+ return_code_seq: '[200]'
+ url: https://{bmc_ip}/redfish/v1/Systems/{system_id}
+- case_name: set host name
+ case_sn: 3
+ details_result: '{''count'': ''N/A for this case'', ''info'': [{''return_code'':
+ ''Failure'', ''HostName'': "Failure, expect value: NFV-RPZJHZ-01B, return value:
+ Can''t find the key HostName in return value"}]}'
+ expected_code: 200
+ expected_result: '{"HostName": "NFV-RPZJHZ-01B"}'
+ final_rst: Failure
+ group: asset managment
+ header: null
+ method: PATCH
+ request_body: '{"HostName": "NFV-RPZJHZ-01B"}'
+ return_code_seq: '[''N/A'']'
+ url: https://{bmc_ip}/redfish/v1/Systems/{system_id}
+- case_name: check host name
+ case_sn: 4
+ details_result: '{''count'': ''N/A for this case'', ''info'': [{''return_code'':
+ ''Success'', ''HostName'': "Failure, expect value: NFV-RPZJHZ-01B, return value:
+ Can''t find the key HostName in return value"}]}'
+ expected_code: 200
+ expected_result: '{"HostName": "NFV-RPZJHZ-01B"}'
+ final_rst: Failure
+ group: asset managment
+ header: null
+ method: GET
+ request_body: null
+ return_code_seq: '[200]'
+ url: https://{bmc_ip}/redfish/v1/Systems/{system_id}
+- case_name: check manufacturer
+ case_sn: 5
+ details_result: '{''count'': ''N/A for this case'', ''info'': [{''return_code'':
+ ''Success'', ''Manufacturer'': ''Failure, expect value: New H3C Technologies Co.,
+ Ltd., return value: Huawei''}]}'
+ expected_code: 200
+ expected_result: '{"Manufacturer": "New H3C Technologies Co., Ltd."}'
+ final_rst: Failure
+ group: asset managment
+ header: null
+ method: GET
+ request_body: null
+ return_code_seq: '[200]'
+ url: https://{bmc_ip}/redfish/v1/Systems/{system_id}
+- case_name: check model
+ case_sn: 6
+ details_result: '{''count'': ''N/A for this case'', ''info'': [{''return_code'':
+ ''Success'', ''Model'': ''Failure, expect value: UniServer R4900 G3, return value:
+ RH2288H V3''}]}'
+ expected_code: 200
+ expected_result: '{"Model": "UniServer R4900 G3"}'
+ final_rst: Failure
+ group: asset managment
+ header: null
+ method: GET
+ request_body: null
+ return_code_seq: '[200]'
+ url: https://{bmc_ip}/redfish/v1/Systems/{system_id}
+- case_name: check serial number
+ case_sn: 7
+ details_result: '{''count'': ''N/A for this case'', ''info'': [{''return_code'':
+ ''Success'', ''SerialNumber'': ''Success''}]}'
+ expected_code: 200
+ expected_result: '{"SerialNumber": "N/A"}'
+ final_rst: Success
+ group: asset managment
+ header: null
+ method: GET
+ request_body: null
+ return_code_seq: '[200]'
+ url: https://{bmc_ip}/redfish/v1/Systems/{system_id}
+- case_name: check main board name
+ case_sn: 8
+ details_result: '{''count'': ''N/A for this case'', ''info'': [{''return_code'':
+ ''Success'', ''Oem'': "Failure, expect value: {''Mainboard'': {''BoardName'':
+ ''RS33M2C9S''}}, return value: Can''t find the key Oem in return value"}]}'
+ expected_code: 200
+ expected_result: '{"Oem":{"Mainboard": {"BoardName": "RS33M2C9S"}}}'
+ final_rst: Failure
+ group: asset managment
+ header: null
+ method: GET
+ request_body: null
+ return_code_seq: '[200]'
+ url: https://{bmc_ip}/redfish/v1/Chassis/{chassis_id}
+- case_name: check main board serial number
+ case_sn: 9
+ details_result: '{''count'': ''N/A for this case'', ''info'': [{''return_code'':
+ ''Success'', ''Oem'': "Failure, expect value: {''Mainboard'': {''SerialNumber'':
+ ''N/A''}}, return value: Can''t find the key Oem in return value"}]}'
+ expected_code: 200
+ expected_result: '{"Oem": {"Mainboard": {"SerialNumber": "N/A"}}}'
+ final_rst: Failure
+ group: asset managment
+ header: null
+ method: GET
+ request_body: null
+ return_code_seq: '[200]'
+ url: https://{bmc_ip}/redfish/v1/Chassis/{chassis_id}
+- case_name: check BIOS version
+ case_sn: 10
+ details_result: '{''count'': ''N/A for this case'', ''info'': [{''return_code'':
+ ''Success'', ''BiosVersion'': ''Failure, expect value: 2.00.35P01 V100R001B02D035SP01,
+ return value: 3.63''}]}'
+ expected_code: 200
+ expected_result: '{"BiosVersion": "2.00.35P01 V100R001B02D035SP01"}'
+ final_rst: Failure
+ group: asset managment
+ header: null
+ method: GET
+ request_body: null
+ return_code_seq: '[200]'
+ url: https://{bmc_ip}/redfish/v1/Systems/{system_id}
+- case_name: check CPU amount
+ case_sn: 11
+ details_result: '{''count'': ''N/A for this case'', ''info'': [{''return_code'':
+ ''Success'', ''Members@odata.count'': ''Success''}]}'
+ expected_code: 200
+ expected_result: '{"Members@odata.count": 2}'
+ final_rst: Success
+ group: compoment management
+ header: null
+ method: GET
+ request_body: null
+ return_code_seq: '[200]'
+ url: https://{bmc_ip}/redfish/v1/Systems/{system_id}/Processors
+- case_name: check CPU info
+ case_sn: 12
+ details_result: '{''count'': ''Success'', ''info'': [{''return_code'': ''Success'',
+ ''Manufacturer'': ''Success'', ''MaxSpeedMHz'': ''Failure, expect value: 2300,
+ return value: 3500'', ''Model'': ''Failure, expect value: Intel(R) Xeon(R) Gold
+ 5218N CPU @ 2.30GHz, return value: Intel(R) Xeon(R) CPU E5-2620 v4 @ 2.10GHz'',
+ ''ProcessorArchitecture'': ''Success'', ''Socket'': ''Failure, expect value: [1,
+ 2], return value: 0'', ''Status'': {''Health'': ''Success'', ''State'': ''Success''},
+ ''TotalCores'': ''Failure, expect value: 16, return value: 8'', ''TotalThreads'':
+ ''Failure, expect value: 32, return value: 16''}, {''return_code'': ''Success'',
+ ''Manufacturer'': ''Success'', ''MaxSpeedMHz'': ''Failure, expect value: 2300,
+ return value: 3500'', ''Model'': ''Failure, expect value: Intel(R) Xeon(R) Gold
+ 5218N CPU @ 2.30GHz, return value: Intel(R) Xeon(R) CPU E5-2620 v4 @ 2.10GHz'',
+ ''ProcessorArchitecture'': ''Success'', ''Socket'': ''Success'', ''Status'': {''Health'':
+ ''Success'', ''State'': ''Success''}, ''TotalCores'': ''Failure, expect value:
+ 16, return value: 8'', ''TotalThreads'': ''Failure, expect value: 32, return value:
+ 16''}]}'
+ expected_code: 200
+ expected_result: '{ "count": 2, "Manufacturer": "Intel(R) Corporation", "MaxSpeedMHz":
+ 2300, "Model": "Intel(R) Xeon(R) Gold 5218N CPU @ 2.30GHz", "ProcessorArchitecture":
+ ["x86", "IA-64", "ARM", "MIPS", "OEM"], "Socket": [1, 2], "Status": { "Health":
+ "OK", "State": "Enabled" }, "TotalCores": 16, "TotalThreads":
+ 32}'
+ final_rst: Failure
+ group: compoment management
+ header: null
+ method: GET
+ request_body: null
+ return_code_seq: '[200, 200]'
+ url: https://{bmc_ip}{cpu_id}
+- case_name: check memory mount
+ case_sn: 13
+ details_result: '{''count'': ''N/A for this case'', ''info'': [{''return_code'':
+ ''Success'', ''Members@odata.count'': ''Failure, expect value: 12, return value:
+ 4''}]}'
+ expected_code: 200
+ expected_result: '{"Members@odata.count": 12}'
+ final_rst: Failure
+ group: compoment management
+ header: null
+ method: GET
+ request_body: null
+ return_code_seq: '[200]'
+ url: https://{bmc_ip}/redfish/v1/Systems/{system_id}/Memory
+- case_name: check memory information
+ case_sn: 14
+ details_result: '{''count'': ''Failure, the actual num is 4'', ''info'': [{''return_code'':
+ ''Success'', ''BaseModuleType'': "Failure, expect value: RDIMM, return value:
+ Can''t find the key BaseModuleType in return value", ''CapacityMiB'': ''Success'',
+ ''DeviceLocator'': ''Success'', ''Manufacturer'': ''Success'', ''MemoryDeviceType'':
+ ''Success'', ''OperatingSpeedMhz'': ''Failure, expect value: 2666, return value:
+ 2400'', ''PartNumber'': "Failure, expect value: [''HMA84GR7AFR4N-VK'', ''36ASF4G72PZ-2G6D1''],
+ return value: Can''t find the key PartNumber in return value", ''Status'': {''Health'':
+ ''Success'', ''State'': ''Success''}}, {''return_code'': ''Success'', ''BaseModuleType'':
+ "Failure, expect value: RDIMM, return value: Can''t find the key BaseModuleType
+ in return value", ''CapacityMiB'': ''Success'', ''DeviceLocator'': ''Success'',
+ ''Manufacturer'': ''Success'', ''MemoryDeviceType'': ''Success'', ''OperatingSpeedMhz'':
+ ''Failure, expect value: 2666, return value: 2400'', ''PartNumber'': "Failure,
+ expect value: [''HMA84GR7AFR4N-VK'', ''36ASF4G72PZ-2G6D1''], return value: Can''t
+ find the key PartNumber in return value", ''Status'': {''Health'': ''Success'',
+ ''State'': ''Success''}}, {''return_code'': ''Success'', ''BaseModuleType'': "Failure,
+ expect value: RDIMM, return value: Can''t find the key BaseModuleType in return
+ value", ''CapacityMiB'': ''Success'', ''DeviceLocator'': ''Success'', ''Manufacturer'':
+ ''Success'', ''MemoryDeviceType'': ''Success'', ''OperatingSpeedMhz'': ''Failure,
+ expect value: 2666, return value: 2400'', ''PartNumber'': "Failure, expect value:
+ [''HMA84GR7AFR4N-VK'', ''36ASF4G72PZ-2G6D1''], return value: Can''t find the key
+ PartNumber in return value", ''Status'': {''Health'': ''Success'', ''State'':
+ ''Success''}}, {''return_code'': ''Success'', ''BaseModuleType'': "Failure, expect
+ value: RDIMM, return value: Can''t find the key BaseModuleType in return value",
+ ''CapacityMiB'': ''Success'', ''DeviceLocator'': ''Success'', ''Manufacturer'':
+ ''Success'', ''MemoryDeviceType'': ''Success'', ''OperatingSpeedMhz'': ''Failure,
+ expect value: 2666, return value: 2400'', ''PartNumber'': "Failure, expect value:
+ [''HMA84GR7AFR4N-VK'', ''36ASF4G72PZ-2G6D1''], return value: Can''t find the key
+ PartNumber in return value", ''Status'': {''Health'': ''Success'', ''State'':
+ ''Success''}}]}'
+ expected_code: 200
+ expected_result: '{ "count": 12, "BaseModuleType": "RDIMM", "CapacityMiB":
+ 32768, "DeviceLocator": "N/A", "Manufacturer": ["Hynix Semiconductor", "Micron"], "MemoryDeviceType":
+ "DDR4", "OperatingSpeedMhz": 2666, "PartNumber": ["HMA84GR7AFR4N-VK","36ASF4G72PZ-2G6D1"], "Status":
+ { "Health": "OK", "State": "Enabled" }}'
+ final_rst: Failure
+ group: compoment management
+ header: null
+ method: GET
+ request_body: null
+ return_code_seq: '[200, 200, 200, 200]'
+ url: https://{bmc_ip}{memory_id}
+- case_name: check raid card amount
+ case_sn: 15
+ details_result: '{''count'': ''N/A for this case'', ''info'': [{''return_code'':
+ ''Success'', ''Members@odata.count'': ''Success''}]}'
+ expected_code: 200
+ expected_result: '{"Members@odata.count": 1}'
+ final_rst: Success
+ group: compoment management
+ header: null
+ method: GET
+ request_body: null
+ return_code_seq: '[200]'
+ url: https://{bmc_ip}/redfish/v1/Systems/{system_id}/Storages
+- case_name: check raid card information
+ case_sn: 16
+ details_result: '{''count'': ''Success'', ''info'': [{''return_code'': ''Success'',
+ ''StorageControllers'': [{''FirmwareVersion'': ''2.62'', ''Manufacturer'': ''H3C'',
+ ''Model'': ''N/A'', ''Status'': {''Health'': ''Success'', ''State'': ''Success''}}]}]}'
+ expected_code: 200
+ expected_result: '{ "count": 1, "StorageControllers": [ { "FirmwareVersion":
+ "2.62", "Manufacturer": "H3C", "Model": "N/A", "Status":
+ { "Health": "OK", "State": "Enabled" } } ]}'
+ final_rst: Failure
+ group: compoment management
+ header: null
+ method: GET
+ request_body: null
+ return_code_seq: '[200]'
+ url: https://{bmc_ip}{storage_id}
+- case_name: check harddisk information
+ case_sn: 17
+ details_result: '{''count'': ''Failure, the actual num is 6'', ''info'': [{''return_code'':
+ ''Success'', ''CapacityBytes'': ''Failure, expect value: [480102187008, 960193626112],
+ return value: None'', ''Location'': {''Info'': ''N/A'', ''InfoFormat'': ''DeviceName''},
+ ''Manufacturer'': ''ATA'', ''MediaType'': ''SSD'', ''Model'': "Failure, expect
+ value: [''INTEL SSDSC2KB48'', ''INTEL SSDSC2KB96''], return value: None", ''Protocol'':
+ ''Success'', ''Status'': {''Health'': ''Success'', ''State'': ''Enabled''}}, {''return_code'':
+ ''Success'', ''CapacityBytes'': ''Failure, expect value: [480102187008, 960193626112],
+ return value: None'', ''Location'': {''Info'': ''N/A'', ''InfoFormat'': ''DeviceName''},
+ ''Manufacturer'': ''ATA'', ''MediaType'': ''SSD'', ''Model'': "Failure, expect
+ value: [''INTEL SSDSC2KB48'', ''INTEL SSDSC2KB96''], return value: None", ''Protocol'':
+ ''Success'', ''Status'': {''Health'': ''Success'', ''State'': ''Enabled''}}, {''return_code'':
+ ''Success'', ''CapacityBytes'': ''Failure, expect value: [480102187008, 960193626112],
+ return value: None'', ''Location'': {''Info'': ''N/A'', ''InfoFormat'': ''DeviceName''},
+ ''Manufacturer'': ''ATA'', ''MediaType'': ''SSD'', ''Model'': "Failure, expect
+ value: [''INTEL SSDSC2KB48'', ''INTEL SSDSC2KB96''], return value: None", ''Protocol'':
+ ''Success'', ''Status'': {''Health'': ''Success'', ''State'': ''Enabled''}}, {''return_code'':
+ ''Success'', ''CapacityBytes'': ''Failure, expect value: [480102187008, 960193626112],
+ return value: None'', ''Location'': {''Info'': ''N/A'', ''InfoFormat'': ''DeviceName''},
+ ''Manufacturer'': ''ATA'', ''MediaType'': ''SSD'', ''Model'': "Failure, expect
+ value: [''INTEL SSDSC2KB48'', ''INTEL SSDSC2KB96''], return value: None", ''Protocol'':
+ ''Success'', ''Status'': {''Health'': ''Success'', ''State'': ''Enabled''}}, {''return_code'':
+ ''Success'', ''CapacityBytes'': ''Failure, expect value: [480102187008, 960193626112],
+ return value: None'', ''Location'': {''Info'': ''N/A'', ''InfoFormat'': ''DeviceName''},
+ ''Manufacturer'': ''ATA'', ''MediaType'': ''SSD'', ''Model'': "Failure, expect
+ value: [''INTEL SSDSC2KB48'', ''INTEL SSDSC2KB96''], return value: None", ''Protocol'':
+ ''Success'', ''Status'': {''Health'': ''Success'', ''State'': ''Enabled''}}, {''return_code'':
+ ''Success'', ''CapacityBytes'': ''Failure, expect value: [480102187008, 960193626112],
+ return value: None'', ''Location'': {''Info'': ''N/A'', ''InfoFormat'': ''DeviceName''},
+ ''Manufacturer'': ''ATA'', ''MediaType'': ''SSD'', ''Model'': "Failure, expect
+ value: [''INTEL SSDSC2KB48'', ''INTEL SSDSC2KB96''], return value: None", ''Protocol'':
+ ''Success'', ''Status'': {''Health'': ''Success'', ''State'': ''Enabled''}}]}'
+ expected_code: 200
+ expected_result: '{ "count": 4, "CapacityBytes": [480102187008, 960193626112], "Location":
+ { "Info": "N/A", "InfoFormat": "DeviceName" }, "Manufacturer":
+ "ATA", "MediaType": "SSD", "Model": ["INTEL SSDSC2KB48", "INTEL SSDSC2KB96"], "Protocol":
+ "SATA", "Status": { "Health": "OK", "State": "Enabled" }}'
+ final_rst: Failure
+ group: compoment management
+ header: null
+ method: GET
+ request_body: null
+ return_code_seq: '[200, 200, 200, 200, 200, 200]'
+ url: https://{bmc_ip}{drives_id}
+- case_name: check network interface adapter information
+ case_sn: 18
+ details_result: N/A
+ expected_code: 200
+ expected_result: '{ "count": 3, "Manufacturer": "Mellanox", "Model": "NIC-620F-B2-25Gb-2P-1-X", "Name":
+ ["PCIeSlot2", "PCIeSlot3", "PCIeSlot6"], "Oem": { "Public": { "CardModel":
+ "2*25GE", "RootBDF": ["0000:17:00.0", "0000:17:02.0", "0000:AE:02.0"], } }, "Status":
+ { "Health": "OK", "State": "Enabled" }}'
+ final_rst: Failure
+ group: compoment management
+ header: null
+ method: GET
+ request_body: null
+ url: https://{bmc_ip}{networkadapters_id}
+- case_name: check network interface adapter port information
+ case_sn: 19
+ details_result: N/A
+ expected_code: 200
+ expected_result: '{ "count": 6, "AssociatedNetworkAddresses": [ "N/A" ], "Oem":
+ { "Public": { "BDF": "N/A", "PortType": "OpticalPort" } }, "PhysicalPortNumber":
+ ["1", "2"]}'
+ final_rst: Failure
+ group: compoment management
+ header: null
+ method: GET
+ request_body: null
+ url: https://{bmc_ip}{networkports_id}
+- case_name: check fans information
+ case_sn: 20
+ details_result: '{''count'': ''N/A for this case'', ''info'': [{''return_code'':
+ ''Success'', ''FanSummary'': "Failure, expect value: {''Count'': 6}, return value:
+ Can''t find the key FanSummary in return value", ''Fans'': [{''MemberId'': ''Success'',
+ ''Oem'': {''Public'': {''SpeedRatio'': ''N/A''}}, ''Status'': {''Health'': ''Success'',
+ ''State'': ''Success''}}, {''MemberId'': ''Success'', ''Oem'': {''Public'': {''SpeedRatio'':
+ ''N/A''}}, ''Status'': {''Health'': ''Success'', ''State'': ''Success''}}, {''MemberId'':
+ ''Success'', ''Oem'': {''Public'': {''SpeedRatio'': ''N/A''}}, ''Status'': {''Health'':
+ ''Success'', ''State'': ''Success''}}, {''MemberId'': ''Success'', ''Oem'': {''Public'':
+ {''SpeedRatio'': ''N/A''}}, ''Status'': {''Health'': ''Success'', ''State'': ''Success''}},
+ {''MemberId'': ''Success'', ''Oem'': {''Public'': {''SpeedRatio'': ''N/A''}},
+ ''Status'': {''Health'': ''Success'', ''State'': ''Success''}}, {''MemberId'':
+ ''Success'', ''Oem'': {''Public'': {''SpeedRatio'': ''N/A''}}, ''Status'': {''Health'':
+ ''Success'', ''State'': ''Success''}}]}]}'
+ expected_code: 200
+ expected_result: '{ "FanSummary": { "Count": 6 }, "Fans": [ { "MemberId":
+ "N/A", "Oem": { "Public": { "SpeedRatio":
+ "N/A" } }, "Status": { "Health":
+ "OK", "State": "Enabled" } }, { "MemberId":
+ "N/A", "Oem": { "Public": { "SpeedRatio":
+ "N/A" } }, "Status": { "Health":
+ "OK", "State": "Enabled" } },{ "MemberId":
+ "N/A", "Oem": { "Public": { "SpeedRatio":
+ "N/A" } }, "Status": { "Health":
+ "OK", "State": "Enabled" } },{ "MemberId":
+ "N/A", "Oem": { "Public": { "SpeedRatio":
+ "N/A" } }, "Status": { "Health":
+ "OK", "State": "Enabled" } },{ "MemberId":
+ "N/A", "Oem": { "Public": { "SpeedRatio":
+ "N/A" } }, "Status": { "Health":
+ "OK", "State": "Enabled" } },{ "MemberId":
+ "N/A", "Oem": { "Public": { "SpeedRatio":
+ "N/A" } }, "Status": { "Health":
+ "OK", "State": "Enabled" } } ],}'
+ final_rst: Failure
+ group: compoment management
+ header: null
+ method: GET
+ request_body: null
+ return_code_seq: '[200]'
+ url: https://{bmc_ip}/redfish/v1/Chassis/{chassis_id}/Thermal
+- case_name: check power amount
+ case_sn: 21
+ details_result: '{''count'': ''N/A for this case'', ''info'': [{''return_code'':
+ ''Success'', ''DeviceMaxNum'': "Failure, expect value: {''PowerSupplyNum'': 2},
+ return value: Can''t find the key DeviceMaxNum in return value"}]}'
+ expected_code: 200
+ expected_result: '{ "DeviceMaxNum": { "PowerSupplyNum": 2},}'
+ final_rst: Failure
+ group: compoment management
+ header: null
+ method: GET
+ request_body: null
+ return_code_seq: '[200]'
+ url: https://{bmc_ip}/redfish/v1/Chassis/{chassis_id}
+- case_name: check power detail info
+ case_sn: 22
+ details_result: '{''count'': ''N/A for this case'', ''info'': [{''return_code'':
+ ''Success'', ''PowerControl'': [{''PowerConsumedWatts'': ''Success'', ''Status'':
+ {''Health'': ''OK'', ''State'': ''Enabled''}}], ''PowerSupplies'': [{''LineInputVoltage'':
+ ''Success'', ''MemberId'': ''Failure, expect value: 1, return value: 0'', ''PowerCapacityWatts'':
+ ''Failure, expect value: 800, return value: 460'', ''Status'': {''Health'': ''Success'',
+ ''State'': ''Success''}}, {''LineInputVoltage'': ''Success'', ''MemberId'': ''Failure,
+ expect value: 2, return value: 1'', ''PowerCapacityWatts'': ''Failure, expect
+ value: 800, return value: 460'', ''Status'': {''Health'': ''Failure, expect value:
+ OK, return value: Critical'', ''State'': ''Success''}}]}]}'
+ expected_code: 200
+ expected_result: '{ "PowerControl": [ { "PowerConsumedWatts":
+ "N/A","Status":{ "Health": "OK", "State": "Enabled" } }, ], "PowerSupplies":
+ [ { "LineInputVoltage": "N/A", "MemberId": "1", "PowerCapacityWatts":
+ 800,"Status": { "Health": "OK", "State": "Enabled" } }, { "LineInputVoltage":
+ "N/A", "MemberId": "2", "PowerCapacityWatts": 800,"Status":
+ { "Health": "OK", "State": "Enabled" } } ],}'
+ final_rst: Failure
+ group: compoment management
+ header: null
+ method: GET
+ request_body: null
+ return_code_seq: '[200]'
+ url: https://{bmc_ip}/redfish/v1/Chassis/{chassis_id}/Power
+- case_name: check logical dirve health status
+ case_sn: 23
+ details_result: N/A
+ expected_code: 200
+ expected_result: '{ "count": 2, "Name": "N/A", "Status": { "Health":
+ ["OK", "Critical"], "State": "Enabled" }}'
+ final_rst: Failure
+ group: compoment management
+ header: null
+ method: GET
+ request_body: null
+ url: https://{bmc_ip}{volume_id}
+- case_name: check server temperature air intake
+ case_sn: 24
+ details_result: '{''count'': ''N/A for this case'', ''info'': [{''return_code'':
+ ''Success'', ''Temperatures'': [{''Name'': ''Failure, expect value: INPUT_TEMP,
+ return value: Inlet Temp'', ''ReadingCelsius'': ''Success''}]}]}'
+ expected_code: 200
+ expected_result: '{ "Temperatures": [ { "Name": "INPUT_TEMP", "ReadingCelsius":
+ "N/A", } ]}'
+ final_rst: Failure
+ group: sensor management
+ header: null
+ method: GET
+ request_body: null
+ return_code_seq: '[200]'
+ url: https://{bmc_ip}/redfish/v1/Chassis/{chassis_id}/Thermal
+- case_name: check cpu temperature
+ case_sn: 25
+ details_result: '{''count'': ''N/A for this case'', ''info'': [{''return_code'':
+ ''Success'', ''Temperatures'': [{''Name'': ''Failure, expect value: INPUT_TEMP,
+ return value: Inlet Temp'', ''ReadingCelsius'': ''Success''}, {''Name'': ''Failure,
+ expect value: CPU1_TEMP, return value: Outlet Temp'', ''ReadingCelsius'': ''Success''},
+ {''Name'': ''Failure, expect value: CPU2_TEMP, return value: PCH Temp'', ''ReadingCelsius'':
+ ''N/A''}]}]}'
+ expected_code: 200
+ expected_result: '{ "Temperatures": [ { "Name": "INPUT_TEMP", "ReadingCelsius":
+ "N/A", }, { "Name": "CPU1_TEMP", "ReadingCelsius":
+ "N/A", }, { "Name": "CPU2_TEMP", "ReadingCelsius":
+ "N/A", }, ]}'
+ final_rst: Failure
+ group: sensor management
+ header: null
+ method: GET
+ request_body: null
+ return_code_seq: '[200]'
+ url: https://{bmc_ip}/redfish/v1/Chassis/{chassis_id}/Thermal
+- case_name: check server power state
+ case_sn: 26
+ details_result: '{''count'': ''N/A for this case'', ''info'': [{''return_code'':
+ ''Success'', ''PowerState'': ''Failure, expect value: On, return value: Off''}]}'
+ expected_code: 200
+ expected_result: '{"PowerState": "On"}'
+ final_rst: Failure
+ group: power management
+ header: null
+ method: GET
+ request_body: null
+ return_code_seq: '[200]'
+ url: https://{bmc_ip}/redfish/v1/Systems/{system_id}
+- case_name: remote power on server
+ case_sn: 27
+ details_result: '{''count'': ''N/A for this case'', ''info'': [{''return_code'':
+ ''Failure''}]}'
+ expected_code: 200
+ expected_result: '{"error":{"@Message.ExtendedInfo": [{"Message": "Successfully
+ Completed Request", "Severity":"OK"}]}}'
+ final_rst: Failure
+ group: power management
+ header: null
+ method: POST
+ request_body: '{"ResetType": "On"}'
+ return_code_seq: '[]'
+ url: https://{bmc_ip}/redfish/v1/Systems/{system_id}/Actions/ComputerSystem.Reset
+- case_name: remote power off server
+ case_sn: 28
+ details_result: '{''count'': ''N/A for this case'', ''info'': [{''return_code'':
+ ''Failure''}]}'
+ expected_code: 200
+ expected_result: '{"error":{"@Message.ExtendedInfo": [{"Message": "Successfully
+ Completed Request", "Severity":"OK"}]}}'
+ final_rst: Failure
+ group: power management
+ header: null
+ method: POST
+ request_body: '{"ResetType": "GracefulShutdown"}'
+ return_code_seq: '[]'
+ url: https://{bmc_ip}/redfish/v1/Systems/{system_id}/Actions/ComputerSystem.Reset
+- case_name: remote reset server
+ case_sn: 29
+ details_result: '{''count'': ''N/A for this case'', ''info'': [{''return_code'':
+ ''Failure''}]}'
+ expected_code: 200
+ expected_result: '{"error":{"@Message.ExtendedInfo": [{"Message": "Successfully
+ Completed Request", "Severity":"OK"}]}}'
+ final_rst: Failure
+ group: power management
+ header: null
+ method: POST
+ request_body: '{"ResetType": "ForceRestart"}'
+ return_code_seq: '[]'
+ url: https://{bmc_ip}/redfish/v1/Systems/{system_id}/Actions/ComputerSystem.Reset
+- case_name: remote configure CPU in hyperthreading disabled
+ case_sn: 30
+ details_result: '{''count'': ''N/A for this case'', ''info'': [{''return_code'':
+ ''Failure'', ''Attributes'': "Failure, expect value: {''ProcessorHyperThreading'':
+ ''Disabled''}, return value: Can''t find the key Attributes in return value"}]}'
+ expected_code: 200
+ expected_result: '{"Attributes": {"ProcessorHyperThreading": "Disabled"}}'
+ final_rst: Failure
+ group: remote configure
+ header: null
+ method: PATCH
+ request_body: '{ "Attributes": { "ProcessorHyperThreading": "Disabled" }}'
+ return_code_seq: '[''N/A'']'
+ url: https://{bmc_ip}/redfish/v1/Systems/{system_id}/Bios/Settings
+- case_name: remote get CPU hyperthreading in disabled
+ case_sn: 31
+ details_result: '{''count'': ''N/A for this case'', ''info'': [{''return_code'':
+ ''Failure''}]}'
+ expected_code: 200
+ expected_result: '{"Attributes": {"ProcessorHyperThreading": "Disabled"}}'
+ final_rst: Failure
+ group: remote configure
+ header: null
+ method: GET
+ request_body: null
+ return_code_seq: '[]'
+ url: https://{bmc_ip}/redfish/v1/Systems/{system_id}/Bios
+- case_name: remote configure CPU in hyperthreading enabled
+ case_sn: 32
+ details_result: '{''count'': ''N/A for this case'', ''info'': [{''return_code'':
+ ''Failure'', ''Attributes'': "Failure, expect value: {''ProcessorHyperThreading'':
+ ''Enabled''}, return value: Can''t find the key Attributes in return value"}]}'
+ expected_code: 200
+ expected_result: '{"Attributes": {"ProcessorHyperThreading": "Enabled"}}'
+ final_rst: Failure
+ group: remote configure
+ header: null
+ method: PATCH
+ request_body: '{ "Attributes": { "ProcessorHyperThreading": "Enabled" }}'
+ return_code_seq: '[''N/A'']'
+ url: https://{bmc_ip}/redfish/v1/Systems/{system_id}/Bios/Settings
+- case_name: remote get CPU hyperthreading in enabled
+ case_sn: 33
+ details_result: '{''count'': ''N/A for this case'', ''info'': [{''return_code'':
+ ''Failure''}]}'
+ expected_code: 200
+ expected_result: '{"Attributes": {"ProcessorHyperThreading": "Enabled"}}'
+ final_rst: Failure
+ group: remote configure
+ header: null
+ method: GET
+ request_body: null
+ return_code_seq: '[]'
+ url: https://{bmc_ip}/redfish/v1/Systems/{system_id}/Bios
+- case_name: disable PXE mode
+ case_sn: 34
+ details_result: '{''count'': ''N/A for this case'', ''info'': [{''return_code'':
+ ''Failure'', ''Attributes'': "Failure, expect value: {''IPv4PXESupport'': ''Disabled''},
+ return value: Can''t find the key Attributes in return value"}]}'
+ expected_code: 200
+ expected_result: '{ "Attributes": { "IPv4PXESupport": "Disabled" }}'
+ final_rst: Failure
+ group: remote configure
+ header: null
+ method: PATCH
+ request_body: '{ "Attributes": { "IPv4PXESupport": "Disabled" }}'
+ return_code_seq: '[''N/A'']'
+ url: https://{bmc_ip}/redfish/v1/Systems/{system_id}/Bios/Settings
+- case_name: check IPV4 PXE mode in disabled
+ case_sn: 35
+ details_result: '{''count'': ''N/A for this case'', ''info'': [{''return_code'':
+ ''Failure''}]}'
+ expected_code: 200
+ expected_result: '{ "Attributes": { "IPv4PXESupport": "Disabled" }}'
+ final_rst: Failure
+ group: remote configure
+ header: null
+ method: GET
+ request_body: null
+ return_code_seq: '[]'
+ url: https://{bmc_ip}/redfish/v1/Systems/{system_id}/Bios
+- case_name: enable PXE mode
+ case_sn: 36
+ details_result: '{''count'': ''N/A for this case'', ''info'': [{''return_code'':
+ ''Failure'', ''Attributes'': "Failure, expect value: {''IPv4PXESupport'': ''Enabled''},
+ return value: Can''t find the key Attributes in return value"}]}'
+ expected_code: 200
+ expected_result: '{ "Attributes": { "IPv4PXESupport": "Enabled" }}'
+ final_rst: Failure
+ group: remote configure
+ header: null
+ method: PATCH
+ request_body: '{ "Attributes": { "IPv4PXESupport": "Enabled" }}'
+ return_code_seq: '[''N/A'']'
+ url: https://{bmc_ip}/redfish/v1/Systems/{system_id}/Bios/Settings
+- case_name: check ipv4 PXE mode in enabled
+ case_sn: 37
+ details_result: '{''count'': ''N/A for this case'', ''info'': [{''return_code'':
+ ''Failure''}]}'
+ expected_code: 200
+ expected_result: '{ "Attributes": { "IPv4PXESupport": "Enabled" }}'
+ final_rst: Failure
+ group: remote configure
+ header: null
+ method: GET
+ request_body: null
+ return_code_seq: '[]'
+ url: https://{bmc_ip}/redfish/v1/Systems/{system_id}/Bios
+- case_name: set boot type order
+ case_sn: 38
+ details_result: '{''count'': ''N/A for this case'', ''info'': [{''return_code'':
+ ''Failure'', ''Attributes'': "Failure, expect value: {''BootTypeOrder0'': ''HardDiskDrive'',
+ ''BootTypeOrder1'': ''DVDROMDrive'', ''BootTypeOrder2'': ''PXE'', ''BootTypeOrder3'':
+ ''Others''}, return value: Can''t find the key Attributes in return value"}]}'
+ expected_code: 200
+ expected_result: '{ "Attributes": { "BootTypeOrder0": "HardDiskDrive", "BootTypeOrder1":
+ "DVDROMDrive", "BootTypeOrder2": "PXE", "BootTypeOrder3":
+ "Others", }}'
+ final_rst: Failure
+ group: remote interface management
+ header: null
+ method: PATCH
+ request_body: '{ "Attributes": { "BootTypeOrder0": "HardDiskDrive", "BootTypeOrder1":
+ "DVDROMDrive", "BootTypeOrder2": "PXE", "BootTypeOrder3":
+ "Others", }}'
+ return_code_seq: '[''N/A'']'
+ url: https://{bmc_ip}/redfish/v1/Systems/{system_id}/Bios/Settings
+- case_name: check boot order
+ case_sn: 39
+ details_result: '{''count'': ''N/A for this case'', ''info'': [{''return_code'':
+ ''Failure''}]}'
+ expected_code: 200
+ expected_result: '{ "Attributes": { "BootTypeOrder0": "HardDiskDrive", "BootTypeOrder1":
+ "DVDROMDrive", "BootTypeOrder2": "PXE", "BootTypeOrder3":
+ "Others", }}'
+ final_rst: Failure
+ group: remote interface management
+ header: null
+ method: GET
+ request_body: null
+ return_code_seq: '[]'
+ url: https://{bmc_ip}/redfish/v1/Systems/{system_id}/Bios
+- case_name: configure boot order
+ case_sn: 40
+ details_result: '{''count'': ''N/A for this case'', ''info'': [{''return_code'':
+ ''Failure'', ''Attributes'': "Failure, expect value: {''BootTypeOrder0'': ''DVDROMDrive'',
+ ''BootTypeOrder1'': ''HardDiskDrive'', ''BootTypeOrder2'': ''Others'', ''BootTypeOrder3'':
+ ''PXE''}, return value: Can''t find the key Attributes in return value"}]}'
+ expected_code: 200
+ expected_result: '{ "Attributes": { "BootTypeOrder0": "DVDROMDrive", "BootTypeOrder1":
+ "HardDiskDrive", "BootTypeOrder2": "Others", "BootTypeOrder3":
+ "PXE", }}'
+ final_rst: Failure
+ group: remote interface management
+ header: null
+ method: PATCH
+ request_body: '{ "Attributes": { "BootTypeOrder0": "DVDROMDrive", "BootTypeOrder1":
+ "HardDiskDrive", "BootTypeOrder2": "Others", "BootTypeOrder3":
+ "PXE", }}'
+ return_code_seq: '[''N/A'']'
+ url: https://{bmc_ip}/redfish/v1/Systems/{system_id}/Bios/Settings
+- case_name: check boot order
+ case_sn: 41
+ details_result: '{''count'': ''N/A for this case'', ''info'': [{''return_code'':
+ ''Failure''}]}'
+ expected_code: 200
+ expected_result: '{ "Attributes": { "BootTypeOrder0": "DVDROMDrive", "BootTypeOrder1":
+ "HardDiskDrive", "BootTypeOrder2": "Others", "BootTypeOrder3":
+ "PXE", }}'
+ final_rst: Failure
+ group: remote interface management
+ header: null
+ method: GET
+ request_body: null
+ return_code_seq: '[]'
+ url: https://{bmc_ip}/redfish/v1/Systems/{system_id}/Bios
+- case_name: configure new boot PXE order first
+ case_sn: 42
+ details_result: '{''count'': ''N/A for this case'', ''info'': [{''return_code'':
+ ''Failure'', ''Attributes'': "Failure, expect value: {''BootTypeOrder0'': ''PXE'',
+ ''BootTypeOrder1'': ''HardDiskDrive'', ''BootTypeOrder2'': ''DVDROMDrive'', ''BootTypeOrder3'':
+ ''Others''}, return value: Can''t find the key Attributes in return value"}]}'
+ expected_code: 200
+ expected_result: '{ "Attributes": { "BootTypeOrder0": "PXE", "BootTypeOrder1":
+ "HardDiskDrive", "BootTypeOrder2": "DVDROMDrive", "BootTypeOrder3":
+ "Others", }}'
+ final_rst: Failure
+ group: remote interface management
+ header: null
+ method: PATCH
+ request_body: '{ "Attributes": { "BootTypeOrder0": "PXE", "BootTypeOrder1":
+ "HardDiskDrive", "BootTypeOrder2": "DVDROMDrive", "BootTypeOrder3":
+ "Others", }}'
+ return_code_seq: '[''N/A'']'
+ url: https://{bmc_ip}/redfish/v1/Systems/{system_id}/Bios/Settings
+- case_name: check boot order PEX order first
+ case_sn: 43
+ details_result: '{''count'': ''N/A for this case'', ''info'': [{''return_code'':
+ ''Failure''}]}'
+ expected_code: 200
+ expected_result: '{ "Attributes": { "BootTypeOrder0": "PXE", "BootTypeOrder1":
+ "HardDiskDrive", "BootTypeOrder2": "DVDROMDrive", "BootTypeOrder3":
+ "Others", }}'
+ final_rst: Failure
+ group: remote interface management
+ header: null
+ method: GET
+ request_body: null
+ return_code_seq: '[]'
+ url: https://{bmc_ip}/redfish/v1/Systems/{system_id}/Bios
+- case_name: check BMC Firmware version
+ case_sn: 44
+ details_result: '{''count'': ''Success'', ''info'': [{''return_code'': ''Success'',
+ ''FirmwareVersion'': ''Failure, expect value: 1.30.11P01 HDM V100R001B03D011SP01,
+ return value: 2.43''}]}'
+ expected_code: 200
+ expected_result: '{"count": 1, "FirmwareVersion": "1.30.11P01 HDM V100R001B03D011SP01"}'
+ final_rst: Failure
+ group: remote interface management
+ header: null
+ method: GET
+ request_body: null
+ return_code_seq: '[200]'
+ url: https://{bmc_ip}{manager_id}
+- case_name: change BMC account
+ case_sn: 45
+ details_result: '{''count'': ''N/A for this case'', ''info'': [{''return_code'':
+ ''Failure'', ''UserName'': "Failure, expect value: CM_cc@1234, return value: Can''t
+ find the key UserName in return value", ''RoleId'': "Failure, expect value: Administrator,
+ return value: Can''t find the key RoleId in return value"}]}'
+ expected_code: 200
+ expected_result: '{"UserName": "CM_cc@1234","RoleId": "Administrator",}'
+ final_rst: Failure
+ group: remote interface management
+ header: null
+ method: PATCH
+ request_body: '{"UserName": "CM_cc@1234","Password": "1234@CM_cc","RoleId": "Administrator",}'
+ return_code_seq: '[''N/A'']'
+ url: https://{bmc_ip}/redfish/v1/AccountService/Accounts/3
+- case_name: configure BMC ip in static, ipv4
+ case_sn: 46
+ details_result: '{''count'': ''Success'', ''info'': [{''return_code'': ''Failure'',
+ ''IPv4Addresses'': "Failure, expect value: [{''Address'': ''192.168.66.120'',
+ ''AddressOrigin'': ''Static'', ''Gateway'': ''192.168.66.1'', ''SubnetMask'':
+ ''255.255.255.128''}], return value: Can''t find the key IPv4Addresses in return
+ value"}]}'
+ expected_code: 200
+ expected_result: '{"count": 1, "IPv4Addresses": [ { "Address":
+ "192.168.66.120", "AddressOrigin": "Static", "Gateway":
+ "192.168.66.1", "SubnetMask": "255.255.255.128" } ]}'
+ final_rst: Failure
+ group: remote interface management
+ header: null
+ method: PATCH
+ request_body: '{ "IPv4Addresses": [ { "Address": "192.168.66.120", "AddressOrigin":
+ "Static", "Gateway": "192.168.66.1", "SubnetMask": "255.255.255.128" } ]}'
+ return_code_seq: '[''N/A'']'
+ url: https://{bmc_ip}{manager_id}/EthernetInterfaces/eth1
+- case_name: configure BMC ip in DHCP, gateway and subnet mask ipv4
+ case_sn: 47
+ details_result: '{''count'': ''Success'', ''info'': [{''return_code'': ''Failure'',
+ ''IPv4Addresses'': "Failure, expect value: [{''Address'': ''192.168.66.120'',
+ ''AddressOrigin'': ''DHCP'', ''Gateway'': ''192.168.66.1'', ''SubnetMask'': ''255.255.255.128''}],
+ return value: Can''t find the key IPv4Addresses in return value"}]}'
+ expected_code: 200
+ expected_result: '{"count": 1, "IPv4Addresses": [ { "Address":
+ "192.168.66.120", "AddressOrigin": "DHCP", "Gateway": "192.168.66.1", "SubnetMask":
+ "255.255.255.128" } ]}'
+ final_rst: Failure
+ group: remote interface management
+ header: null
+ method: PATCH
+ request_body: '{ "IPv4Addresses": [ { "AddressOrigin": "DHCP" } ]}'
+ return_code_seq: '[''N/A'']'
+ url: https://{bmc_ip}{manager_id}/EthernetInterfaces/eth1
+- case_name: configure BMC ip in static, ipv4
+ case_sn: 48
+ details_result: '{''count'': ''Success'', ''info'': [{''return_code'': ''Failure'',
+ ''IPv4Addresses'': "Failure, expect value: [{''AddressOrigin'': ''DHCP''}], return
+ value: Can''t find the key IPv4Addresses in return value"}]}'
+ expected_code: 200
+ expected_result: '{"count": 1, "IPv4Addresses": [ { "AddressOrigin":
+ "DHCP", } ]}'
+ final_rst: Failure
+ group: remote interface management
+ header: null
+ method: PATCH
+ request_body: '{ "IPv4Addresses": [ { "AddressOrigin": "DHCP" } ]}'
+ return_code_seq: '[''N/A'']'
+ url: https://{bmc_ip}{manager_id}/EthernetInterfaces/eth1
+- case_name: configure BMC ip in static, ipv6
+ case_sn: 49
+ details_result: '{''count'': ''Success'', ''info'': [{''return_code'': ''Failure'',
+ ''IPv6Addresses'': "Failure, expect value: [{''Address'': ''N/A'', ''AddressOrigin'':
+ ''N/A'', ''PrefixLength'': 64}, {''Address'': ''2019::11'', ''AddressOrigin'':
+ ''Static'', ''PrefixLength'': 64}], return value: Can''t find the key IPv6Addresses
+ in return value"}]}'
+ expected_code: 200
+ expected_result: '{"count": 1, "IPv6Addresses": [ { "Address":
+ "N/A", "AddressOrigin": "N/A", "PrefixLength": 64 }, { "Address":
+ "2019::11", "AddressOrigin": "Static", "PrefixLength": 64 } ]}'
+ final_rst: Failure
+ group: remote interface management
+ header: null
+ method: PATCH
+ request_body: '{ "IPv6Addresses": [ { "Address": "2019::11", "AddressOrigin":
+ "Static", "PrefixLength": 64 } ]}'
+ return_code_seq: '[''N/A'']'
+ url: https://{bmc_ip}{manager_id}/EthernetInterfaces/eth1
diff --git a/hdv/redfish/docs/readme.md b/hdv/redfish/docs/readme.md
new file mode 100644
index 0000000..2113913
--- /dev/null
+++ b/hdv/redfish/docs/readme.md
@@ -0,0 +1,129 @@
+##############################################################################
+# Copyright (c) 2020 China Mobile Co.,Ltd and others.
+#
+# All rights reserved. This program and the accompanying materials
+# are made available under the terms of the Apache License, Version 2.0
+# which accompanies this distribution, and is available at
+# http://www.apache.org/licenses/LICENSE-2.0
+##############################################################################
+
+This is a prototype of hardware validation implementation in redfish interface for a certain hardware vendor.
+which originally is contributed by China Mobile.
+>>> Usage:
+usage: hdv.py [-h] [--version] [--config CONFIG] [--file_type FILE_TYPE]
+ [--case_yaml CASE_YAML] [--depends_yaml DEPENDS_YAML]
+ [--case_excel CASE_EXCEL]
+
+hdv tool by redfish, it works in two mode
+
+optional arguments:
+ -h, --help show this help message and exit
+ --version show tool version
+ --config CONFIG given global config.yaml file
+ --file_type FILE_TYPE
+ config file type, [yaml|excel]
+ --case_yaml CASE_YAML
+ case yaml file, uesd if file_type = yaml
+ --depends_yaml DEPENDS_YAML
+ depends yaml file,uesd if file_type = yaml
+ --case_excel CASE_EXCEL
+ excel case file used if file_type = excel
+example:
+#default case
+1>python .\hdv.py
+following config used
+ - ./conf/config.yaml
+ - file_type=excel
+ - ./conf/cases.xlsx
+# use file_type=yaml
+2>python .\hdv.py --file_type=yaml
+
+example1. default conf/config.yaml, file_type=excel, cases.xlsx used
+python .\hdv.py
+example2. use yaml file type config, default conf/cases.yaml conf/depends.yaml used
+python .\hdv.py --file_type=yaml
+example3. user input config file
+python .\hdv.py --file_type=yaml --case_yaml=./conf-new/cases.yaml --depends_yaml=./conf-new/depends.yaml
+
+>>> tools directory:
+
+./redfish
+├─conf # config directory
+├─docs # readme
+├─logs # hdv.log would be generated here.
+
+$ ls -lR .
+$ ls redfish/*.py
+redfish/__init__.py
+redfish/excel_2_yaml.py #tool script to convert excel cases.xlsx sheets content to yaml format cases.yaml and depends.yaml
+redfish/hdv_redfish.py #the code implementation by parsing config.yaml and cases.xlsx or cases.yaml and depends.yaml
+redfish/log_utils.py #log utils
+redfish/errors.py #error code definition for the tool during parse.
+redfish/hdv.py #hdv portal
+redfish/http_handler.py #http_handler
+redfish/yaml_utils.py #yaml utils for test.
+
+$ ls redfish/conf
+config.yaml #global config yaml where define BMC settings, static value, and some position definition in the cases.xlsx excel
+cases.xlsx #two sheet defined (cases and depend_id), input case file if file_type=excel, default way.
+ #sheet cases - define all test case redfish url, expected value, etc
+ #sheet dependent_id - define all dependent_id url which is used to get parent resource id for the url in the cases.
+cases.yaml #test cases yaml file,where the same set test case with cases.xlsx, it is used if file_type=yaml
+depends.yaml #depends.yaml where the same content with sheet dependent_id, it is used if file_type=yaml
+report.yaml #final test report, it is used if file_type=yaml
+
+$ ls redfish/docs
+readme.md #readme
+
+$ ls redfish/logs
+hdv.log # test log file
+
+>>> Principle
+The hdv tool gets the global config from conf/config.yaml, e.g bmc settings, and
+global variable definitions, and some excel column position used in case file_type=excel
+User can select eiter file_type yaml or excel as the configure file type,
+default type is excel at present. However the principle is similar.
+
+If file_type is excel, it will parse two sheets of excel workbook, cases and dependent_id.
+The dependent_id sheet is used to define how to get the parents before checking a final redfish url,
+thinking about checking a port should get the adapter at first.
+The cases sheet is the test cases template, where the variable will be replaced
+by global static value from config yaml or dependent_id
+
+By running a final redfish url request, it will get response result from the test server.
+Then tool will compare the response value with expected value defined in <expected_result> column of cases sheet to decide if the case status.
+
+test report of each case <details,case_status> will write back to the same excel in the last two columns.
+
+Meanwhile, yaml file_type is supported also, it processes similarly as excel, except
+- reading depends.yaml to get the dependent_id
+- reading cases.yaml to run the test case
+- report.yaml will be created as the final report.
+cases.xlsx will not be used anymore in yaml case.
+
+Besides, excel_2_yaml.py script can be used to convert the cases.xlsx to yaml file accordingly.
+If you want to update the cases content, you can update the excel at first, then convert by the script.
+
+>>> FAQ:
+1. how to customize expected result?
+you need put a json format value in it, the hierachy should be exactly the same with actual returned value,
+as the comparing implementation relies on it.
+ => a simple example: '{"AssetTag": "CM_cc@1234"}'
+ => a complex example:
+'{ "count": 2, "Manufacturer": "Intel(R) Corporation", "MaxSpeedMHz":
+ 2300, "Model": "Intel(R) Xeon(R) Gold 5218N CPU @ 2.30GHz", "ProcessorArchitecture":
+ ["x86", "IA-64", "ARM", "MIPS", "OEM"], "Socket": [1, 2], "Status": { "Health":
+ "OK", "State": "Enabled" }, "TotalCores": 16, "TotalThreads":
+ 32}'
+
+in the above data, a specific "count" attribute defined to check components quantity returned, e.g How many cpus expected.
+generally it can be a subset attributes definition, comparing with actual return value
+also it can support list of all expected value for list of objects.
+example: "Socket:[1,2]", expecting return "Socket:1" and "Socket:2" from returned response
+
+>>>Perspective:
+- there are differences between vendors's implementation, or even versions for the same vendor.
+- define more test case or update existing case in the cases.yaml and depends.yaml or cases.xlsx file to support much more checks.
+- more implementation could be contributed from community so that it can grow bigger to support more types and checkpoints test case.
+
+#https://gerrit.opnfv.org/gerrit/admin/repos/cirv
diff --git a/hdv/redfish/errors.py b/hdv/redfish/errors.py
new file mode 100644
index 0000000..e7ea8e8
--- /dev/null
+++ b/hdv/redfish/errors.py
@@ -0,0 +1,47 @@
+##############################################################################
+# Copyright (c) 2020 China Mobile Co.,Ltd and others.
+#
+# All rights reserved. This program and the accompanying materials
+# are made available under the terms of the Apache License, Version 2.0
+# which accompanies this distribution, and is available at
+# http://www.apache.org/licenses/LICENSE-2.0
+##############################################################################
+"""
+ERROR CODE instruction
+"""
+ERROR_CODE = {
+ # E100XXX: Connection
+ "E100001": "E100001:fail to get response from the url",
+ "E100002": "E100002:unexpected request url",
+ "E100003": "E100003:failed to setup connection",
+ # E200XXX: options - tools arguments.
+ "E200001": "E200001:unsupported input file_mode, \
+ should be one of [yaml,excel]",
+ # E300XXX: resource issue - depended resource is not existing...
+ "E300001": "E300001:invalid token",
+ "E300002": "E300002:fail to get dependency parent id, Action: check if the \
+ resource support by server",
+ "E300003": "E300003:fail to get expected id list for component_id, \
+ Action: check if the resource support by server",
+ # E400XXX: configuration error
+ "E400001": "E400001:fail to find configure file",
+ "E400002": "E400002:parse config.yaml exception",
+ "E400003": "E400003: key_list is null for key_flags",
+ "E400004": "E400004: unexpected response body type",
+ "E400005": "E400005: customized expected value format error, \
+ Action:check input expected value type with actual returned value type",
+ "E400006": "E400006: unexpected expected value type, \
+ expected[str,list,dict]",
+ "E400007": "E400007: unexpected expected value type while comparing",
+ # E500XXX: application - find no value from cache
+ "E500001": "E500001: fail find key from actual value, \
+ Action: check if the attribute support by server",
+ # E600XXX: restful interface
+ "E600001": "E600001: unsupported redfish api?",
+ }
+
+WARN_CODE = {
+ "W100001": "W100001: fail to the response from a request",
+ "W100002": "W100002: unexpected type of return_value type",
+ "W100003": "W100003: NoneType value",
+}
diff --git a/hdv/redfish/excel_2_yaml.py b/hdv/redfish/excel_2_yaml.py
new file mode 100644
index 0000000..948ead3
--- /dev/null
+++ b/hdv/redfish/excel_2_yaml.py
@@ -0,0 +1,62 @@
+##############################################################################
+# Copyright (c) 2020 China Mobile Co.,Ltd and others.
+#
+# All rights reserved. This program and the accompanying materials
+# are made available under the terms of the Apache License, Version 2.0
+# which accompanies this distribution, and is available at
+# http://www.apache.org/licenses/LICENSE-2.0
+##############################################################################
+'''
+excel 2 yaml tools
+convert excel config to yaml format config: depends.yaml and cases.yaml.
+'''
+import os
+import yaml
+from openpyxl.reader.excel import load_workbook
+# pylint: disable=E0611
+from log_utils import LOGGER
+
+
+def load_sheet(excel_file, sheet_index, start_col, end_col):
+ '''
+ load sheet
+ '''
+ if not os.path.exists(excel_file):
+ LOGGER.error("excel file not existing")
+ return None
+ input_file = load_workbook(excel_file)
+ input_ws = input_file[input_file.sheetnames[sheet_index]]
+ cell_key = []
+ rows_list = []
+ for i in range(start_col, end_col):
+ cell_key.append(input_ws.cell(row=1, column=i).value)
+ row = 2
+ while input_ws.cell(row=row, column=1).value:
+ cell_value = []
+ for i in range(start_col, end_col):
+ value = input_ws.cell(row=row, column=i).value
+ if isinstance(value, str):
+ value = value.strip().replace('\n', '')
+ cell_value.append(value)
+ cell_dict = dict(zip(cell_key, cell_value))
+ row += 1
+ rows_list.append(cell_dict)
+
+ LOGGER.info(rows_list)
+ return rows_list
+
+
+def create_yaml(id_dict, yaml_file):
+ '''
+ create yaml
+ '''
+ with open(yaml_file, 'w') as y_file:
+ yaml.dump(id_dict, y_file, explicit_start=True)
+
+
+DEPEND_FILE_NAME = "./conf/depends.yaml"
+LOGGER.info("create %s ", DEPEND_FILE_NAME)
+create_yaml(load_sheet("./conf/cases.xlsx", 1, 1, 5), DEPEND_FILE_NAME)
+
+CASE_FILE_NAME = "./conf/cases.yaml"
+create_yaml(load_sheet("./conf/cases.xlsx", 0, 1, 10), CASE_FILE_NAME)
diff --git a/hdv/redfish/hdv.py b/hdv/redfish/hdv.py
new file mode 100644
index 0000000..e06286e
--- /dev/null
+++ b/hdv/redfish/hdv.py
@@ -0,0 +1,60 @@
+##############################################################################
+# Copyright (c) 2020 China Mobile Co.,Ltd and others.
+#
+# All rights reserved. This program and the accompanying materials
+# are made available under the terms of the Apache License, Version 2.0
+# which accompanies this distribution, and is available at
+# http://www.apache.org/licenses/LICENSE-2.0
+##############################################################################
+'''
+hdv tools
+ all config files are put under conf/
+ config.yaml is the global configuration
+ additional config for supporting two modes
+ - excel: tools will parse the depend_id sheet and cases sheet and
+ execute test case and write report back to excel
+ - yaml: tools will parse depends.yaml and cases.yaml and execute test case
+ and write a report.yaml
+ theory:
+ either test case can be finished by one restful request,
+ or an additional request needed to get dependency parent resource.
+ e.g a case for checking port, should get networkadaptor_id before that.
+'''
+import argparse
+from hdv_redfish import run as run_case
+
+
+def parse_args():
+ '''
+ parse arguments
+ '''
+ parser = argparse.ArgumentParser(description="hdv tool by redfish, \
+ check readme under ./docs")
+ parser.add_argument('--version', action='version',
+ version='%(prog)s 0.1', help="show tool version")
+ parser.add_argument('--config', type=str, default="./conf/config.yaml",
+ help="given global config.yaml file")
+ parser.add_argument('--file_type', type=str, default="excel",
+ help="config file type, [yaml|excel],default is excel")
+ parser.add_argument('--case_yaml', type=str, default="./conf/cases.yaml",
+ help="case yaml file, uesd if file_type = yaml")
+ parser.add_argument('--depends_yaml', type=str,
+ default="./conf/depends.yaml",
+ help="depends yaml file,uesd if file_type = yaml")
+ parser.add_argument('--case_excel', type=str, default="./conf/cases.xlsx",
+ help="excel case file used if file_type = excel")
+ args = parser.parse_args()
+ return args
+
+
+def main():
+ '''
+ main function
+ '''
+ args = parse_args()
+ run_case(args.config, args.case_excel, args.depends_yaml, args.case_yaml,
+ args.file_type)
+
+
+if __name__ == "__main__":
+ main()
diff --git a/hdv/redfish/hdv_redfish.py b/hdv/redfish/hdv_redfish.py
new file mode 100644
index 0000000..5fc44ca
--- /dev/null
+++ b/hdv/redfish/hdv_redfish.py
@@ -0,0 +1,676 @@
+##############################################################################
+# Copyright (c) 2020 China Mobile Co.,Ltd and others.
+#
+# All rights reserved. This program and the accompanying materials
+# are made available under the terms of the Apache License, Version 2.0
+# which accompanies this distribution, and is available at
+# http://www.apache.org/licenses/LICENSE-2.0
+##############################################################################
+'''
+an implementation of hardware delivery validation based on redfish interface.
+'''
+import time
+import os
+import re
+from re import DOTALL as DT
+import json
+import copy
+from ast import literal_eval
+import yaml
+from openpyxl.reader.excel import load_workbook
+from http_handler import UrllibHttpHandler, HEADERS
+# pylint: disable=E0611
+from log_utils import BASE_DIR, LOG_FILE, LOGGER
+from errors import ERROR_CODE, WARN_CODE
+
+LOGGER.info(BASE_DIR)
+
+ACCOUNT_INFO = {}
+WAIT_INTERVAL = 5
+
+
+def parse_config(config_yaml):
+ """
+ parse setting from config.yaml
+ :return:
+ """
+ try:
+ if not os.path.exists(config_yaml):
+ LOGGER.error(" %s, %s", ERROR_CODE['E400001'], config_yaml)
+ with open(config_yaml, 'r') as conf_file:
+ config = yaml.load(conf_file.read(), Loader=yaml.FullLoader)
+ except FileNotFoundError as fnfe:
+ LOGGER.error(fnfe)
+ LOGGER.error(u"%s", ERROR_CODE['E400002'])
+ return None
+ else:
+ return config
+
+
+def get_token(http_handler, url):
+ """
+ :return: x_auth_token
+ """
+ retry_num = 3
+ x_auth_token = None
+ while retry_num:
+ retry_num -= 1
+ res = http_handler.post(url, ACCOUNT_INFO)
+ if res is None:
+ LOGGER.error("%s, %s", WARN_CODE['W100001'], url)
+ LOGGER.info("wait %s seconds to try again", WAIT_INTERVAL)
+ time.sleep(WAIT_INTERVAL)
+ continue
+ data = res.info()
+ if "X-Auth-Token" in data:
+ x_auth_token = data.get("X-Auth-Token")
+ return x_auth_token
+ else:
+ time.sleep(WAIT_INTERVAL)
+ return None
+
+
+def get_etag(http_handler, url):
+ """
+ :return: ETag
+ """
+ etag = None
+ res = http_handler.get(url)
+ data = None
+ if res is not None:
+ data = res.info()
+ if data is not None and "ETag" in data:
+ etag = data.get("ETag")
+ return etag
+
+
+def parse_data(exp_value, act_value):
+ '''
+ parse the expected value and actual value:
+ @return: case 1: exp_value and actual value is str or int,
+ then return tuple (exp_value,act_value)
+ case 2: list,dict type, then return updated exp_value
+ ERROR_CODE for unexpected case.
+ '''
+ if isinstance(exp_value, (str, int)) and isinstance(act_value, (str, int)):
+ return (exp_value, act_value)
+ if isinstance(exp_value, list):
+ if not isinstance(act_value, list):
+ return (exp_value, act_value)
+ else:
+ for exp in enumerate(exp_value, start=0):
+ index = exp[0]
+ exp_value[index] = parse_data(
+ exp_value[index], act_value[index])
+
+ elif isinstance(exp_value, dict):
+ if isinstance(act_value, dict):
+ for key, val in exp_value.items():
+ if key in act_value:
+ exp_value[key] = parse_data(val, act_value[key])
+ else:
+ LOGGER.error("%s,%s", ERROR_CODE['E500001'], key)
+ else:
+ LOGGER.error("%s,expected: %s , actual: %s",
+ ERROR_CODE['E400005'], exp_value, act_value)
+ else:
+ LOGGER.error("%s, expected type:%s, actual type %s",
+ ERROR_CODE['E400006'], type(exp_value), type(act_value))
+ return exp_value
+
+
+def compare_data(value, flag):
+ '''
+ compare value content
+ '''
+ if isinstance(value, tuple):
+ if value[1] is not None or value[1]:
+ if value[0] == 'N/A':
+ return "Success", flag
+ elif isinstance(value[0], (bool, int, str)):
+ if value[0] == value[1]:
+ return "Success", flag
+ else:
+ flag += 1
+ return "Failure, expect value: " + str(value[0]) + \
+ ", return value: " + str(value[1]), flag
+ elif value[1] in value[0] or value[0] == ['N/A']:
+ return "Success", flag
+ else:
+ flag += 1
+ return "Failure, expect value: " + str(value[0]) + \
+ ", return value: " + str(value[1]), flag
+ else:
+ flag += 1
+ return "Failure, expect value: " + str(value[0]) + \
+ ", return value: " + str(value[1]), flag
+
+ elif isinstance(value, list):
+ for elem in enumerate(value, start=0):
+ index = elem[0]
+ value[index], flag = compare_data(value[index], flag)
+ elif isinstance(value, dict):
+ for key, val in value.items():
+ value[key], flag = compare_data(val, flag)
+ else:
+ LOGGER.error("%s", ERROR_CODE['E400007'])
+ flag += 1
+ return value, flag
+
+
+def get_component_ids_yaml(file):
+ '''
+ get component ids from yaml file
+ '''
+ if not os.path.exists(file):
+ LOGGER.info("%s, %s", ERROR_CODE['E400001'], file)
+ return None
+ return yaml.load(open(file, "r"))
+
+
+def get_component_ids_excel(excel_file):
+ '''
+ get the component_id settings from the excel sheet2
+ the componnet_id is the parent id of the hardware resource of sheet1
+ '''
+ input_file = load_workbook(excel_file)
+ input_ws = input_file[input_file.sheetnames[1]]
+ cell_key = []
+ id_info_list = []
+ for i in range(1, 5):
+ cell_key.append(input_ws.cell(row=1, column=i).value)
+ row = 2
+ while input_ws.cell(row=row, column=1).value:
+ cell_value = []
+ for i in range(1, 5):
+
+ cell_value.append(input_ws.cell(row=row, column=i).value.
+ encode("utf8").decode("utf8").replace('\n', ''))
+ cell_dict = dict(zip(cell_key, cell_value))
+ row += 1
+ id_info_list.append(cell_dict)
+ return id_info_list
+
+
+def create_real_url(url_value, id_dict, config_file):
+ '''
+ create the real url
+ either a static url, or a replaced url by depended_id
+ '''
+ url_list = []
+ replaced = 0
+ regexp = r'[^{]*{(?P<var>[a-zA-Z_]*)}'
+ # pattern = re.compile(regexp, re.S)
+ pattern = re.compile(regexp, DT)
+ LOGGER.info("url_value %s", url_value)
+ matches = list(pattern.finditer(url_value))
+ for match in matches:
+ value = match.groupdict()
+ if value['var'] in config_file:
+ url_value = url_value.replace('{' + str(value['var']) + '}',
+ str(config_file[value['var']]))
+
+ elif value['var'] in id_dict:
+ replaced = 1
+ instance_list = id_dict[value['var']]
+ for instance in instance_list:
+ sgl_url = url_value.replace('{' + str(value['var']) + '}',
+ str(instance))
+ LOGGER.debug("replaced url value %s", sgl_url)
+ url_list.append(sgl_url)
+ else:
+ replaced = 2
+ LOGGER.error("%s for parameter %s",
+ ERROR_CODE['E300002'], value['var'])
+ # combine single case with list case together.
+ if replaced == 0:
+ LOGGER.info("adding static url %s into list", url_value)
+ url_list.append(url_value)
+ return url_list
+
+
+def execute_get_url(url, http_handler):
+ """
+ execute the url
+ """
+ LOGGER.debug("execute url %s", url)
+ rsp = http_handler.get(url)
+ if rsp is None:
+ LOGGER.error("return None for url %s", url)
+ return None
+ ret_dict = {}
+ ret_dict.update({"return_code": rsp.code})
+ return_value = json.loads(rsp.read())
+ ret_dict.update({"return_value": return_value})
+ LOGGER.info("ret_dict is %s", ret_dict)
+ LOGGER.debug("ret_dict type is %s", type(ret_dict))
+ return ret_dict
+
+
+def handle_depend_url(method, url_list, http_handler):
+ '''
+ run request url in url_list and collect the response as list
+ '''
+ response_list = []
+ if method == 'GET':
+ for url_case in url_list:
+ response = execute_get_url(url_case, http_handler)
+ response_list.append(response)
+ elif method == 'POST':
+ pass
+ elif method == 'PATCH':
+ pass
+ elif method == 'DELETE':
+ pass
+ return response_list
+
+
+def create_obj_id_list(key_flags, response_list):
+ '''
+ create object id list
+ '''
+ if response_list is None or response_list.__len__() == 0:
+ LOGGER.debug("response list is None")
+ return None
+ if key_flags is not None:
+ key_list = key_flags.split(':')
+ end_id_list = []
+ for response in response_list:
+ if response is None:
+ LOGGER.warning("response is None")
+ continue
+ return_value = response['return_value']
+ if len(key_list) == 1 and key_list[0] in return_value:
+ for i in return_value[key_list[0]]:
+ end_id_list.append(i['@odata.id'])
+ elif len(key_list) > 1:
+ for elem in enumerate(key_list, start=0):
+ index = elem[0]
+ if index == len(key_list) - 1:
+ for case in return_value[key_list[index]]:
+ end_id_list.append(case['@odata.id'])
+ else:
+ if isinstance(return_value, list):
+ return_value = return_value[0]
+ elif isinstance(return_value, dict):
+ return_value = return_value[key_list[index]]
+ else:
+ LOGGER.warning("%s, %s", WARN_CODE['W100002'],
+ type(return_value))
+
+ else:
+ LOGGER.error("%s %s", ERROR_CODE['E400003'], key_flags)
+ return end_id_list
+
+
+def get_depend_id(config_file, http_handler, depend_ids):
+ '''
+ @param mode: yaml or excel,default value "excel"
+ parse the component id list
+ build up the id resource for each component_id
+ return: id_dict like {component_id:[obj_list]}
+ '''
+ id_dict = {}
+ for case in depend_ids:
+ component_name = case.get('component_id')
+ LOGGER.info("parsing component %s", component_name)
+ pro_value = case.get('pro_value')
+ url_value = case.get('url_value')
+ key_flags = case.get('key_flags')
+ # url_list = []
+ url_list = create_real_url(url_value, id_dict, config_file)
+ # response_list = []
+ response_list = handle_depend_url(pro_value, url_list, http_handler)
+ # end_id_list = []
+ end_id_list = create_obj_id_list(key_flags, response_list)
+ if end_id_list is None or end_id_list.__len__() == 0:
+ LOGGER.error("%s,%s", ERROR_CODE['E300003'], component_name)
+ continue
+ id_dict.update({component_name: end_id_list})
+ LOGGER.debug("id_dict content is %s", id_dict)
+ return id_dict
+
+
+def read_row(input_ws, row, config_file):
+ '''
+ read a row value
+ '''
+ pro_value = input_ws.cell(row=row, column=config_file["pro_seq"]).value
+ url_value = input_ws.cell(row=row, column=config_file["url_seq"]).value
+ req_body_value = input_ws.cell(
+ row=row, column=config_file["req_body_seq"]).value
+ expect_return_code = \
+ input_ws.cell(
+ row=row, column=config_file["expect_return_code_seq"]).value
+ expect_return_value = \
+ input_ws.cell(
+ row=row, column=config_file["expect_return_value_seq"]).value
+ attr_name = input_ws.cell(row=row, column=config_file["attr_name"]).value
+
+ if req_body_value is not None:
+ req_body_value = literal_eval(req_body_value)
+ if expect_return_code is not None:
+ expect_return_code = int(expect_return_code)
+ if expect_return_value is not None:
+ expect_return_value = literal_eval(expect_return_value)
+ return pro_value, url_value, req_body_value, expect_return_code,\
+ expect_return_value, attr_name
+
+
+def execute_post_url(body, handler, url):
+ '''
+ execute post url
+ '''
+ LOGGER.debug("execute url %s", url)
+ rsp = handler.post(url, body)
+ LOGGER.debug("post response %s", rsp)
+ if not isinstance(rsp, dict):
+ LOGGER.error("%s,%s, expected type %s",
+ ERROR_CODE["E400004"], type(rsp), dict)
+ return None
+ return rsp
+
+
+def execute_patch_url(body, http_handler, url):
+ '''
+ execute patch url
+ '''
+ etag = get_etag(http_handler, url)
+ LOGGER.info("etag %s", etag)
+ rsp = http_handler.patch(url, body, etag)
+ LOGGER.debug("patch response %s", rsp)
+ LOGGER.debug("type response is %s", type(rsp))
+ ret_dict = {}
+ if rsp is None:
+ LOGGER.error("%s %s", ERROR_CODE['E100001'], url)
+ ret_dict.update({"return_code": "N/A"})
+ ret_dict.update({"return_value": "Failure"})
+ return ret_dict
+ ret_dict.update({"return_code": rsp.code})
+ return_value = json.loads(rsp.read())
+ ret_dict.update({"return_value": return_value})
+ return ret_dict
+
+
+def handle_final_url(method, url_list, req_body=None, http_handler=None):
+ '''execute the requested url to get the response
+ '''
+ response_list = []
+ if method == 'GET':
+ for url_case in url_list:
+ rsp = execute_get_url(url_case, http_handler)
+ response_list.append(rsp)
+ elif method == 'POST':
+ if len(url_list) > 1:
+ LOGGER.error(ERROR_CODE['E100002'])
+ return None
+ url_value = url_list[0]
+ rsp = execute_post_url(req_body, http_handler, url_value)
+ response_list.append(rsp)
+ elif method == 'PATCH':
+ for url_case in url_list:
+ LOGGER.info(url_case)
+ temp = execute_patch_url(req_body, http_handler, url_case)
+ if temp is not None:
+ response_list.append(temp)
+ elif method == 'DELETE':
+ pass
+ LOGGER.info("response_list %s", response_list)
+ return response_list
+
+
+def check_component_cnt(expect_return_value, res_list, result):
+ '''
+ #check if the component count meet the required.
+ '''
+ if expect_return_value.__contains__('count'):
+ if expect_return_value['count'] == len(res_list):
+ result.update({"count": "Success"})
+ else:
+ result.update({"count":
+ "Failure, the actual num is " + str(len(res_list))})
+ else:
+ result.update({"count": "N/A for this case"})
+ return result
+
+
+def parse_test_result(expect_return_value, expect_return_code,
+ actual_result_list, final_result):
+ '''
+ @param expected_return_value expected value set in input excel
+ @param expected_return_code expected return code
+ @param actual_result_list: actual result run by each url list checking
+ @param final_result: returned final result
+ parsing the test final_result by comparing expected_value with
+ real test final_result value.
+ '''
+ return_code_list = []
+ return_value_list = []
+ flag = 0
+ final_result = check_component_cnt(expect_return_value,
+ actual_result_list, final_result)
+
+ for each_result in actual_result_list:
+ temp_result = {}
+ if each_result is not None:
+ LOGGER.debug("current result is %s,result_list is %s",
+ each_result, actual_result_list)
+ return_code = each_result["return_code"]
+ return_code_list.append(return_code)
+ return_value = each_result["return_value"]
+ if return_code == expect_return_code:
+ code_result = 'Success'
+ else:
+ code_result = 'Failure'
+ temp_result.update({'return_code': code_result})
+ else:
+ LOGGER.warning("%s ,set failure", WARN_CODE['W100003'])
+ temp_result.update({'return_code': 'Failure'})
+ return_value_list.append(temp_result)
+ flag += 1
+ continue
+
+ # parse the actual result according to the expected value hierachy.
+ ex_value = copy.deepcopy(expect_return_value)
+ exp_act_pairs = {}
+ for key, value in ex_value.items():
+ if key in return_value:
+ exp_act_pairs[key] = parse_data(value, return_value[key])
+ elif key == 'count':
+ pass
+ else:
+ LOGGER.error("%s, %s", ERROR_CODE['E500001'], key)
+ exp_act_pairs[key] = \
+ (value, "Can't find key {} in return value".format(key))
+ LOGGER.debug("real_result:%s", exp_act_pairs)
+
+ # comparing expected result with real result.
+ if exp_act_pairs:
+ for key, value in exp_act_pairs.items():
+ temp_result[key], flag = compare_data(value, flag)
+ return_value_list.append(temp_result)
+ return return_value_list, return_code_list, final_result, flag
+
+
+def write_result_2_excel(config_file, input_ws, row, flag, result):
+ '''
+ write the result back to excel
+ '''
+ if not result:
+ input_ws.cell(row=row, column=config_file["detail_result"],
+ value=str('N/A'))
+ else:
+ input_ws.cell(row=row, column=config_file["detail_result"],
+ value=str(result))
+ if flag == 0:
+ input_ws.cell(row=row, column=config_file["final_result"],
+ value=str("Success"))
+ else:
+ input_ws.cell(row=row, column=config_file["final_result"],
+ value=str("Failure"))
+ return row
+
+
+def execute_final_url(config_file, depends_id, http_handler,
+ method, url, req_body):
+ '''
+ execute final url to get the request result
+ '''
+ url_list = create_real_url(url, depends_id, config_file)
+ rsp_list = handle_final_url(method, url_list, req_body, http_handler)
+ return rsp_list
+
+
+def run_test_case_yaml(config_file, case_file, depends_id, http_handler):
+ '''run test case from cases.yaml
+ '''
+ LOGGER.info("############### start perform test case #################")
+ cases_result = []
+ cases = read_yaml(case_file)
+ for case in cases:
+ method, url, req_body, expected_code, expected_value, tc_name \
+ = case['method'], case['url'], case['request_body'], \
+ case['expected_code'], case['expected_result'], case['case_name']
+
+ expected_value = literal_eval(expected_value)
+ flag = 0
+ final_rst = {}
+ rsp_list = execute_final_url(config_file, depends_id,
+ http_handler, method, url, req_body)
+ if rsp_list is not None and len(rsp_list) > 0:
+ return_value_list, return_code_list, final_rst, flag = \
+ parse_test_result(
+ expected_value, expected_code, rsp_list, final_rst)
+ final_rst.update({'info': return_value_list})
+ LOGGER.debug("return_code_list:%s", return_code_list)
+ case['return_code_seq'] = str(return_code_list)
+ else:
+ LOGGER.error("%s", ERROR_CODE['E600001'])
+ flag += 1
+ case['final_rst'] = "Success" if flag == 0 else "Failure"
+ case['details_result'] = \
+ str(final_rst) if len(final_rst) > 0 else "N/A"
+ cases_result.append(case)
+ LOGGER.info("writing test final_rst for case %s", tc_name)
+
+ write_result_2_yaml(cases_result)
+
+ LOGGER.info("############### end perform test case ###################")
+
+
+def read_yaml(file):
+ '''read a yaml file
+ '''
+ if not os.path.exists(file):
+ LOGGER.info("%s %s", ERROR_CODE['E400001'], file)
+ return None
+ return yaml.load(open(file, "r"))
+
+
+def write_result_2_yaml(result):
+ '''
+ write test result to new report.yaml
+ '''
+ LOGGER.info("writing to yaml file")
+ yaml.safe_dump(result, open("./conf/report.yaml", "w"),
+ explicit_start=True)
+
+
+def run_test_case_excel(config_file, case_file, depends_id, http_handler):
+ '''
+ perform the test case one by one,
+ and write test final_result back to the excel.
+ '''
+ LOGGER.info("############### start perform test case #################")
+ input_file = load_workbook(case_file)
+ input_ws = input_file[input_file.sheetnames[0]]
+
+ row = 2
+ while input_ws.cell(row=row, column=1).value:
+ method, url, req_body, expected_code, expected_value, tc_name \
+ = read_row(input_ws, row, config_file)
+
+ LOGGER.info("run test case ##%s##", tc_name)
+ if tc_name == "configure BMC ip in static, ipv4":
+ LOGGER.debug("debug")
+ flag = 0
+ final_result = {}
+ rsp_list = []
+ rsp_list = execute_final_url(config_file, depends_id, http_handler,
+ method, url, req_body)
+ if rsp_list is not None and len(rsp_list) > 0:
+ return_value_list, return_code_list, final_result, flag = \
+ parse_test_result(expected_value, expected_code,
+ rsp_list, final_result)
+ final_result.update({'info': return_value_list})
+ LOGGER.debug("return_code_list:%s", return_code_list)
+ input_ws.cell(row=row, column=config_file["return_code_seq"],
+ value=str(return_code_list))
+ else:
+ LOGGER.error("%s", ERROR_CODE['E600001'])
+ flag += 1
+
+ LOGGER.info("writing test final_result for row %s", row)
+ row = write_result_2_excel(
+ config_file, input_ws, row, flag, final_result)
+ row += 1
+ input_file.save(case_file)
+ LOGGER.info("############### end perform test case ###################")
+
+
+def run(conf_file, case_excel_file=None, depend_yaml_file=None,
+ case_yaml_file=None, file_mode=None):
+ '''
+ @param conf_file: config.yaml
+ @param case_excel_file: excel case file
+ @param depend_yaml_file: depends yaml file used if file_mode=yaml
+ @param case_yaml_file: case yaml file, used if file_mode=yaml
+ @param file_mode: "excel" or "yaml"
+ access function
+ '''
+ # parse config.yaml
+ LOGGER.info("start engine ...")
+ config_file = parse_config(conf_file)
+ http_handler = UrllibHttpHandler()
+
+ # get bmc info
+ bmc_ip, bmc_user, bmc_pwd = \
+ config_file["bmc_ip"], config_file["bmc_user"], config_file["bmc_pwd"]
+ ACCOUNT_INFO.update({"UserName": bmc_user})
+ ACCOUNT_INFO.update({"Password": bmc_pwd})
+
+ url = "https://{0}/redfish/v1/SessionService/Sessions".format(bmc_ip)
+ x_auth_token = get_token(http_handler, url)
+ LOGGER.info("x_auth_token: %s", x_auth_token)
+
+ if x_auth_token is None:
+ LOGGER.error("%s token is None", ERROR_CODE['E300001'])
+ return None
+
+ HEADERS.update({"X-Auth-Token": x_auth_token})
+ id_info_list = None
+ if file_mode == "excel":
+ id_info_list = get_component_ids_excel(case_excel_file)
+ elif file_mode == "yaml":
+ id_info_list = get_component_ids_yaml(depend_yaml_file)
+ else:
+ LOGGER.error("%s,%s", ERROR_CODE['E200001'], file_mode)
+ return None
+
+ # get dependent id
+ depends_id = get_depend_id(config_file, http_handler, id_info_list)
+
+ # read the test case sheet and perform test
+ if file_mode == "excel":
+ run_test_case_excel(config_file,
+ case_excel_file, depends_id, http_handler)
+ elif file_mode == "yaml":
+ run_test_case_yaml(config_file,
+ case_yaml_file, depends_id, http_handler)
+ else:
+ LOGGER.error("%s,%s", ERROR_CODE['E200001'], file_mode)
+ return None
+
+ LOGGER.info("done,checking the log %s", LOG_FILE)
+
+ return True
diff --git a/hdv/redfish/http_handler.py b/hdv/redfish/http_handler.py
new file mode 100644
index 0000000..c1b0a13
--- /dev/null
+++ b/hdv/redfish/http_handler.py
@@ -0,0 +1,129 @@
+##############################################################################
+# Copyright (c) 2020 China Mobile Co.,Ltd and others.
+#
+# All rights reserved. This program and the accompanying materials
+# are made available under the terms of the Apache License, Version 2.0
+# which accompanies this distribution, and is available at
+# http://www.apache.org/licenses/LICENSE-2.0
+##############################################################################
+'''
+a common http_handler
+'''
+import urllib.request
+import json
+import ssl
+from http.client import HTTPException
+from urllib.error import HTTPError, URLError
+# pylint: disable=E0611
+from log_utils import LOGGER
+from errors import ERROR_CODE
+
+# pylint: disable=W0212
+ssl._create_default_https_context = ssl._create_unverified_context
+
+HEADERS = {
+ 'Connection': 'keep-alive',
+ 'User-Agent':
+ 'Mozilla/5.0 (Windows NT 10.0; WOW64) AppleWebKit/537.36 \
+ (KHTML, like Gecko) Chrome/67.0.3396.62 Safari/537.36',
+}
+
+TIME_OUT = 3000
+
+
+class UrllibHttpHandler:
+ """
+ http handler based on urllib of python2.7
+ """
+
+ def __init__(self):
+ self.__header = HEADERS
+
+ def get(self, url):
+ """
+ run the get request
+ """
+ try:
+ req = urllib.request.Request(url, headers=self.__header)
+ res = urllib.request.urlopen(req, timeout=TIME_OUT)
+ except HTTPException as http_exp:
+ LOGGER.error(http_exp)
+ LOGGER.error(u"%s %s", ERROR_CODE['E100001'], url)
+ except HTTPError as http_err:
+ LOGGER.error(http_err)
+ LOGGER.error(u"%s %s", ERROR_CODE['E100001'], url)
+ LOGGER.error(u"%s %s", ERROR_CODE['E600001'], url)
+ else:
+ return res
+
+ def post(self, url, parameter=None):
+ """
+ run the post request, parameter must to encode to bytes
+ """
+ try:
+ data = json.dumps(parameter).encode(encoding="utf-8")
+ LOGGER.debug("data is %s", data)
+ req = urllib.request.Request(url, data=data, headers=self.__header)
+ req.add_header("Content-Type", "application/json")
+ res = urllib.request.urlopen(req, timeout=TIME_OUT)
+ except HTTPException as http_exp:
+ LOGGER.error(http_exp)
+ LOGGER.error(u"%s %s", ERROR_CODE['E100001'], url)
+ except TimeoutError as timeout_error:
+ LOGGER.error(timeout_error)
+ LOGGER.error(u"%s", ERROR_CODE['E100003'])
+ except HTTPError as http_err:
+ LOGGER.error(http_err)
+ LOGGER.error(u"%s %s", ERROR_CODE['E100001'], url)
+ LOGGER.error(u"%s %s", ERROR_CODE['E600001'], url)
+ except URLError as url_err:
+ LOGGER.error(url_err)
+ LOGGER.error(u"%s %s", ERROR_CODE['E100001'], url)
+ else:
+ return res
+
+ def put(self, url, parameter=None):
+ """
+ run the put request, parameter must to encode to bytes
+ """
+# parameter_data = urllib.parse.urlencode(parameter) #??
+ data = json.dumps(parameter).encode(encoding="utf-8")
+ LOGGER.debug("data is %s", data)
+ req = urllib.request.Request(url, data=data, headers=self.__header)
+ req.get_method = lambda: 'PUT'
+ res = urllib.request.urlopen(req)
+ return res
+
+ def patch(self, url, parameter=None, etag=None):
+ """
+ run the patch request, parameter must to encode to bytes
+ """
+ data = json.dumps(parameter).encode(encoding="utf-8")
+ LOGGER.debug("data is %s", data)
+ req = urllib.request.Request(url, data=data, headers=self.__header)
+ req.add_header("Content-Type", "application/json")
+ req.add_header("If-Match", etag)
+ req.get_method = lambda: 'PATCH'
+ res = None
+ try:
+ res = urllib.request.urlopen(req, timeout=TIME_OUT)
+ except HTTPException as http_exp:
+ LOGGER.error(http_exp)
+ LOGGER.error(u"%s %s", ERROR_CODE['E100001'], url)
+ except HTTPError as http_err:
+ LOGGER.error(http_err)
+ LOGGER.error(u"%s %s", ERROR_CODE['E100001'], url)
+ LOGGER.error(u"%s %s", ERROR_CODE['E600001'], url)
+ except TypeError as type_err:
+ LOGGER.error(type_err)
+ LOGGER.error(u"%s %s", ERROR_CODE['E100001'], url)
+ return res
+
+ def delete(self, url):
+ '''
+ run the delete request,
+ '''
+ req = urllib.request.Request(url, headers=self.__header)
+ req.get_method = lambda: 'DELETE'
+ res = urllib.request.urlopen(req)
+ return res
diff --git a/hdv/redfish/log_utils.py b/hdv/redfish/log_utils.py
new file mode 100644
index 0000000..996a1d1
--- /dev/null
+++ b/hdv/redfish/log_utils.py
@@ -0,0 +1,33 @@
+##############################################################################
+# Copyright (c) 2020 China Mobile Co.,Ltd and others.
+#
+# All rights reserved. This program and the accompanying materials
+# are made available under the terms of the Apache License, Version 2.0
+# which accompanies this distribution, and is available at
+# http://www.apache.org/licenses/LICENSE-2.0
+##############################################################################
+"""
+define the LOGGER settings
+"""
+import logging
+import sys
+
+BASE_DIR = sys.path[0]
+LOG_FILE = BASE_DIR + "/" + "logs" + "/" + 'hdv.log'
+
+LOGGER = logging.getLogger("redfish")
+LOGGER.setLevel(logging.DEBUG)
+
+FORMATTER = logging.Formatter('%(asctime)s - %(filename)s[line:%(lineno)d] \
+ - %(funcName)s - %(levelname)s: %(message)s')
+
+FILE = logging.FileHandler(filename=LOG_FILE, mode='w')
+FILE.setLevel(logging.DEBUG)
+FILE.setFormatter(FORMATTER)
+
+CONSOLE = logging.StreamHandler()
+CONSOLE.setLevel(logging.DEBUG)
+CONSOLE.setFormatter(FORMATTER)
+
+LOGGER.addHandler(CONSOLE)
+LOGGER.addHandler(FILE)
diff --git a/hdv/redfish/yaml_utils.py b/hdv/redfish/yaml_utils.py
new file mode 100644
index 0000000..438c150
--- /dev/null
+++ b/hdv/redfish/yaml_utils.py
@@ -0,0 +1,28 @@
+'''
+@author: cmcc
+'''
+import os
+import yaml
+# pylint: disable=E0611
+from log_utils import LOGGER
+
+
+def read_yaml(file):
+ '''read a yaml file
+ '''
+ if not os.path.exists(file):
+ LOGGER.info("%s not found", file)
+ return None
+ return yaml.load(open(file, "r"))
+
+
+def write_yaml(file, dict_data):
+ '''write a yaml file
+ '''
+ yaml.safe_dump(dict_data, open(file, "w"), explicit_start=True)
+
+
+print(read_yaml("./conf/depends.yaml"))
+print(read_yaml("./conf/cases.yaml"))
+
+write_yaml("./conf/report.yaml", read_yaml("./conf/cases.yaml"))
diff --git a/pylintrc b/pylintrc
new file mode 100644
index 0000000..4900d67
--- /dev/null
+++ b/pylintrc
@@ -0,0 +1,396 @@
+# Copyright 2015-2020 Intel Corporation.
+#
+# Licensed under the Apache License, Version 2.0 (the "License");
+# you may not use this file except in compliance with the License.
+# You may obtain a copy of the License at
+#
+# http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
+
+[MASTER]
+
+# Specify a configuration file.
+#rcfile=
+
+# Python code to execute, usually for sys.path manipulation such as
+# pygtk.require().
+#init-hook=
+
+# Profiled execution.
+profile=no
+
+# Add files or directories to the blacklist. They should be base names, not
+# paths.
+ignore=CVS,docs,conf
+
+# Pickle collected data for later comparisons.
+persistent=yes
+
+# List of plugins (as comma separated values of python modules names) to load,
+# usually to register additional checkers.
+load-plugins=
+
+# Use multiple processes to speed up Pylint.
+jobs=1
+
+# Allow loading of arbitrary C extensions. Extensions are imported into the
+# active Python interpreter and may run arbitrary code.
+unsafe-load-any-extension=no
+
+# A comma-separated list of package or module names from where C extensions may
+# be loaded. Extensions are loading into the active Python interpreter and may
+# run arbitrary code
+extension-pkg-whitelist=
+
+# Allow optimization of some AST trees. This will activate a peephole AST
+# optimizer, which will apply various small optimizations. For instance, it can
+# be used to obtain the result of joining multiple strings with the addition
+# operator. Joining a lot of strings can lead to a maximum recursion error in
+# Pylint and this flag can prevent that. It has one side effect, the resulting
+# AST will be different than the one from reality.
+optimize-ast=no
+
+
+[MESSAGES CONTROL]
+
+# Only show warnings with the listed confidence levels. Leave empty to show
+# all. Valid levels: HIGH, INFERENCE, INFERENCE_FAILURE, UNDEFINED
+confidence=
+
+# Enable the message, report, category or checker with the given id(s). You can
+# either give multiple identifier separated by comma (,) or put this option
+# multiple time. See also the "--disable" option for examples.
+#enable=
+
+# Disable the message, report, category or checker with the given id(s). You
+# can either give multiple identifiers separated by comma (,) or put this
+# option multiple times (only on the command line, not in the configuration
+# file where it should appear only once).You can also use "--disable=all" to
+# disable everything first and then reenable specific checks. For example, if
+# you want to run only the similarities checker, you can use "--disable=all
+# --enable=similarities". If you want to run only the classes checker, but have
+# no Warning level messages displayed, use"--disable=all --enable=classes
+# --disable=W"
+disable=E1602,E1603,E1601,E1606,E1607,E1604,E1605,E1608,W0401,W1604,W1605,W1606,W1607,W1601,W1602,W1603,W1622,W1623,W1620,W1621,W1608,W1609,W1624,W1625,W1618,W1626,W1627,I0021,I0020,W0704,R0903,W1613,W1638,W1611,W1610,W1617,W1616,W1615,W1614,W1630,W1619,W1632,W1635,W1634,W1637,W1636,W1639,W1612,W1628,W1633,W1629,I0011,W1640,R1705
+
+
+[REPORTS]
+
+# Set the output format. Available formats are text, parseable, colorized, msvs
+# (visual studio) and html. You can also give a reporter class, eg
+# mypackage.mymodule.MyReporterClass.
+output-format=text
+
+# Put messages in a separate file for each module / package specified on the
+# command line instead of printing them on stdout. Reports (if any) will be
+# written in a file name "pylint_global.[txt|html]".
+files-output=no
+
+# Tells whether to display a full report or only the messages
+reports=yes
+
+# Python expression which should return a note less than 10 (10 is the highest
+# note). You have access to the variables errors warning, statement which
+# respectively contain the number of errors / warnings messages and the total
+# number of statements analyzed. This is used by the global evaluation report
+# (RP0004).
+evaluation=10.0 - ((float(5 * error + warning + refactor + convention) / statement) * 10)
+
+# Add a comment according to your evaluation note. This is used by the global
+# evaluation report (RP0004).
+comment=no
+
+# Template used to display messages. This is a python new-style format string
+# used to format the message information. See doc for all details
+#msg-template=
+
+
+[BASIC]
+
+# Required attributes for module, separated by a comma
+required-attributes=
+
+# List of builtins function names that should not be used, separated by a comma
+bad-functions=filter,apply,file
+
+# Good variable names which should always be accepted, separated by a comma
+good-names=i,j,k,ex,Run,_
+
+# Bad variable names which should always be refused, separated by a comma
+bad-names=foo,bar,baz,toto,tutu,tata
+
+# Colon-delimited sets of names that determine each other's naming style when
+# the name regexes allow several styles.
+name-group=
+
+# Include a hint for the correct naming format with invalid-name
+include-naming-hint=yes
+
+# Regular expression matching correct inline iteration names
+inlinevar-rgx=[A-Za-z_][A-Za-z0-9_]*$
+
+# Naming hint for inline iteration names
+inlinevar-name-hint=[A-Za-z_][A-Za-z0-9_]*$
+
+# Regular expression matching correct class attribute names
+class-attribute-rgx=([A-Za-z_][A-Za-z0-9_]{1,30}|(__.*__))$
+
+# Naming hint for class attribute names
+class-attribute-name-hint=([A-Za-z_][A-Za-z0-9_]{1,30}|(__.*__))$
+
+# Regular expression matching correct argument names
+argument-rgx=[a-z_][a-z0-9_]{2,30}$
+
+# Naming hint for argument names
+argument-name-hint=[a-z_][a-z0-9_]{2,30}$
+
+# Regular expression matching correct variable names
+variable-rgx=[a-z_][a-z0-9_]{2,30}$
+
+# Naming hint for variable names
+variable-name-hint=[a-z_][a-z0-9_]{2,30}$
+
+# Regular expression matching correct module names
+module-rgx=(([a-z_][a-z0-9_]*)|([A-Z][a-zA-Z0-9]+))$
+
+# Naming hint for module names
+module-name-hint=(([a-z_][a-z0-9_]*)|([A-Z][a-zA-Z0-9]+))$
+
+# Regular expression matching correct class names
+class-rgx=[A-Z_][a-zA-Z0-9]+$
+
+# Naming hint for class names
+class-name-hint=[A-Z_][a-zA-Z0-9]+$
+
+# Regular expression matching correct function names
+function-rgx=[a-z_][a-z0-9_]{2,30}$
+
+# Naming hint for function names
+function-name-hint=[a-z_][a-z0-9_]{2,30}$
+
+# Regular expression matching correct method names
+method-rgx=[a-z_][a-z0-9_]{2,35}$
+
+# Naming hint for method names
+method-name-hint=[a-z_][a-z0-9_]{2,30}$
+
+# Regular expression matching correct attribute names
+attr-rgx=[a-z_][a-z0-9_]{2,30}$
+
+# Naming hint for attribute names
+attr-name-hint=[a-z_][a-z0-9_]{2,30}$
+
+# Regular expression matching correct constant names
+const-rgx=(([A-Z_][A-Z0-9_]*)|(__.*__))$
+
+# Naming hint for constant names
+const-name-hint=(([A-Z_][A-Z0-9_]*)|(__.*__))$
+
+# Regular expression which should only match function or class names that do
+# not require a docstring.
+no-docstring-rgx=__.*__
+
+# Minimum line length for functions/classes that require docstrings, shorter
+# ones are exempt.
+docstring-min-length=-1
+
+
+[FORMAT]
+
+# Maximum number of characters on a single line.
+max-line-length=120
+
+# Regexp for a line that is allowed to be longer than the limit.
+ignore-long-lines=^\s*(# )?<?https?://\S+>?$
+
+# Allow the body of an if to be on the same line as the test if there is no
+# else.
+single-line-if-stmt=no
+
+# List of optional constructs for which whitespace checking is disabled
+no-space-check=trailing-comma,dict-separator
+
+# Maximum number of lines in a module
+max-module-lines=1000
+
+# String used as indentation unit. This is usually " " (4 spaces) or "\t" (1
+# tab).
+indent-string=' '
+
+# Number of spaces of indent required inside a hanging or continued line.
+indent-after-paren=4
+
+# Expected format of line ending, e.g. empty (any line ending), LF or CRLF.
+expected-line-ending-format=
+
+
+[LOGGING]
+
+# Logging modules to check that the string format arguments are in logging
+# function parameter format
+logging-modules=logging
+
+
+[MISCELLANEOUS]
+
+# List of note tags to take in consideration, separated by a comma.
+notes=FIXME,XXX,TODO
+
+
+[SIMILARITIES]
+
+# Minimum lines number of a similarity.
+min-similarity-lines=4
+
+# Ignore comments when computing similarities.
+ignore-comments=yes
+
+# Ignore docstrings when computing similarities.
+ignore-docstrings=yes
+
+# Ignore imports when computing similarities.
+ignore-imports=no
+
+
+[SPELLING]
+
+# Spelling dictionary name. Available dictionaries: none. To make it working
+# install python-enchant package.
+spelling-dict=
+
+# List of comma separated words that should not be checked.
+spelling-ignore-words=
+
+# A path to a file that contains private dictionary; one word per line.
+spelling-private-dict-file=
+
+# Tells whether to store unknown words to indicated private dictionary in
+# --spelling-private-dict-file option instead of raising a message.
+spelling-store-unknown-words=no
+
+
+[TYPECHECK]
+
+# Tells whether missing members accessed in mixin class should be ignored. A
+# mixin class is detected if its name ends with "mixin" (case insensitive).
+ignore-mixin-members=yes
+
+# List of module names for which member attributes should not be checked
+# (useful for modules/projects where namespaces are manipulated during runtime
+# and thus existing member attributes cannot be deduced by static analysis
+ignored-modules=
+
+# List of classes names for which member attributes should not be checked
+# (useful for classes with attributes dynamically set).
+ignored-classes=SQLObject
+
+# When zope mode is activated, add a predefined set of Zope acquired attributes
+# to generated-members.
+zope=no
+
+# List of members which are set dynamically and missed by pylint inference
+# system, and so shouldn't trigger E0201 when accessed. Python regular
+# expressions are accepted.
+generated-members=REQUEST,acl_users,aq_parent
+
+
+[VARIABLES]
+
+# Tells whether we should check for unused import in __init__ files.
+init-import=no
+
+# A regular expression matching the name of dummy variables (i.e. expectedly
+# not used).
+dummy-variables-rgx=_$|dummy
+
+# List of additional names supposed to be defined in builtins. Remember that
+# you should avoid to define new builtins when possible.
+additional-builtins=
+
+# List of strings which can identify a callback function by name. A callback
+# name must start or end with one of those strings.
+callbacks=cb_,_cb
+
+
+[CLASSES]
+
+# List of interface methods to ignore, separated by a comma. This is used for
+# instance to not check methods defines in Zope's Interface base class.
+ignore-iface-methods=isImplementedBy,deferred,extends,names,namesAndDescriptions,queryDescriptionFor,getBases,getDescriptionFor,getDoc,getName,getTaggedValue,getTaggedValueTags,isEqualOrExtendedBy,setTaggedValue,isImplementedByInstancesOf,adaptWith,is_implemented_by
+
+# List of method names used to declare (i.e. assign) instance attributes.
+defining-attr-methods=__init__,__new__,setUp
+
+# List of valid names for the first argument in a class method.
+valid-classmethod-first-arg=cls
+
+# List of valid names for the first argument in a metaclass class method.
+valid-metaclass-classmethod-first-arg=mcs
+
+# List of member names, which should be excluded from the protected access
+# warning.
+exclude-protected=_asdict,_fields,_replace,_source,_make
+
+
+[DESIGN]
+
+# Maximum number of arguments for function / method
+max-args=6
+
+# Argument names that match this expression will be ignored. Default to name
+# with leading underscore
+ignored-argument-names=_.*
+
+# Maximum number of locals for function / method body
+max-locals=25
+
+# Maximum number of return / yield for function / method body
+max-returns=10
+
+# Maximum number of branch for function / method body
+max-branches=25
+
+# Maximum number of statements in function / method body
+max-statements=70
+
+# Maximum number of parents for a class (see R0901).
+max-parents=7
+
+# Maximum number of attributes for a class (see R0902).
+max-attributes=15
+
+# Minimum number of public methods for a class (see R0903).
+min-public-methods=2
+
+# Maximum number of public methods for a class (see R0904).
+max-public-methods=20
+
+
+[IMPORTS]
+
+# Deprecated modules which should not be used, separated by a comma
+deprecated-modules=regsub,TERMIOS,Bastion,rexec
+
+# Create a graph of every (i.e. internal and external) dependencies in the
+# given file (report RP0402 must not be disabled)
+import-graph=
+
+# Create a graph of external dependencies in the given file (report RP0402 must
+# not be disabled)
+ext-import-graph=
+
+# Create a graph of internal dependencies in the given file (report RP0402 must
+# not be disabled)
+int-import-graph=
+
+
+[EXCEPTIONS]
+
+# Exceptions that will emit a warning when being caught. Defaults to
+# "Exception"
+overgeneral-exceptions=Exception
diff --git a/requirements.txt b/requirements.txt
new file mode 100644
index 0000000..7d0c0ff
--- /dev/null
+++ b/requirements.txt
@@ -0,0 +1,17 @@
+# The order of packages is significant, because pip processes them in the order
+# of appearance. Changing the order has an impact on the overall integration
+# process, which may cause wedges in the gate later.
+# Copyright (c) 2020 Spirent Communications
+#
+# All rights reserved. This program and the accompanying materials
+# are made available under the terms of the Apache License, Version 2.0
+# which accompanies this distribution, and is available at
+# http://www.apache.org/licenses/LICENSE-2.0
+
+PyYAML # MIT
+urllib3 # MIT
+GitPython<2.1.12;python_version<'3.0' # BSD License (3 clause)
+GitPython;python_version>='3.0' # BSD License (3 clause)
+requests!=2.20.0 # Apache-2.0
+netaddr # BSD
+openpyxl
diff --git a/setup.cfg b/setup.cfg
new file mode 100644
index 0000000..656396d
--- /dev/null
+++ b/setup.cfg
@@ -0,0 +1,24 @@
+[metadata]
+name = cirv
+summary = Common Infrastructure Realization & Validation
+description-file =
+ README.md
+author = OPNFV
+author-email = opnfv-tech-discuss@lists.opnfv.org
+home-page = https://wiki.opnfv.org/display/functest
+classifier =
+ Intended Audience :: Information Technology
+ Intended Audience :: System Administrators
+ License :: OSI Approved :: Apache Software License
+ Operating System :: POSIX :: Linux
+ Programming Language :: Python
+ Programming Language :: Python :: 2
+ Programming Language :: Python :: 2.7
+ Programming Language :: Python :: 3
+ Programming Language :: Python :: 3.5
+
+[files]
+packages = tools
+
+[wheel]
+universal = 1
diff --git a/setup.py b/setup.py
new file mode 100644
index 0000000..566d844
--- /dev/null
+++ b/setup.py
@@ -0,0 +1,29 @@
+# Copyright (c) 2013 Hewlett-Packard Development Company, L.P.
+#
+# Licensed under the Apache License, Version 2.0 (the "License");
+# you may not use this file except in compliance with the License.
+# You may obtain a copy of the License at
+#
+# http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or
+# implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
+
+# THIS FILE IS MANAGED BY THE GLOBAL REQUIREMENTS REPO - DO NOT EDIT
+import setuptools
+
+# In python < 2.7.4, a lazy loading of package `pbr` will break
+# setuptools if some other modules registered functions in `atexit`.
+# solution from: http://bugs.python.org/issue15881#msg170215
+try:
+ import multiprocessing # noqa
+except ImportError:
+ pass
+
+setuptools.setup(
+ setup_requires=['pbr>=2.0.0'],
+ pbr=True)
diff --git a/test-requirements.txt b/test-requirements.txt
new file mode 100644
index 0000000..23c3b64
--- /dev/null
+++ b/test-requirements.txt
@@ -0,0 +1,8 @@
+# The order of packages is significant, because pip processes them in the order
+# of appearance. Changing the order has an impact on the overall integration
+# process, which may cause wedges in the gate later.
+flake8 # MIT
+yamllint
+bashate<1.0.0;python_version=='2.7' # Apache-2.0
+bashate;python_version>='3.6' # Apache-2.0
+pylint
diff --git a/tox.ini b/tox.ini
new file mode 100644
index 0000000..74d0129
--- /dev/null
+++ b/tox.ini
@@ -0,0 +1,25 @@
+[tox]
+envlist = pep8,yamllint,pylint
+
+[testenv]
+deps =
+ -c{env:UPPER_CONSTRAINTS_FILE:https://opendev.org/openstack/requirements/raw/branch/master/upper-constraints.txt}
+ -r{toxinidir}/requirements.txt
+ -r{toxinidir}/test-requirements.txt
+
+[testenv:pep8]
+basepython = python3
+commands = flake8
+
+[testenv:yamllint]
+basepython = python3
+files =
+ INFO.yaml
+ hdv
+commands =
+ yamllint -s {[testenv:yamllint]files} -c {toxinidir}/yamllintrc
+
+[testenv:pylint]
+basepython = python3
+commands =
+ pylint hdv