aboutsummaryrefslogtreecommitdiffstats
path: root/tools
diff options
context:
space:
mode:
Diffstat (limited to 'tools')
-rw-r--r--tools/CONTRIBUTING.md70
-rw-r--r--tools/README.md1
-rw-r--r--tools/__init__.py0
-rw-r--r--tools/hdv/__init__.py0
-rw-r--r--tools/hdv/redfish/__init__.py0
-rw-r--r--tools/hdv/redfish/conf/cases.xlsxbin17263 -> 0 bytes
-rw-r--r--tools/hdv/redfish/conf/cases.yaml517
-rw-r--r--tools/hdv/redfish/conf/config.yaml17
-rw-r--r--tools/hdv/redfish/conf/depends.yaml33
-rw-r--r--tools/hdv/redfish/conf/report.yaml832
-rw-r--r--tools/hdv/redfish/docs/readme.md129
-rw-r--r--tools/hdv/redfish/errors.py47
-rw-r--r--tools/hdv/redfish/excel_2_yaml.py62
-rw-r--r--tools/hdv/redfish/hdv.py60
-rw-r--r--tools/hdv/redfish/hdv_redfish.py676
-rw-r--r--tools/hdv/redfish/http_handler.py129
-rw-r--r--tools/hdv/redfish/log_utils.py33
-rw-r--r--tools/hdv/redfish/yaml_utils.py28
-rwxr-xr-xtools/sdv/NwLinksValid/__init__.py17
-rw-r--r--tools/sdv/NwLinksValid/nwlinksvalidator.py38
-rwxr-xr-xtools/sdv/SoftwarePostValid/__init__.py19
-rw-r--r--tools/sdv/SoftwarePostValid/swpostvalidator.py42
-rwxr-xr-xtools/sdv/SoftwarePreValid/__init__.py19
-rw-r--r--tools/sdv/SoftwarePreValid/airship.py267
-rw-r--r--tools/sdv/SoftwarePreValid/swprevalidator.py42
-rw-r--r--tools/sdv/__init__.py0
-rw-r--r--tools/sdv/conf/00_common.conf18
-rw-r--r--tools/sdv/conf/01_swprevalid.conf33
-rw-r--r--tools/sdv/conf/02_swpostvalid.conf4
-rw-r--r--tools/sdv/conf/03_nwlinksvalid.conf1
-rw-r--r--tools/sdv/conf/__init__.py265
-rw-r--r--tools/sdv/core/__init__.py19
-rw-r--r--tools/sdv/core/component_factory.py32
-rw-r--r--tools/sdv/core/loader/__init__.py18
-rw-r--r--tools/sdv/core/loader/loader.py129
-rw-r--r--tools/sdv/core/loader/loader_servant.py183
-rw-r--r--tools/sdv/docs/valid.rst28
-rwxr-xr-xtools/sdv/valid147
38 files changed, 0 insertions, 3955 deletions
diff --git a/tools/CONTRIBUTING.md b/tools/CONTRIBUTING.md
deleted file mode 100644
index 1aa6108..0000000
--- a/tools/CONTRIBUTING.md
+++ /dev/null
@@ -1,70 +0,0 @@
-<!---
-This work is licensed under a Creative Commons Attribution 4.0 International License.
-http://creativecommons.org/licenses/by/4.0
--->
-
-# General Coding Style
-
-## Code
-
-Abide by [PEP-8] for general code. Some particular points to note:
-
-* Wrap code at 79 characters.
-* Use only spaces - no tabs.
-* Use implicit string concatenation where possible. Don't use the escape
- character unless absolutely necessary.
-* Be liberal in your use of whitespace to group related statements together.
- However, don't leave a space after the docstring and the first statement.
-* Use single quotes for all string literals.
-
-## Documentation
-
-Follow [PEP-257] and the [Sphinx guidelines] for documentation. In particular:
-
-* Wrap docstrings at 72 characters.
-* Use double-quotes for all docstrings.
-* Write all inline comments in lower-case, except where using a name/initialism.
-* Document **all** library functions/classes completely. Tests, however, only need a test case docstring.
-
-To summarise the docstring conventions:
-
-```python
-def my_function(athing, stuff=5):
- """
- Summary line here in imperative tense.
-
- Longer description here...
-
- :param athing: Details about this paramter here
- :param stuff: Ditto
-
- :returns: None
- """
- pass # code here...
-```
-
-### Validation
-
-All code should be checked with the PyLint linter and PEP8 style guide checker.
-Pylint can be run like so:
-
-```bash
-pylint <file or directory>
-```
-
-Most PyLint errors should be resolved. You will need to do this manually.
-However, there are cases where they may not make sense (e.g. you **need** to
-pass `N` parameters to a function). In this case, disable the relevant
-case using an inline `disable` like so:
-
-```python
-# pylint: disable=[code]
-```
-
-On the other hand, all PEP8 errors should be resolved.
-
----
-
-[PEP-8]: http://legacy.python.org/dev/peps/pep-0008/
-[PEP-257]: http://legacy.python.org/dev/peps/pep-0257/
-[Sphinx guidelines]: https://pythonhosted.org/an_example_pypi_project/sphinx.html
diff --git a/tools/README.md b/tools/README.md
deleted file mode 100644
index 0bef330..0000000
--- a/tools/README.md
+++ /dev/null
@@ -1 +0,0 @@
-This folder contains various tool sets for CNTT RI-RC
diff --git a/tools/__init__.py b/tools/__init__.py
deleted file mode 100644
index e69de29..0000000
--- a/tools/__init__.py
+++ /dev/null
diff --git a/tools/hdv/__init__.py b/tools/hdv/__init__.py
deleted file mode 100644
index e69de29..0000000
--- a/tools/hdv/__init__.py
+++ /dev/null
diff --git a/tools/hdv/redfish/__init__.py b/tools/hdv/redfish/__init__.py
deleted file mode 100644
index e69de29..0000000
--- a/tools/hdv/redfish/__init__.py
+++ /dev/null
diff --git a/tools/hdv/redfish/conf/cases.xlsx b/tools/hdv/redfish/conf/cases.xlsx
deleted file mode 100644
index e7fc61d..0000000
--- a/tools/hdv/redfish/conf/cases.xlsx
+++ /dev/null
Binary files differ
diff --git a/tools/hdv/redfish/conf/cases.yaml b/tools/hdv/redfish/conf/cases.yaml
deleted file mode 100644
index 5609708..0000000
--- a/tools/hdv/redfish/conf/cases.yaml
+++ /dev/null
@@ -1,517 +0,0 @@
----
-- case_name: set asset code
- case_sn: 1
- expected_code: 200
- expected_result: '{"AssetTag": "CM_cc@1234"}'
- group: asset managment
- header: null
- method: PATCH
- request_body: '{"AssetTag": "CM_cc@1234"}'
- url: https://{bmc_ip}/redfish/v1/Systems/{system_id}
-- case_name: get asset code
- case_sn: 2
- expected_code: 200
- expected_result: '{"AssetTag": "CM_cc@1234"}'
- group: asset managment
- header: null
- method: GET
- request_body: null
- url: https://{bmc_ip}/redfish/v1/Systems/{system_id}
-- case_name: set host name
- case_sn: 3
- expected_code: 200
- expected_result: '{"HostName": "NFV-RPZJHZ-01B"}'
- group: asset managment
- header: null
- method: PATCH
- request_body: '{"HostName": "NFV-RPZJHZ-01B"}'
- url: https://{bmc_ip}/redfish/v1/Systems/{system_id}
-- case_name: check host name
- case_sn: 4
- expected_code: 200
- expected_result: '{"HostName": "NFV-RPZJHZ-01B"}'
- group: asset managment
- header: null
- method: GET
- request_body: null
- url: https://{bmc_ip}/redfish/v1/Systems/{system_id}
-- case_name: check manufacturer
- case_sn: 5
- expected_code: 200
- expected_result: '{"Manufacturer": "New H3C Technologies Co., Ltd."}'
- group: asset managment
- header: null
- method: GET
- request_body: null
- url: https://{bmc_ip}/redfish/v1/Systems/{system_id}
-- case_name: check model
- case_sn: 6
- expected_code: 200
- expected_result: '{"Model": "UniServer R4900 G3"}'
- group: asset managment
- header: null
- method: GET
- request_body: null
- url: https://{bmc_ip}/redfish/v1/Systems/{system_id}
-- case_name: check serial number
- case_sn: 7
- expected_code: 200
- expected_result: '{"SerialNumber": "N/A"}'
- group: asset managment
- header: null
- method: GET
- request_body: null
- url: https://{bmc_ip}/redfish/v1/Systems/{system_id}
-- case_name: check main board name
- case_sn: 8
- expected_code: 200
- expected_result: '{"Oem":{"Mainboard": {"BoardName": "RS33M2C9S"}}}'
- group: asset managment
- header: null
- method: GET
- request_body: null
- url: https://{bmc_ip}/redfish/v1/Chassis/{chassis_id}
-- case_name: check main board serial number
- case_sn: 9
- expected_code: 200
- expected_result: '{"Oem": {"Mainboard": {"SerialNumber": "N/A"}}}'
- group: asset managment
- header: null
- method: GET
- request_body: null
- url: https://{bmc_ip}/redfish/v1/Chassis/{chassis_id}
-- case_name: check BIOS version
- case_sn: 10
- expected_code: 200
- expected_result: '{"BiosVersion": "2.00.35P01 V100R001B02D035SP01"}'
- group: asset managment
- header: null
- method: GET
- request_body: null
- url: https://{bmc_ip}/redfish/v1/Systems/{system_id}
-- case_name: check CPU amount
- case_sn: 11
- expected_code: 200
- expected_result: '{"Members@odata.count": 2}'
- group: compoment management
- header: null
- method: GET
- request_body: null
- url: https://{bmc_ip}/redfish/v1/Systems/{system_id}/Processors
-- case_name: check CPU info
- case_sn: 12
- expected_code: 200
- expected_result: '{ "count": 2, "Manufacturer": "Intel(R) Corporation", "MaxSpeedMHz":
- 2300, "Model": "Intel(R) Xeon(R) Gold 5218N CPU @ 2.30GHz", "ProcessorArchitecture":
- ["x86", "IA-64", "ARM", "MIPS", "OEM"], "Socket": [1, 2], "Status": { "Health":
- "OK", "State": "Enabled" }, "TotalCores": 16, "TotalThreads":
- 32}'
- group: compoment management
- header: null
- method: GET
- request_body: null
- url: https://{bmc_ip}{cpu_id}
-- case_name: check memory mount
- case_sn: 13
- expected_code: 200
- expected_result: '{"Members@odata.count": 12}'
- group: compoment management
- header: null
- method: GET
- request_body: null
- url: https://{bmc_ip}/redfish/v1/Systems/{system_id}/Memory
-- case_name: check memory information
- case_sn: 14
- expected_code: 200
- expected_result: '{ "count": 12, "BaseModuleType": "RDIMM", "CapacityMiB":
- 32768, "DeviceLocator": "N/A", "Manufacturer": ["Hynix Semiconductor", "Micron"], "MemoryDeviceType":
- "DDR4", "OperatingSpeedMhz": 2666, "PartNumber": ["HMA84GR7AFR4N-VK","36ASF4G72PZ-2G6D1"], "Status":
- { "Health": "OK", "State": "Enabled" }}'
- group: compoment management
- header: null
- method: GET
- request_body: null
- url: https://{bmc_ip}{memory_id}
-- case_name: check raid card amount
- case_sn: 15
- expected_code: 200
- expected_result: '{"Members@odata.count": 1}'
- group: compoment management
- header: null
- method: GET
- request_body: null
- url: https://{bmc_ip}/redfish/v1/Systems/{system_id}/Storages
-- case_name: check raid card information
- case_sn: 16
- expected_code: 200
- expected_result: '{ "count": 1, "StorageControllers": [ { "FirmwareVersion":
- "2.62", "Manufacturer": "H3C", "Model": "N/A", "Status":
- { "Health": "OK", "State": "Enabled" } } ]}'
- group: compoment management
- header: null
- method: GET
- request_body: null
- url: https://{bmc_ip}{storage_id}
-- case_name: check harddisk information
- case_sn: 17
- expected_code: 200
- expected_result: '{ "count": 4, "CapacityBytes": [480102187008, 960193626112], "Location":
- { "Info": "N/A", "InfoFormat": "DeviceName" }, "Manufacturer":
- "ATA", "MediaType": "SSD", "Model": ["INTEL SSDSC2KB48", "INTEL SSDSC2KB96"], "Protocol":
- "SATA", "Status": { "Health": "OK", "State": "Enabled" }}'
- group: compoment management
- header: null
- method: GET
- request_body: null
- url: https://{bmc_ip}{drives_id}
-- case_name: check network interface adapter information
- case_sn: 18
- expected_code: 200
- expected_result: '{ "count": 3, "Manufacturer": "Mellanox", "Model": "NIC-620F-B2-25Gb-2P-1-X", "Name":
- ["PCIeSlot2", "PCIeSlot3", "PCIeSlot6"], "Oem": { "Public": { "CardModel":
- "2*25GE", "RootBDF": ["0000:17:00.0", "0000:17:02.0", "0000:AE:02.0"], } }, "Status":
- { "Health": "OK", "State": "Enabled" }}'
- group: compoment management
- header: null
- method: GET
- request_body: null
- url: https://{bmc_ip}{networkadapters_id}
-- case_name: check network interface adapter port information
- case_sn: 19
- expected_code: 200
- expected_result: '{ "count": 6, "AssociatedNetworkAddresses": [ "N/A" ], "Oem":
- { "Public": { "BDF": "N/A", "PortType": "OpticalPort" } }, "PhysicalPortNumber":
- ["1", "2"]}'
- group: compoment management
- header: null
- method: GET
- request_body: null
- url: https://{bmc_ip}{networkports_id}
-- case_name: check fans information
- case_sn: 20
- expected_code: 200
- expected_result: '{ "FanSummary": { "Count": 6 }, "Fans": [ { "MemberId":
- "N/A", "Oem": { "Public": { "SpeedRatio":
- "N/A" } }, "Status": { "Health":
- "OK", "State": "Enabled" } }, { "MemberId":
- "N/A", "Oem": { "Public": { "SpeedRatio":
- "N/A" } }, "Status": { "Health":
- "OK", "State": "Enabled" } },{ "MemberId":
- "N/A", "Oem": { "Public": { "SpeedRatio":
- "N/A" } }, "Status": { "Health":
- "OK", "State": "Enabled" } },{ "MemberId":
- "N/A", "Oem": { "Public": { "SpeedRatio":
- "N/A" } }, "Status": { "Health":
- "OK", "State": "Enabled" } },{ "MemberId":
- "N/A", "Oem": { "Public": { "SpeedRatio":
- "N/A" } }, "Status": { "Health":
- "OK", "State": "Enabled" } },{ "MemberId":
- "N/A", "Oem": { "Public": { "SpeedRatio":
- "N/A" } }, "Status": { "Health":
- "OK", "State": "Enabled" } } ],}'
- group: compoment management
- header: null
- method: GET
- request_body: null
- url: https://{bmc_ip}/redfish/v1/Chassis/{chassis_id}/Thermal
-- case_name: check power amount
- case_sn: 21
- expected_code: 200
- expected_result: '{ "DeviceMaxNum": { "PowerSupplyNum": 2},}'
- group: compoment management
- header: null
- method: GET
- request_body: null
- url: https://{bmc_ip}/redfish/v1/Chassis/{chassis_id}
-- case_name: check power detail info
- case_sn: 22
- expected_code: 200
- expected_result: '{ "PowerControl": [ { "PowerConsumedWatts":
- "N/A","Status":{ "Health": "OK", "State": "Enabled" } }, ], "PowerSupplies":
- [ { "LineInputVoltage": "N/A", "MemberId": "1", "PowerCapacityWatts":
- 800,"Status": { "Health": "OK", "State": "Enabled" } }, { "LineInputVoltage":
- "N/A", "MemberId": "2", "PowerCapacityWatts": 800,"Status":
- { "Health": "OK", "State": "Enabled" } } ],}'
- group: compoment management
- header: null
- method: GET
- request_body: null
- url: https://{bmc_ip}/redfish/v1/Chassis/{chassis_id}/Power
-- case_name: check logical dirve health status
- case_sn: 23
- expected_code: 200
- expected_result: '{ "count": 2, "Name": "N/A", "Status": { "Health":
- ["OK", "Critical"], "State": "Enabled" }}'
- group: compoment management
- header: null
- method: GET
- request_body: null
- url: https://{bmc_ip}{volume_id}
-- case_name: check server temperature air intake
- case_sn: 24
- expected_code: 200
- expected_result: '{ "Temperatures": [ { "Name": "INPUT_TEMP", "ReadingCelsius":
- "N/A", } ]}'
- group: sensor management
- header: null
- method: GET
- request_body: null
- url: https://{bmc_ip}/redfish/v1/Chassis/{chassis_id}/Thermal
-- case_name: check cpu temperature
- case_sn: 25
- expected_code: 200
- expected_result: '{ "Temperatures": [ { "Name": "INPUT_TEMP", "ReadingCelsius":
- "N/A", }, { "Name": "CPU1_TEMP", "ReadingCelsius":
- "N/A", }, { "Name": "CPU2_TEMP", "ReadingCelsius":
- "N/A", }, ]}'
- group: sensor management
- header: null
- method: GET
- request_body: null
- url: https://{bmc_ip}/redfish/v1/Chassis/{chassis_id}/Thermal
-- case_name: check server power state
- case_sn: 26
- expected_code: 200
- expected_result: '{"PowerState": "On"}'
- group: power management
- header: null
- method: GET
- request_body: null
- url: https://{bmc_ip}/redfish/v1/Systems/{system_id}
-- case_name: remote power on server
- case_sn: 27
- expected_code: 200
- expected_result: '{"error":{"@Message.ExtendedInfo": [{"Message": "Successfully
- Completed Request", "Severity":"OK"}]}}'
- group: power management
- header: null
- method: POST
- request_body: '{"ResetType": "On"}'
- url: https://{bmc_ip}/redfish/v1/Systems/{system_id}/Actions/ComputerSystem.Reset
-- case_name: remote power off server
- case_sn: 28
- expected_code: 200
- expected_result: '{"error":{"@Message.ExtendedInfo": [{"Message": "Successfully
- Completed Request", "Severity":"OK"}]}}'
- group: power management
- header: null
- method: POST
- request_body: '{"ResetType": "GracefulShutdown"}'
- url: https://{bmc_ip}/redfish/v1/Systems/{system_id}/Actions/ComputerSystem.Reset
-- case_name: remote reset server
- case_sn: 29
- expected_code: 200
- expected_result: '{"error":{"@Message.ExtendedInfo": [{"Message": "Successfully
- Completed Request", "Severity":"OK"}]}}'
- group: power management
- header: null
- method: POST
- request_body: '{"ResetType": "ForceRestart"}'
- url: https://{bmc_ip}/redfish/v1/Systems/{system_id}/Actions/ComputerSystem.Reset
-- case_name: remote configure CPU in hyperthreading disabled
- case_sn: 30
- expected_code: 200
- expected_result: '{"Attributes": {"ProcessorHyperThreading": "Disabled"}}'
- group: remote configure
- header: null
- method: PATCH
- request_body: '{ "Attributes": { "ProcessorHyperThreading": "Disabled" }}'
- url: https://{bmc_ip}/redfish/v1/Systems/{system_id}/Bios/Settings
-- case_name: remote get CPU hyperthreading in disabled
- case_sn: 31
- expected_code: 200
- expected_result: '{"Attributes": {"ProcessorHyperThreading": "Disabled"}}'
- group: remote configure
- header: null
- method: GET
- request_body: null
- url: https://{bmc_ip}/redfish/v1/Systems/{system_id}/Bios
-- case_name: remote configure CPU in hyperthreading enabled
- case_sn: 32
- expected_code: 200
- expected_result: '{"Attributes": {"ProcessorHyperThreading": "Enabled"}}'
- group: remote configure
- header: null
- method: PATCH
- request_body: '{ "Attributes": { "ProcessorHyperThreading": "Enabled" }}'
- url: https://{bmc_ip}/redfish/v1/Systems/{system_id}/Bios/Settings
-- case_name: remote get CPU hyperthreading in enabled
- case_sn: 33
- expected_code: 200
- expected_result: '{"Attributes": {"ProcessorHyperThreading": "Enabled"}}'
- group: remote configure
- header: null
- method: GET
- request_body: null
- url: https://{bmc_ip}/redfish/v1/Systems/{system_id}/Bios
-- case_name: disable PXE mode
- case_sn: 34
- expected_code: 200
- expected_result: '{ "Attributes": { "IPv4PXESupport": "Disabled" }}'
- group: remote configure
- header: null
- method: PATCH
- request_body: '{ "Attributes": { "IPv4PXESupport": "Disabled" }}'
- url: https://{bmc_ip}/redfish/v1/Systems/{system_id}/Bios/Settings
-- case_name: check IPV4 PXE mode in disabled
- case_sn: 35
- expected_code: 200
- expected_result: '{ "Attributes": { "IPv4PXESupport": "Disabled" }}'
- group: remote configure
- header: null
- method: GET
- request_body: null
- url: https://{bmc_ip}/redfish/v1/Systems/{system_id}/Bios
-- case_name: enable PXE mode
- case_sn: 36
- expected_code: 200
- expected_result: '{ "Attributes": { "IPv4PXESupport": "Enabled" }}'
- group: remote configure
- header: null
- method: PATCH
- request_body: '{ "Attributes": { "IPv4PXESupport": "Enabled" }}'
- url: https://{bmc_ip}/redfish/v1/Systems/{system_id}/Bios/Settings
-- case_name: check ipv4 PXE mode in enabled
- case_sn: 37
- expected_code: 200
- expected_result: '{ "Attributes": { "IPv4PXESupport": "Enabled" }}'
- group: remote configure
- header: null
- method: GET
- request_body: null
- url: https://{bmc_ip}/redfish/v1/Systems/{system_id}/Bios
-- case_name: set boot type order
- case_sn: 38
- expected_code: 200
- expected_result: '{ "Attributes": { "BootTypeOrder0": "HardDiskDrive", "BootTypeOrder1":
- "DVDROMDrive", "BootTypeOrder2": "PXE", "BootTypeOrder3":
- "Others", }}'
- group: remote interface management
- header: null
- method: PATCH
- request_body: '{ "Attributes": { "BootTypeOrder0": "HardDiskDrive", "BootTypeOrder1":
- "DVDROMDrive", "BootTypeOrder2": "PXE", "BootTypeOrder3":
- "Others", }}'
- url: https://{bmc_ip}/redfish/v1/Systems/{system_id}/Bios/Settings
-- case_name: check boot order
- case_sn: 39
- expected_code: 200
- expected_result: '{ "Attributes": { "BootTypeOrder0": "HardDiskDrive", "BootTypeOrder1":
- "DVDROMDrive", "BootTypeOrder2": "PXE", "BootTypeOrder3":
- "Others", }}'
- group: remote interface management
- header: null
- method: GET
- request_body: null
- url: https://{bmc_ip}/redfish/v1/Systems/{system_id}/Bios
-- case_name: configure boot order
- case_sn: 40
- expected_code: 200
- expected_result: '{ "Attributes": { "BootTypeOrder0": "DVDROMDrive", "BootTypeOrder1":
- "HardDiskDrive", "BootTypeOrder2": "Others", "BootTypeOrder3":
- "PXE", }}'
- group: remote interface management
- header: null
- method: PATCH
- request_body: '{ "Attributes": { "BootTypeOrder0": "DVDROMDrive", "BootTypeOrder1":
- "HardDiskDrive", "BootTypeOrder2": "Others", "BootTypeOrder3":
- "PXE", }}'
- url: https://{bmc_ip}/redfish/v1/Systems/{system_id}/Bios/Settings
-- case_name: check boot order
- case_sn: 41
- expected_code: 200
- expected_result: '{ "Attributes": { "BootTypeOrder0": "DVDROMDrive", "BootTypeOrder1":
- "HardDiskDrive", "BootTypeOrder2": "Others", "BootTypeOrder3":
- "PXE", }}'
- group: remote interface management
- header: null
- method: GET
- request_body: null
- url: https://{bmc_ip}/redfish/v1/Systems/{system_id}/Bios
-- case_name: configure new boot PXE order first
- case_sn: 42
- expected_code: 200
- expected_result: '{ "Attributes": { "BootTypeOrder0": "PXE", "BootTypeOrder1":
- "HardDiskDrive", "BootTypeOrder2": "DVDROMDrive", "BootTypeOrder3":
- "Others", }}'
- group: remote interface management
- header: null
- method: PATCH
- request_body: '{ "Attributes": { "BootTypeOrder0": "PXE", "BootTypeOrder1":
- "HardDiskDrive", "BootTypeOrder2": "DVDROMDrive", "BootTypeOrder3":
- "Others", }}'
- url: https://{bmc_ip}/redfish/v1/Systems/{system_id}/Bios/Settings
-- case_name: check boot order PEX order first
- case_sn: 43
- expected_code: 200
- expected_result: '{ "Attributes": { "BootTypeOrder0": "PXE", "BootTypeOrder1":
- "HardDiskDrive", "BootTypeOrder2": "DVDROMDrive", "BootTypeOrder3":
- "Others", }}'
- group: remote interface management
- header: null
- method: GET
- request_body: null
- url: https://{bmc_ip}/redfish/v1/Systems/{system_id}/Bios
-- case_name: check BMC Firmware version
- case_sn: 44
- expected_code: 200
- expected_result: '{"count": 1, "FirmwareVersion": "1.30.11P01 HDM V100R001B03D011SP01"}'
- group: remote interface management
- header: null
- method: GET
- request_body: null
- url: https://{bmc_ip}{manager_id}
-- case_name: change BMC account
- case_sn: 45
- expected_code: 200
- expected_result: '{"UserName": "CM_cc@1234","RoleId": "Administrator",}'
- group: remote interface management
- header: null
- method: PATCH
- request_body: '{"UserName": "CM_cc@1234","Password": "1234@CM_cc","RoleId": "Administrator",}'
- url: https://{bmc_ip}/redfish/v1/AccountService/Accounts/3
-- case_name: configure BMC ip in static, ipv4
- case_sn: 46
- expected_code: 200
- expected_result: '{"count": 1, "IPv4Addresses": [ { "Address":
- "192.168.66.120", "AddressOrigin": "Static", "Gateway":
- "192.168.66.1", "SubnetMask": "255.255.255.128" } ]}'
- group: remote interface management
- header: null
- method: PATCH
- request_body: '{ "IPv4Addresses": [ { "Address": "192.168.66.120", "AddressOrigin":
- "Static", "Gateway": "192.168.66.1", "SubnetMask": "255.255.255.128" } ]}'
- url: https://{bmc_ip}{manager_id}/EthernetInterfaces/eth1
-- case_name: configure BMC ip in DHCP, gateway and subnet mask ipv4
- case_sn: 47
- expected_code: 200
- expected_result: '{"count": 1, "IPv4Addresses": [ { "Address":
- "192.168.66.120", "AddressOrigin": "DHCP", "Gateway": "192.168.66.1", "SubnetMask":
- "255.255.255.128" } ]}'
- group: remote interface management
- header: null
- method: PATCH
- request_body: '{ "IPv4Addresses": [ { "AddressOrigin": "DHCP" } ]}'
- url: https://{bmc_ip}{manager_id}/EthernetInterfaces/eth1
-- case_name: configure BMC ip in static, ipv4
- case_sn: 48
- expected_code: 200
- expected_result: '{"count": 1, "IPv4Addresses": [ { "AddressOrigin":
- "DHCP", } ]}'
- group: remote interface management
- header: null
- method: PATCH
- request_body: '{ "IPv4Addresses": [ { "AddressOrigin": "DHCP" } ]}'
- url: https://{bmc_ip}{manager_id}/EthernetInterfaces/eth1
-- case_name: configure BMC ip in static, ipv6
- case_sn: 49
- expected_code: 200
- expected_result: '{"count": 1, "IPv6Addresses": [ { "Address":
- "N/A", "AddressOrigin": "N/A", "PrefixLength": 64 }, { "Address":
- "2019::11", "AddressOrigin": "Static", "PrefixLength": 64 } ]}'
- group: remote interface management
- header: null
- method: PATCH
- request_body: '{ "IPv6Addresses": [ { "Address": "2019::11", "AddressOrigin":
- "Static", "PrefixLength": 64 } ]}'
- url: https://{bmc_ip}{manager_id}/EthernetInterfaces/eth1
diff --git a/tools/hdv/redfish/conf/config.yaml b/tools/hdv/redfish/conf/config.yaml
deleted file mode 100644
index b57b71c..0000000
--- a/tools/hdv/redfish/conf/config.yaml
+++ /dev/null
@@ -1,17 +0,0 @@
----
-bmc_ip: 172.29.160.22
-bmc_user: root
-bmc_pwd: Huawei12#$
-system_id: 1
-chassis_id: 1
-attr_name: 3
-pro_seq: 4
-url_seq: 5
-req_header_seq: 6
-req_body_seq: 7
-expect_return_code_seq: 8
-expect_return_value_seq: 9
-return_code_seq: 10
-return_value_seq: 11
-detail_result: 12
-final_result: 13
diff --git a/tools/hdv/redfish/conf/depends.yaml b/tools/hdv/redfish/conf/depends.yaml
deleted file mode 100644
index eecdcd4..0000000
--- a/tools/hdv/redfish/conf/depends.yaml
+++ /dev/null
@@ -1,33 +0,0 @@
----
-- component_id: cpu_id
- key_flags: Members
- pro_value: GET
- url_value: https://{bmc_ip}/redfish/v1/Systems/{system_id}/Processors
-- component_id: memory_id
- key_flags: Members
- pro_value: GET
- url_value: https://{bmc_ip}/redfish/v1/Systems/{system_id}/Memory
-- component_id: storage_id
- key_flags: Members
- pro_value: GET
- url_value: https://{bmc_ip}/redfish/v1/Systems/{system_id}/Storages
-- component_id: drives_id
- key_flags: Drives
- pro_value: GET
- url_value: https://{bmc_ip}{storage_id}
-- component_id: networkadapters_id
- key_flags: Members
- pro_value: GET
- url_value: https://{bmc_ip}/redfish/v1/Chassis/{chassis_id}/NetworkAdapters
-- component_id: networkports_id
- key_flags: Controllers:Link:NetworkPorts
- pro_value: GET
- url_value: https://{bmc_ip}{networkadapters_id}
-- component_id: volume_id
- key_flags: Members
- pro_value: GET
- url_value: https://{bmc_ip}{storage_id}/Volumes
-- component_id: manager_id
- key_flags: Members
- pro_value: GET
- url_value: https://{bmc_ip}/redfish/v1/Managers
diff --git a/tools/hdv/redfish/conf/report.yaml b/tools/hdv/redfish/conf/report.yaml
deleted file mode 100644
index d396360..0000000
--- a/tools/hdv/redfish/conf/report.yaml
+++ /dev/null
@@ -1,832 +0,0 @@
----
-- case_name: set asset code
- case_sn: 1
- details_result: '{''count'': ''N/A for this case'', ''info'': [{''return_code'':
- ''Failure'', ''AssetTag'': "Failure, expect value: CM_cc@1234, return value: Can''t
- find the key AssetTag in return value"}]}'
- expected_code: 200
- expected_result: '{"AssetTag": "CM_cc@1234"}'
- final_rst: Failure
- group: asset managment
- header: null
- method: PATCH
- request_body: '{"AssetTag": "CM_cc@1234"}'
- return_code_seq: '[''N/A'']'
- url: https://{bmc_ip}/redfish/v1/Systems/{system_id}
-- case_name: get asset code
- case_sn: 2
- details_result: '{''count'': ''N/A for this case'', ''info'': [{''return_code'':
- ''Success'', ''AssetTag'': ''Success''}]}'
- expected_code: 200
- expected_result: '{"AssetTag": "CM_cc@1234"}'
- final_rst: Success
- group: asset managment
- header: null
- method: GET
- request_body: null
- return_code_seq: '[200]'
- url: https://{bmc_ip}/redfish/v1/Systems/{system_id}
-- case_name: set host name
- case_sn: 3
- details_result: '{''count'': ''N/A for this case'', ''info'': [{''return_code'':
- ''Failure'', ''HostName'': "Failure, expect value: NFV-RPZJHZ-01B, return value:
- Can''t find the key HostName in return value"}]}'
- expected_code: 200
- expected_result: '{"HostName": "NFV-RPZJHZ-01B"}'
- final_rst: Failure
- group: asset managment
- header: null
- method: PATCH
- request_body: '{"HostName": "NFV-RPZJHZ-01B"}'
- return_code_seq: '[''N/A'']'
- url: https://{bmc_ip}/redfish/v1/Systems/{system_id}
-- case_name: check host name
- case_sn: 4
- details_result: '{''count'': ''N/A for this case'', ''info'': [{''return_code'':
- ''Success'', ''HostName'': "Failure, expect value: NFV-RPZJHZ-01B, return value:
- Can''t find the key HostName in return value"}]}'
- expected_code: 200
- expected_result: '{"HostName": "NFV-RPZJHZ-01B"}'
- final_rst: Failure
- group: asset managment
- header: null
- method: GET
- request_body: null
- return_code_seq: '[200]'
- url: https://{bmc_ip}/redfish/v1/Systems/{system_id}
-- case_name: check manufacturer
- case_sn: 5
- details_result: '{''count'': ''N/A for this case'', ''info'': [{''return_code'':
- ''Success'', ''Manufacturer'': ''Failure, expect value: New H3C Technologies Co.,
- Ltd., return value: Huawei''}]}'
- expected_code: 200
- expected_result: '{"Manufacturer": "New H3C Technologies Co., Ltd."}'
- final_rst: Failure
- group: asset managment
- header: null
- method: GET
- request_body: null
- return_code_seq: '[200]'
- url: https://{bmc_ip}/redfish/v1/Systems/{system_id}
-- case_name: check model
- case_sn: 6
- details_result: '{''count'': ''N/A for this case'', ''info'': [{''return_code'':
- ''Success'', ''Model'': ''Failure, expect value: UniServer R4900 G3, return value:
- RH2288H V3''}]}'
- expected_code: 200
- expected_result: '{"Model": "UniServer R4900 G3"}'
- final_rst: Failure
- group: asset managment
- header: null
- method: GET
- request_body: null
- return_code_seq: '[200]'
- url: https://{bmc_ip}/redfish/v1/Systems/{system_id}
-- case_name: check serial number
- case_sn: 7
- details_result: '{''count'': ''N/A for this case'', ''info'': [{''return_code'':
- ''Success'', ''SerialNumber'': ''Success''}]}'
- expected_code: 200
- expected_result: '{"SerialNumber": "N/A"}'
- final_rst: Success
- group: asset managment
- header: null
- method: GET
- request_body: null
- return_code_seq: '[200]'
- url: https://{bmc_ip}/redfish/v1/Systems/{system_id}
-- case_name: check main board name
- case_sn: 8
- details_result: '{''count'': ''N/A for this case'', ''info'': [{''return_code'':
- ''Success'', ''Oem'': "Failure, expect value: {''Mainboard'': {''BoardName'':
- ''RS33M2C9S''}}, return value: Can''t find the key Oem in return value"}]}'
- expected_code: 200
- expected_result: '{"Oem":{"Mainboard": {"BoardName": "RS33M2C9S"}}}'
- final_rst: Failure
- group: asset managment
- header: null
- method: GET
- request_body: null
- return_code_seq: '[200]'
- url: https://{bmc_ip}/redfish/v1/Chassis/{chassis_id}
-- case_name: check main board serial number
- case_sn: 9
- details_result: '{''count'': ''N/A for this case'', ''info'': [{''return_code'':
- ''Success'', ''Oem'': "Failure, expect value: {''Mainboard'': {''SerialNumber'':
- ''N/A''}}, return value: Can''t find the key Oem in return value"}]}'
- expected_code: 200
- expected_result: '{"Oem": {"Mainboard": {"SerialNumber": "N/A"}}}'
- final_rst: Failure
- group: asset managment
- header: null
- method: GET
- request_body: null
- return_code_seq: '[200]'
- url: https://{bmc_ip}/redfish/v1/Chassis/{chassis_id}
-- case_name: check BIOS version
- case_sn: 10
- details_result: '{''count'': ''N/A for this case'', ''info'': [{''return_code'':
- ''Success'', ''BiosVersion'': ''Failure, expect value: 2.00.35P01 V100R001B02D035SP01,
- return value: 3.63''}]}'
- expected_code: 200
- expected_result: '{"BiosVersion": "2.00.35P01 V100R001B02D035SP01"}'
- final_rst: Failure
- group: asset managment
- header: null
- method: GET
- request_body: null
- return_code_seq: '[200]'
- url: https://{bmc_ip}/redfish/v1/Systems/{system_id}
-- case_name: check CPU amount
- case_sn: 11
- details_result: '{''count'': ''N/A for this case'', ''info'': [{''return_code'':
- ''Success'', ''Members@odata.count'': ''Success''}]}'
- expected_code: 200
- expected_result: '{"Members@odata.count": 2}'
- final_rst: Success
- group: compoment management
- header: null
- method: GET
- request_body: null
- return_code_seq: '[200]'
- url: https://{bmc_ip}/redfish/v1/Systems/{system_id}/Processors
-- case_name: check CPU info
- case_sn: 12
- details_result: '{''count'': ''Success'', ''info'': [{''return_code'': ''Success'',
- ''Manufacturer'': ''Success'', ''MaxSpeedMHz'': ''Failure, expect value: 2300,
- return value: 3500'', ''Model'': ''Failure, expect value: Intel(R) Xeon(R) Gold
- 5218N CPU @ 2.30GHz, return value: Intel(R) Xeon(R) CPU E5-2620 v4 @ 2.10GHz'',
- ''ProcessorArchitecture'': ''Success'', ''Socket'': ''Failure, expect value: [1,
- 2], return value: 0'', ''Status'': {''Health'': ''Success'', ''State'': ''Success''},
- ''TotalCores'': ''Failure, expect value: 16, return value: 8'', ''TotalThreads'':
- ''Failure, expect value: 32, return value: 16''}, {''return_code'': ''Success'',
- ''Manufacturer'': ''Success'', ''MaxSpeedMHz'': ''Failure, expect value: 2300,
- return value: 3500'', ''Model'': ''Failure, expect value: Intel(R) Xeon(R) Gold
- 5218N CPU @ 2.30GHz, return value: Intel(R) Xeon(R) CPU E5-2620 v4 @ 2.10GHz'',
- ''ProcessorArchitecture'': ''Success'', ''Socket'': ''Success'', ''Status'': {''Health'':
- ''Success'', ''State'': ''Success''}, ''TotalCores'': ''Failure, expect value:
- 16, return value: 8'', ''TotalThreads'': ''Failure, expect value: 32, return value:
- 16''}]}'
- expected_code: 200
- expected_result: '{ "count": 2, "Manufacturer": "Intel(R) Corporation", "MaxSpeedMHz":
- 2300, "Model": "Intel(R) Xeon(R) Gold 5218N CPU @ 2.30GHz", "ProcessorArchitecture":
- ["x86", "IA-64", "ARM", "MIPS", "OEM"], "Socket": [1, 2], "Status": { "Health":
- "OK", "State": "Enabled" }, "TotalCores": 16, "TotalThreads":
- 32}'
- final_rst: Failure
- group: compoment management
- header: null
- method: GET
- request_body: null
- return_code_seq: '[200, 200]'
- url: https://{bmc_ip}{cpu_id}
-- case_name: check memory mount
- case_sn: 13
- details_result: '{''count'': ''N/A for this case'', ''info'': [{''return_code'':
- ''Success'', ''Members@odata.count'': ''Failure, expect value: 12, return value:
- 4''}]}'
- expected_code: 200
- expected_result: '{"Members@odata.count": 12}'
- final_rst: Failure
- group: compoment management
- header: null
- method: GET
- request_body: null
- return_code_seq: '[200]'
- url: https://{bmc_ip}/redfish/v1/Systems/{system_id}/Memory
-- case_name: check memory information
- case_sn: 14
- details_result: '{''count'': ''Failure, the actual num is 4'', ''info'': [{''return_code'':
- ''Success'', ''BaseModuleType'': "Failure, expect value: RDIMM, return value:
- Can''t find the key BaseModuleType in return value", ''CapacityMiB'': ''Success'',
- ''DeviceLocator'': ''Success'', ''Manufacturer'': ''Success'', ''MemoryDeviceType'':
- ''Success'', ''OperatingSpeedMhz'': ''Failure, expect value: 2666, return value:
- 2400'', ''PartNumber'': "Failure, expect value: [''HMA84GR7AFR4N-VK'', ''36ASF4G72PZ-2G6D1''],
- return value: Can''t find the key PartNumber in return value", ''Status'': {''Health'':
- ''Success'', ''State'': ''Success''}}, {''return_code'': ''Success'', ''BaseModuleType'':
- "Failure, expect value: RDIMM, return value: Can''t find the key BaseModuleType
- in return value", ''CapacityMiB'': ''Success'', ''DeviceLocator'': ''Success'',
- ''Manufacturer'': ''Success'', ''MemoryDeviceType'': ''Success'', ''OperatingSpeedMhz'':
- ''Failure, expect value: 2666, return value: 2400'', ''PartNumber'': "Failure,
- expect value: [''HMA84GR7AFR4N-VK'', ''36ASF4G72PZ-2G6D1''], return value: Can''t
- find the key PartNumber in return value", ''Status'': {''Health'': ''Success'',
- ''State'': ''Success''}}, {''return_code'': ''Success'', ''BaseModuleType'': "Failure,
- expect value: RDIMM, return value: Can''t find the key BaseModuleType in return
- value", ''CapacityMiB'': ''Success'', ''DeviceLocator'': ''Success'', ''Manufacturer'':
- ''Success'', ''MemoryDeviceType'': ''Success'', ''OperatingSpeedMhz'': ''Failure,
- expect value: 2666, return value: 2400'', ''PartNumber'': "Failure, expect value:
- [''HMA84GR7AFR4N-VK'', ''36ASF4G72PZ-2G6D1''], return value: Can''t find the key
- PartNumber in return value", ''Status'': {''Health'': ''Success'', ''State'':
- ''Success''}}, {''return_code'': ''Success'', ''BaseModuleType'': "Failure, expect
- value: RDIMM, return value: Can''t find the key BaseModuleType in return value",
- ''CapacityMiB'': ''Success'', ''DeviceLocator'': ''Success'', ''Manufacturer'':
- ''Success'', ''MemoryDeviceType'': ''Success'', ''OperatingSpeedMhz'': ''Failure,
- expect value: 2666, return value: 2400'', ''PartNumber'': "Failure, expect value:
- [''HMA84GR7AFR4N-VK'', ''36ASF4G72PZ-2G6D1''], return value: Can''t find the key
- PartNumber in return value", ''Status'': {''Health'': ''Success'', ''State'':
- ''Success''}}]}'
- expected_code: 200
- expected_result: '{ "count": 12, "BaseModuleType": "RDIMM", "CapacityMiB":
- 32768, "DeviceLocator": "N/A", "Manufacturer": ["Hynix Semiconductor", "Micron"], "MemoryDeviceType":
- "DDR4", "OperatingSpeedMhz": 2666, "PartNumber": ["HMA84GR7AFR4N-VK","36ASF4G72PZ-2G6D1"], "Status":
- { "Health": "OK", "State": "Enabled" }}'
- final_rst: Failure
- group: compoment management
- header: null
- method: GET
- request_body: null
- return_code_seq: '[200, 200, 200, 200]'
- url: https://{bmc_ip}{memory_id}
-- case_name: check raid card amount
- case_sn: 15
- details_result: '{''count'': ''N/A for this case'', ''info'': [{''return_code'':
- ''Success'', ''Members@odata.count'': ''Success''}]}'
- expected_code: 200
- expected_result: '{"Members@odata.count": 1}'
- final_rst: Success
- group: compoment management
- header: null
- method: GET
- request_body: null
- return_code_seq: '[200]'
- url: https://{bmc_ip}/redfish/v1/Systems/{system_id}/Storages
-- case_name: check raid card information
- case_sn: 16
- details_result: '{''count'': ''Success'', ''info'': [{''return_code'': ''Success'',
- ''StorageControllers'': [{''FirmwareVersion'': ''2.62'', ''Manufacturer'': ''H3C'',
- ''Model'': ''N/A'', ''Status'': {''Health'': ''Success'', ''State'': ''Success''}}]}]}'
- expected_code: 200
- expected_result: '{ "count": 1, "StorageControllers": [ { "FirmwareVersion":
- "2.62", "Manufacturer": "H3C", "Model": "N/A", "Status":
- { "Health": "OK", "State": "Enabled" } } ]}'
- final_rst: Failure
- group: compoment management
- header: null
- method: GET
- request_body: null
- return_code_seq: '[200]'
- url: https://{bmc_ip}{storage_id}
-- case_name: check harddisk information
- case_sn: 17
- details_result: '{''count'': ''Failure, the actual num is 6'', ''info'': [{''return_code'':
- ''Success'', ''CapacityBytes'': ''Failure, expect value: [480102187008, 960193626112],
- return value: None'', ''Location'': {''Info'': ''N/A'', ''InfoFormat'': ''DeviceName''},
- ''Manufacturer'': ''ATA'', ''MediaType'': ''SSD'', ''Model'': "Failure, expect
- value: [''INTEL SSDSC2KB48'', ''INTEL SSDSC2KB96''], return value: None", ''Protocol'':
- ''Success'', ''Status'': {''Health'': ''Success'', ''State'': ''Enabled''}}, {''return_code'':
- ''Success'', ''CapacityBytes'': ''Failure, expect value: [480102187008, 960193626112],
- return value: None'', ''Location'': {''Info'': ''N/A'', ''InfoFormat'': ''DeviceName''},
- ''Manufacturer'': ''ATA'', ''MediaType'': ''SSD'', ''Model'': "Failure, expect
- value: [''INTEL SSDSC2KB48'', ''INTEL SSDSC2KB96''], return value: None", ''Protocol'':
- ''Success'', ''Status'': {''Health'': ''Success'', ''State'': ''Enabled''}}, {''return_code'':
- ''Success'', ''CapacityBytes'': ''Failure, expect value: [480102187008, 960193626112],
- return value: None'', ''Location'': {''Info'': ''N/A'', ''InfoFormat'': ''DeviceName''},
- ''Manufacturer'': ''ATA'', ''MediaType'': ''SSD'', ''Model'': "Failure, expect
- value: [''INTEL SSDSC2KB48'', ''INTEL SSDSC2KB96''], return value: None", ''Protocol'':
- ''Success'', ''Status'': {''Health'': ''Success'', ''State'': ''Enabled''}}, {''return_code'':
- ''Success'', ''CapacityBytes'': ''Failure, expect value: [480102187008, 960193626112],
- return value: None'', ''Location'': {''Info'': ''N/A'', ''InfoFormat'': ''DeviceName''},
- ''Manufacturer'': ''ATA'', ''MediaType'': ''SSD'', ''Model'': "Failure, expect
- value: [''INTEL SSDSC2KB48'', ''INTEL SSDSC2KB96''], return value: None", ''Protocol'':
- ''Success'', ''Status'': {''Health'': ''Success'', ''State'': ''Enabled''}}, {''return_code'':
- ''Success'', ''CapacityBytes'': ''Failure, expect value: [480102187008, 960193626112],
- return value: None'', ''Location'': {''Info'': ''N/A'', ''InfoFormat'': ''DeviceName''},
- ''Manufacturer'': ''ATA'', ''MediaType'': ''SSD'', ''Model'': "Failure, expect
- value: [''INTEL SSDSC2KB48'', ''INTEL SSDSC2KB96''], return value: None", ''Protocol'':
- ''Success'', ''Status'': {''Health'': ''Success'', ''State'': ''Enabled''}}, {''return_code'':
- ''Success'', ''CapacityBytes'': ''Failure, expect value: [480102187008, 960193626112],
- return value: None'', ''Location'': {''Info'': ''N/A'', ''InfoFormat'': ''DeviceName''},
- ''Manufacturer'': ''ATA'', ''MediaType'': ''SSD'', ''Model'': "Failure, expect
- value: [''INTEL SSDSC2KB48'', ''INTEL SSDSC2KB96''], return value: None", ''Protocol'':
- ''Success'', ''Status'': {''Health'': ''Success'', ''State'': ''Enabled''}}]}'
- expected_code: 200
- expected_result: '{ "count": 4, "CapacityBytes": [480102187008, 960193626112], "Location":
- { "Info": "N/A", "InfoFormat": "DeviceName" }, "Manufacturer":
- "ATA", "MediaType": "SSD", "Model": ["INTEL SSDSC2KB48", "INTEL SSDSC2KB96"], "Protocol":
- "SATA", "Status": { "Health": "OK", "State": "Enabled" }}'
- final_rst: Failure
- group: compoment management
- header: null
- method: GET
- request_body: null
- return_code_seq: '[200, 200, 200, 200, 200, 200]'
- url: https://{bmc_ip}{drives_id}
-- case_name: check network interface adapter information
- case_sn: 18
- details_result: N/A
- expected_code: 200
- expected_result: '{ "count": 3, "Manufacturer": "Mellanox", "Model": "NIC-620F-B2-25Gb-2P-1-X", "Name":
- ["PCIeSlot2", "PCIeSlot3", "PCIeSlot6"], "Oem": { "Public": { "CardModel":
- "2*25GE", "RootBDF": ["0000:17:00.0", "0000:17:02.0", "0000:AE:02.0"], } }, "Status":
- { "Health": "OK", "State": "Enabled" }}'
- final_rst: Failure
- group: compoment management
- header: null
- method: GET
- request_body: null
- url: https://{bmc_ip}{networkadapters_id}
-- case_name: check network interface adapter port information
- case_sn: 19
- details_result: N/A
- expected_code: 200
- expected_result: '{ "count": 6, "AssociatedNetworkAddresses": [ "N/A" ], "Oem":
- { "Public": { "BDF": "N/A", "PortType": "OpticalPort" } }, "PhysicalPortNumber":
- ["1", "2"]}'
- final_rst: Failure
- group: compoment management
- header: null
- method: GET
- request_body: null
- url: https://{bmc_ip}{networkports_id}
-- case_name: check fans information
- case_sn: 20
- details_result: '{''count'': ''N/A for this case'', ''info'': [{''return_code'':
- ''Success'', ''FanSummary'': "Failure, expect value: {''Count'': 6}, return value:
- Can''t find the key FanSummary in return value", ''Fans'': [{''MemberId'': ''Success'',
- ''Oem'': {''Public'': {''SpeedRatio'': ''N/A''}}, ''Status'': {''Health'': ''Success'',
- ''State'': ''Success''}}, {''MemberId'': ''Success'', ''Oem'': {''Public'': {''SpeedRatio'':
- ''N/A''}}, ''Status'': {''Health'': ''Success'', ''State'': ''Success''}}, {''MemberId'':
- ''Success'', ''Oem'': {''Public'': {''SpeedRatio'': ''N/A''}}, ''Status'': {''Health'':
- ''Success'', ''State'': ''Success''}}, {''MemberId'': ''Success'', ''Oem'': {''Public'':
- {''SpeedRatio'': ''N/A''}}, ''Status'': {''Health'': ''Success'', ''State'': ''Success''}},
- {''MemberId'': ''Success'', ''Oem'': {''Public'': {''SpeedRatio'': ''N/A''}},
- ''Status'': {''Health'': ''Success'', ''State'': ''Success''}}, {''MemberId'':
- ''Success'', ''Oem'': {''Public'': {''SpeedRatio'': ''N/A''}}, ''Status'': {''Health'':
- ''Success'', ''State'': ''Success''}}]}]}'
- expected_code: 200
- expected_result: '{ "FanSummary": { "Count": 6 }, "Fans": [ { "MemberId":
- "N/A", "Oem": { "Public": { "SpeedRatio":
- "N/A" } }, "Status": { "Health":
- "OK", "State": "Enabled" } }, { "MemberId":
- "N/A", "Oem": { "Public": { "SpeedRatio":
- "N/A" } }, "Status": { "Health":
- "OK", "State": "Enabled" } },{ "MemberId":
- "N/A", "Oem": { "Public": { "SpeedRatio":
- "N/A" } }, "Status": { "Health":
- "OK", "State": "Enabled" } },{ "MemberId":
- "N/A", "Oem": { "Public": { "SpeedRatio":
- "N/A" } }, "Status": { "Health":
- "OK", "State": "Enabled" } },{ "MemberId":
- "N/A", "Oem": { "Public": { "SpeedRatio":
- "N/A" } }, "Status": { "Health":
- "OK", "State": "Enabled" } },{ "MemberId":
- "N/A", "Oem": { "Public": { "SpeedRatio":
- "N/A" } }, "Status": { "Health":
- "OK", "State": "Enabled" } } ],}'
- final_rst: Failure
- group: compoment management
- header: null
- method: GET
- request_body: null
- return_code_seq: '[200]'
- url: https://{bmc_ip}/redfish/v1/Chassis/{chassis_id}/Thermal
-- case_name: check power amount
- case_sn: 21
- details_result: '{''count'': ''N/A for this case'', ''info'': [{''return_code'':
- ''Success'', ''DeviceMaxNum'': "Failure, expect value: {''PowerSupplyNum'': 2},
- return value: Can''t find the key DeviceMaxNum in return value"}]}'
- expected_code: 200
- expected_result: '{ "DeviceMaxNum": { "PowerSupplyNum": 2},}'
- final_rst: Failure
- group: compoment management
- header: null
- method: GET
- request_body: null
- return_code_seq: '[200]'
- url: https://{bmc_ip}/redfish/v1/Chassis/{chassis_id}
-- case_name: check power detail info
- case_sn: 22
- details_result: '{''count'': ''N/A for this case'', ''info'': [{''return_code'':
- ''Success'', ''PowerControl'': [{''PowerConsumedWatts'': ''Success'', ''Status'':
- {''Health'': ''OK'', ''State'': ''Enabled''}}], ''PowerSupplies'': [{''LineInputVoltage'':
- ''Success'', ''MemberId'': ''Failure, expect value: 1, return value: 0'', ''PowerCapacityWatts'':
- ''Failure, expect value: 800, return value: 460'', ''Status'': {''Health'': ''Success'',
- ''State'': ''Success''}}, {''LineInputVoltage'': ''Success'', ''MemberId'': ''Failure,
- expect value: 2, return value: 1'', ''PowerCapacityWatts'': ''Failure, expect
- value: 800, return value: 460'', ''Status'': {''Health'': ''Failure, expect value:
- OK, return value: Critical'', ''State'': ''Success''}}]}]}'
- expected_code: 200
- expected_result: '{ "PowerControl": [ { "PowerConsumedWatts":
- "N/A","Status":{ "Health": "OK", "State": "Enabled" } }, ], "PowerSupplies":
- [ { "LineInputVoltage": "N/A", "MemberId": "1", "PowerCapacityWatts":
- 800,"Status": { "Health": "OK", "State": "Enabled" } }, { "LineInputVoltage":
- "N/A", "MemberId": "2", "PowerCapacityWatts": 800,"Status":
- { "Health": "OK", "State": "Enabled" } } ],}'
- final_rst: Failure
- group: compoment management
- header: null
- method: GET
- request_body: null
- return_code_seq: '[200]'
- url: https://{bmc_ip}/redfish/v1/Chassis/{chassis_id}/Power
-- case_name: check logical dirve health status
- case_sn: 23
- details_result: N/A
- expected_code: 200
- expected_result: '{ "count": 2, "Name": "N/A", "Status": { "Health":
- ["OK", "Critical"], "State": "Enabled" }}'
- final_rst: Failure
- group: compoment management
- header: null
- method: GET
- request_body: null
- url: https://{bmc_ip}{volume_id}
-- case_name: check server temperature air intake
- case_sn: 24
- details_result: '{''count'': ''N/A for this case'', ''info'': [{''return_code'':
- ''Success'', ''Temperatures'': [{''Name'': ''Failure, expect value: INPUT_TEMP,
- return value: Inlet Temp'', ''ReadingCelsius'': ''Success''}]}]}'
- expected_code: 200
- expected_result: '{ "Temperatures": [ { "Name": "INPUT_TEMP", "ReadingCelsius":
- "N/A", } ]}'
- final_rst: Failure
- group: sensor management
- header: null
- method: GET
- request_body: null
- return_code_seq: '[200]'
- url: https://{bmc_ip}/redfish/v1/Chassis/{chassis_id}/Thermal
-- case_name: check cpu temperature
- case_sn: 25
- details_result: '{''count'': ''N/A for this case'', ''info'': [{''return_code'':
- ''Success'', ''Temperatures'': [{''Name'': ''Failure, expect value: INPUT_TEMP,
- return value: Inlet Temp'', ''ReadingCelsius'': ''Success''}, {''Name'': ''Failure,
- expect value: CPU1_TEMP, return value: Outlet Temp'', ''ReadingCelsius'': ''Success''},
- {''Name'': ''Failure, expect value: CPU2_TEMP, return value: PCH Temp'', ''ReadingCelsius'':
- ''N/A''}]}]}'
- expected_code: 200
- expected_result: '{ "Temperatures": [ { "Name": "INPUT_TEMP", "ReadingCelsius":
- "N/A", }, { "Name": "CPU1_TEMP", "ReadingCelsius":
- "N/A", }, { "Name": "CPU2_TEMP", "ReadingCelsius":
- "N/A", }, ]}'
- final_rst: Failure
- group: sensor management
- header: null
- method: GET
- request_body: null
- return_code_seq: '[200]'
- url: https://{bmc_ip}/redfish/v1/Chassis/{chassis_id}/Thermal
-- case_name: check server power state
- case_sn: 26
- details_result: '{''count'': ''N/A for this case'', ''info'': [{''return_code'':
- ''Success'', ''PowerState'': ''Failure, expect value: On, return value: Off''}]}'
- expected_code: 200
- expected_result: '{"PowerState": "On"}'
- final_rst: Failure
- group: power management
- header: null
- method: GET
- request_body: null
- return_code_seq: '[200]'
- url: https://{bmc_ip}/redfish/v1/Systems/{system_id}
-- case_name: remote power on server
- case_sn: 27
- details_result: '{''count'': ''N/A for this case'', ''info'': [{''return_code'':
- ''Failure''}]}'
- expected_code: 200
- expected_result: '{"error":{"@Message.ExtendedInfo": [{"Message": "Successfully
- Completed Request", "Severity":"OK"}]}}'
- final_rst: Failure
- group: power management
- header: null
- method: POST
- request_body: '{"ResetType": "On"}'
- return_code_seq: '[]'
- url: https://{bmc_ip}/redfish/v1/Systems/{system_id}/Actions/ComputerSystem.Reset
-- case_name: remote power off server
- case_sn: 28
- details_result: '{''count'': ''N/A for this case'', ''info'': [{''return_code'':
- ''Failure''}]}'
- expected_code: 200
- expected_result: '{"error":{"@Message.ExtendedInfo": [{"Message": "Successfully
- Completed Request", "Severity":"OK"}]}}'
- final_rst: Failure
- group: power management
- header: null
- method: POST
- request_body: '{"ResetType": "GracefulShutdown"}'
- return_code_seq: '[]'
- url: https://{bmc_ip}/redfish/v1/Systems/{system_id}/Actions/ComputerSystem.Reset
-- case_name: remote reset server
- case_sn: 29
- details_result: '{''count'': ''N/A for this case'', ''info'': [{''return_code'':
- ''Failure''}]}'
- expected_code: 200
- expected_result: '{"error":{"@Message.ExtendedInfo": [{"Message": "Successfully
- Completed Request", "Severity":"OK"}]}}'
- final_rst: Failure
- group: power management
- header: null
- method: POST
- request_body: '{"ResetType": "ForceRestart"}'
- return_code_seq: '[]'
- url: https://{bmc_ip}/redfish/v1/Systems/{system_id}/Actions/ComputerSystem.Reset
-- case_name: remote configure CPU in hyperthreading disabled
- case_sn: 30
- details_result: '{''count'': ''N/A for this case'', ''info'': [{''return_code'':
- ''Failure'', ''Attributes'': "Failure, expect value: {''ProcessorHyperThreading'':
- ''Disabled''}, return value: Can''t find the key Attributes in return value"}]}'
- expected_code: 200
- expected_result: '{"Attributes": {"ProcessorHyperThreading": "Disabled"}}'
- final_rst: Failure
- group: remote configure
- header: null
- method: PATCH
- request_body: '{ "Attributes": { "ProcessorHyperThreading": "Disabled" }}'
- return_code_seq: '[''N/A'']'
- url: https://{bmc_ip}/redfish/v1/Systems/{system_id}/Bios/Settings
-- case_name: remote get CPU hyperthreading in disabled
- case_sn: 31
- details_result: '{''count'': ''N/A for this case'', ''info'': [{''return_code'':
- ''Failure''}]}'
- expected_code: 200
- expected_result: '{"Attributes": {"ProcessorHyperThreading": "Disabled"}}'
- final_rst: Failure
- group: remote configure
- header: null
- method: GET
- request_body: null
- return_code_seq: '[]'
- url: https://{bmc_ip}/redfish/v1/Systems/{system_id}/Bios
-- case_name: remote configure CPU in hyperthreading enabled
- case_sn: 32
- details_result: '{''count'': ''N/A for this case'', ''info'': [{''return_code'':
- ''Failure'', ''Attributes'': "Failure, expect value: {''ProcessorHyperThreading'':
- ''Enabled''}, return value: Can''t find the key Attributes in return value"}]}'
- expected_code: 200
- expected_result: '{"Attributes": {"ProcessorHyperThreading": "Enabled"}}'
- final_rst: Failure
- group: remote configure
- header: null
- method: PATCH
- request_body: '{ "Attributes": { "ProcessorHyperThreading": "Enabled" }}'
- return_code_seq: '[''N/A'']'
- url: https://{bmc_ip}/redfish/v1/Systems/{system_id}/Bios/Settings
-- case_name: remote get CPU hyperthreading in enabled
- case_sn: 33
- details_result: '{''count'': ''N/A for this case'', ''info'': [{''return_code'':
- ''Failure''}]}'
- expected_code: 200
- expected_result: '{"Attributes": {"ProcessorHyperThreading": "Enabled"}}'
- final_rst: Failure
- group: remote configure
- header: null
- method: GET
- request_body: null
- return_code_seq: '[]'
- url: https://{bmc_ip}/redfish/v1/Systems/{system_id}/Bios
-- case_name: disable PXE mode
- case_sn: 34
- details_result: '{''count'': ''N/A for this case'', ''info'': [{''return_code'':
- ''Failure'', ''Attributes'': "Failure, expect value: {''IPv4PXESupport'': ''Disabled''},
- return value: Can''t find the key Attributes in return value"}]}'
- expected_code: 200
- expected_result: '{ "Attributes": { "IPv4PXESupport": "Disabled" }}'
- final_rst: Failure
- group: remote configure
- header: null
- method: PATCH
- request_body: '{ "Attributes": { "IPv4PXESupport": "Disabled" }}'
- return_code_seq: '[''N/A'']'
- url: https://{bmc_ip}/redfish/v1/Systems/{system_id}/Bios/Settings
-- case_name: check IPV4 PXE mode in disabled
- case_sn: 35
- details_result: '{''count'': ''N/A for this case'', ''info'': [{''return_code'':
- ''Failure''}]}'
- expected_code: 200
- expected_result: '{ "Attributes": { "IPv4PXESupport": "Disabled" }}'
- final_rst: Failure
- group: remote configure
- header: null
- method: GET
- request_body: null
- return_code_seq: '[]'
- url: https://{bmc_ip}/redfish/v1/Systems/{system_id}/Bios
-- case_name: enable PXE mode
- case_sn: 36
- details_result: '{''count'': ''N/A for this case'', ''info'': [{''return_code'':
- ''Failure'', ''Attributes'': "Failure, expect value: {''IPv4PXESupport'': ''Enabled''},
- return value: Can''t find the key Attributes in return value"}]}'
- expected_code: 200
- expected_result: '{ "Attributes": { "IPv4PXESupport": "Enabled" }}'
- final_rst: Failure
- group: remote configure
- header: null
- method: PATCH
- request_body: '{ "Attributes": { "IPv4PXESupport": "Enabled" }}'
- return_code_seq: '[''N/A'']'
- url: https://{bmc_ip}/redfish/v1/Systems/{system_id}/Bios/Settings
-- case_name: check ipv4 PXE mode in enabled
- case_sn: 37
- details_result: '{''count'': ''N/A for this case'', ''info'': [{''return_code'':
- ''Failure''}]}'
- expected_code: 200
- expected_result: '{ "Attributes": { "IPv4PXESupport": "Enabled" }}'
- final_rst: Failure
- group: remote configure
- header: null
- method: GET
- request_body: null
- return_code_seq: '[]'
- url: https://{bmc_ip}/redfish/v1/Systems/{system_id}/Bios
-- case_name: set boot type order
- case_sn: 38
- details_result: '{''count'': ''N/A for this case'', ''info'': [{''return_code'':
- ''Failure'', ''Attributes'': "Failure, expect value: {''BootTypeOrder0'': ''HardDiskDrive'',
- ''BootTypeOrder1'': ''DVDROMDrive'', ''BootTypeOrder2'': ''PXE'', ''BootTypeOrder3'':
- ''Others''}, return value: Can''t find the key Attributes in return value"}]}'
- expected_code: 200
- expected_result: '{ "Attributes": { "BootTypeOrder0": "HardDiskDrive", "BootTypeOrder1":
- "DVDROMDrive", "BootTypeOrder2": "PXE", "BootTypeOrder3":
- "Others", }}'
- final_rst: Failure
- group: remote interface management
- header: null
- method: PATCH
- request_body: '{ "Attributes": { "BootTypeOrder0": "HardDiskDrive", "BootTypeOrder1":
- "DVDROMDrive", "BootTypeOrder2": "PXE", "BootTypeOrder3":
- "Others", }}'
- return_code_seq: '[''N/A'']'
- url: https://{bmc_ip}/redfish/v1/Systems/{system_id}/Bios/Settings
-- case_name: check boot order
- case_sn: 39
- details_result: '{''count'': ''N/A for this case'', ''info'': [{''return_code'':
- ''Failure''}]}'
- expected_code: 200
- expected_result: '{ "Attributes": { "BootTypeOrder0": "HardDiskDrive", "BootTypeOrder1":
- "DVDROMDrive", "BootTypeOrder2": "PXE", "BootTypeOrder3":
- "Others", }}'
- final_rst: Failure
- group: remote interface management
- header: null
- method: GET
- request_body: null
- return_code_seq: '[]'
- url: https://{bmc_ip}/redfish/v1/Systems/{system_id}/Bios
-- case_name: configure boot order
- case_sn: 40
- details_result: '{''count'': ''N/A for this case'', ''info'': [{''return_code'':
- ''Failure'', ''Attributes'': "Failure, expect value: {''BootTypeOrder0'': ''DVDROMDrive'',
- ''BootTypeOrder1'': ''HardDiskDrive'', ''BootTypeOrder2'': ''Others'', ''BootTypeOrder3'':
- ''PXE''}, return value: Can''t find the key Attributes in return value"}]}'
- expected_code: 200
- expected_result: '{ "Attributes": { "BootTypeOrder0": "DVDROMDrive", "BootTypeOrder1":
- "HardDiskDrive", "BootTypeOrder2": "Others", "BootTypeOrder3":
- "PXE", }}'
- final_rst: Failure
- group: remote interface management
- header: null
- method: PATCH
- request_body: '{ "Attributes": { "BootTypeOrder0": "DVDROMDrive", "BootTypeOrder1":
- "HardDiskDrive", "BootTypeOrder2": "Others", "BootTypeOrder3":
- "PXE", }}'
- return_code_seq: '[''N/A'']'
- url: https://{bmc_ip}/redfish/v1/Systems/{system_id}/Bios/Settings
-- case_name: check boot order
- case_sn: 41
- details_result: '{''count'': ''N/A for this case'', ''info'': [{''return_code'':
- ''Failure''}]}'
- expected_code: 200
- expected_result: '{ "Attributes": { "BootTypeOrder0": "DVDROMDrive", "BootTypeOrder1":
- "HardDiskDrive", "BootTypeOrder2": "Others", "BootTypeOrder3":
- "PXE", }}'
- final_rst: Failure
- group: remote interface management
- header: null
- method: GET
- request_body: null
- return_code_seq: '[]'
- url: https://{bmc_ip}/redfish/v1/Systems/{system_id}/Bios
-- case_name: configure new boot PXE order first
- case_sn: 42
- details_result: '{''count'': ''N/A for this case'', ''info'': [{''return_code'':
- ''Failure'', ''Attributes'': "Failure, expect value: {''BootTypeOrder0'': ''PXE'',
- ''BootTypeOrder1'': ''HardDiskDrive'', ''BootTypeOrder2'': ''DVDROMDrive'', ''BootTypeOrder3'':
- ''Others''}, return value: Can''t find the key Attributes in return value"}]}'
- expected_code: 200
- expected_result: '{ "Attributes": { "BootTypeOrder0": "PXE", "BootTypeOrder1":
- "HardDiskDrive", "BootTypeOrder2": "DVDROMDrive", "BootTypeOrder3":
- "Others", }}'
- final_rst: Failure
- group: remote interface management
- header: null
- method: PATCH
- request_body: '{ "Attributes": { "BootTypeOrder0": "PXE", "BootTypeOrder1":
- "HardDiskDrive", "BootTypeOrder2": "DVDROMDrive", "BootTypeOrder3":
- "Others", }}'
- return_code_seq: '[''N/A'']'
- url: https://{bmc_ip}/redfish/v1/Systems/{system_id}/Bios/Settings
-- case_name: check boot order PEX order first
- case_sn: 43
- details_result: '{''count'': ''N/A for this case'', ''info'': [{''return_code'':
- ''Failure''}]}'
- expected_code: 200
- expected_result: '{ "Attributes": { "BootTypeOrder0": "PXE", "BootTypeOrder1":
- "HardDiskDrive", "BootTypeOrder2": "DVDROMDrive", "BootTypeOrder3":
- "Others", }}'
- final_rst: Failure
- group: remote interface management
- header: null
- method: GET
- request_body: null
- return_code_seq: '[]'
- url: https://{bmc_ip}/redfish/v1/Systems/{system_id}/Bios
-- case_name: check BMC Firmware version
- case_sn: 44
- details_result: '{''count'': ''Success'', ''info'': [{''return_code'': ''Success'',
- ''FirmwareVersion'': ''Failure, expect value: 1.30.11P01 HDM V100R001B03D011SP01,
- return value: 2.43''}]}'
- expected_code: 200
- expected_result: '{"count": 1, "FirmwareVersion": "1.30.11P01 HDM V100R001B03D011SP01"}'
- final_rst: Failure
- group: remote interface management
- header: null
- method: GET
- request_body: null
- return_code_seq: '[200]'
- url: https://{bmc_ip}{manager_id}
-- case_name: change BMC account
- case_sn: 45
- details_result: '{''count'': ''N/A for this case'', ''info'': [{''return_code'':
- ''Failure'', ''UserName'': "Failure, expect value: CM_cc@1234, return value: Can''t
- find the key UserName in return value", ''RoleId'': "Failure, expect value: Administrator,
- return value: Can''t find the key RoleId in return value"}]}'
- expected_code: 200
- expected_result: '{"UserName": "CM_cc@1234","RoleId": "Administrator",}'
- final_rst: Failure
- group: remote interface management
- header: null
- method: PATCH
- request_body: '{"UserName": "CM_cc@1234","Password": "1234@CM_cc","RoleId": "Administrator",}'
- return_code_seq: '[''N/A'']'
- url: https://{bmc_ip}/redfish/v1/AccountService/Accounts/3
-- case_name: configure BMC ip in static, ipv4
- case_sn: 46
- details_result: '{''count'': ''Success'', ''info'': [{''return_code'': ''Failure'',
- ''IPv4Addresses'': "Failure, expect value: [{''Address'': ''192.168.66.120'',
- ''AddressOrigin'': ''Static'', ''Gateway'': ''192.168.66.1'', ''SubnetMask'':
- ''255.255.255.128''}], return value: Can''t find the key IPv4Addresses in return
- value"}]}'
- expected_code: 200
- expected_result: '{"count": 1, "IPv4Addresses": [ { "Address":
- "192.168.66.120", "AddressOrigin": "Static", "Gateway":
- "192.168.66.1", "SubnetMask": "255.255.255.128" } ]}'
- final_rst: Failure
- group: remote interface management
- header: null
- method: PATCH
- request_body: '{ "IPv4Addresses": [ { "Address": "192.168.66.120", "AddressOrigin":
- "Static", "Gateway": "192.168.66.1", "SubnetMask": "255.255.255.128" } ]}'
- return_code_seq: '[''N/A'']'
- url: https://{bmc_ip}{manager_id}/EthernetInterfaces/eth1
-- case_name: configure BMC ip in DHCP, gateway and subnet mask ipv4
- case_sn: 47
- details_result: '{''count'': ''Success'', ''info'': [{''return_code'': ''Failure'',
- ''IPv4Addresses'': "Failure, expect value: [{''Address'': ''192.168.66.120'',
- ''AddressOrigin'': ''DHCP'', ''Gateway'': ''192.168.66.1'', ''SubnetMask'': ''255.255.255.128''}],
- return value: Can''t find the key IPv4Addresses in return value"}]}'
- expected_code: 200
- expected_result: '{"count": 1, "IPv4Addresses": [ { "Address":
- "192.168.66.120", "AddressOrigin": "DHCP", "Gateway": "192.168.66.1", "SubnetMask":
- "255.255.255.128" } ]}'
- final_rst: Failure
- group: remote interface management
- header: null
- method: PATCH
- request_body: '{ "IPv4Addresses": [ { "AddressOrigin": "DHCP" } ]}'
- return_code_seq: '[''N/A'']'
- url: https://{bmc_ip}{manager_id}/EthernetInterfaces/eth1
-- case_name: configure BMC ip in static, ipv4
- case_sn: 48
- details_result: '{''count'': ''Success'', ''info'': [{''return_code'': ''Failure'',
- ''IPv4Addresses'': "Failure, expect value: [{''AddressOrigin'': ''DHCP''}], return
- value: Can''t find the key IPv4Addresses in return value"}]}'
- expected_code: 200
- expected_result: '{"count": 1, "IPv4Addresses": [ { "AddressOrigin":
- "DHCP", } ]}'
- final_rst: Failure
- group: remote interface management
- header: null
- method: PATCH
- request_body: '{ "IPv4Addresses": [ { "AddressOrigin": "DHCP" } ]}'
- return_code_seq: '[''N/A'']'
- url: https://{bmc_ip}{manager_id}/EthernetInterfaces/eth1
-- case_name: configure BMC ip in static, ipv6
- case_sn: 49
- details_result: '{''count'': ''Success'', ''info'': [{''return_code'': ''Failure'',
- ''IPv6Addresses'': "Failure, expect value: [{''Address'': ''N/A'', ''AddressOrigin'':
- ''N/A'', ''PrefixLength'': 64}, {''Address'': ''2019::11'', ''AddressOrigin'':
- ''Static'', ''PrefixLength'': 64}], return value: Can''t find the key IPv6Addresses
- in return value"}]}'
- expected_code: 200
- expected_result: '{"count": 1, "IPv6Addresses": [ { "Address":
- "N/A", "AddressOrigin": "N/A", "PrefixLength": 64 }, { "Address":
- "2019::11", "AddressOrigin": "Static", "PrefixLength": 64 } ]}'
- final_rst: Failure
- group: remote interface management
- header: null
- method: PATCH
- request_body: '{ "IPv6Addresses": [ { "Address": "2019::11", "AddressOrigin":
- "Static", "PrefixLength": 64 } ]}'
- return_code_seq: '[''N/A'']'
- url: https://{bmc_ip}{manager_id}/EthernetInterfaces/eth1
diff --git a/tools/hdv/redfish/docs/readme.md b/tools/hdv/redfish/docs/readme.md
deleted file mode 100644
index 2113913..0000000
--- a/tools/hdv/redfish/docs/readme.md
+++ /dev/null
@@ -1,129 +0,0 @@
-##############################################################################
-# Copyright (c) 2020 China Mobile Co.,Ltd and others.
-#
-# All rights reserved. This program and the accompanying materials
-# are made available under the terms of the Apache License, Version 2.0
-# which accompanies this distribution, and is available at
-# http://www.apache.org/licenses/LICENSE-2.0
-##############################################################################
-
-This is a prototype of hardware validation implementation in redfish interface for a certain hardware vendor.
-which originally is contributed by China Mobile.
->>> Usage:
-usage: hdv.py [-h] [--version] [--config CONFIG] [--file_type FILE_TYPE]
- [--case_yaml CASE_YAML] [--depends_yaml DEPENDS_YAML]
- [--case_excel CASE_EXCEL]
-
-hdv tool by redfish, it works in two mode
-
-optional arguments:
- -h, --help show this help message and exit
- --version show tool version
- --config CONFIG given global config.yaml file
- --file_type FILE_TYPE
- config file type, [yaml|excel]
- --case_yaml CASE_YAML
- case yaml file, uesd if file_type = yaml
- --depends_yaml DEPENDS_YAML
- depends yaml file,uesd if file_type = yaml
- --case_excel CASE_EXCEL
- excel case file used if file_type = excel
-example:
-#default case
-1>python .\hdv.py
-following config used
- - ./conf/config.yaml
- - file_type=excel
- - ./conf/cases.xlsx
-# use file_type=yaml
-2>python .\hdv.py --file_type=yaml
-
-example1. default conf/config.yaml, file_type=excel, cases.xlsx used
-python .\hdv.py
-example2. use yaml file type config, default conf/cases.yaml conf/depends.yaml used
-python .\hdv.py --file_type=yaml
-example3. user input config file
-python .\hdv.py --file_type=yaml --case_yaml=./conf-new/cases.yaml --depends_yaml=./conf-new/depends.yaml
-
->>> tools directory:
-
-./redfish
-├─conf # config directory
-├─docs # readme
-├─logs # hdv.log would be generated here.
-
-$ ls -lR .
-$ ls redfish/*.py
-redfish/__init__.py
-redfish/excel_2_yaml.py #tool script to convert excel cases.xlsx sheets content to yaml format cases.yaml and depends.yaml
-redfish/hdv_redfish.py #the code implementation by parsing config.yaml and cases.xlsx or cases.yaml and depends.yaml
-redfish/log_utils.py #log utils
-redfish/errors.py #error code definition for the tool during parse.
-redfish/hdv.py #hdv portal
-redfish/http_handler.py #http_handler
-redfish/yaml_utils.py #yaml utils for test.
-
-$ ls redfish/conf
-config.yaml #global config yaml where define BMC settings, static value, and some position definition in the cases.xlsx excel
-cases.xlsx #two sheet defined (cases and depend_id), input case file if file_type=excel, default way.
- #sheet cases - define all test case redfish url, expected value, etc
- #sheet dependent_id - define all dependent_id url which is used to get parent resource id for the url in the cases.
-cases.yaml #test cases yaml file,where the same set test case with cases.xlsx, it is used if file_type=yaml
-depends.yaml #depends.yaml where the same content with sheet dependent_id, it is used if file_type=yaml
-report.yaml #final test report, it is used if file_type=yaml
-
-$ ls redfish/docs
-readme.md #readme
-
-$ ls redfish/logs
-hdv.log # test log file
-
->>> Principle
-The hdv tool gets the global config from conf/config.yaml, e.g bmc settings, and
-global variable definitions, and some excel column position used in case file_type=excel
-User can select eiter file_type yaml or excel as the configure file type,
-default type is excel at present. However the principle is similar.
-
-If file_type is excel, it will parse two sheets of excel workbook, cases and dependent_id.
-The dependent_id sheet is used to define how to get the parents before checking a final redfish url,
-thinking about checking a port should get the adapter at first.
-The cases sheet is the test cases template, where the variable will be replaced
-by global static value from config yaml or dependent_id
-
-By running a final redfish url request, it will get response result from the test server.
-Then tool will compare the response value with expected value defined in <expected_result> column of cases sheet to decide if the case status.
-
-test report of each case <details,case_status> will write back to the same excel in the last two columns.
-
-Meanwhile, yaml file_type is supported also, it processes similarly as excel, except
-- reading depends.yaml to get the dependent_id
-- reading cases.yaml to run the test case
-- report.yaml will be created as the final report.
-cases.xlsx will not be used anymore in yaml case.
-
-Besides, excel_2_yaml.py script can be used to convert the cases.xlsx to yaml file accordingly.
-If you want to update the cases content, you can update the excel at first, then convert by the script.
-
->>> FAQ:
-1. how to customize expected result?
-you need put a json format value in it, the hierachy should be exactly the same with actual returned value,
-as the comparing implementation relies on it.
- => a simple example: '{"AssetTag": "CM_cc@1234"}'
- => a complex example:
-'{ "count": 2, "Manufacturer": "Intel(R) Corporation", "MaxSpeedMHz":
- 2300, "Model": "Intel(R) Xeon(R) Gold 5218N CPU @ 2.30GHz", "ProcessorArchitecture":
- ["x86", "IA-64", "ARM", "MIPS", "OEM"], "Socket": [1, 2], "Status": { "Health":
- "OK", "State": "Enabled" }, "TotalCores": 16, "TotalThreads":
- 32}'
-
-in the above data, a specific "count" attribute defined to check components quantity returned, e.g How many cpus expected.
-generally it can be a subset attributes definition, comparing with actual return value
-also it can support list of all expected value for list of objects.
-example: "Socket:[1,2]", expecting return "Socket:1" and "Socket:2" from returned response
-
->>>Perspective:
-- there are differences between vendors's implementation, or even versions for the same vendor.
-- define more test case or update existing case in the cases.yaml and depends.yaml or cases.xlsx file to support much more checks.
-- more implementation could be contributed from community so that it can grow bigger to support more types and checkpoints test case.
-
-#https://gerrit.opnfv.org/gerrit/admin/repos/cirv
diff --git a/tools/hdv/redfish/errors.py b/tools/hdv/redfish/errors.py
deleted file mode 100644
index e7ea8e8..0000000
--- a/tools/hdv/redfish/errors.py
+++ /dev/null
@@ -1,47 +0,0 @@
-##############################################################################
-# Copyright (c) 2020 China Mobile Co.,Ltd and others.
-#
-# All rights reserved. This program and the accompanying materials
-# are made available under the terms of the Apache License, Version 2.0
-# which accompanies this distribution, and is available at
-# http://www.apache.org/licenses/LICENSE-2.0
-##############################################################################
-"""
-ERROR CODE instruction
-"""
-ERROR_CODE = {
- # E100XXX: Connection
- "E100001": "E100001:fail to get response from the url",
- "E100002": "E100002:unexpected request url",
- "E100003": "E100003:failed to setup connection",
- # E200XXX: options - tools arguments.
- "E200001": "E200001:unsupported input file_mode, \
- should be one of [yaml,excel]",
- # E300XXX: resource issue - depended resource is not existing...
- "E300001": "E300001:invalid token",
- "E300002": "E300002:fail to get dependency parent id, Action: check if the \
- resource support by server",
- "E300003": "E300003:fail to get expected id list for component_id, \
- Action: check if the resource support by server",
- # E400XXX: configuration error
- "E400001": "E400001:fail to find configure file",
- "E400002": "E400002:parse config.yaml exception",
- "E400003": "E400003: key_list is null for key_flags",
- "E400004": "E400004: unexpected response body type",
- "E400005": "E400005: customized expected value format error, \
- Action:check input expected value type with actual returned value type",
- "E400006": "E400006: unexpected expected value type, \
- expected[str,list,dict]",
- "E400007": "E400007: unexpected expected value type while comparing",
- # E500XXX: application - find no value from cache
- "E500001": "E500001: fail find key from actual value, \
- Action: check if the attribute support by server",
- # E600XXX: restful interface
- "E600001": "E600001: unsupported redfish api?",
- }
-
-WARN_CODE = {
- "W100001": "W100001: fail to the response from a request",
- "W100002": "W100002: unexpected type of return_value type",
- "W100003": "W100003: NoneType value",
-}
diff --git a/tools/hdv/redfish/excel_2_yaml.py b/tools/hdv/redfish/excel_2_yaml.py
deleted file mode 100644
index 948ead3..0000000
--- a/tools/hdv/redfish/excel_2_yaml.py
+++ /dev/null
@@ -1,62 +0,0 @@
-##############################################################################
-# Copyright (c) 2020 China Mobile Co.,Ltd and others.
-#
-# All rights reserved. This program and the accompanying materials
-# are made available under the terms of the Apache License, Version 2.0
-# which accompanies this distribution, and is available at
-# http://www.apache.org/licenses/LICENSE-2.0
-##############################################################################
-'''
-excel 2 yaml tools
-convert excel config to yaml format config: depends.yaml and cases.yaml.
-'''
-import os
-import yaml
-from openpyxl.reader.excel import load_workbook
-# pylint: disable=E0611
-from log_utils import LOGGER
-
-
-def load_sheet(excel_file, sheet_index, start_col, end_col):
- '''
- load sheet
- '''
- if not os.path.exists(excel_file):
- LOGGER.error("excel file not existing")
- return None
- input_file = load_workbook(excel_file)
- input_ws = input_file[input_file.sheetnames[sheet_index]]
- cell_key = []
- rows_list = []
- for i in range(start_col, end_col):
- cell_key.append(input_ws.cell(row=1, column=i).value)
- row = 2
- while input_ws.cell(row=row, column=1).value:
- cell_value = []
- for i in range(start_col, end_col):
- value = input_ws.cell(row=row, column=i).value
- if isinstance(value, str):
- value = value.strip().replace('\n', '')
- cell_value.append(value)
- cell_dict = dict(zip(cell_key, cell_value))
- row += 1
- rows_list.append(cell_dict)
-
- LOGGER.info(rows_list)
- return rows_list
-
-
-def create_yaml(id_dict, yaml_file):
- '''
- create yaml
- '''
- with open(yaml_file, 'w') as y_file:
- yaml.dump(id_dict, y_file, explicit_start=True)
-
-
-DEPEND_FILE_NAME = "./conf/depends.yaml"
-LOGGER.info("create %s ", DEPEND_FILE_NAME)
-create_yaml(load_sheet("./conf/cases.xlsx", 1, 1, 5), DEPEND_FILE_NAME)
-
-CASE_FILE_NAME = "./conf/cases.yaml"
-create_yaml(load_sheet("./conf/cases.xlsx", 0, 1, 10), CASE_FILE_NAME)
diff --git a/tools/hdv/redfish/hdv.py b/tools/hdv/redfish/hdv.py
deleted file mode 100644
index e06286e..0000000
--- a/tools/hdv/redfish/hdv.py
+++ /dev/null
@@ -1,60 +0,0 @@
-##############################################################################
-# Copyright (c) 2020 China Mobile Co.,Ltd and others.
-#
-# All rights reserved. This program and the accompanying materials
-# are made available under the terms of the Apache License, Version 2.0
-# which accompanies this distribution, and is available at
-# http://www.apache.org/licenses/LICENSE-2.0
-##############################################################################
-'''
-hdv tools
- all config files are put under conf/
- config.yaml is the global configuration
- additional config for supporting two modes
- - excel: tools will parse the depend_id sheet and cases sheet and
- execute test case and write report back to excel
- - yaml: tools will parse depends.yaml and cases.yaml and execute test case
- and write a report.yaml
- theory:
- either test case can be finished by one restful request,
- or an additional request needed to get dependency parent resource.
- e.g a case for checking port, should get networkadaptor_id before that.
-'''
-import argparse
-from hdv_redfish import run as run_case
-
-
-def parse_args():
- '''
- parse arguments
- '''
- parser = argparse.ArgumentParser(description="hdv tool by redfish, \
- check readme under ./docs")
- parser.add_argument('--version', action='version',
- version='%(prog)s 0.1', help="show tool version")
- parser.add_argument('--config', type=str, default="./conf/config.yaml",
- help="given global config.yaml file")
- parser.add_argument('--file_type', type=str, default="excel",
- help="config file type, [yaml|excel],default is excel")
- parser.add_argument('--case_yaml', type=str, default="./conf/cases.yaml",
- help="case yaml file, uesd if file_type = yaml")
- parser.add_argument('--depends_yaml', type=str,
- default="./conf/depends.yaml",
- help="depends yaml file,uesd if file_type = yaml")
- parser.add_argument('--case_excel', type=str, default="./conf/cases.xlsx",
- help="excel case file used if file_type = excel")
- args = parser.parse_args()
- return args
-
-
-def main():
- '''
- main function
- '''
- args = parse_args()
- run_case(args.config, args.case_excel, args.depends_yaml, args.case_yaml,
- args.file_type)
-
-
-if __name__ == "__main__":
- main()
diff --git a/tools/hdv/redfish/hdv_redfish.py b/tools/hdv/redfish/hdv_redfish.py
deleted file mode 100644
index 5fc44ca..0000000
--- a/tools/hdv/redfish/hdv_redfish.py
+++ /dev/null
@@ -1,676 +0,0 @@
-##############################################################################
-# Copyright (c) 2020 China Mobile Co.,Ltd and others.
-#
-# All rights reserved. This program and the accompanying materials
-# are made available under the terms of the Apache License, Version 2.0
-# which accompanies this distribution, and is available at
-# http://www.apache.org/licenses/LICENSE-2.0
-##############################################################################
-'''
-an implementation of hardware delivery validation based on redfish interface.
-'''
-import time
-import os
-import re
-from re import DOTALL as DT
-import json
-import copy
-from ast import literal_eval
-import yaml
-from openpyxl.reader.excel import load_workbook
-from http_handler import UrllibHttpHandler, HEADERS
-# pylint: disable=E0611
-from log_utils import BASE_DIR, LOG_FILE, LOGGER
-from errors import ERROR_CODE, WARN_CODE
-
-LOGGER.info(BASE_DIR)
-
-ACCOUNT_INFO = {}
-WAIT_INTERVAL = 5
-
-
-def parse_config(config_yaml):
- """
- parse setting from config.yaml
- :return:
- """
- try:
- if not os.path.exists(config_yaml):
- LOGGER.error(" %s, %s", ERROR_CODE['E400001'], config_yaml)
- with open(config_yaml, 'r') as conf_file:
- config = yaml.load(conf_file.read(), Loader=yaml.FullLoader)
- except FileNotFoundError as fnfe:
- LOGGER.error(fnfe)
- LOGGER.error(u"%s", ERROR_CODE['E400002'])
- return None
- else:
- return config
-
-
-def get_token(http_handler, url):
- """
- :return: x_auth_token
- """
- retry_num = 3
- x_auth_token = None
- while retry_num:
- retry_num -= 1
- res = http_handler.post(url, ACCOUNT_INFO)
- if res is None:
- LOGGER.error("%s, %s", WARN_CODE['W100001'], url)
- LOGGER.info("wait %s seconds to try again", WAIT_INTERVAL)
- time.sleep(WAIT_INTERVAL)
- continue
- data = res.info()
- if "X-Auth-Token" in data:
- x_auth_token = data.get("X-Auth-Token")
- return x_auth_token
- else:
- time.sleep(WAIT_INTERVAL)
- return None
-
-
-def get_etag(http_handler, url):
- """
- :return: ETag
- """
- etag = None
- res = http_handler.get(url)
- data = None
- if res is not None:
- data = res.info()
- if data is not None and "ETag" in data:
- etag = data.get("ETag")
- return etag
-
-
-def parse_data(exp_value, act_value):
- '''
- parse the expected value and actual value:
- @return: case 1: exp_value and actual value is str or int,
- then return tuple (exp_value,act_value)
- case 2: list,dict type, then return updated exp_value
- ERROR_CODE for unexpected case.
- '''
- if isinstance(exp_value, (str, int)) and isinstance(act_value, (str, int)):
- return (exp_value, act_value)
- if isinstance(exp_value, list):
- if not isinstance(act_value, list):
- return (exp_value, act_value)
- else:
- for exp in enumerate(exp_value, start=0):
- index = exp[0]
- exp_value[index] = parse_data(
- exp_value[index], act_value[index])
-
- elif isinstance(exp_value, dict):
- if isinstance(act_value, dict):
- for key, val in exp_value.items():
- if key in act_value:
- exp_value[key] = parse_data(val, act_value[key])
- else:
- LOGGER.error("%s,%s", ERROR_CODE['E500001'], key)
- else:
- LOGGER.error("%s,expected: %s , actual: %s",
- ERROR_CODE['E400005'], exp_value, act_value)
- else:
- LOGGER.error("%s, expected type:%s, actual type %s",
- ERROR_CODE['E400006'], type(exp_value), type(act_value))
- return exp_value
-
-
-def compare_data(value, flag):
- '''
- compare value content
- '''
- if isinstance(value, tuple):
- if value[1] is not None or value[1]:
- if value[0] == 'N/A':
- return "Success", flag
- elif isinstance(value[0], (bool, int, str)):
- if value[0] == value[1]:
- return "Success", flag
- else:
- flag += 1
- return "Failure, expect value: " + str(value[0]) + \
- ", return value: " + str(value[1]), flag
- elif value[1] in value[0] or value[0] == ['N/A']:
- return "Success", flag
- else:
- flag += 1
- return "Failure, expect value: " + str(value[0]) + \
- ", return value: " + str(value[1]), flag
- else:
- flag += 1
- return "Failure, expect value: " + str(value[0]) + \
- ", return value: " + str(value[1]), flag
-
- elif isinstance(value, list):
- for elem in enumerate(value, start=0):
- index = elem[0]
- value[index], flag = compare_data(value[index], flag)
- elif isinstance(value, dict):
- for key, val in value.items():
- value[key], flag = compare_data(val, flag)
- else:
- LOGGER.error("%s", ERROR_CODE['E400007'])
- flag += 1
- return value, flag
-
-
-def get_component_ids_yaml(file):
- '''
- get component ids from yaml file
- '''
- if not os.path.exists(file):
- LOGGER.info("%s, %s", ERROR_CODE['E400001'], file)
- return None
- return yaml.load(open(file, "r"))
-
-
-def get_component_ids_excel(excel_file):
- '''
- get the component_id settings from the excel sheet2
- the componnet_id is the parent id of the hardware resource of sheet1
- '''
- input_file = load_workbook(excel_file)
- input_ws = input_file[input_file.sheetnames[1]]
- cell_key = []
- id_info_list = []
- for i in range(1, 5):
- cell_key.append(input_ws.cell(row=1, column=i).value)
- row = 2
- while input_ws.cell(row=row, column=1).value:
- cell_value = []
- for i in range(1, 5):
-
- cell_value.append(input_ws.cell(row=row, column=i).value.
- encode("utf8").decode("utf8").replace('\n', ''))
- cell_dict = dict(zip(cell_key, cell_value))
- row += 1
- id_info_list.append(cell_dict)
- return id_info_list
-
-
-def create_real_url(url_value, id_dict, config_file):
- '''
- create the real url
- either a static url, or a replaced url by depended_id
- '''
- url_list = []
- replaced = 0
- regexp = r'[^{]*{(?P<var>[a-zA-Z_]*)}'
- # pattern = re.compile(regexp, re.S)
- pattern = re.compile(regexp, DT)
- LOGGER.info("url_value %s", url_value)
- matches = list(pattern.finditer(url_value))
- for match in matches:
- value = match.groupdict()
- if value['var'] in config_file:
- url_value = url_value.replace('{' + str(value['var']) + '}',
- str(config_file[value['var']]))
-
- elif value['var'] in id_dict:
- replaced = 1
- instance_list = id_dict[value['var']]
- for instance in instance_list:
- sgl_url = url_value.replace('{' + str(value['var']) + '}',
- str(instance))
- LOGGER.debug("replaced url value %s", sgl_url)
- url_list.append(sgl_url)
- else:
- replaced = 2
- LOGGER.error("%s for parameter %s",
- ERROR_CODE['E300002'], value['var'])
- # combine single case with list case together.
- if replaced == 0:
- LOGGER.info("adding static url %s into list", url_value)
- url_list.append(url_value)
- return url_list
-
-
-def execute_get_url(url, http_handler):
- """
- execute the url
- """
- LOGGER.debug("execute url %s", url)
- rsp = http_handler.get(url)
- if rsp is None:
- LOGGER.error("return None for url %s", url)
- return None
- ret_dict = {}
- ret_dict.update({"return_code": rsp.code})
- return_value = json.loads(rsp.read())
- ret_dict.update({"return_value": return_value})
- LOGGER.info("ret_dict is %s", ret_dict)
- LOGGER.debug("ret_dict type is %s", type(ret_dict))
- return ret_dict
-
-
-def handle_depend_url(method, url_list, http_handler):
- '''
- run request url in url_list and collect the response as list
- '''
- response_list = []
- if method == 'GET':
- for url_case in url_list:
- response = execute_get_url(url_case, http_handler)
- response_list.append(response)
- elif method == 'POST':
- pass
- elif method == 'PATCH':
- pass
- elif method == 'DELETE':
- pass
- return response_list
-
-
-def create_obj_id_list(key_flags, response_list):
- '''
- create object id list
- '''
- if response_list is None or response_list.__len__() == 0:
- LOGGER.debug("response list is None")
- return None
- if key_flags is not None:
- key_list = key_flags.split(':')
- end_id_list = []
- for response in response_list:
- if response is None:
- LOGGER.warning("response is None")
- continue
- return_value = response['return_value']
- if len(key_list) == 1 and key_list[0] in return_value:
- for i in return_value[key_list[0]]:
- end_id_list.append(i['@odata.id'])
- elif len(key_list) > 1:
- for elem in enumerate(key_list, start=0):
- index = elem[0]
- if index == len(key_list) - 1:
- for case in return_value[key_list[index]]:
- end_id_list.append(case['@odata.id'])
- else:
- if isinstance(return_value, list):
- return_value = return_value[0]
- elif isinstance(return_value, dict):
- return_value = return_value[key_list[index]]
- else:
- LOGGER.warning("%s, %s", WARN_CODE['W100002'],
- type(return_value))
-
- else:
- LOGGER.error("%s %s", ERROR_CODE['E400003'], key_flags)
- return end_id_list
-
-
-def get_depend_id(config_file, http_handler, depend_ids):
- '''
- @param mode: yaml or excel,default value "excel"
- parse the component id list
- build up the id resource for each component_id
- return: id_dict like {component_id:[obj_list]}
- '''
- id_dict = {}
- for case in depend_ids:
- component_name = case.get('component_id')
- LOGGER.info("parsing component %s", component_name)
- pro_value = case.get('pro_value')
- url_value = case.get('url_value')
- key_flags = case.get('key_flags')
- # url_list = []
- url_list = create_real_url(url_value, id_dict, config_file)
- # response_list = []
- response_list = handle_depend_url(pro_value, url_list, http_handler)
- # end_id_list = []
- end_id_list = create_obj_id_list(key_flags, response_list)
- if end_id_list is None or end_id_list.__len__() == 0:
- LOGGER.error("%s,%s", ERROR_CODE['E300003'], component_name)
- continue
- id_dict.update({component_name: end_id_list})
- LOGGER.debug("id_dict content is %s", id_dict)
- return id_dict
-
-
-def read_row(input_ws, row, config_file):
- '''
- read a row value
- '''
- pro_value = input_ws.cell(row=row, column=config_file["pro_seq"]).value
- url_value = input_ws.cell(row=row, column=config_file["url_seq"]).value
- req_body_value = input_ws.cell(
- row=row, column=config_file["req_body_seq"]).value
- expect_return_code = \
- input_ws.cell(
- row=row, column=config_file["expect_return_code_seq"]).value
- expect_return_value = \
- input_ws.cell(
- row=row, column=config_file["expect_return_value_seq"]).value
- attr_name = input_ws.cell(row=row, column=config_file["attr_name"]).value
-
- if req_body_value is not None:
- req_body_value = literal_eval(req_body_value)
- if expect_return_code is not None:
- expect_return_code = int(expect_return_code)
- if expect_return_value is not None:
- expect_return_value = literal_eval(expect_return_value)
- return pro_value, url_value, req_body_value, expect_return_code,\
- expect_return_value, attr_name
-
-
-def execute_post_url(body, handler, url):
- '''
- execute post url
- '''
- LOGGER.debug("execute url %s", url)
- rsp = handler.post(url, body)
- LOGGER.debug("post response %s", rsp)
- if not isinstance(rsp, dict):
- LOGGER.error("%s,%s, expected type %s",
- ERROR_CODE["E400004"], type(rsp), dict)
- return None
- return rsp
-
-
-def execute_patch_url(body, http_handler, url):
- '''
- execute patch url
- '''
- etag = get_etag(http_handler, url)
- LOGGER.info("etag %s", etag)
- rsp = http_handler.patch(url, body, etag)
- LOGGER.debug("patch response %s", rsp)
- LOGGER.debug("type response is %s", type(rsp))
- ret_dict = {}
- if rsp is None:
- LOGGER.error("%s %s", ERROR_CODE['E100001'], url)
- ret_dict.update({"return_code": "N/A"})
- ret_dict.update({"return_value": "Failure"})
- return ret_dict
- ret_dict.update({"return_code": rsp.code})
- return_value = json.loads(rsp.read())
- ret_dict.update({"return_value": return_value})
- return ret_dict
-
-
-def handle_final_url(method, url_list, req_body=None, http_handler=None):
- '''execute the requested url to get the response
- '''
- response_list = []
- if method == 'GET':
- for url_case in url_list:
- rsp = execute_get_url(url_case, http_handler)
- response_list.append(rsp)
- elif method == 'POST':
- if len(url_list) > 1:
- LOGGER.error(ERROR_CODE['E100002'])
- return None
- url_value = url_list[0]
- rsp = execute_post_url(req_body, http_handler, url_value)
- response_list.append(rsp)
- elif method == 'PATCH':
- for url_case in url_list:
- LOGGER.info(url_case)
- temp = execute_patch_url(req_body, http_handler, url_case)
- if temp is not None:
- response_list.append(temp)
- elif method == 'DELETE':
- pass
- LOGGER.info("response_list %s", response_list)
- return response_list
-
-
-def check_component_cnt(expect_return_value, res_list, result):
- '''
- #check if the component count meet the required.
- '''
- if expect_return_value.__contains__('count'):
- if expect_return_value['count'] == len(res_list):
- result.update({"count": "Success"})
- else:
- result.update({"count":
- "Failure, the actual num is " + str(len(res_list))})
- else:
- result.update({"count": "N/A for this case"})
- return result
-
-
-def parse_test_result(expect_return_value, expect_return_code,
- actual_result_list, final_result):
- '''
- @param expected_return_value expected value set in input excel
- @param expected_return_code expected return code
- @param actual_result_list: actual result run by each url list checking
- @param final_result: returned final result
- parsing the test final_result by comparing expected_value with
- real test final_result value.
- '''
- return_code_list = []
- return_value_list = []
- flag = 0
- final_result = check_component_cnt(expect_return_value,
- actual_result_list, final_result)
-
- for each_result in actual_result_list:
- temp_result = {}
- if each_result is not None:
- LOGGER.debug("current result is %s,result_list is %s",
- each_result, actual_result_list)
- return_code = each_result["return_code"]
- return_code_list.append(return_code)
- return_value = each_result["return_value"]
- if return_code == expect_return_code:
- code_result = 'Success'
- else:
- code_result = 'Failure'
- temp_result.update({'return_code': code_result})
- else:
- LOGGER.warning("%s ,set failure", WARN_CODE['W100003'])
- temp_result.update({'return_code': 'Failure'})
- return_value_list.append(temp_result)
- flag += 1
- continue
-
- # parse the actual result according to the expected value hierachy.
- ex_value = copy.deepcopy(expect_return_value)
- exp_act_pairs = {}
- for key, value in ex_value.items():
- if key in return_value:
- exp_act_pairs[key] = parse_data(value, return_value[key])
- elif key == 'count':
- pass
- else:
- LOGGER.error("%s, %s", ERROR_CODE['E500001'], key)
- exp_act_pairs[key] = \
- (value, "Can't find key {} in return value".format(key))
- LOGGER.debug("real_result:%s", exp_act_pairs)
-
- # comparing expected result with real result.
- if exp_act_pairs:
- for key, value in exp_act_pairs.items():
- temp_result[key], flag = compare_data(value, flag)
- return_value_list.append(temp_result)
- return return_value_list, return_code_list, final_result, flag
-
-
-def write_result_2_excel(config_file, input_ws, row, flag, result):
- '''
- write the result back to excel
- '''
- if not result:
- input_ws.cell(row=row, column=config_file["detail_result"],
- value=str('N/A'))
- else:
- input_ws.cell(row=row, column=config_file["detail_result"],
- value=str(result))
- if flag == 0:
- input_ws.cell(row=row, column=config_file["final_result"],
- value=str("Success"))
- else:
- input_ws.cell(row=row, column=config_file["final_result"],
- value=str("Failure"))
- return row
-
-
-def execute_final_url(config_file, depends_id, http_handler,
- method, url, req_body):
- '''
- execute final url to get the request result
- '''
- url_list = create_real_url(url, depends_id, config_file)
- rsp_list = handle_final_url(method, url_list, req_body, http_handler)
- return rsp_list
-
-
-def run_test_case_yaml(config_file, case_file, depends_id, http_handler):
- '''run test case from cases.yaml
- '''
- LOGGER.info("############### start perform test case #################")
- cases_result = []
- cases = read_yaml(case_file)
- for case in cases:
- method, url, req_body, expected_code, expected_value, tc_name \
- = case['method'], case['url'], case['request_body'], \
- case['expected_code'], case['expected_result'], case['case_name']
-
- expected_value = literal_eval(expected_value)
- flag = 0
- final_rst = {}
- rsp_list = execute_final_url(config_file, depends_id,
- http_handler, method, url, req_body)
- if rsp_list is not None and len(rsp_list) > 0:
- return_value_list, return_code_list, final_rst, flag = \
- parse_test_result(
- expected_value, expected_code, rsp_list, final_rst)
- final_rst.update({'info': return_value_list})
- LOGGER.debug("return_code_list:%s", return_code_list)
- case['return_code_seq'] = str(return_code_list)
- else:
- LOGGER.error("%s", ERROR_CODE['E600001'])
- flag += 1
- case['final_rst'] = "Success" if flag == 0 else "Failure"
- case['details_result'] = \
- str(final_rst) if len(final_rst) > 0 else "N/A"
- cases_result.append(case)
- LOGGER.info("writing test final_rst for case %s", tc_name)
-
- write_result_2_yaml(cases_result)
-
- LOGGER.info("############### end perform test case ###################")
-
-
-def read_yaml(file):
- '''read a yaml file
- '''
- if not os.path.exists(file):
- LOGGER.info("%s %s", ERROR_CODE['E400001'], file)
- return None
- return yaml.load(open(file, "r"))
-
-
-def write_result_2_yaml(result):
- '''
- write test result to new report.yaml
- '''
- LOGGER.info("writing to yaml file")
- yaml.safe_dump(result, open("./conf/report.yaml", "w"),
- explicit_start=True)
-
-
-def run_test_case_excel(config_file, case_file, depends_id, http_handler):
- '''
- perform the test case one by one,
- and write test final_result back to the excel.
- '''
- LOGGER.info("############### start perform test case #################")
- input_file = load_workbook(case_file)
- input_ws = input_file[input_file.sheetnames[0]]
-
- row = 2
- while input_ws.cell(row=row, column=1).value:
- method, url, req_body, expected_code, expected_value, tc_name \
- = read_row(input_ws, row, config_file)
-
- LOGGER.info("run test case ##%s##", tc_name)
- if tc_name == "configure BMC ip in static, ipv4":
- LOGGER.debug("debug")
- flag = 0
- final_result = {}
- rsp_list = []
- rsp_list = execute_final_url(config_file, depends_id, http_handler,
- method, url, req_body)
- if rsp_list is not None and len(rsp_list) > 0:
- return_value_list, return_code_list, final_result, flag = \
- parse_test_result(expected_value, expected_code,
- rsp_list, final_result)
- final_result.update({'info': return_value_list})
- LOGGER.debug("return_code_list:%s", return_code_list)
- input_ws.cell(row=row, column=config_file["return_code_seq"],
- value=str(return_code_list))
- else:
- LOGGER.error("%s", ERROR_CODE['E600001'])
- flag += 1
-
- LOGGER.info("writing test final_result for row %s", row)
- row = write_result_2_excel(
- config_file, input_ws, row, flag, final_result)
- row += 1
- input_file.save(case_file)
- LOGGER.info("############### end perform test case ###################")
-
-
-def run(conf_file, case_excel_file=None, depend_yaml_file=None,
- case_yaml_file=None, file_mode=None):
- '''
- @param conf_file: config.yaml
- @param case_excel_file: excel case file
- @param depend_yaml_file: depends yaml file used if file_mode=yaml
- @param case_yaml_file: case yaml file, used if file_mode=yaml
- @param file_mode: "excel" or "yaml"
- access function
- '''
- # parse config.yaml
- LOGGER.info("start engine ...")
- config_file = parse_config(conf_file)
- http_handler = UrllibHttpHandler()
-
- # get bmc info
- bmc_ip, bmc_user, bmc_pwd = \
- config_file["bmc_ip"], config_file["bmc_user"], config_file["bmc_pwd"]
- ACCOUNT_INFO.update({"UserName": bmc_user})
- ACCOUNT_INFO.update({"Password": bmc_pwd})
-
- url = "https://{0}/redfish/v1/SessionService/Sessions".format(bmc_ip)
- x_auth_token = get_token(http_handler, url)
- LOGGER.info("x_auth_token: %s", x_auth_token)
-
- if x_auth_token is None:
- LOGGER.error("%s token is None", ERROR_CODE['E300001'])
- return None
-
- HEADERS.update({"X-Auth-Token": x_auth_token})
- id_info_list = None
- if file_mode == "excel":
- id_info_list = get_component_ids_excel(case_excel_file)
- elif file_mode == "yaml":
- id_info_list = get_component_ids_yaml(depend_yaml_file)
- else:
- LOGGER.error("%s,%s", ERROR_CODE['E200001'], file_mode)
- return None
-
- # get dependent id
- depends_id = get_depend_id(config_file, http_handler, id_info_list)
-
- # read the test case sheet and perform test
- if file_mode == "excel":
- run_test_case_excel(config_file,
- case_excel_file, depends_id, http_handler)
- elif file_mode == "yaml":
- run_test_case_yaml(config_file,
- case_yaml_file, depends_id, http_handler)
- else:
- LOGGER.error("%s,%s", ERROR_CODE['E200001'], file_mode)
- return None
-
- LOGGER.info("done,checking the log %s", LOG_FILE)
-
- return True
diff --git a/tools/hdv/redfish/http_handler.py b/tools/hdv/redfish/http_handler.py
deleted file mode 100644
index c1b0a13..0000000
--- a/tools/hdv/redfish/http_handler.py
+++ /dev/null
@@ -1,129 +0,0 @@
-##############################################################################
-# Copyright (c) 2020 China Mobile Co.,Ltd and others.
-#
-# All rights reserved. This program and the accompanying materials
-# are made available under the terms of the Apache License, Version 2.0
-# which accompanies this distribution, and is available at
-# http://www.apache.org/licenses/LICENSE-2.0
-##############################################################################
-'''
-a common http_handler
-'''
-import urllib.request
-import json
-import ssl
-from http.client import HTTPException
-from urllib.error import HTTPError, URLError
-# pylint: disable=E0611
-from log_utils import LOGGER
-from errors import ERROR_CODE
-
-# pylint: disable=W0212
-ssl._create_default_https_context = ssl._create_unverified_context
-
-HEADERS = {
- 'Connection': 'keep-alive',
- 'User-Agent':
- 'Mozilla/5.0 (Windows NT 10.0; WOW64) AppleWebKit/537.36 \
- (KHTML, like Gecko) Chrome/67.0.3396.62 Safari/537.36',
-}
-
-TIME_OUT = 3000
-
-
-class UrllibHttpHandler:
- """
- http handler based on urllib of python2.7
- """
-
- def __init__(self):
- self.__header = HEADERS
-
- def get(self, url):
- """
- run the get request
- """
- try:
- req = urllib.request.Request(url, headers=self.__header)
- res = urllib.request.urlopen(req, timeout=TIME_OUT)
- except HTTPException as http_exp:
- LOGGER.error(http_exp)
- LOGGER.error(u"%s %s", ERROR_CODE['E100001'], url)
- except HTTPError as http_err:
- LOGGER.error(http_err)
- LOGGER.error(u"%s %s", ERROR_CODE['E100001'], url)
- LOGGER.error(u"%s %s", ERROR_CODE['E600001'], url)
- else:
- return res
-
- def post(self, url, parameter=None):
- """
- run the post request, parameter must to encode to bytes
- """
- try:
- data = json.dumps(parameter).encode(encoding="utf-8")
- LOGGER.debug("data is %s", data)
- req = urllib.request.Request(url, data=data, headers=self.__header)
- req.add_header("Content-Type", "application/json")
- res = urllib.request.urlopen(req, timeout=TIME_OUT)
- except HTTPException as http_exp:
- LOGGER.error(http_exp)
- LOGGER.error(u"%s %s", ERROR_CODE['E100001'], url)
- except TimeoutError as timeout_error:
- LOGGER.error(timeout_error)
- LOGGER.error(u"%s", ERROR_CODE['E100003'])
- except HTTPError as http_err:
- LOGGER.error(http_err)
- LOGGER.error(u"%s %s", ERROR_CODE['E100001'], url)
- LOGGER.error(u"%s %s", ERROR_CODE['E600001'], url)
- except URLError as url_err:
- LOGGER.error(url_err)
- LOGGER.error(u"%s %s", ERROR_CODE['E100001'], url)
- else:
- return res
-
- def put(self, url, parameter=None):
- """
- run the put request, parameter must to encode to bytes
- """
-# parameter_data = urllib.parse.urlencode(parameter) #??
- data = json.dumps(parameter).encode(encoding="utf-8")
- LOGGER.debug("data is %s", data)
- req = urllib.request.Request(url, data=data, headers=self.__header)
- req.get_method = lambda: 'PUT'
- res = urllib.request.urlopen(req)
- return res
-
- def patch(self, url, parameter=None, etag=None):
- """
- run the patch request, parameter must to encode to bytes
- """
- data = json.dumps(parameter).encode(encoding="utf-8")
- LOGGER.debug("data is %s", data)
- req = urllib.request.Request(url, data=data, headers=self.__header)
- req.add_header("Content-Type", "application/json")
- req.add_header("If-Match", etag)
- req.get_method = lambda: 'PATCH'
- res = None
- try:
- res = urllib.request.urlopen(req, timeout=TIME_OUT)
- except HTTPException as http_exp:
- LOGGER.error(http_exp)
- LOGGER.error(u"%s %s", ERROR_CODE['E100001'], url)
- except HTTPError as http_err:
- LOGGER.error(http_err)
- LOGGER.error(u"%s %s", ERROR_CODE['E100001'], url)
- LOGGER.error(u"%s %s", ERROR_CODE['E600001'], url)
- except TypeError as type_err:
- LOGGER.error(type_err)
- LOGGER.error(u"%s %s", ERROR_CODE['E100001'], url)
- return res
-
- def delete(self, url):
- '''
- run the delete request,
- '''
- req = urllib.request.Request(url, headers=self.__header)
- req.get_method = lambda: 'DELETE'
- res = urllib.request.urlopen(req)
- return res
diff --git a/tools/hdv/redfish/log_utils.py b/tools/hdv/redfish/log_utils.py
deleted file mode 100644
index 996a1d1..0000000
--- a/tools/hdv/redfish/log_utils.py
+++ /dev/null
@@ -1,33 +0,0 @@
-##############################################################################
-# Copyright (c) 2020 China Mobile Co.,Ltd and others.
-#
-# All rights reserved. This program and the accompanying materials
-# are made available under the terms of the Apache License, Version 2.0
-# which accompanies this distribution, and is available at
-# http://www.apache.org/licenses/LICENSE-2.0
-##############################################################################
-"""
-define the LOGGER settings
-"""
-import logging
-import sys
-
-BASE_DIR = sys.path[0]
-LOG_FILE = BASE_DIR + "/" + "logs" + "/" + 'hdv.log'
-
-LOGGER = logging.getLogger("redfish")
-LOGGER.setLevel(logging.DEBUG)
-
-FORMATTER = logging.Formatter('%(asctime)s - %(filename)s[line:%(lineno)d] \
- - %(funcName)s - %(levelname)s: %(message)s')
-
-FILE = logging.FileHandler(filename=LOG_FILE, mode='w')
-FILE.setLevel(logging.DEBUG)
-FILE.setFormatter(FORMATTER)
-
-CONSOLE = logging.StreamHandler()
-CONSOLE.setLevel(logging.DEBUG)
-CONSOLE.setFormatter(FORMATTER)
-
-LOGGER.addHandler(CONSOLE)
-LOGGER.addHandler(FILE)
diff --git a/tools/hdv/redfish/yaml_utils.py b/tools/hdv/redfish/yaml_utils.py
deleted file mode 100644
index 438c150..0000000
--- a/tools/hdv/redfish/yaml_utils.py
+++ /dev/null
@@ -1,28 +0,0 @@
-'''
-@author: cmcc
-'''
-import os
-import yaml
-# pylint: disable=E0611
-from log_utils import LOGGER
-
-
-def read_yaml(file):
- '''read a yaml file
- '''
- if not os.path.exists(file):
- LOGGER.info("%s not found", file)
- return None
- return yaml.load(open(file, "r"))
-
-
-def write_yaml(file, dict_data):
- '''write a yaml file
- '''
- yaml.safe_dump(dict_data, open(file, "w"), explicit_start=True)
-
-
-print(read_yaml("./conf/depends.yaml"))
-print(read_yaml("./conf/cases.yaml"))
-
-write_yaml("./conf/report.yaml", read_yaml("./conf/cases.yaml"))
diff --git a/tools/sdv/NwLinksValid/__init__.py b/tools/sdv/NwLinksValid/__init__.py
deleted file mode 100755
index 99456db..0000000
--- a/tools/sdv/NwLinksValid/__init__.py
+++ /dev/null
@@ -1,17 +0,0 @@
-# Licensed under the Apache License, Version 2.0 (the "License");
-# you may not use this file except in compliance with the License.
-# You may obtain a copy of the License at
-#
-# http://www.apache.org/licenses/LICENSE-2.0
-#
-# Unless required by applicable law or agreed to in writing, software
-# distributed under the License is distributed on an "AS IS" BASIS,
-# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
-# See the License for the specific language governing permissions and
-# limitations under the License.
-
-"""Nework-Links Validator interface and helpers.
-"""
-
-# flake8: noqa
-from NwLinksValid.nwlinksvalidator import *
diff --git a/tools/sdv/NwLinksValid/nwlinksvalidator.py b/tools/sdv/NwLinksValid/nwlinksvalidator.py
deleted file mode 100644
index 5e06590..0000000
--- a/tools/sdv/NwLinksValid/nwlinksvalidator.py
+++ /dev/null
@@ -1,38 +0,0 @@
-# Copyright 2020 Spirent Communications.
-#
-# Licensed under the Apache License, Version 2.0 (the "License");
-# you may not use this file except in compliance with the License.
-# You may obtain a copy of the License at
-#
-# http://www.apache.org/licenses/LICENSE-2.0
-#
-# Unless required by applicable law or agreed to in writing, software
-# distributed under the License is distributed on an "AS IS" BASIS,
-# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
-# See the License for the specific language governing permissions and
-# limitations under the License.
-
-"""
-Abstract class for N/W Lnks Prevalidations.
-Implementors, please inherit from this class.
-"""
-
-
-class INwLinksValidator():
- """ Model for a Links Validator """
- def __init__(self):
- """ Initialization of the Interface """
- self._default_nwlinks_validation = None
-
- @property
- def validation_nwlinks_defaults(self):
- """ Default Validation values """
- return True
-
- def validate_compute_node_links(self):
- """ Validating Compute Node Links"""
- raise NotImplementedError('Please call an implementation.')
-
- def validate_control_node_links(self):
- """ Validating Controller Node Links"""
- raise NotImplementedError('Please call an implementation.')
diff --git a/tools/sdv/SoftwarePostValid/__init__.py b/tools/sdv/SoftwarePostValid/__init__.py
deleted file mode 100755
index 0a964b6..0000000
--- a/tools/sdv/SoftwarePostValid/__init__.py
+++ /dev/null
@@ -1,19 +0,0 @@
-# Copyright 2015 Intel Corporation.
-#
-# Licensed under the Apache License, Version 2.0 (the "License");
-# you may not use this file except in compliance with the License.
-# You may obtain a copy of the License at
-#
-# http://www.apache.org/licenses/LICENSE-2.0
-#
-# Unless required by applicable law or agreed to in writing, software
-# distributed under the License is distributed on an "AS IS" BASIS,
-# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
-# See the License for the specific language governing permissions and
-# limitations under the License.
-
-"""Sw Validator interface and helpers.
-"""
-
-# flake8: noqa
-from SoftwarePostValid.swpostvalidator import *
diff --git a/tools/sdv/SoftwarePostValid/swpostvalidator.py b/tools/sdv/SoftwarePostValid/swpostvalidator.py
deleted file mode 100644
index 4776123..0000000
--- a/tools/sdv/SoftwarePostValid/swpostvalidator.py
+++ /dev/null
@@ -1,42 +0,0 @@
-# Copyright 2020 Spirent Communications.
-#
-# Licensed under the Apache License, Version 2.0 (the "License");
-# you may not use this file except in compliance with the License.
-# You may obtain a copy of the License at
-#
-# http://www.apache.org/licenses/LICENSE-2.0
-#
-# Unless required by applicable law or agreed to in writing, software
-# distributed under the License is distributed on an "AS IS" BASIS,
-# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
-# See the License for the specific language governing permissions and
-# limitations under the License.
-
-"""
-Abstract class for Software Postvalidations.
-Implementors, please inherit from this class.
-"""
-
-
-class ISwPostValidator():
- """ Model for a Sw Validator """
- def __init__(self):
- """ Initialization of the Interface """
- self._default_swpost_validation = None
-
- @property
- def validation_swpost_defaults(self):
- """ Default Validation values """
- return True
-
- def validate_configuration_mandatory(self):
- """
- Validating Mandatory Configuration
- """
- raise NotImplementedError('Please call an implementation.')
-
- def validate_configuration_optional(self):
- """
- Validating Optional Configuration
- """
- raise NotImplementedError('Please call an implementation.')
diff --git a/tools/sdv/SoftwarePreValid/__init__.py b/tools/sdv/SoftwarePreValid/__init__.py
deleted file mode 100755
index 8307b66..0000000
--- a/tools/sdv/SoftwarePreValid/__init__.py
+++ /dev/null
@@ -1,19 +0,0 @@
-# Copyright 2020 Spirent Communications.
-#
-# Licensed under the Apache License, Version 2.0 (the "License");
-# you may not use this file except in compliance with the License.
-# You may obtain a copy of the License at
-#
-# http://www.apache.org/licenses/LICENSE-2.0
-#
-# Unless required by applicable law or agreed to in writing, software
-# distributed under the License is distributed on an "AS IS" BASIS,
-# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
-# See the License for the specific language governing permissions and
-# limitations under the License.
-
-"""Sw Validator interface and helpers.
-"""
-
-# flake8: noqa
-from SoftwarePreValid.swprevalidator import *
diff --git a/tools/sdv/SoftwarePreValid/airship.py b/tools/sdv/SoftwarePreValid/airship.py
deleted file mode 100644
index bd93aa2..0000000
--- a/tools/sdv/SoftwarePreValid/airship.py
+++ /dev/null
@@ -1,267 +0,0 @@
-# Copyright 2020 Spirent Communications.
-#
-# Licensed under the Apache License, Version 2.0 (the "License");
-# you may not use this file except in compliance with the License.
-# You may obtain a copy of the License at
-#
-# http://www.apache.org/licenses/LICENSE-2.0
-#
-# Unless required by applicable law or agreed to in writing, software
-# distributed under the License is distributed on an "AS IS" BASIS,
-# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
-# See the License for the specific language governing permissions and
-# limitations under the License.
-
-"""
-Airship implementation of Software Predeployment Validation
-"""
-
-import os
-import shutil
-from pathlib import Path
-import git
-import urllib3
-import yaml
-from conf import settings
-from SoftwarePreValid import swprevalidator
-
-
-def check_link(link):
- """
- Function the check the availability of Hyperlinks
- """
- timeout = urllib3.util.Timeout(connect=5.0, read=7.0)
- http = urllib3.PoolManager(timeout=timeout)
- try:
- http.request('HEAD', link)
- except urllib3.exceptions.LocationValueError as err:
- print(err.args)
- return False
- except urllib3.exceptions.MaxRetryError as err:
- print(err.args)
- return False
- except urllib3.exceptions.RequestError as err:
- print(err.args)
- return False
- except urllib3.exceptions.ConnectTimeoutError as err:
- print(err.args)
- return False
- except urllib3.exceptions.PoolError as err:
- print(err.args)
- return False
- except urllib3.exceptions.HTTPError as err:
- print(err.args)
- return False
- return True
-
-
-class Airship(swprevalidator.ISwPreValidator):
- """
- Ariship Sw Validation
- """
- def __init__(self):
- """ Airship class constructor """
- super().__init__()
- self.url = settings.getValue('AIRSHIP_MANIFEST_URL')
- self.branch = settings.getValue('AIRSHIP_MANIFEST_BRANCH')
- self.dl_path = settings.getValue('AIRSHIP_MANIFEST_DOWNLOAD_PATH')
- self.site_name = settings.getValue('AIRSHIP_MANIFEST_SITE_NAME')
- self.manifest = None
- self.dirpath = Path(self.dl_path, 'airship')
- self.tmdirpath = Path(self.dl_path, 'treasuremap')
- self.locations = []
-
- def clone_repo(self):
- """
- Cloning the repos
- """
- git.Repo.clone_from(self.url,
- self.dirpath,
- branch=self.branch)
- git.Repo.clone_from('https://github.com/airshipit/treasuremap',
- self.tmdirpath,
- branch=settings.getValue(
- 'AIRSHIP_TREASUREMAP_VERSION'))
-
- def cleanup_manifest(self):
- """
- Remove existing manifests
- """
- # Next Remove any manifest files, if it exists
- if self.dirpath.exists() and self.dirpath.is_dir():
- shutil.rmtree(self.dirpath)
- if self.tmdirpath.exists() and self.tmdirpath.is_dir():
- shutil.rmtree(self.tmdirpath)
-
- def manifest_exists_locally(self):
- """
- Check if manifests exists locally
- """
- if self.dirpath.exists() and self.dirpath.is_dir():
- return True
- return False
-
- def validate_hyperlinks(self):
- """
- Hyperlink Validation
- """
- self.cleanup_manifest()
- # Next, clone the repo to the provided path.
- self.clone_repo()
-
- if self.dirpath.exists() and self.dirpath.is_dir():
- # Get the file(s) where links are defined.
- self.find_locations(
- os.path.join(self.dirpath, 'type',
- 'cntt', 'software',
- 'config', 'versions.yaml'))
- for location in self.locations:
- if check_link(location):
- print("The Link: %s is VALID" % (location))
- else:
- print("The Link: %s is INVALID" % (location))
-
- # pylint: disable=consider-using-enumerate
- def find_locations(self, yamlfile):
- """
- Find all the hyperlinks in the manifests
- """
- with open(yamlfile, 'r') as filep:
- lines = filep.readlines()
- for index in range(len(lines)):
- line = lines[index].strip()
- if line.startswith('location:'):
- link = line.split(":", 1)[1]
- if "opendev" in link:
- if ((len(lines) > index+1) and
- (lines[index+1].strip().startswith(
- 'reference:'))):
- ref = lines[index+1].split(":", 1)[1]
- link = link + '/commit/' + ref.strip()
- if link.strip() not in self.locations:
- print(link)
- self.locations.append(link.strip())
- if 'docker.' in line:
- link = line.split(":", 1)[1]
- link = link.replace('"', '')
- parts = link.split('/')
- if len(parts) == 3:
- link = ('https://index.' +
- parts[0].strip() +
- '/v1/repositories/' +
- parts[1] + '/' + parts[2].split(':')[0] +
- '/tags/' + parts[2].split(':')[-1])
- if link.strip() not in self.locations:
- print(link)
- self.locations.append(link.strip())
-
- # pylint: disable=too-many-nested-blocks, too-many-boolean-expressions
- def validate_configuration_mandatory(self):
- """
- Configuration checking of mandatory parameters
- """
- if not self.manifest_exists_locally():
- self.clone_repo()
- # We will perform validation one-by-one:
- # The Operating System Flavor
- os_done = False
- os_filename = os.path.join(self.tmdirpath,
- 'global',
- 'software',
- 'charts',
- 'ucp',
- 'drydock',
- 'maas.yaml')
- with open(os_filename, 'r') as osref:
- osfiles = yaml.load_all(osref, Loader=yaml.FullLoader)
- for osf in osfiles:
- if ('data' in osf and
- 'values' in osf['data'] and
- 'conf' in osf['data']['values'] and
- 'maas' in osf['data']['values']['conf'] and
- 'images' in osf['data']['values']['conf']['maas'] and
- ('default_os' in
- osf['data']['values']['conf']['maas']['images'])):
- if (settings.getValue('OPERATING_SYSTEM') in
- osf['data']['values']['conf']['maas']['images'][
- 'default_os']):
- print('Operating System is VALID')
- os_done = True
- if not os_done:
- print("Operating System is INVALID")
-
- filesdir = os.path.join(self.dirpath,
- 'site',
- self.site_name,
- 'profiles',
- 'host')
- hostprofile = None
- os_ver_done = False
- if os.path.isdir(filesdir):
- for filename in os.listdir(filesdir):
- filename = os.path.join(filesdir, filename)
- with open(filename, 'r') as fileref:
- hostprofile = yaml.load(fileref, Loader=yaml.FullLoader)
- if 'data' in hostprofile:
- if 'platform' in hostprofile['data']:
- if 'image' in hostprofile['data']['platform']:
- if (hostprofile['data']['platform']['image'] in
- settings.getValue('OS_VERSION_NAME')):
- print('Operating System Version is VALID')
- os_ver_done = True
- break
- if not os_ver_done:
- print("Operating System Version is INVALID")
- # Virtualization - Hugepages and CPU Isolation
- hugepages_size_done = False
- hugepages_count_done = False
- filesdir = os.path.join(self.dirpath,
- 'type',
- 'cntt',
- 'profiles',
- 'hardware')
- if os.path.isdir(filesdir):
- for filename in os.listdir(filesdir):
- filename = os.path.join(filesdir, filename)
- with open(filename, 'r') as fileref:
- hwprofile = yaml.load(fileref, Loader=yaml.FullLoader)
- if ('data' in hwprofile and
- 'hugepages' in hwprofile['data'] and
- 'dpdk' in hwprofile['data']['hugepages']):
- if ('size' in hwprofile['data']['hugepages']['dpdk'] and
- (settings.getValue('HUGEPAGES_SIZE') in
- hwprofile['data']['hugepages']['dpdk']['size'])):
- print('Hugepages Size is VALID')
- else:
- print('Hugepages Size is INVALID')
- hugepages_size_done = True
- if ('count' in hwprofile['data']['hugepages']['dpdk'] and
- (settings.getValue('HUGEPAGES_COUNT') ==
- hwprofile['data']['hugepages']['dpdk']['count'])):
- print('Hugepages COUNT is VALID')
- else:
- print('Hugepages COUNT is INVALID')
- hugepages_count_done = True
- if hugepages_size_done and hugepages_count_done:
- break
-
- # Virtual Switch - Switch and Configuration
- # Openstack-Version
- filename = os.path.join(self.tmdirpath,
- 'global',
- 'software',
- 'config',
- 'versions.yaml')
- if os.path.exists(filename):
- if settings.getValue('OPENSTACK_VERSION') in open(filename).read():
- print('Openstack Version is valid')
- else:
- print('Openstack version if INVALID')
- # Openstack Services
- # Bootstrap
-
- def validate_configuration_optional(self):
- """
- Validate Optional COnfigurations
- """
- return False
diff --git a/tools/sdv/SoftwarePreValid/swprevalidator.py b/tools/sdv/SoftwarePreValid/swprevalidator.py
deleted file mode 100644
index bef141b..0000000
--- a/tools/sdv/SoftwarePreValid/swprevalidator.py
+++ /dev/null
@@ -1,42 +0,0 @@
-# Copyright 2020 Spirent Communications.
-#
-# Licensed under the Apache License, Version 2.0 (the "License");
-# you may not use this file except in compliance with the License.
-# You may obtain a copy of the License at
-#
-# http://www.apache.org/licenses/LICENSE-2.0
-#
-# Unless required by applicable law or agreed to in writing, software
-# distributed under the License is distributed on an "AS IS" BASIS,
-# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
-# See the License for the specific language governing permissions and
-# limitations under the License.
-
-"""
-Abstract class for Software Prevalidations.
-Implementors, please inherit from this class.
-"""
-
-
-class ISwPreValidator():
- """ Model for a Sw Validator """
- def __init__(self):
- """ Initialization of the Interface """
- self._default_swpre_validation = None
-
- @property
- def validation_swpre_defaults(self):
- """ Default Validation values """
- return True
-
- def validate_hyperlinks(self):
- """ Validate Hyperlinks"""
- raise NotImplementedError('Please call an implementation.')
-
- def validate_configuration_mandatory(self):
- """ Validate Mandatory Configurations """
- raise NotImplementedError('Please call an implementation.')
-
- def validate_configuration_optional(self):
- """ Validate Optional Configurations """
- raise NotImplementedError('Please call an implementation.')
diff --git a/tools/sdv/__init__.py b/tools/sdv/__init__.py
deleted file mode 100644
index e69de29..0000000
--- a/tools/sdv/__init__.py
+++ /dev/null
diff --git a/tools/sdv/conf/00_common.conf b/tools/sdv/conf/00_common.conf
deleted file mode 100644
index 5326ecb..0000000
--- a/tools/sdv/conf/00_common.conf
+++ /dev/null
@@ -1,18 +0,0 @@
-import os
-
-# default log output directory for all logs
-LOG_DIR = '/tmp'
-
-# default log for all "small" executables
-LOG_FILE_DEFAULT = 'valid-overall.log'
-
-ROOT_DIR = os.path.normpath(os.path.join(
- os.path.dirname(os.path.realpath(__file__)), '../'))
-SW_PRE_VALID_DIR = os.path.join(ROOT_DIR, 'SoftwarePreValid')
-SW_POST_VALID_DIR = os.path.join(ROOT_DIR, 'SoftwarePostValid')
-NW_LINKS_VALID_DIR = os.path.join(ROOT_DIR, 'NwLinksValid')
-
-# 'debug', 'info', 'warning', 'error', 'critical'
-VERBOSITY = 'warning'
-
-EXCLUDE_MODULES = ['']
diff --git a/tools/sdv/conf/01_swprevalid.conf b/tools/sdv/conf/01_swprevalid.conf
deleted file mode 100644
index 46043ce..0000000
--- a/tools/sdv/conf/01_swprevalid.conf
+++ /dev/null
@@ -1,33 +0,0 @@
-
-# Modify this value to any Installer projects that have
-# manifests (templates and configuration files)
-SW_PRE_VALIDATOR = 'Airship'
-
-# Mandatory Requirements [ Configuration Check ]
-OPENSTACK_VERSION = 'ocata'
-OPERATING_SYSTEM = 'centos'
-OS_VERSION_NAME = 'xenial'
-HUGEPAGES_SIZE = '1G'
-HUGEPAGES_COUNT = 32
-OPENSTACK_CUSTOM_SERVICE_LIST = ['barbican', 'sahara']
-VIRTUAL_SWITCH_COMPUTE = 'ovs'
-VIRTUAL_SWITCH_VERSION = '2.9.2'
-BOOTSTRAP_PROTOCOL = 'pxe'
-CPU_ISOLATION = '2-19,22-39'
-
-# Airship Specific configurations.
-AIRSHIP_MANIFEST_URL = 'https://gerrit.opnfv.org/gerrit/airship'
-AIRSHIP_MANIFEST_BRANCH = 'master'
-AIRSHIP_MANIFEST_DOWNLOAD_PATH = '/tmp'
-AIRSHIP_MANIFEST_SITE_NAME = 'intel-pod10'
-AIRSHIP_TREASUREMAP_VERSION = 'v1.7'
-
-
-# Optional Requirements [Could be Installer Specific ]
-OVS_DPDK_ARGUMENTS = 'test'
-OVERCLOUD_LOGGING_CLIENT = 'fluentd'
-OVERCLOUD_MONITORING_CLIENT = 'collectd'
-LMA_SERVER_MONITORING = 'prometheus'
-LMA_SERVER_LOGGING = 'efk'
-OPENSTACK_CONTAINER_ORCHESTRATION = 'kubernetes'
-AIRSHIP_MANIFEST_VERSION = 1.7
diff --git a/tools/sdv/conf/02_swpostvalid.conf b/tools/sdv/conf/02_swpostvalid.conf
deleted file mode 100644
index 1ed9279..0000000
--- a/tools/sdv/conf/02_swpostvalid.conf
+++ /dev/null
@@ -1,4 +0,0 @@
-
-# Modify this value to any Installer projects that have
-# manifests (templates and configuration files)
-SW_POST_VALIDATOR = 'Airship'
diff --git a/tools/sdv/conf/03_nwlinksvalid.conf b/tools/sdv/conf/03_nwlinksvalid.conf
deleted file mode 100644
index 6e83066..0000000
--- a/tools/sdv/conf/03_nwlinksvalid.conf
+++ /dev/null
@@ -1 +0,0 @@
-NW_LINKS_VALIDATOR = 'lldpd'
diff --git a/tools/sdv/conf/__init__.py b/tools/sdv/conf/__init__.py
deleted file mode 100644
index ef97aa7..0000000
--- a/tools/sdv/conf/__init__.py
+++ /dev/null
@@ -1,265 +0,0 @@
-# Copyright 2015-2017 Intel Corporation.
-#
-# Licensed under the Apache License, Version 2.0 (the "License");
-# you may not use this file except in compliance with the License.
-# You may obtain a copy of the License at
-#
-# http://www.apache.org/licenses/LICENSE-2.0
-#
-# Unless required by applicable law or agreed to in writing, software
-# distributed under the License is distributed on an "AS IS" BASIS,
-# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
-# See the License for the specific language governing permissions and
-# limitations under the License.
-
-"""Settings and configuration handlers.
-
-Settings will be loaded from several .conf files
-and any user provided settings file.
-"""
-
-# pylint: disable=invalid-name
-
-import copy
-import os
-import re
-import logging
-import pprint
-
-_LOGGER = logging.getLogger(__name__)
-
-# regex to parse configuration macros from 04_vnf.conf
-# it will select all patterns starting with # sign
-# and returns macro parameters and step
-# examples of valid macros:
-# #VMINDEX
-# #MAC(AA:BB:CC:DD:EE:FF) or #MAC(AA:BB:CC:DD:EE:FF,2)
-# #IP(192.168.1.2) or #IP(192.168.1.2,2)
-# #EVAL(2*#VMINDEX)
-_PARSE_PATTERN = r'(#[A-Z]+)(\(([^(),]+)(,([0-9]+))?\))?'
-
-
-class Settings(object):
- """Holding class for settings.
- """
- def __init__(self):
- pass
-
- def _eval_param(self, param):
- # pylint: disable=invalid-name
- """ Helper function for expansion of references to 'valid' parameters
- """
- if isinstance(param, str):
- # evaluate every #PARAM reference inside parameter itself
- macros = re.findall(
- r'#PARAM\((([\w\-]+)(\[[\w\[\]\-\'\"]+\])*)\)',
- param)
- if macros:
- for macro in macros:
- # pylint: disable=eval-used
- try:
- tmp_val = str(
- eval("self.getValue('{}'){}".format(macro[1],
- macro[2])))
- param = param.replace('#PARAM({})'.format(macro[0]),
- tmp_val)
- # silently ignore that option required by
- # PARAM macro can't be evaluated;
- # It is possible, that referred parameter
- # will be constructed during runtime
- # and re-read later.
- except IndexError:
- pass
- except AttributeError:
- pass
- return param
- elif isinstance(param, (list, tuple)):
- tmp_list = []
- for item in param:
- tmp_list.append(self._eval_param(item))
- return tmp_list
- elif isinstance(param, dict):
- tmp_dict = {}
- for (key, value) in param.items():
- tmp_dict[key] = self._eval_param(value)
- return tmp_dict
- else:
- return param
-
- def getValue(self, attr):
- """Return a settings item value
- """
- if attr in self.__dict__:
- if attr == 'TEST_PARAMS':
- return getattr(self, attr)
- else:
- master_value = getattr(self, attr)
- return self._eval_param(master_value)
- else:
- raise AttributeError("%r object has no attribute %r" %
- (self.__class__, attr))
-
- def __setattr__(self, name, value):
- """Set a value
- """
- # skip non-settings. this should exclude built-ins amongst others
- if not name.isupper():
- return
-
- # we can assume all uppercase keys are valid settings
- super(Settings, self).__setattr__(name, value)
-
- def setValue(self, name, value):
- """Set a value
- """
- if name is not None and value is not None:
- super(Settings, self).__setattr__(name, value)
-
- def load_from_file(self, path):
- """Update ``settings`` with values found in module at ``path``.
- """
- import imp
-
- custom_settings = imp.load_source('custom_settings', path)
-
- for key in dir(custom_settings):
- if getattr(custom_settings, key) is not None:
- setattr(self, key, getattr(custom_settings, key))
-
- def load_from_dir(self, dir_path):
- """Update ``settings`` with contents of the .conf files at ``path``.
-
- Each file must be named Nfilename.conf, where N is a single or
- multi-digit decimal number. The files are loaded in ascending order of
- N - so if a configuration item exists in more that one file the setting
- in the file with the largest value of N takes precedence.
-
- :param dir_path: The full path to the dir from which to load the .conf
- files.
-
- :returns: None
- """
- regex = re.compile(
- "^(?P<digit_part>[0-9]+)(?P<alfa_part>[a-z]?)_.*.conf$")
-
- def get_prefix(filename):
- """
- Provide a suitable function for sort's key arg
- """
- match_object = regex.search(os.path.basename(filename))
- return [int(match_object.group('digit_part')),
- match_object.group('alfa_part')]
-
- # get full file path to all files & dirs in dir_path
- file_paths = os.listdir(dir_path)
- file_paths = [os.path.join(dir_path, x) for x in file_paths]
-
- # filter to get only those that are a files, with a leading
- # digit and end in '.conf'
- file_paths = [x for x in file_paths if os.path.isfile(x) and
- regex.search(os.path.basename(x))]
-
- # sort ascending on the leading digits and afla (e.g. 03_, 05a_)
- file_paths.sort(key=get_prefix)
-
- # load settings from each file in turn
- for filepath in file_paths:
- self.load_from_file(filepath)
-
- def load_from_dict(self, conf):
- """
- Update ``settings`` with values found in ``conf``.
-
- Unlike the other loaders, this is case insensitive.
- """
- for key in conf:
- if conf[key] is not None:
- if isinstance(conf[key], dict):
- # recursively update dict items, e.g. TEST_PARAMS
- setattr(self, key.upper(),
- merge_spec(getattr(self, key.upper()), conf[key]))
- else:
- setattr(self, key.upper(), conf[key])
-
- def restore_from_dict(self, conf):
- """
- Restore ``settings`` with values found in ``conf``.
-
- Method will drop all configuration options and restore their
- values from conf dictionary
- """
- self.__dict__.clear()
- tmp_conf = copy.deepcopy(conf)
- for key in tmp_conf:
- self.setValue(key, tmp_conf[key])
-
- def load_from_env(self):
- """
- Update ``settings`` with values found in the environment.
- """
- for key in os.environ:
- setattr(self, key, os.environ[key])
-
- def __str__(self):
- """Provide settings as a human-readable string.
-
- This can be useful for debug.
-
- Returns:
- A human-readable string.
- """
- tmp_dict = {}
- for key in self.__dict__:
- tmp_dict[key] = self.getValue(key)
-
- return pprint.pformat(tmp_dict)
-
- #
- # validation methods used by step driven testcases
- #
- def validate_getValue(self, result, attr):
- """Verifies, that correct value was returned
- """
- # getValue must be called to expand macros and apply
- # values from TEST_PARAM option
- assert result == self.getValue(attr)
- return True
-
- def validate_setValue(self, _dummy_result, name, value):
- """Verifies, that value was correctly set
- """
- assert value == self.__dict__[name]
- return True
-
-
-settings = Settings()
-
-
-def merge_spec(orig, new):
- """Merges ``new`` dict with ``orig`` dict, and returns orig.
-
- This takes into account nested dictionaries. Example:
-
- >>> old = {'foo': 1, 'bar': {'foo': 2, 'bar': 3}}
- >>> new = {'foo': 6, 'bar': {'foo': 7}}
- >>> merge_spec(old, new)
- {'foo': 6, 'bar': {'foo': 7, 'bar': 3}}
-
- You'll notice that ``bar.bar`` is not removed. This is the desired result.
- """
- for key in orig:
- if key not in new:
- continue
-
- # Not allowing derived dictionary types for now
- # pylint: disable=unidiomatic-typecheck
- if type(orig[key]) == dict:
- orig[key] = merge_spec(orig[key], new[key])
- else:
- orig[key] = new[key]
-
- for key in new:
- if key not in orig:
- orig[key] = new[key]
-
- return orig
diff --git a/tools/sdv/core/__init__.py b/tools/sdv/core/__init__.py
deleted file mode 100644
index 2441d38..0000000
--- a/tools/sdv/core/__init__.py
+++ /dev/null
@@ -1,19 +0,0 @@
-# Copyright 2015 Intel Corporation.
-#
-# Licensed under the Apache License, Version 2.0 (the "License");
-# you may not use this file except in compliance with the License.
-# You may obtain a copy of the License at
-#
-# http://www.apache.org/licenses/LICENSE-2.0
-#
-# Unless required by applicable law or agreed to in writing, software
-# distributed under the License is distributed on an "AS IS" BASIS,
-# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
-# See the License for the specific language governing permissions and
-# limitations under the License.
-
-"""Core structural interfaces and their implementations
-"""
-
-# flake8: noqa
-import core.component_factory
diff --git a/tools/sdv/core/component_factory.py b/tools/sdv/core/component_factory.py
deleted file mode 100644
index 396aa28..0000000
--- a/tools/sdv/core/component_factory.py
+++ /dev/null
@@ -1,32 +0,0 @@
-# Copyright 2020 Spirent Communications.
-#
-# Licensed under the Apache License, Version 2.0 (the "License");
-# you may not use this file except in compliance with the License.
-# You may obtain a copy of the License at
-#
-# http://www.apache.org/licenses/LICENSE-2.0
-#
-# Unless required by applicable law or agreed to in writing, software
-# distributed under the License is distributed on an "AS IS" BASIS,
-# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
-# See the License for the specific language governing permissions and
-# limitations under the License.
-
-"""
-Create Components.
-"""
-
-
-def create_swprevalidator(swprevalidator_class):
- """ Create Pre-Validators"""
- return swprevalidator_class()
-
-
-def create_swpostvalidator(swpostvalidator_class):
- """ Create Post-Validators"""
- return swpostvalidator_class()
-
-
-def create_linkvalidator(linkprevalidator_class):
- """ Create Link-Validators"""
- return linkprevalidator_class()
diff --git a/tools/sdv/core/loader/__init__.py b/tools/sdv/core/loader/__init__.py
deleted file mode 100644
index e86c48e..0000000
--- a/tools/sdv/core/loader/__init__.py
+++ /dev/null
@@ -1,18 +0,0 @@
-# Licensed under the Apache License, Version 2.0 (the "License");
-# you may not use this file except in compliance with the License.
-# You may obtain a copy of the License at
-#
-# http://www.apache.org/licenses/LICENSE-2.0
-#
-# Unless required by applicable law or agreed to in writing, software
-# distributed under the License is distributed on an "AS IS" BASIS,
-# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
-# See the License for the specific language governing permissions and
-# limitations under the License.
-
-"""
-Core: Loader Component.
-"""
-
-# flake8: noqa
-from .loader import Loader
diff --git a/tools/sdv/core/loader/loader.py b/tools/sdv/core/loader/loader.py
deleted file mode 100644
index c9f8e96..0000000
--- a/tools/sdv/core/loader/loader.py
+++ /dev/null
@@ -1,129 +0,0 @@
-# Copyright 2020 Intel Corporation, Spirent Communications.
-#
-# Licensed under the Apache License, Version 2.0 (the "License");
-# you may not use this file except in compliance with the License.
-# You may obtain a copy of the License at
-#
-# http://www.apache.org/licenses/LICENSE-2.0
-#
-# Unless required by applicable law or agreed to in writing, software
-# distributed under the License is distributed on an "AS IS" BASIS,
-# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
-# See the License for the specific language governing permissions and
-# limitations under the License.
-
-"""
-Abstract class for Software Prevalidations.
-Implementors, please inherit from this class.
-"""
-
-from conf import settings
-from core.loader.loader_servant import LoaderServant
-from SoftwarePreValid.swprevalidator import ISwPreValidator
-from SoftwarePostValid.swpostvalidator import ISwPostValidator
-from NwLinksValid.nwlinksvalidator import INwLinksValidator
-
-
-# pylint: disable=too-many-public-methods
-class Loader():
- """Loader class - main object context holder.
- """
- _swvalidator_loader = None
-
- def __init__(self):
- """Loader ctor - initialization method.
-
- All data is read from configuration each time Loader instance is
- created. It is up to creator to maintain object life cycle if this
- behavior is unwanted.
- """
- self._swprevalidator_loader = LoaderServant(
- settings.getValue('SW_PRE_VALID_DIR'),
- settings.getValue('SW_PRE_VALIDATOR'),
- ISwPreValidator)
- self._swpostvalidator_loader = LoaderServant(
- settings.getValue('SW_POST_VALID_DIR'),
- settings.getValue('SW_POST_VALIDATOR'),
- ISwPostValidator)
- self._nwlinksvalidator_loader = LoaderServant(
- settings.getValue('NW_LINKS_VALID_DIR'),
- settings.getValue('NW_LINKS_VALIDATOR'),
- INwLinksValidator)
-
- def get_swprevalidator(self):
- """ Returns a new instance configured Software Validator
- :return: ISwPreValidator implementation if available, None otherwise
- """
- return self._swprevalidator_loader.get_class()()
-
- def get_swprevalidator_class(self):
- """Returns type of currently configured Software Validator.
-
- :return: Type of ISwPreValidator implementation if available.
- None otherwise.
- """
- return self._swprevalidator_loader.get_class()
-
- def get_swprevalidators(self):
- """
- Get Prevalidators
- """
- return self._swprevalidator_loader.get_classes()
-
- def get_swprevalidators_printable(self):
- """
- Get Prevalidators for printing
- """
- return self._swprevalidator_loader.get_classes_printable()
-
- def get_swpostvalidator(self):
- """ Returns a new instance configured Software Validator
- :return: ISwPostValidator implementation if available, None otherwise
- """
- return self._swpostvalidator_loader.get_class()()
-
- def get_swpostvalidator_class(self):
- """Returns type of currently configured Software Validator.
-
- :return: Type of ISwPostValidator implementation if available.
- None otherwise.
- """
- return self._swpostvalidator_loader.get_class()
-
- def get_swpostvalidators(self):
- """
- Get Postvalidators
- """
- return self._swpostvalidator_loader.get_classes()
-
- def get_swpostvalidators_printable(self):
- """
- Get Postvalidators for printing
- """
- return self._swpostvalidator_loader.get_classes_printable()
-
- def get_nwlinksvalidator(self):
- """ Returns a new instance configured Nw-Links Validator
- :return: INwLinksValidator implementation if available, None otherwise
- """
- return self._nwlinksvalidator_loader.get_class()()
-
- def get_nwlinksvalidator_class(self):
- """Returns type of currently configured Nw-Links Validator.
-
- :return: Type of NwLinksValidator implementation if available.
- None otherwise.
- """
- return self._nwlinksvalidator_loader.get_class()
-
- def get_nwlinkvalidators(self):
- """
- Get Linkvalidators
- """
- return self._nwlinksvalidator_loader.get_classes()
-
- def get_nwlinkvalidators_printable(self):
- """
- Get Linkvalidators for printing
- """
- return self._nwlinksvalidator_loader.get_classes_printable()
diff --git a/tools/sdv/core/loader/loader_servant.py b/tools/sdv/core/loader/loader_servant.py
deleted file mode 100644
index 4e55c67..0000000
--- a/tools/sdv/core/loader/loader_servant.py
+++ /dev/null
@@ -1,183 +0,0 @@
-# Copyright 2020 Intel Corporation, Spirent Communications.
-#
-# Licensed under the Apache License, Version 2.0 (the "License");
-# you may not use this file except in compliance with the License.
-# You may obtain a copy of the License at
-#
-# http://www.apache.org/licenses/LICENSE-2.0
-#
-# Unless required by applicable law or agreed to in writing, software
-# distributed under the License is distributed on an "AS IS" BASIS,
-# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
-# See the License for the specific language governing permissions and
-# limitations under the License.
-
-"""
-Loader Support Module.
-"""
-
-import os
-from os import sys
-import imp
-import fnmatch
-import logging
-from conf import settings
-
-
-class LoaderServant():
- """Class implements basic dynamic import operations.
- """
- _class_name = None
- _path = None
- _interface = None
-
- def __init__(self, path, class_name, interface):
- """LoaderServant constructor
-
- Intializes all data needed for import operations.
-
- Attributes:
- path: path to directory which contains implementations derived from
- interface.
- class_name: Class name which will be returned in get_class
- method, if such definition exists in directory
- represented by path,
- interface: interface type. Every object which doesn't
- implement this particular interface will be
- filtered out.
- """
- self._class_name = class_name
- self._path = path
- self._interface = interface
-
- def get_class(self):
- """Returns class type based on parameters passed in __init__.
-
- :return: Type of the found class.
- None if class hasn't been found
- """
-
- return self.load_module(path=self._path,
- interface=self._interface,
- class_name=self._class_name)
-
- def get_classes(self):
- """Returns all classes in path derived from interface
-
- :return: Dictionary with following data:
- - key: String representing class name,
- - value: Class type.
- """
- return self.load_modules(path=self._path,
- interface=self._interface)
-
- def get_classes_printable(self):
- """Returns all classes derived from _interface found in path
-
- :return: String - list of classes in printable format.
- """
-
- out = self.load_modules(path=self._path,
- interface=self._interface)
- results = []
-
- # sort modules to produce the same output everytime
- for (name, mod) in sorted(out.items()):
- desc = (mod.__doc__ or 'No description').strip().split('\n')[0]
- results.append((name, desc))
-
- header = 'Classes derived from: ' + self._interface.__name__
- output = [header + '\n' + '=' * len(header) + '\n']
-
- for (name, desc) in results:
- output.append('* %-18s%s' % ('%s:' % name, desc))
-
- output.append('')
-
- output.append('')
-
- return '\n'.join(output)
-
- @staticmethod
- def load_module(path, interface, class_name):
- """Imports everything from given path and returns class type
-
- This is based on following conditions:
- - Class is derived from interface,
- - Class type name matches class_name.
-
- :return: Type of the found class.
- None if class hasn't been found
- """
-
- results = LoaderServant.load_modules(
- path=path, interface=interface)
-
- if class_name in results:
- logging.info(
- "Class found: %s.", class_name)
- return results.get(class_name)
-
- return None
-
- @staticmethod
- def load_modules(path, interface):
- """Returns dictionary of class name/class type found in path
-
- This is based on following conditions:
- - classes found under path are derived from interface.
- - class is not interface itself.
-
- :return: Dictionary with following data:
- - key: String representing class name,
- - value: Class type.
- """
- result = {}
-
- for _, mod in LoaderServant._load_all_modules(path):
- # find all classes derived from given interface, but suppress
- # interface itself and any abstract class starting with iface name
- gens = dict((k, v) for (k, v) in list(mod.__dict__.items())
- if isinstance(v, type) and
- issubclass(v, interface) and
- not k.startswith(interface.__name__))
- if gens:
- for (genname, gen) in list(gens.items()):
- result[genname] = gen
- return result
-
- @staticmethod
- def _load_all_modules(path):
- """Load all modules from ``path`` directory.
-
- This is based on the design used by OFTest:
- https://github.com/floodlight/oftest/blob/master/oft
-
- :param path: Path to a folder of modules.
-
- :return: List of modules in a folder.
- """
- mods = []
-
- for root, _, filenames in os.walk(path):
- # Iterate over each python file
- for filename in fnmatch.filter(filenames, '[!.]*.py'):
- modname = os.path.splitext(os.path.basename(filename))[0]
-
- # skip module load if it is excluded by configuration
- if modname in settings.getValue('EXCLUDE_MODULES'):
- continue
-
- try:
- if modname in sys.modules:
- mod = sys.modules[modname]
- else:
- mod = imp.load_module(
- modname, *imp.find_module(modname, [root]))
- except ImportError:
- logging.error('Could not import file %s', filename)
- raise
-
- mods.append((modname, mod))
-
- return mods
diff --git a/tools/sdv/docs/valid.rst b/tools/sdv/docs/valid.rst
deleted file mode 100644
index 6aeb8a2..0000000
--- a/tools/sdv/docs/valid.rst
+++ /dev/null
@@ -1,28 +0,0 @@
-.. This work is licensed under a Creative Commons Attribution 4.0 International License.
-.. http://creativecommons.org/licenses/by/4.0
-.. (c) OPNFV, Intel Corporation, AT&T and others.
-
-CIRV Software Validation Tool
-=============================
-This tool is designed to perform Software Configuration Validation, which covers:
-
-1. Pre-Deployment (of VIM or Container Management Software) Validation of Software Configuration
-2. Post-Deployment (of VIM or Container Management Software) Validation of Software Configuration
-3. Network-Link Checking - Validating VLAN and IP configurations
-
-
-Installation
-************
-This tool does not have any installation. However, there are requirements in terms of Python packages, which can be installed using pip3. Refer to requirements.txt file for the package list.
-
-Usage
-*****
-Example Commands:
-
-1. To run all the validations: ./valid
-2. Help: ./valid --help
-3. Version Check: ./valid --version
-4. List Sofware Pre-Deployment validators: ./valid --list-swpredepv
-5. List Sofware Post-Deployment validators: ./valid --list-swpostdepv
-6. List all validations: ./valid --list-validations
-7. Run only single validation [WORK IN PROGRESS]
diff --git a/tools/sdv/valid b/tools/sdv/valid
deleted file mode 100755
index 1a9a252..0000000
--- a/tools/sdv/valid
+++ /dev/null
@@ -1,147 +0,0 @@
-#!/usr/bin/env python3
-
-# Copyright 2020 Spirent Communications
-#
-# Licensed under the Apache License, Version 2.0 (the "License");
-# you may not use this file except in compliance with the License.
-# You may obtain a copy of the License at
-#
-# http://www.apache.org/licenses/LICENSE-2.0
-#
-# Unless required by applicable law or agreed to in writing, software
-# distributed under the License is distributed on an "AS IS" BASIS,
-# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
-# See the License for the specific language governing permissions and
-# limitations under the License.
-
-"""VALID main script.
-"""
-
-import logging
-import os
-import sys
-import argparse
-import time
-import datetime
-from conf import settings
-import core.component_factory as component_factory
-from core.loader import Loader
-
-VERBOSITY_LEVELS = {
- 'debug': logging.DEBUG,
- 'info': logging.INFO,
- 'warning': logging.WARNING,
- 'error': logging.ERROR,
- 'critical': logging.CRITICAL
-}
-
-_CURR_DIR = os.path.dirname(os.path.realpath(__file__))
-_LOGGER = logging.getLogger()
-
-def parse_arguments():
- """
- Parse command line arguments.
- """
- parser = argparse.ArgumentParser(prog=__file__, formatter_class=
- argparse.ArgumentDefaultsHelpFormatter)
- parser.add_argument('--version', action='version', version='%(prog)s 0.1')
- parser.add_argument('--list-validations', action='store_true',
- help='list all validations')
- parser.add_argument('--list-swpredepv', action='store_true',
- help='list all Software pre-dep validations and exit')
- parser.add_argument('--list-swpostdepv', action='store_true',
- help='list all Software post-dep validations and exit')
- parser.add_argument('--list-nwlinksv', action='store_true',
- help='list all Link validations and exit')
- parser.add_argument('exact_validation_name', nargs='*', help='Exact names of\
- validations to run. E.g "valid nwlinks"\
- runs only nwlink-validations.\
- To run all omit positional arguments')
- args = vars(parser.parse_args())
-
- return args
-
-
-def configure_logging(level):
- """Configure logging.
- """
- name, ext = os.path.splitext(settings.getValue('LOG_FILE_DEFAULT'))
- rename_default = "{name}_{uid}{ex}".format(name=name,
- uid=settings.getValue(
- 'LOG_TIMESTAMP'),
- ex=ext)
- log_file_default = os.path.join(
- settings.getValue('RESULTS_PATH'), rename_default)
- _LOGGER.setLevel(logging.DEBUG)
- stream_logger = logging.StreamHandler(sys.stdout)
- stream_logger.setLevel(VERBOSITY_LEVELS[level])
- stream_logger.setFormatter(logging.Formatter(
- '[%(levelname)-5s] %(asctime)s : (%(name)s) - %(message)s'))
- _LOGGER.addHandler(stream_logger)
- file_logger = logging.FileHandler(filename=log_file_default)
- file_logger.setLevel(logging.DEBUG)
- file_logger.setFormatter(logging.Formatter(
- '%(asctime)s : %(message)s'))
- _LOGGER.addHandler(file_logger)
-
-def handle_list_options(args):
- """ Process --list cli arguments if needed
-
- :param args: A dictionary with all CLI arguments
- """
- if args['list_swpredepv']:
- print(Loader().get_swprevalidators_printable())
- sys.exit(0)
-
- if args['list_swpostdepv']:
- print(Loader().get_swpostvalidators_printable())
- sys.exit(0)
-
- if args['list_nwlinksv']:
- print(Loader().get_nwlinkvalidators_printable())
- sys.exit(0)
-
-
-def main():
- """Main function.
- """
- args = parse_arguments()
-
- # define the timestamp to be used by logs and results
- date = datetime.datetime.fromtimestamp(time.time())
- timestamp = date.strftime('%Y-%m-%d_%H-%M-%S')
- settings.setValue('LOG_TIMESTAMP', timestamp)
-
-
- # configure settings
- settings.load_from_dir(os.path.join(_CURR_DIR, 'conf'))
-
- # if required, handle list-* operations
- handle_list_options(args)
-
- results_dir = "results_" + timestamp
- results_path = os.path.join(settings.getValue('LOG_DIR'), results_dir)
- settings.setValue('RESULTS_PATH', results_path)
- # create results directory
- if not os.path.exists(results_path):
- os.makedirs(results_path)
-
- configure_logging(settings.getValue('VERBOSITY'))
-
- loader = Loader()
- swprevalidators = loader.get_swprevalidators()
- if settings.getValue('SW_PRE_VALIDATOR') not in swprevalidators:
- _LOGGER.error('There are no swvalidators matching \'%s\' found in'
- ' \'%s\'. Exiting...', settings.getValue('SW_PRE_VALIDATOR'),
- settings.getValue('SW_PRE_VALID_DIR'))
- sys.exit(1)
- swv_pre_ctrl = component_factory.create_swprevalidator(
- loader.get_swprevalidator_class())
- # First validate hyperlinks
- swv_pre_ctrl.validate_hyperlinks()
- # Next validate mandatory configuration
- swv_pre_ctrl.validate_configuration_mandatory()
-
-
-if __name__ == "__main__":
- main()