aboutsummaryrefslogtreecommitdiffstats
diff options
context:
space:
mode:
authorSridhar Rao <sridhar.rao@spirent.com>2020-10-15 01:55:24 +0000
committerGerrit Code Review <gerrit@opnfv.org>2020-10-15 01:55:24 +0000
commitf12039ae1fb9ed773ddf3d8000c6645e5900c48e (patch)
tree53dff91c6d2bcbe8b0ba5511ea8e7a32f52dd993
parentc5bd3737e608b81ef5d4361739d680d2fb3bb1cb (diff)
parentb189119586d1d3cf176c31402daa4b34830ec48b (diff)
Merge "sdv-prevalidation: added src, mapping, documentation, Dockerfile, and server files"
-rw-r--r--sdv/docker/sdvconfig/Dockerfile26
-rw-r--r--sdv/docker/sdvconfig/cli_validation.py249
-rw-r--r--sdv/docker/sdvconfig/extrapolation/__init__.py19
-rw-r--r--sdv/docker/sdvconfig/extrapolation/extrapolation.py167
-rw-r--r--sdv/docker/sdvconfig/manifest/__init__.py19
-rw-r--r--sdv/docker/sdvconfig/manifest/manifest.py296
-rw-r--r--sdv/docker/sdvconfig/mapping/TripleO/hardware-mapping.json104
-rw-r--r--sdv/docker/sdvconfig/mapping/TripleO/info-mapping.json155
-rw-r--r--sdv/docker/sdvconfig/mapping/TripleO/network-mapping.json110
-rw-r--r--sdv/docker/sdvconfig/mapping/TripleO/platform-mapping.json59
-rw-r--r--sdv/docker/sdvconfig/mapping/TripleO/software-mapping.json20
-rw-r--r--sdv/docker/sdvconfig/mapping/TripleO/storage-mapping.json29
-rw-r--r--sdv/docker/sdvconfig/mapping/airship/hardware-mapping.json107
-rw-r--r--sdv/docker/sdvconfig/mapping/airship/info-mapping.json176
-rw-r--r--sdv/docker/sdvconfig/mapping/airship/network-mapping.json110
-rw-r--r--sdv/docker/sdvconfig/mapping/airship/platform-mapping.json59
-rw-r--r--sdv/docker/sdvconfig/mapping/airship/software-mapping.json20
-rw-r--r--sdv/docker/sdvconfig/mapping/airship/storage-mapping.json53
-rw-r--r--sdv/docker/sdvconfig/mapping/template/hardware-mapping.json104
-rw-r--r--sdv/docker/sdvconfig/mapping/template/info-mapping.json155
-rw-r--r--sdv/docker/sdvconfig/mapping/template/network-mapping.json110
-rw-r--r--sdv/docker/sdvconfig/mapping/template/platform-mapping.json59
-rw-r--r--sdv/docker/sdvconfig/mapping/template/software-mapping.json20
-rw-r--r--sdv/docker/sdvconfig/mapping/template/storage-mapping.json29
-rw-r--r--sdv/docker/sdvconfig/requirements.txt1
-rw-r--r--sdv/docker/sdvconfig/server.py177
-rw-r--r--sdv/docker/sdvconfig/testapi/__init__.py19
-rw-r--r--sdv/docker/sdvconfig/testapi/testapi.py77
-rw-r--r--sdv/docker/sdvconfig/validation/__init__.py24
-rw-r--r--sdv/docker/sdvconfig/validation/hardware.py214
-rw-r--r--sdv/docker/sdvconfig/validation/info.py202
-rw-r--r--sdv/docker/sdvconfig/validation/network.py242
-rw-r--r--sdv/docker/sdvconfig/validation/platform.py93
-rw-r--r--sdv/docker/sdvconfig/validation/software.py159
-rw-r--r--sdv/docker/sdvconfig/validation/storage.py160
-rw-r--r--sdv/docs/docker/sdvconfig/developer/devguide.rst309
-rw-r--r--sdv/docs/docker/sdvconfig/developer/extrapolation-flow.pngbin0 -> 48637 bytes
-rw-r--r--sdv/docs/docker/sdvconfig/developer/extrapolation.pngbin0 -> 6923 bytes
-rw-r--r--sdv/docs/docker/sdvconfig/developer/validation-flow.pngbin0 -> 73615 bytes
-rw-r--r--sdv/docs/docker/sdvconfig/developer/validation.pngbin0 -> 17281 bytes
-rw-r--r--sdv/docs/docker/sdvconfig/user/configguide.rst83
-rw-r--r--sdv/docs/docker/sdvconfig/user/userguide.rst42
42 files changed, 4057 insertions, 0 deletions
diff --git a/sdv/docker/sdvconfig/Dockerfile b/sdv/docker/sdvconfig/Dockerfile
new file mode 100644
index 0000000..d66ccc7
--- /dev/null
+++ b/sdv/docker/sdvconfig/Dockerfile
@@ -0,0 +1,26 @@
+FROM python:3.8-slim-buster
+
+# create folder sdvconfig
+RUN mkdir sdvconfig
+# change the workdir to the newly created file
+WORKDIR /sdvconfig/
+
+# install from requirements.txt
+COPY requirements.txt /sdvconfig/requirements.txt
+RUN pip install -r requirements.txt
+RUN rm requirements.txt
+
+# copy all required files/folders
+COPY extrapolation/ /sdvconfig/extrapolation/
+COPY mapping/ /sdvconfig/mapping/
+COPY validation/ /sdvconfig/validation/
+COPY server.py /sdvconfig/
+COPY cli_validation.py /sdvconfig/
+COPY testapi/ sdvconfig/testapi/
+COPY manifest /sdvconfig/manifest/
+
+# expose port for rest calls
+EXPOSE 8000
+
+# run the http server
+CMD [ "python", "server.py" ]
diff --git a/sdv/docker/sdvconfig/cli_validation.py b/sdv/docker/sdvconfig/cli_validation.py
new file mode 100644
index 0000000..70e498e
--- /dev/null
+++ b/sdv/docker/sdvconfig/cli_validation.py
@@ -0,0 +1,249 @@
+#!/usr/bin/env python
+
+# Copyright (C) 2020 Ashwin Nayak
+#
+# Licensed under the Apache License, Version 2.0 (the "License");
+# you may not use this file except in compliance with the License.
+# You may obtain a copy of the License at
+#
+# http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
+
+# pylint: disable=too-many-instance-attributes, too-many-arguments
+
+""" validation code """
+
+import os
+import logging
+import argparse
+import sys
+import datetime
+import json
+import shutil
+import requests
+import yaml
+from git import Repo
+from manifest import Manifest
+from validation import HardwareValidation, InfoValidation, PlatformValidation
+from validation import SoftwareValidation, NetworkValidation, StorageValidation
+from testapi import PushResults
+
+GLOBAL_DIR = "https://github.com/airshipit/treasuremap.git"
+
+class Validate():
+ """ Validation class """
+ def __init__(self, inst_dir, mapping_file_type, pdf_fn, sitename):
+ # initialize internal values
+ self.correct = 0
+ self.wrong = 0
+ self.total = 0
+ self.json = None
+ self.logger = None
+ self.result = ""
+ self.pdf_fn = None
+ self.inst_dir = None
+ self.gsw = None
+ self.testapi_result = dict()
+
+ # initialize the logger
+ self.start_logger()
+
+ # check if its a url or directory
+ self.check_and_download(inst_dir, pdf_fn)
+ # initialize few more values
+ self.site_sw_dir = os.path.join(self.inst_dir, 'site', sitename, 'software')
+ self.type_sw_dir = self.get_type_sw(self.inst_dir, sitename)
+
+ if self.json is None:
+ self.read_json(self.pdf_fn)
+
+ self.manifest = Manifest(inst_dir=os.path.join(self.inst_dir, 'site', sitename), \
+ mapping_file_dir=os.path.join("mapping", mapping_file_type), logger=self.logger)
+
+ def check_and_download(self, inst_dir, pdf_fn):
+ """check if the given arguments are url or directory location"""
+
+ # create a directory called /tmp
+ try:
+ os.mkdir('/tmp')
+ except OSError:
+ self.logger.exception("creation of directory failed")
+ raise
+
+ # check pdf_fn
+ if pdf_fn[:4] == "http":
+ # do a requests call and get value
+ req = requests.get(pdf_fn)
+ self.json = json.loads(req.text)
+ else:
+ self.pdf_fn = pdf_fn
+
+ if inst_dir[:4] == "http":
+
+ # clone the installer repo
+ try:
+ Repo.clone_from(inst_dir, os.path.join('/tmp', 'inst'))
+ self.inst_dir = os.path.join('/tmp', 'inst')
+ self.downloaded = True
+ except ConnectionError:
+ self.logger.exception("failed to download the inst git repo")
+ raise
+ else:
+ self.inst_dir = inst_dir
+
+ # download the global file
+ try:
+ Repo.clone_from(GLOBAL_DIR, os.path.join('/tmp', 'global'))
+ self.gsw = os.path.join('/tmp', 'global', 'global', 'software')
+ except ConnectionError:
+ self.logger.exception("failed to download the global git repo")
+ raise
+
+ def get_type_sw(self, inst_dir, sitename):
+ """ find the directory location of type sw in inst """
+ # read the site definitions present in site
+ temp = None
+ try:
+ with open(os.path.join(inst_dir, 'site', sitename, 'site-definition.yaml')) as yaml_file:
+ temp = yaml.load(yaml_file, Loader=yaml.FullLoader)
+ except IOError:
+ self.logger.exception("could not read the yaml file")
+ raise
+
+ return os.path.join(inst_dir, 'type', temp["data"]["site_type"])
+
+ def read_json(self, json_fn):
+ """ read json file """
+ try:
+ with open(os.path.join(json_fn)) as json_file:
+ self.json = json.loads(json_file.read())
+ except IOError:
+ self.logger.exception("Unable to read the pdf file")
+ self.logger.info("Exiting process")
+ sys.exit()
+
+ def start_logger(self):
+ """ starting logging process """
+ logging.basicConfig(filename='validation.log',
+ filemode='w',
+ format='%(asctime)s,%(msecs)d %(name)s %(levelname)s %(message)s',
+ datefmt='%H:%M:%S',
+ level=logging.DEBUG)
+
+ self.logger = logging.getLogger('validation')
+ self.logger.info("Starting validation program")
+
+ def validate(self):
+ """ description about validation """
+ # validate info
+ correct, wrong, total, result = InfoValidation(
+ self.json, self.manifest, self.logger).get_values()
+ self.correct += correct
+ self.wrong += wrong
+ self.total += total
+ string = (
+ "The number of correct :{} wrong:{} and total:{} in info profile\n\n".format(
+ self.correct,
+ self.wrong,
+ self.total))
+ self.result += result + string
+
+ # iterate through the roles: have a class for each for each of the roles
+ for _, value in enumerate(self.json["roles"]):
+ role = value["name"]
+ # print(role,value["hardware_profile"])
+ correct, wrong, total, result = HardwareValidation(
+ self.json, value["hardware_profile"], self.manifest, self.logger).get_values()
+ self.correct += correct
+ self.wrong += wrong
+ self.total += total
+ string = (
+ "The number of correct :{} wrong:{} and total:{} in hardware profile\n\n".format(
+ correct, wrong, total))
+ self.result += result + string
+
+ correct, wrong, total, result = StorageValidation(
+ role, self.json, value["storage_mapping"], self.manifest, self.logger).get_values()
+ self.correct += correct
+ self.wrong += wrong
+ self.total += total
+ string = (
+ "The number of correct :{} wrong:{} and total:{} in storage profile\n\n".format(
+ correct, wrong, total))
+ self.result += result + string
+
+ correct, wrong, total, result = SoftwareValidation(role, self.json, \
+ value["sw_set_name"], self.manifest, self.gsw, self.type_sw_dir, \
+ self.site_sw_dir, self.logger).get_values()
+ self.correct += correct
+ self.wrong += wrong
+ self.total += total
+ string = (
+ "The number of correct :{} wrong:{} and total:{} in software profile\n\n".format(
+ correct, wrong, total))
+ self.result += result + string
+
+ correct, wrong, total, result = PlatformValidation(
+ role, self.json, value["platform_profile"], self.manifest, self.logger).get_values()
+ self.correct += correct
+ self.wrong += wrong
+ self.total += total
+ string = (
+ "The number of correct :{} wrong:{} and total:{} in platform profile\n\n".format(
+ correct, wrong, total))
+ self.result += result + string
+
+ correct, wrong, total, result = NetworkValidation(role, self.json, \
+ value["interface_mapping"], self.manifest, self.logger).get_values()
+ self.correct += correct
+ self.wrong += wrong
+ self.total += total
+ string = (
+ "The number of correct :{} wrong:{} and total:{} in network profile\n\n".format(
+ correct, wrong, total))
+ self.result += result + string
+
+ self.testapi_result["timestamp"] = datetime.datetime.now()
+ self.testapi_result["correct"] = self.correct
+ self.testapi_result["wrong"] = self.wrong
+ self.testapi_result["total"] = self.total
+
+ # print the final report
+ self.logger.info("Validation complete!")
+ # push results to opnfv testapi
+ PushResults(self.testapi_result, self.logger)
+
+ # clean up /tmp
+ shutil.rmtree('/tmp')
+
+ return self.result
+
+
+if __name__ == "__main__":
+ # Initiate the parser
+ PARSER = argparse.ArgumentParser(description="validation program")
+
+ # Add long and short argument for test
+ # parser.add_argument("--test", help="test the code", action="store_true")
+
+ # Add long and short argument for manifest dir
+ PARSER.add_argument("--inst_dir", help="get installer dir")
+
+ # Add long and short argument for mapping dir
+ PARSER.add_argument("--inst_type", help="get installer type")
+
+ # Add long and short argument for pdf file
+ PARSER.add_argument("--pdf", help="get pdf")
+
+ # Add long and short argument for sitename
+ PARSER.add_argument("--sitename", help="get sitename")
+
+ # Read arguments from the command line
+ ARGS = PARSER.parse_args()
+
+ print(ARGS.inst_dir, ARGS.inst_type, ARGS.pdf, ARGS.sitename).validate()
diff --git a/sdv/docker/sdvconfig/extrapolation/__init__.py b/sdv/docker/sdvconfig/extrapolation/__init__.py
new file mode 100644
index 0000000..96f13a0
--- /dev/null
+++ b/sdv/docker/sdvconfig/extrapolation/__init__.py
@@ -0,0 +1,19 @@
+#!/usr/bin/env python
+
+# Copyright (C) 2020 Ashwin Nayak
+#
+# Licensed under the Apache License, Version 2.0 (the "License");
+# you may not use this file except in compliance with the License.
+# You may obtain a copy of the License at
+#
+# http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
+
+""" extrapolation package """
+
+from .extrapolation import Extrapolate
diff --git a/sdv/docker/sdvconfig/extrapolation/extrapolation.py b/sdv/docker/sdvconfig/extrapolation/extrapolation.py
new file mode 100644
index 0000000..848ef79
--- /dev/null
+++ b/sdv/docker/sdvconfig/extrapolation/extrapolation.py
@@ -0,0 +1,167 @@
+#!/usr/bin/env python
+
+# Copyright (C) 2020 Ashwin Nayak
+#
+# Licensed under the Apache License, Version 2.0 (the "License");
+# you may not use this file except in compliance with the License.
+# You may obtain a copy of the License at
+#
+# http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
+
+""" program to perform extrapolation """
+
+import os
+import json
+import logging
+import argparse
+import requests
+from netaddr import IPNetwork
+
+class Extrapolate():
+ """Perform extrapolation"""
+ def __init__(self, pdf_fn, store_at):
+ # store external values
+ self.store_at = store_at
+ # initialzie internal values
+ self.pdf = None
+ self.ip_list = []
+ # initialization fucntions
+ self.start_logger()
+ self.download_pdf(pdf_fn)
+ if self.pdf is None:
+ self.read_pdf(pdf_fn)
+
+ self.get_ip(self.get_ipmi())
+ self.extrapolate()
+
+ def download_pdf(self, pdf_fn):
+ """ download pdf """
+ # check pdf_fn
+ if pdf_fn[:4] == "http":
+ # do a requests call and get value
+ try:
+ req = requests.get(pdf_fn)
+ self.pdf = json.loads(req.text)
+ except ConnectionError:
+ self.logger.exception("error downloading pdf")
+ raise
+
+ def start_logger(self):
+ """ starting logging process """
+ logging.basicConfig(filename='extrapolation.log',
+ filemode='w',
+ format='%(asctime)s,%(msecs)d %(name)s %(levelname)s %(message)s',
+ datefmt='%H:%M:%S',
+ level=logging.INFO)
+
+ self.logger = logging.getLogger('extrapolation')
+ self.logger.info("Starting extrapolation program")
+
+ def read_pdf(self, json_fn):
+ """ read platform descritpion file """
+ try:
+ with open(os.path.join(json_fn)) as json_file:
+ self.logger.debug(f"Reading the pdf file:{json_fn}")
+ self.pdf = json.loads(json_file.read())
+ except IOError:
+ self.logger.exception("Error while reading the pdf file")
+ raise
+
+ def save_pdf(self):
+ """ save the pdf file """
+ try:
+ with open(os.path.join(self.store_at, 'pdf_new.json'), 'w', encoding='utf-8')\
+ as json_file:
+ self.logger.debug(f"Saving the extrapolated pdf file named: pd_new.json at:{self.store_at}")
+ json.dump(self.pdf, json_file, indent=2)
+ except IOError:
+ self.logger.exception("Could not save the logger file")
+
+ def get_ipmi(self):
+ """ get ipmi cidr ip """
+ for val in self.pdf["networks"]:
+ if val["name"] == "ipmi" or val["name"] == "ilo":
+ return val["cidr"]
+ return "192.168.10.0/24"
+
+ def get_ip(self, value):
+ """ get list of valid ip's"""
+ self.logger.debug(f"getting list of ip's from {value}")
+
+ try:
+ for _ip in IPNetwork(value):
+ if str(_ip).split('.')[-1] != '0' and str(_ip).split('.')[-1] != '255':
+ self.ip_list.append(str(_ip))
+ except Exception:
+ self.logger.exception("error with the ip module")
+ raise
+
+ def get_ilo_info(self, count):
+ """get ipmi info """
+ temp = dict()
+ if count > len(self.ip_list):
+ self.logger.error("No ip's avaialble!")
+ elif not self.pdf["extrapolation_info"]["ip_increment"].isdigit():
+ self.logger.error("ip increment value is not an integer")
+ else:
+ temp["ip"] = self.ip_list[count * int(self.pdf["extrapolation_info"]["ip_increment"])]
+ temp["user"] = self.pdf["extrapolation_info"]["ilo_user"]
+ temp["password"] = self.pdf["management_info"]["city"]\
+ + self.pdf["management_info"]["area_name"]\
+ + self.pdf["management_info"]["room_id"]+str(count + 1)
+ self.logger.debug(f"ipmi info:{temp}")
+ return temp
+
+ def extrapolate(self):
+ """ Perform Extrapolation """
+ self.logger.info("starting extrapolation")
+
+ list_servers = []
+
+ # get ipmi info
+ count = 0
+
+ for val in self.pdf["roles"]:
+ num_servers = int(val["count"]) # Number of servers in the particular role.
+ role = val["name"]
+
+ for idx in range(num_servers):
+ temp = dict()
+ temp["role_name"] = role
+ temp["device_name"] = str(role) + str(idx + 1)
+ temp["az_name"] = "default"
+ temp["ha_name"] = "default"
+
+ temp["ilo_info"] = self.get_ilo_info(count)
+ count += 1
+
+ list_servers.append(temp)
+
+ # save the pdf file
+ self.pdf["servers"] = list_servers
+ self.save_pdf()
+
+ self.logger.info("Extrapolation completed!")
+
+if __name__ == "__main__":
+ # main class is for testing purposes
+ # Initiate the parser
+ PARSER = argparse.ArgumentParser(description="Extrapolation program")
+
+ # Add long argument for pdf file
+ PARSER.add_argument("--file", help="get pdf file name")
+
+ # Add long argument for store_at
+ PARSER.add_argument("--store_at", help="store the file at")
+
+ # Read arguments from the command line
+ ARGS = PARSER.parse_args()
+
+ # run the code
+ Extrapolate(ARGS.file, ARGS.store_at)
diff --git a/sdv/docker/sdvconfig/manifest/__init__.py b/sdv/docker/sdvconfig/manifest/__init__.py
new file mode 100644
index 0000000..e5bf546
--- /dev/null
+++ b/sdv/docker/sdvconfig/manifest/__init__.py
@@ -0,0 +1,19 @@
+#!/usr/bin/env python
+
+# Copyright (C) 2020 Ashwin Nayak
+#
+# Licensed under the Apache License, Version 2.0 (the "License");
+# you may not use this file except in compliance with the License.
+# You may obtain a copy of the License at
+#
+# http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
+
+""" manifest package """
+
+from .manifest import Manifest
diff --git a/sdv/docker/sdvconfig/manifest/manifest.py b/sdv/docker/sdvconfig/manifest/manifest.py
new file mode 100644
index 0000000..c9dd5b9
--- /dev/null
+++ b/sdv/docker/sdvconfig/manifest/manifest.py
@@ -0,0 +1,296 @@
+#!/usr/bin/env python
+
+# Copyright (C) 2020 Ashwin Nayak
+#
+# Licensed under the Apache License, Version 2.0 (the "License");
+# you may not use this file except in compliance with the License.
+# You may obtain a copy of the License at
+#
+# http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
+
+# pylint: disable=too-many-branches, too-many-statements
+
+""" manifest code """
+
+import os
+import json
+import yaml
+
+CWD = os.getcwd()
+
+
+class Manifest():
+ """ All about manifest """
+
+ def __init__(self, inst_dir, mapping_file_dir, logger):
+ self.yaml = dict()
+ self.mapping = dict()
+ self.vals = []
+ self.saver = dict()
+ self.logger = logger
+
+ self.auto_generate_mapping(mapping_file_dir, inst_dir)
+
+ def read_yaml(self, yaml_dir):
+ """ read yaml file """
+ yaml_files = [pos_json for pos_json in os.listdir(yaml_dir) if pos_json.endswith('.yaml')]
+ temp = []
+
+ for yaml_fn in yaml_files:
+ try:
+ self.logger.debug("the yaml fn:%s", yaml_fn)
+ with open(os.path.join(yaml_dir, yaml_fn)) as yaml_file:
+ temp_yaml = list(yaml.load_all(yaml_file, Loader=yaml.FullLoader))
+ temp.append(temp_yaml)
+ except FileNotFoundError:
+ self.logger.exception("could not read the manifest files")
+ raise
+
+ self.logger.debug("temp variable size in read_yaml:%s", len(temp))
+ if len(temp) == 0:
+ self.logger.exception("could not read the manifest files")
+ raise FileNotFoundError("could not read the manifest file")
+
+ return temp
+
+ def get_host_profile_mapping(self, inst_dir):
+ """ get host profile mapping """
+ # first read the nodes.yaml file from baremetal directory
+ temp_yaml = self.read_yaml(os.path.join(inst_dir, "baremetal"))
+ host_profile_mapping = dict()
+
+ for vals in temp_yaml:
+ for val in vals:
+ self.logger.info("val:%s", val)
+ val = val["data"]
+ host_profile_mapping[val["host_profile"]] = []
+
+ for role in val["metadata"]["tags"]:
+ if role not in host_profile_mapping[val["host_profile"]]:
+ host_profile_mapping[val["host_profile"]].append(role)
+ self.logger.info("host profile mapping values:%s", host_profile_mapping)
+
+ return host_profile_mapping
+
+ def read_mapping(self, mapping_file):
+ """ read corresponding mapping file """
+ try:
+ with open(mapping_file) as map_file:
+ temp = json.load(map_file)
+ return temp
+ except FileNotFoundError:
+ self.logger.exception("could not read the mapping file")
+ raise
+
+ def auto_generate_mapping(self, mapping_file_dir, inst_dir):
+ """ generate mapping """
+ # first check if the required files exist in the site
+ if not os.path.exists(os.path.join(inst_dir, "baremetal")):
+ self.logger.crititcal("baremetal file does not exist")
+ raise FileNotFoundError("baremetal file does not exist")
+
+ if not os.path.exists(os.path.join(inst_dir, "profiles")):
+ self.logger.crititcal("profiles file does not exist")
+ raise FileNotFoundError("profiles file does not exist")
+
+ # get host profile mapping
+ host_profile_mapping = self.get_host_profile_mapping(inst_dir)
+
+ # first check for hardware profile
+ # read the hardware mapping file, set the manifest context as hardware-intel-pod10
+
+ temp = self.read_yaml(os.path.join(inst_dir, "profiles", "hardware"))
+ for val in temp:
+ self.yaml[val[0]["metadata"]["name"]] = val
+ context = "global"
+ manifest_context = val[0]["metadata"]["name"]
+
+ temp_mapping = self.read_mapping(os.path.join(
+ mapping_file_dir, "hardware-mapping.json"))
+
+ for key in temp_mapping.keys():
+ new_key = context + "-" + key
+ self.mapping[new_key] = dict()
+ self.mapping[new_key]["manifest_key"] = temp_mapping[key]["manifest_key"]
+ self.mapping[new_key]["manifest_context"] = manifest_context
+
+ # platform profile
+ temp = self.read_yaml(os.path.join(inst_dir, "profiles", "host"))
+ # self.logger.info("host manifest file output(platform):%s", temp)
+ for val in temp:
+ try:
+ self.yaml[val[0]["metadata"]["name"]]
+ except KeyError:
+ self.yaml[val[0]["metadata"]["name"]] = val
+
+ for key in host_profile_mapping:
+ if key == val[0]["metadata"]["name"]:
+ for role in host_profile_mapping[key]:
+ context = role
+ manifest_context = val[0]["metadata"]["name"]
+
+ temp_mapping = self.read_mapping(os.path.join(
+ mapping_file_dir, "platform-mapping.json"))
+
+ for key_2 in temp_mapping.keys():
+ new_key = context + "-" + key_2
+ self.mapping[new_key] = dict()
+ self.mapping[new_key]["manifest_key"] = \
+ temp_mapping[key_2]["manifest_key"]
+ self.mapping[new_key]["manifest_context"] = manifest_context
+
+ # storage profile
+ temp = self.read_yaml(os.path.join(inst_dir, "profiles", "host"))
+ for val in temp:
+ try:
+ self.yaml[val[0]["metadata"]["name"]]
+ except KeyError:
+ self.yaml[val[0]["metadata"]["name"]] = val
+
+ for key in host_profile_mapping:
+ if key == val[0]["metadata"]["name"]:
+ for role in host_profile_mapping[key]:
+ context = role
+ manifest_context = val[0]["metadata"]["name"]
+
+ temp_mapping = self.read_mapping(os.path.join(
+ mapping_file_dir, "storage-mapping.json"))
+
+ for key2 in temp_mapping.keys():
+ new_key = context + "-" + key2
+ self.mapping[new_key] = dict()
+ self.mapping[new_key]["manifest_key"] = \
+ temp_mapping[key2]["manifest_key"]
+ self.mapping[new_key]["manifest_context"] = manifest_context
+
+ # network profile
+ temp = self.read_yaml(os.path.join(inst_dir, "networks", "physical"))
+ for vals in temp:
+ for val in vals:
+
+ if val["metadata"]["name"] not in self.yaml.keys():
+ self.yaml[val["metadata"]["name"]] = []
+ self.yaml[val["metadata"]["name"]].append(val)
+ else:
+ self.yaml[val["metadata"]["name"]].append(val)
+
+ context = val["metadata"]["name"]
+ manifest_context = val["metadata"]["name"]
+
+ temp_mapping = self.read_mapping(os.path.join(
+ mapping_file_dir, "network-mapping.json"))
+
+ for key in temp_mapping.keys():
+ new_key = context + "-" + key
+ self.mapping[new_key] = dict()
+ self.mapping[new_key]["manifest_key"] = \
+ temp_mapping[key]["manifest_key"]
+ self.mapping[new_key]["manifest_context"] = manifest_context
+
+ # info profile
+ self.logger.debug("yaml file:%s", self.yaml)
+ temp = self.read_yaml(os.path.join(inst_dir, "profiles", "host"))
+ for val in temp:
+ try:
+ self.yaml[val[0]["metadata"]["name"]]
+ except KeyError:
+ self.yaml[val[0]["metadata"]["name"]] = val
+
+ for key in host_profile_mapping:
+ if key == val[0]["metadata"]["name"]:
+ for role in host_profile_mapping[key]:
+ context = role
+ manifest_context = val[0]["metadata"]["name"]
+
+ temp_mapping = self.read_mapping(
+ os.path.join(mapping_file_dir, "info-mapping.json"))
+
+ for key2 in temp_mapping.keys():
+ new_key = context + "-" + key2
+ self.mapping[new_key] = dict()
+ self.mapping[new_key]["manifest_key"] = \
+ temp_mapping[key2]["manifest_key"]
+ self.mapping[new_key]["manifest_context"] = manifest_context
+
+ self.logger.debug("the autogenrated mapping:%s", self.mapping)
+ self.logger.info("Completed autogenration of mapping")
+
+ def find_vals(self, key, temp_json):
+ """ insert all matching json key-vals in array """
+ # self.logger.info("temp_json value:%s", temp_json)
+ for k, value in temp_json.items():
+ if k == key:
+ if isinstance(value, list):
+ for val in value:
+ self.vals.append(str(val))
+ else:
+ self.vals.append(str(value))
+
+ if isinstance(value, dict):
+ found = self.find_vals(key, value)
+ if found:
+ return True
+
+ if isinstance(value, list):
+ for _, val in enumerate(value):
+ if isinstance(val, str):
+ continue
+ found = self.find_vals(key, val)
+ if found:
+ return True
+ return False
+
+ def find_val(self, role, context, skey):
+ """ find val in manifest """
+
+ # 1. find corresponding manifest context & key
+ # code here
+ key = role + "-" + context + "-" + skey
+ man_con = dict()
+ man_key = dict()
+ self.vals = []
+
+ try:
+ return self.saver[key]
+ except KeyError:
+ # log that the key does not exist in the saver dict.
+ self.logger.info("key: %s doesnt exist in the saved keys, searching manifest")
+
+ try:
+ man_con = self.mapping[key]["manifest_context"]
+ man_key = self.mapping[key]["manifest_key"]
+ except KeyError:
+ self.logger.error("could not find corresponding mapping for key:%s", key)
+ return self.vals
+
+ if man_con == '':
+ self.saver[key] = []
+ return []
+
+ # 2. find values corresponding to the key( by recursing through shortened dict )
+ # code here
+ temp = self.yaml[man_con]
+ # print(man_key,temp)
+ if isinstance(temp, list):
+ temp_json = dict()
+ temp_json[man_con] = temp
+ self.find_vals(man_key, temp_json)
+ else:
+ self.find_vals(man_key, json)
+
+ if self.vals == []:
+ self.logger.debug(
+ "found nothing for man_con:%s and man_key:%s and key:%s",
+ man_con,
+ man_key,
+ key)
+
+ # 3. return the value
+ self.saver[key] = self.vals
+ return self.vals
diff --git a/sdv/docker/sdvconfig/mapping/TripleO/hardware-mapping.json b/sdv/docker/sdvconfig/mapping/TripleO/hardware-mapping.json
new file mode 100644
index 0000000..f060291
--- /dev/null
+++ b/sdv/docker/sdvconfig/mapping/TripleO/hardware-mapping.json
@@ -0,0 +1,104 @@
+{
+ "bios_profile-bios_version":{
+ "manifest_key":""
+ },
+ "bios_profile-bios_mode":{
+ "manifest_key":""
+ },
+ "bios_profile-bootstrap_proto":{
+ "manifest_key":""
+ },
+ "bios_profile-bios_setting":{
+ "manifest_key":""
+ },
+ "bios_profile-hyperthreading_enabled":{
+ "manifest_key":""
+ },
+ "processor_profiles-numa_id":{
+ "manifest_key":""
+ },
+ "processor_profiles-cpus":{
+ "manifest_key":""
+ },
+ "processor_profiles-cpu_cflags":{
+ "manifest_key":""
+ },
+ "processor_profiles-speed":{
+ "manifest_key":""
+ },
+ "processor_profiles-cache_size":{
+ "manifest_key":""
+ },
+ "processor_profiles-model":{
+ "manifest_key":""
+ },
+ "processor_profiles-architecture":{
+ "manifest_key":""
+ },
+ "disk_profiles-address":{
+ "manifest_key":""
+ },
+ "disk_profiles-alias":{
+ "manifest_key":""
+ },
+ "disk_profiles-dev_type":{
+ "manifest_key":""
+ },
+ "disk_profiles-vendor":{
+ "manifest_key":""
+ },
+ "disk_profiles-size":{
+ "manifest_key":""
+ },
+ "disk_profiles-model":{
+ "manifest_key":""
+ },
+ "disk_profiles-logical_name":{
+ "manifest_key":""
+ },
+ "disk_profiles-rotation":{
+ "manifest_key":""
+ },
+ "disk_profiles-bus":{
+ "manifest_key":""
+ },
+ "nic_profiles-address":{
+ "manifest_key":""
+ },
+ "nic_profiles-dev_type":{
+ "manifest_key":""
+ },
+ "nic_profiles-bus":{
+ "manifest_key":""
+ },
+ "nic_profiles-sriov_capable":{
+ "manifest_key":""
+ },
+ "nic_profiles-numa_id":{
+ "manifest_key":""
+ },
+ "hardware_profiles-maufacturer":{
+ "manifest_key":""
+ },
+ "hardware_profiles-model":{
+ "manifest_key":""
+ },
+ "hardware_profiles-generation":{
+ "manifest_key":""
+ },
+ "hardware_profiles-bios_profile":{
+ "manifest_key":""
+ },
+ "hardware_profiles-processor_profile":{
+ "manifest_key":""
+ },
+ "hardware_profiles-memory":{
+ "manifest_key":""
+ },
+ "hardware_profiles-disk_profile":{
+ "manifest_key":""
+ },
+ "hardware_profiles-nics_profile":{
+ "manifest_key":""
+ }
+} \ No newline at end of file
diff --git a/sdv/docker/sdvconfig/mapping/TripleO/info-mapping.json b/sdv/docker/sdvconfig/mapping/TripleO/info-mapping.json
new file mode 100644
index 0000000..c3633f3
--- /dev/null
+++ b/sdv/docker/sdvconfig/mapping/TripleO/info-mapping.json
@@ -0,0 +1,155 @@
+{
+ "management_info-owner":{
+ "manifest_key":""
+ },
+ "management_info-area_name":{
+ "manifest_key":""
+ },
+ "management_info-area_center_name":{
+ "manifest_key":""
+ },
+ "management_info-room_id":{
+ "manifest_key":""
+ },
+ "management_info-city":{
+ "manifest_key":""
+ },
+ "management_info-resource_pool_name":{
+ "manifest_key":""
+ },
+ "ntp_info-primary_ip":{
+ "manifest_key":""
+ },
+ "ntp_info-primary_zone":{
+ "manifest_key":""
+ },
+ "ntp_info-secondary_ip":{
+ "manifest_key":""
+ },
+ "ntp_info-secondary_zone":{
+ "manifest_key":""
+ },
+ "syslog_info-server_ip":{
+ "manifest_key":""
+ },
+ "syslog_info-transport":{
+ "manifest_key":""
+ },
+ "dns_info-domain":{
+ "manifest_key":""
+ },
+ "dns_info.servers-name":{
+ "manifest_key":""
+ },
+ "proxy_info-address":{
+ "manifest_key":""
+ },
+ "proxy_info-port":{
+ "manifest_key":""
+ },
+ "proxy_info-user":{
+ "manifest_key":""
+ },
+ "proxy_info-password":{
+ "manifest_key":""
+ },
+ "vim_info-vim_id":{
+ "manifest_key":""
+ },
+ "vim_info-vendor":{
+ "manifest_key":""
+ },
+ "vim_info-version":{
+ "manifest_key":""
+ },
+ "vim_info-installer":{
+ "manifest_key":""
+ },
+ "vim_info-deployment_style":{
+ "manifest_key":""
+ },
+ "vim_info-container_orchestrator":{
+ "manifest_key":""
+ },
+ "vim_info-storage_type":{
+ "manifest_key":""
+ },
+ "deployment_info-introspection":{
+ "manifest_key":""
+ },
+ "deployment_info-deployment_type":{
+ "manifest_key":""
+ },
+ "deployment_info-installer_used":{
+ "manifest_key":""
+ },
+ "deployment_info-workload_vnf":{
+ "manifest_key":""
+ },
+ "deployment_info-workload_cnf":{
+ "manifest_key":""
+ },
+ "deployment_info-sdn_er":{
+ "manifest_key":""
+ },
+ "deployment_info-sdn_er_version":{
+ "manifest_key":""
+ },
+ "deployment_info-sdn_er_nbapps":{
+ "manifest_key":""
+ },
+ "deployment_info-vnfm":{
+ "manifest_key":""
+ },
+ "deployment_info-vnfm_version":{
+ "manifest_key":""
+ },
+ "deployment_info-data_plane_used":{
+ "manifest_key":""
+ },
+ "deployment_info-external_storage_cluster":{
+ "manifest_key":""
+ },
+ "deployment_info-cpu_allocation_ratio":{
+ "manifest_key":""
+ },
+ "deployment_info.es_cluster_info-type":{
+ "manifest_key":""
+ },
+ "deployment_info.es_cluster_info-name":{
+ "manifest_key":""
+ },
+ "deployment_info.es_cluster_info-username":{
+ "manifest_key":""
+ },
+ "deployment_info.es_cluster_info-password":{
+ "manifest_key":""
+ },
+ "deployment_info.es_cluster_info-key":{
+ "manifest_key":""
+ },
+ "deployment_info.es_cluster_info.host_ips-ip":{
+ "manifest_key":""
+ },
+ "deployment_info.es_cluster_info.pools-key":{
+ "manifest_key":""
+ },
+ "deployment_info.es_cluster_info.pools-value":{
+ "manifest_key":""
+ },
+ "jumphost_info-ip":{
+ "manifest_key":""
+ },
+ "jumphost_info-name":{
+ "manifest_key":""
+ },
+ "rack_info-rack_id":{
+ "manifest_key":""
+ },
+ "rack_info.rack_details-rack_name":{
+ "manifest_key":""
+ },
+ "rack_info.rack_details-rack_description":{
+ "manifest_key":""
+ }
+} \ No newline at end of file
diff --git a/sdv/docker/sdvconfig/mapping/TripleO/network-mapping.json b/sdv/docker/sdvconfig/mapping/TripleO/network-mapping.json
new file mode 100644
index 0000000..f8edd27
--- /dev/null
+++ b/sdv/docker/sdvconfig/mapping/TripleO/network-mapping.json
@@ -0,0 +1,110 @@
+{
+ "network_link-name":{
+ "manifest_key":""
+ },
+ "network_link-bonding_mode":{
+ "manifest_key":""
+ },
+ "network_link-mtu":{
+ "manifest_key":""
+ },
+ "network_link-linkspeed":{
+ "manifest_key":""
+ },
+ "network_link-trunking_mode":{
+ "manifest_key":""
+ },
+ "network_link-trunking_default_nw":{
+ "manifest_key":""
+ },
+ "network_link.metadata-key":{
+ "manifest_key":""
+ },
+ "network_link.metadata-value":{
+ "manifest_key":""
+ },
+ "network_link.members-name":{
+ "manifest_key":""
+ },
+ "network_link.members-type":{
+ "manifest_key":""
+ },
+ "network_link-vid":{
+ "manifest_key":""
+ },
+ "network_link-vf_count":{
+ "manifest_key":""
+ },
+ "networks-name":{
+ "manifest_key":""
+ },
+ "networks.vips-name":{
+ "manifest_key":""
+ },
+ "networks.vips-ip":{
+ "manifest_key":""
+ },
+ "networks-tunnel_type":{
+ "manifest_key":""
+ },
+ "networks-tunnel_id":{
+ "manifest_key":""
+ },
+ "networks-tunnel_id_range":{
+ "manifest_key":""
+ },
+ "networks-mtu":{
+ "manifest_key":""
+ },
+ "networks-routedomain":{
+ "manifest_key":""
+ },
+ "networks-cidr":{
+ "manifest_key":""
+ },
+ "networks-dns":{
+ "manifest_key":""
+ },
+ "networks.routes-subnet":{
+ "manifest_key":""
+ },
+ "networks.routes-gateway":{
+ "manifest_key":""
+ },
+ "networks.routes-metric":{
+ "manifest_key":""
+ },
+ "networks.routes-routedomain":{
+ "manifest_key":""
+ },
+ "networks.allocation_pool-type":{
+ "manifest_key":""
+ },
+ "networks.allocation_pool-start":{
+ "manifest_key":""
+ },
+ "networks.allocation_pool-end":{
+ "manifest_key":""
+ },
+ "networks-v6_cidr":{
+ "manifest_key":""
+ },
+ "networks.v6_allocation_pool-type":{
+ "manifest_key":""
+ },
+ "networks.v6_allocation_pool-start":{
+ "manifest_key":""
+ },
+ "networks.v6_allocation_pool-end":{
+ "manifest_key":""
+ },
+ "networks.physical_networks-name":{
+ "manifest_key":""
+ },
+ "networks.physical_networks-cidr":{
+ "manifest_key":""
+ },
+ "networks.physical_networks-type":{
+ "manifest_key":""
+ }
+} \ No newline at end of file
diff --git a/sdv/docker/sdvconfig/mapping/TripleO/platform-mapping.json b/sdv/docker/sdvconfig/mapping/TripleO/platform-mapping.json
new file mode 100644
index 0000000..1628833
--- /dev/null
+++ b/sdv/docker/sdvconfig/mapping/TripleO/platform-mapping.json
@@ -0,0 +1,59 @@
+{
+ "platform_profiles-os":{
+ "manifest_key":""
+ },
+ "platform_profiles-rt_kvm":{
+ "manifest_key":""
+ },
+ "platform_profiles-kernel_version":{
+ "manifest_key":""
+ },
+ "platform_profiles-kernel_parameters":{
+ "manifest_key":""
+ },
+ "platform_profiles-isolated_cpus":{
+ "manifest_key":""
+ },
+ "platform_profiles-vnf_cores":{
+ "manifest_key":""
+ },
+ "platform_profiles-os_reserverd_cores":{
+ "manifest_key":""
+ },
+ "platform_profiles-hugepage_count":{
+ "manifest_key":""
+ },
+ "platform_profiles-hugepage_size":{
+ "manifest_key":""
+ },
+ "platform_profiles-iommu":{
+ "manifest_key":""
+ },
+ "platform_profiles-vswitch_daemon_cores":{
+ "manifest_key":""
+ },
+ "platform_profiles-vswitch_type":{
+ "manifest_key":""
+ },
+ "platform_profiles-vswitch_uio_driver":{
+ "manifest_key":""
+ },
+ "platform_profiles-vswitch_mem_channels":{
+ "manifest_key":""
+ },
+ "platform_profiles-vswitch_socket_memory":{
+ "manifest_key":""
+ },
+ "platform_profiles-vswitch_pmd_cores":{
+ "manifest_key":""
+ },
+ "platform_profiles-vswitch_dpdk_lcores":{
+ "manifest_key":""
+ },
+ "platform_profiles-vswitch_dpdk_rxqs":{
+ "manifest_key":""
+ },
+ "platform_profiles-vswitch_options":{
+ "manifest_key":""
+ }
+} \ No newline at end of file
diff --git a/sdv/docker/sdvconfig/mapping/TripleO/software-mapping.json b/sdv/docker/sdvconfig/mapping/TripleO/software-mapping.json
new file mode 100644
index 0000000..704b00e
--- /dev/null
+++ b/sdv/docker/sdvconfig/mapping/TripleO/software-mapping.json
@@ -0,0 +1,20 @@
+{
+ "control-infra_sw_profiles-name":{
+ "manifest_key":""
+ },
+ "control-infra_sw_profiles-version":{
+ "manifest_key":""
+ },
+ "control-openstack_sw_profiles-name":{
+ "manifest_key":""
+ },
+ "control-openstack_sw_profiles-version":{
+ "manifest_key":""
+ },
+ "control-undercloud_sw_profiles-name":{
+ "manifest_key":""
+ },
+ "control-undercloud_sw_profiles-version":{
+ "manifest_key":""
+ }
+} \ No newline at end of file
diff --git a/sdv/docker/sdvconfig/mapping/TripleO/storage-mapping.json b/sdv/docker/sdvconfig/mapping/TripleO/storage-mapping.json
new file mode 100644
index 0000000..ab7acef
--- /dev/null
+++ b/sdv/docker/sdvconfig/mapping/TripleO/storage-mapping.json
@@ -0,0 +1,29 @@
+{
+ "storage_profile-bootdrive":{
+ "manifest_key":""
+ },
+ "storage_profile.bd_partitions-name":{
+ "manifest_key":""
+ },
+ "storage_profile.bd_partitions-size":{
+ "manifest_key":""
+ },
+ "storage_profile.bd_partitions-bootable":{
+ "manifest_key":""
+ },
+ "storage_profile.bd_partitions.filesystem-mountpoint":{
+ "manifest_key":""
+ },
+ "storage_profile.bd_partitions.filesystem-fstype":{
+ "manifest_key":""
+ },
+ "storage_profile.bd_partitions.filesystem-mount_options":{
+ "manifest_key":""
+ },
+ "storage_profile.data_devices-name":{
+ "manifest_key":""
+ },
+ "storage_profile.journal_devices-name":{
+ "manifest_key":""
+ }
+} \ No newline at end of file
diff --git a/sdv/docker/sdvconfig/mapping/airship/hardware-mapping.json b/sdv/docker/sdvconfig/mapping/airship/hardware-mapping.json
new file mode 100644
index 0000000..e083347
--- /dev/null
+++ b/sdv/docker/sdvconfig/mapping/airship/hardware-mapping.json
@@ -0,0 +1,107 @@
+{
+ "bios_profile-bios_version":{
+ "manifest_key":"bios_version"
+ },
+ "bios_profile-bios_mode":{
+ "manifest_key":"boot_mode"
+ },
+ "bios_profile-bootstrap_proto":{
+ "manifest_key":"bootstrap_protocol"
+ },
+ "bios_profile-bios_setting":{
+ "manifest_key":""
+ },
+ "bios_profile-hyperthreading_enabled":{
+ "manifest_key":""
+ },
+ "processor_profiles-numa_id":{
+ "manifest_key":""
+ },
+ "processor_profiles-cpus":{
+ "manifest_key":""
+ },
+ "processor_profiles-cpu_cflags":{
+ "manifest_key":""
+ },
+ "processor_profiles-speed":{
+ "manifest_key":""
+ },
+ "processor_profiles-cache_size":{
+ "manifest_key":""
+ },
+ "processor_profiles-model":{
+ "manifest_key":""
+ },
+ "processor_profiles-architecture":{
+ "manifest_key":""
+ },
+ "disks_profiles-address":{
+ "manifest_key":"address"
+ },
+ "disks_profiles-alias":{
+ "manifest_key":""
+ },
+ "disks_profiles-dev_type":{
+ "manifest_key":"dev_type"
+ },
+ "disks_profiles-vendor":{
+ "manifest_key":""
+ },
+ "disks_profiles-size":{
+ "manifest_key":""
+ },
+ "disks_profiles-model":{
+ "manifest_key":""
+ },
+ "disks_profiles-logical_name":{
+ "manifest_key":""
+ },
+ "disks_profiles-rotation":{
+ "manifest_key":""
+ },
+ "disks_profiles-bus":{
+ "manifest_key":""
+ },
+ "nic_profiles-address":{
+ "manifest_key":"address"
+ },
+ "nic_profiles-dev_type":{
+ "manifest_key":"dev_type"
+ },
+ "nic_profiles-bus":{
+ "manifest_key":"bus_type"
+ },
+ "nic_profiles-sriov_capable":{
+ "manifest_key":""
+ },
+ "nic_profiles-numa_id":{
+ "manifest_key":""
+ },
+ "hardware_profiles-maufacturer":{
+ "manifest_key":""
+ },
+ "hardware_profiles-model":{
+ "manifest_key":""
+ },
+ "hardware_profiles-generation":{
+ "manifest_key":"generation"
+ },
+ "hardware_profiles-bios_profile":{
+ "manifest_key":""
+ },
+ "hardware_profiles-processor_profile":{
+ "manifest_key":""
+ },
+ "hardware_profiles-memory":{
+ "manifest_key":"memory"
+ },
+ "hardware_profiles-manufacturer":{
+ "manifest_key":"vendor"
+ },
+ "hardware_profiles-disk_profile":{
+ "manifest_key":""
+ },
+ "hardware_profiles-nics_profile":{
+ "manifest_key":""
+ }
+} \ No newline at end of file
diff --git a/sdv/docker/sdvconfig/mapping/airship/info-mapping.json b/sdv/docker/sdvconfig/mapping/airship/info-mapping.json
new file mode 100644
index 0000000..5a2121a
--- /dev/null
+++ b/sdv/docker/sdvconfig/mapping/airship/info-mapping.json
@@ -0,0 +1,176 @@
+{
+ "management_info-owner":{
+ "manifest_key":""
+ },
+ "management_info-area_name":{
+ "manifest_key":""
+ },
+ "management_info-area_center_name":{
+ "manifest_key":""
+ },
+ "management_info-room_id":{
+ "manifest_key":""
+ },
+ "management_info-city":{
+ "manifest_key":""
+ },
+ "management_info-resource_pool_name":{
+ "manifest_key":""
+ },
+ "ntp_info-primary_ip":{
+ "manifest_key":""
+ },
+ "ntp_info-primary_zone":{
+ "manifest_key":""
+ },
+ "ntp_info-secondary_ip":{
+ "manifest_key":""
+ },
+ "ntp_info-secondary_zone":{
+ "manifest_key":""
+ },
+ "syslog_info-server_ip":{
+ "manifest_key":""
+ },
+ "syslog_info-transport":{
+ "manifest_key":""
+ },
+ "dns_info-domain":{
+ "manifest_key":""
+ },
+ "dns_info.servers-name":{
+ "manifest_key":""
+ },
+ "proxy_info-address":{
+ "manifest_key":""
+ },
+ "proxy_info-port":{
+ "manifest_key":""
+ },
+ "proxy_info-user":{
+ "manifest_key":""
+ },
+ "proxy_info-password":{
+ "manifest_key":""
+ },
+ "vim_info-vim_name":{
+ "manifest_key":""
+ },
+ "vim_info-vim_id":{
+ "manifest_key":""
+ },
+ "vim_info-vendor":{
+ "manifest_key":""
+ },
+ "vim_info-version":{
+ "manifest_key":""
+ },
+ "vim_info-installer":{
+ "manifest_key":""
+ },
+ "vim_info-deployment_style":{
+ "manifest_key":""
+ },
+ "vim_info-container_orchestrator":{
+ "manifest_key":""
+ },
+ "vim_info-storage_type":{
+ "manifest_key":""
+ },
+ "deployment_info-introspection":{
+ "manifest_key":""
+ },
+ "deployment_info-deployment_type":{
+ "manifest_key":""
+ },
+ "deployment_info-installer_used":{
+ "manifest_key":""
+ },
+ "deployment_info-workload_vnf":{
+ "manifest_key":""
+ },
+ "deployment_info-workload_cnf":{
+ "manifest_key":""
+ },
+ "deployment_info-sdn_er":{
+ "manifest_key":""
+ },
+ "deployment_info-sdn_er_version":{
+ "manifest_key":""
+ },
+ "deployment_info-sdn_er_nbapps":{
+ "manifest_key":""
+ },
+ "deployment_info-vnfm":{
+ "manifest_key":""
+ },
+ "deployment_info-vnfm_version":{
+ "manifest_key":""
+ },
+ "deployment_info-data_plane_used":{
+ "manifest_key":""
+ },
+ "deployment_info-external_storage_cluster":{
+ "manifest_key":""
+ },
+ "deployment_info-cpu_allocation_ratio":{
+ "manifest_key":""
+ },
+ "deployment_info-high_availability":{
+ "manifest_key":""
+ },
+ "deployment_info-sdn_controller":{
+ "manifest_key":""
+ },
+ "deployment_info-sdn_controller_version":{
+ "manifest_key":""
+ },
+ "deployment_info-sdn_controller_nbapps":{
+ "manifest_key":""
+ },
+ "deployment_info-ironic_deploy_interface":{
+ "manifest_key":""
+ },
+ "deployment_info-bl_str_connect_method":{
+ "manifest_key":""
+ },
+ "deployment_info.es_cluster_info-type":{
+ "manifest_key":""
+ },
+ "deployment_info.es_cluster_info-name":{
+ "manifest_key":""
+ },
+ "deployment_info.es_cluster_info-username":{
+ "manifest_key":""
+ },
+ "deployment_info.es_cluster_info-password":{
+ "manifest_key":""
+ },
+ "deployment_info.es_cluster_info-key":{
+ "manifest_key":""
+ },
+ "deployment_info.es_cluster_info.host_ips-ip":{
+ "manifest_key":""
+ },
+ "deployment_info.es_cluster_info.pools-key":{
+ "manifest_key":""
+ },
+ "deployment_info.es_cluster_info.pools-value":{
+ "manifest_key":""
+ },
+ "jumphost_info-ip":{
+ "manifest_key":""
+ },
+ "jumphost_info-name":{
+ "manifest_key":""
+ },
+ "rack_info-rack_id":{
+ "manifest_key":""
+ },
+ "rack_info.rack_details-rack_name":{
+ "manifest_key":""
+ },
+ "rack_info.rack_details-rack_description":{
+ "manifest_key":""
+ }
+} \ No newline at end of file
diff --git a/sdv/docker/sdvconfig/mapping/airship/network-mapping.json b/sdv/docker/sdvconfig/mapping/airship/network-mapping.json
new file mode 100644
index 0000000..71e6660
--- /dev/null
+++ b/sdv/docker/sdvconfig/mapping/airship/network-mapping.json
@@ -0,0 +1,110 @@
+{
+ "network_link-name":{
+ "manifest_key":"name"
+ },
+ "network_link-bonding_mode":{
+ "manifest_key":"mode"
+ },
+ "network_link-mtu":{
+ "manifest_key":"mtu"
+ },
+ "network_link-linkspeed":{
+ "manifest_key":"linkspeed"
+ },
+ "network_link-trunking_mode":{
+ "manifest_key":"mode"
+ },
+ "network_link-trunking_default_nw":{
+ "manifest_key":"default_network"
+ },
+ "network_link.metadata-key":{
+ "manifest_key":""
+ },
+ "network_link.metadata-value":{
+ "manifest_key":""
+ },
+ "network_link.members-name":{
+ "manifest_key":""
+ },
+ "network_link.members-type":{
+ "manifest_key":""
+ },
+ "network_link-vid":{
+ "manifest_key":""
+ },
+ "network_link-vf_count":{
+ "manifest_key":""
+ },
+ "networks-name":{
+ "manifest_key":"name"
+ },
+ "networks.vips-name":{
+ "manifest_key":""
+ },
+ "networks.vips-ip":{
+ "manifest_key":""
+ },
+ "networks-tunnel_type":{
+ "manifest_key":""
+ },
+ "networks-tunnel_id":{
+ "manifest_key":""
+ },
+ "networks-tunnel_id_range":{
+ "manifest_key":""
+ },
+ "networks-mtu":{
+ "manifest_key":"mtu"
+ },
+ "networks-routedomain":{
+ "manifest_key":""
+ },
+ "networks-cidr":{
+ "manifest_key":"cidr"
+ },
+ "networks-dns":{
+ "manifest_key":""
+ },
+ "networks.routes-subnet":{
+ "manifest_key":"subnet"
+ },
+ "networks.routes-gateway":{
+ "manifest_key":"gateway"
+ },
+ "networks.routes-metric":{
+ "manifest_key":"metric"
+ },
+ "networks.routes-routedomain":{
+ "manifest_key":""
+ },
+ "networks.allocation_pools-type":{
+ "manifest_key":"type"
+ },
+ "networks.allocation_pools-start":{
+ "manifest_key":"start"
+ },
+ "networks.allocation_pools-end":{
+ "manifest_key":"end"
+ },
+ "networks-v6_cidr":{
+ "manifest_key":"cidr"
+ },
+ "networks.v6_allocation_pools-type":{
+ "manifest_key":""
+ },
+ "networks.v6_allocation_pools-start":{
+ "manifest_key":""
+ },
+ "networks.v6_allocation_pools-end":{
+ "manifest_key":""
+ },
+ "networks.physical_networks-name":{
+ "manifest_key":""
+ },
+ "networks.physical_networks-cidr":{
+ "manifest_key":""
+ },
+ "networks.physical_networks-type":{
+ "manifest_key":""
+ }
+} \ No newline at end of file
diff --git a/sdv/docker/sdvconfig/mapping/airship/platform-mapping.json b/sdv/docker/sdvconfig/mapping/airship/platform-mapping.json
new file mode 100644
index 0000000..c7ae374
--- /dev/null
+++ b/sdv/docker/sdvconfig/mapping/airship/platform-mapping.json
@@ -0,0 +1,59 @@
+{
+ "platform_profiles-os":{
+ "manifest_key":"image"
+ },
+ "platform_profiles-rt_kvm":{
+ "manifest_key":""
+ },
+ "platform_profiles-kernel_version":{
+ "manifest_key":"kernel_package"
+ },
+ "platform_profiles-kernel_parameters":{
+ "manifest_key":""
+ },
+ "platform_profiles-isolated_cpus":{
+ "manifest_key":""
+ },
+ "platform_profiles-vnf_cores":{
+ "manifest_key":"kvm"
+ },
+ "platform_profiles-os_reserverd_cores":{
+ "manifest_key":""
+ },
+ "platform_profiles-hugepage_count":{
+ "manifest_key":"count"
+ },
+ "platform_profiles-hugepage_size":{
+ "manifest_key":"size"
+ },
+ "platform_profiles-iommu":{
+ "manifest_key":""
+ },
+ "platform_profiles-vswitch_daemon_cores":{
+ "manifest_key":""
+ },
+ "platform_profiles-vswitch_type":{
+ "manifest_key":""
+ },
+ "platform_profiles-vswitch_uio_driver":{
+ "manifest_key":""
+ },
+ "platform_profiles-vswitch_mem_channels":{
+ "manifest_key":""
+ },
+ "platform_profiles-vswitch_socket_memory":{
+ "manifest_key":""
+ },
+ "platform_profiles-vswitch_pmd_cores":{
+ "manifest_key":""
+ },
+ "platform_profiles-vswitch_dpdk_lcores":{
+ "manifest_key":""
+ },
+ "platform_profiles-vswitch_dpdk_rxqs":{
+ "manifest_key":""
+ },
+ "platform_profiles-vswitch_options":{
+ "manifest_key":""
+ }
+} \ No newline at end of file
diff --git a/sdv/docker/sdvconfig/mapping/airship/software-mapping.json b/sdv/docker/sdvconfig/mapping/airship/software-mapping.json
new file mode 100644
index 0000000..21d56a4
--- /dev/null
+++ b/sdv/docker/sdvconfig/mapping/airship/software-mapping.json
@@ -0,0 +1,20 @@
+{
+ "infra_sw_profiles-name":{
+ "manifest_key":""
+ },
+ "infra_sw_profiles-version":{
+ "manifest_key":""
+ },
+ "openstack_sw_profiles-name":{
+ "manifest_key":""
+ },
+ "openstack_sw_profiles-version":{
+ "manifest_key":""
+ },
+ "undercloud_sw_profiles-name":{
+ "manifest_key":""
+ },
+ "undercloud_sw_profiles-version":{
+ "manifest_key":""
+ }
+} \ No newline at end of file
diff --git a/sdv/docker/sdvconfig/mapping/airship/storage-mapping.json b/sdv/docker/sdvconfig/mapping/airship/storage-mapping.json
new file mode 100644
index 0000000..16f737c
--- /dev/null
+++ b/sdv/docker/sdvconfig/mapping/airship/storage-mapping.json
@@ -0,0 +1,53 @@
+{
+ "storage_profile-bootdrive":{
+ "manifest_key":"bootdrive"
+ },
+ "storage_profile.bd_partitions-name":{
+ "manifest_key":"name"
+ },
+ "storage_profile.bd_partitions-size":{
+ "manifest_key":"size"
+ },
+ "storage_profile.bd_partitions-bootable":{
+ "manifest_key":"bootable"
+ },
+ "storage_profile.bd_partitions.filesystem-mountpoint":{
+ "manifest_key":"mountpoint"
+ },
+ "storage_profile.bd_partitions.filesystem-fstype":{
+ "manifest_key":"fstype"
+ },
+ "storage_profile.bd_partitions.filesystem-mount_options":{
+ "manifest_key":"mount_options"
+ },
+ "storage_profile.devices-name":{
+ "manifest_key":"name"
+ },
+ "storage_profile.devices-size":{
+ "manifest_key":"size"
+ },
+ "storage_profile.devices-mountpoint":{
+ "manifest_key":"mountpoint"
+ },
+ "storage_profile.devices-fstype":{
+ "manifest_key":"fstype"
+ },
+ "storage_profile.data_devices-name":{
+ "manifest_key":"name"
+ },
+ "storage_profile.data_devices-size":{
+ "manifest_key":"size"
+ },
+ "storage_profile.data_devices-mountpoint":{
+ "manifest_key":"mountpoint"
+ },
+ "storage_profile.data_devices-fstype":{
+ "manifest_key":"fstype"
+ },
+ "storage_profile.devices-mount_options":{
+ "manifest_key":"mount_options"
+ },
+ "storage_profile.journal_devices-name":{
+ "manifest_key":"name"
+ }
+} \ No newline at end of file
diff --git a/sdv/docker/sdvconfig/mapping/template/hardware-mapping.json b/sdv/docker/sdvconfig/mapping/template/hardware-mapping.json
new file mode 100644
index 0000000..f060291
--- /dev/null
+++ b/sdv/docker/sdvconfig/mapping/template/hardware-mapping.json
@@ -0,0 +1,104 @@
+{
+ "bios_profile-bios_version":{
+ "manifest_key":""
+ },
+ "bios_profile-bios_mode":{
+ "manifest_key":""
+ },
+ "bios_profile-bootstrap_proto":{
+ "manifest_key":""
+ },
+ "bios_profile-bios_setting":{
+ "manifest_key":""
+ },
+ "bios_profile-hyperthreading_enabled":{
+ "manifest_key":""
+ },
+ "processor_profiles-numa_id":{
+ "manifest_key":""
+ },
+ "processor_profiles-cpus":{
+ "manifest_key":""
+ },
+ "processor_profiles-cpu_cflags":{
+ "manifest_key":""
+ },
+ "processor_profiles-speed":{
+ "manifest_key":""
+ },
+ "processor_profiles-cache_size":{
+ "manifest_key":""
+ },
+ "processor_profiles-model":{
+ "manifest_key":""
+ },
+ "processor_profiles-architecture":{
+ "manifest_key":""
+ },
+ "disk_profiles-address":{
+ "manifest_key":""
+ },
+ "disk_profiles-alias":{
+ "manifest_key":""
+ },
+ "disk_profiles-dev_type":{
+ "manifest_key":""
+ },
+ "disk_profiles-vendor":{
+ "manifest_key":""
+ },
+ "disk_profiles-size":{
+ "manifest_key":""
+ },
+ "disk_profiles-model":{
+ "manifest_key":""
+ },
+ "disk_profiles-logical_name":{
+ "manifest_key":""
+ },
+ "disk_profiles-rotation":{
+ "manifest_key":""
+ },
+ "disk_profiles-bus":{
+ "manifest_key":""
+ },
+ "nic_profiles-address":{
+ "manifest_key":""
+ },
+ "nic_profiles-dev_type":{
+ "manifest_key":""
+ },
+ "nic_profiles-bus":{
+ "manifest_key":""
+ },
+ "nic_profiles-sriov_capable":{
+ "manifest_key":""
+ },
+ "nic_profiles-numa_id":{
+ "manifest_key":""
+ },
+ "hardware_profiles-maufacturer":{
+ "manifest_key":""
+ },
+ "hardware_profiles-model":{
+ "manifest_key":""
+ },
+ "hardware_profiles-generation":{
+ "manifest_key":""
+ },
+ "hardware_profiles-bios_profile":{
+ "manifest_key":""
+ },
+ "hardware_profiles-processor_profile":{
+ "manifest_key":""
+ },
+ "hardware_profiles-memory":{
+ "manifest_key":""
+ },
+ "hardware_profiles-disk_profile":{
+ "manifest_key":""
+ },
+ "hardware_profiles-nics_profile":{
+ "manifest_key":""
+ }
+} \ No newline at end of file
diff --git a/sdv/docker/sdvconfig/mapping/template/info-mapping.json b/sdv/docker/sdvconfig/mapping/template/info-mapping.json
new file mode 100644
index 0000000..c3633f3
--- /dev/null
+++ b/sdv/docker/sdvconfig/mapping/template/info-mapping.json
@@ -0,0 +1,155 @@
+{
+ "management_info-owner":{
+ "manifest_key":""
+ },
+ "management_info-area_name":{
+ "manifest_key":""
+ },
+ "management_info-area_center_name":{
+ "manifest_key":""
+ },
+ "management_info-room_id":{
+ "manifest_key":""
+ },
+ "management_info-city":{
+ "manifest_key":""
+ },
+ "management_info-resource_pool_name":{
+ "manifest_key":""
+ },
+ "ntp_info-primary_ip":{
+ "manifest_key":""
+ },
+ "ntp_info-primary_zone":{
+ "manifest_key":""
+ },
+ "ntp_info-secondary_ip":{
+ "manifest_key":""
+ },
+ "ntp_info-secondary_zone":{
+ "manifest_key":""
+ },
+ "syslog_info-server_ip":{
+ "manifest_key":""
+ },
+ "syslog_info-transport":{
+ "manifest_key":""
+ },
+ "dns_info-domain":{
+ "manifest_key":""
+ },
+ "dns_info.servers-name":{
+ "manifest_key":""
+ },
+ "proxy_info-address":{
+ "manifest_key":""
+ },
+ "proxy_info-port":{
+ "manifest_key":""
+ },
+ "proxy_info-user":{
+ "manifest_key":""
+ },
+ "proxy_info-password":{
+ "manifest_key":""
+ },
+ "vim_info-vim_id":{
+ "manifest_key":""
+ },
+ "vim_info-vendor":{
+ "manifest_key":""
+ },
+ "vim_info-version":{
+ "manifest_key":""
+ },
+ "vim_info-installer":{
+ "manifest_key":""
+ },
+ "vim_info-deployment_style":{
+ "manifest_key":""
+ },
+ "vim_info-container_orchestrator":{
+ "manifest_key":""
+ },
+ "vim_info-storage_type":{
+ "manifest_key":""
+ },
+ "deployment_info-introspection":{
+ "manifest_key":""
+ },
+ "deployment_info-deployment_type":{
+ "manifest_key":""
+ },
+ "deployment_info-installer_used":{
+ "manifest_key":""
+ },
+ "deployment_info-workload_vnf":{
+ "manifest_key":""
+ },
+ "deployment_info-workload_cnf":{
+ "manifest_key":""
+ },
+ "deployment_info-sdn_er":{
+ "manifest_key":""
+ },
+ "deployment_info-sdn_er_version":{
+ "manifest_key":""
+ },
+ "deployment_info-sdn_er_nbapps":{
+ "manifest_key":""
+ },
+ "deployment_info-vnfm":{
+ "manifest_key":""
+ },
+ "deployment_info-vnfm_version":{
+ "manifest_key":""
+ },
+ "deployment_info-data_plane_used":{
+ "manifest_key":""
+ },
+ "deployment_info-external_storage_cluster":{
+ "manifest_key":""
+ },
+ "deployment_info-cpu_allocation_ratio":{
+ "manifest_key":""
+ },
+ "deployment_info.es_cluster_info-type":{
+ "manifest_key":""
+ },
+ "deployment_info.es_cluster_info-name":{
+ "manifest_key":""
+ },
+ "deployment_info.es_cluster_info-username":{
+ "manifest_key":""
+ },
+ "deployment_info.es_cluster_info-password":{
+ "manifest_key":""
+ },
+ "deployment_info.es_cluster_info-key":{
+ "manifest_key":""
+ },
+ "deployment_info.es_cluster_info.host_ips-ip":{
+ "manifest_key":""
+ },
+ "deployment_info.es_cluster_info.pools-key":{
+ "manifest_key":""
+ },
+ "deployment_info.es_cluster_info.pools-value":{
+ "manifest_key":""
+ },
+ "jumphost_info-ip":{
+ "manifest_key":""
+ },
+ "jumphost_info-name":{
+ "manifest_key":""
+ },
+ "rack_info-rack_id":{
+ "manifest_key":""
+ },
+ "rack_info.rack_details-rack_name":{
+ "manifest_key":""
+ },
+ "rack_info.rack_details-rack_description":{
+ "manifest_key":""
+ }
+} \ No newline at end of file
diff --git a/sdv/docker/sdvconfig/mapping/template/network-mapping.json b/sdv/docker/sdvconfig/mapping/template/network-mapping.json
new file mode 100644
index 0000000..f8edd27
--- /dev/null
+++ b/sdv/docker/sdvconfig/mapping/template/network-mapping.json
@@ -0,0 +1,110 @@
+{
+ "network_link-name":{
+ "manifest_key":""
+ },
+ "network_link-bonding_mode":{
+ "manifest_key":""
+ },
+ "network_link-mtu":{
+ "manifest_key":""
+ },
+ "network_link-linkspeed":{
+ "manifest_key":""
+ },
+ "network_link-trunking_mode":{
+ "manifest_key":""
+ },
+ "network_link-trunking_default_nw":{
+ "manifest_key":""
+ },
+ "network_link.metadata-key":{
+ "manifest_key":""
+ },
+ "network_link.metadata-value":{
+ "manifest_key":""
+ },
+ "network_link.members-name":{
+ "manifest_key":""
+ },
+ "network_link.members-type":{
+ "manifest_key":""
+ },
+ "network_link-vid":{
+ "manifest_key":""
+ },
+ "network_link-vf_count":{
+ "manifest_key":""
+ },
+ "networks-name":{
+ "manifest_key":""
+ },
+ "networks.vips-name":{
+ "manifest_key":""
+ },
+ "networks.vips-ip":{
+ "manifest_key":""
+ },
+ "networks-tunnel_type":{
+ "manifest_key":""
+ },
+ "networks-tunnel_id":{
+ "manifest_key":""
+ },
+ "networks-tunnel_id_range":{
+ "manifest_key":""
+ },
+ "networks-mtu":{
+ "manifest_key":""
+ },
+ "networks-routedomain":{
+ "manifest_key":""
+ },
+ "networks-cidr":{
+ "manifest_key":""
+ },
+ "networks-dns":{
+ "manifest_key":""
+ },
+ "networks.routes-subnet":{
+ "manifest_key":""
+ },
+ "networks.routes-gateway":{
+ "manifest_key":""
+ },
+ "networks.routes-metric":{
+ "manifest_key":""
+ },
+ "networks.routes-routedomain":{
+ "manifest_key":""
+ },
+ "networks.allocation_pool-type":{
+ "manifest_key":""
+ },
+ "networks.allocation_pool-start":{
+ "manifest_key":""
+ },
+ "networks.allocation_pool-end":{
+ "manifest_key":""
+ },
+ "networks-v6_cidr":{
+ "manifest_key":""
+ },
+ "networks.v6_allocation_pool-type":{
+ "manifest_key":""
+ },
+ "networks.v6_allocation_pool-start":{
+ "manifest_key":""
+ },
+ "networks.v6_allocation_pool-end":{
+ "manifest_key":""
+ },
+ "networks.physical_networks-name":{
+ "manifest_key":""
+ },
+ "networks.physical_networks-cidr":{
+ "manifest_key":""
+ },
+ "networks.physical_networks-type":{
+ "manifest_key":""
+ }
+} \ No newline at end of file
diff --git a/sdv/docker/sdvconfig/mapping/template/platform-mapping.json b/sdv/docker/sdvconfig/mapping/template/platform-mapping.json
new file mode 100644
index 0000000..1628833
--- /dev/null
+++ b/sdv/docker/sdvconfig/mapping/template/platform-mapping.json
@@ -0,0 +1,59 @@
+{
+ "platform_profiles-os":{
+ "manifest_key":""
+ },
+ "platform_profiles-rt_kvm":{
+ "manifest_key":""
+ },
+ "platform_profiles-kernel_version":{
+ "manifest_key":""
+ },
+ "platform_profiles-kernel_parameters":{
+ "manifest_key":""
+ },
+ "platform_profiles-isolated_cpus":{
+ "manifest_key":""
+ },
+ "platform_profiles-vnf_cores":{
+ "manifest_key":""
+ },
+ "platform_profiles-os_reserverd_cores":{
+ "manifest_key":""
+ },
+ "platform_profiles-hugepage_count":{
+ "manifest_key":""
+ },
+ "platform_profiles-hugepage_size":{
+ "manifest_key":""
+ },
+ "platform_profiles-iommu":{
+ "manifest_key":""
+ },
+ "platform_profiles-vswitch_daemon_cores":{
+ "manifest_key":""
+ },
+ "platform_profiles-vswitch_type":{
+ "manifest_key":""
+ },
+ "platform_profiles-vswitch_uio_driver":{
+ "manifest_key":""
+ },
+ "platform_profiles-vswitch_mem_channels":{
+ "manifest_key":""
+ },
+ "platform_profiles-vswitch_socket_memory":{
+ "manifest_key":""
+ },
+ "platform_profiles-vswitch_pmd_cores":{
+ "manifest_key":""
+ },
+ "platform_profiles-vswitch_dpdk_lcores":{
+ "manifest_key":""
+ },
+ "platform_profiles-vswitch_dpdk_rxqs":{
+ "manifest_key":""
+ },
+ "platform_profiles-vswitch_options":{
+ "manifest_key":""
+ }
+} \ No newline at end of file
diff --git a/sdv/docker/sdvconfig/mapping/template/software-mapping.json b/sdv/docker/sdvconfig/mapping/template/software-mapping.json
new file mode 100644
index 0000000..704b00e
--- /dev/null
+++ b/sdv/docker/sdvconfig/mapping/template/software-mapping.json
@@ -0,0 +1,20 @@
+{
+ "control-infra_sw_profiles-name":{
+ "manifest_key":""
+ },
+ "control-infra_sw_profiles-version":{
+ "manifest_key":""
+ },
+ "control-openstack_sw_profiles-name":{
+ "manifest_key":""
+ },
+ "control-openstack_sw_profiles-version":{
+ "manifest_key":""
+ },
+ "control-undercloud_sw_profiles-name":{
+ "manifest_key":""
+ },
+ "control-undercloud_sw_profiles-version":{
+ "manifest_key":""
+ }
+} \ No newline at end of file
diff --git a/sdv/docker/sdvconfig/mapping/template/storage-mapping.json b/sdv/docker/sdvconfig/mapping/template/storage-mapping.json
new file mode 100644
index 0000000..ab7acef
--- /dev/null
+++ b/sdv/docker/sdvconfig/mapping/template/storage-mapping.json
@@ -0,0 +1,29 @@
+{
+ "storage_profile-bootdrive":{
+ "manifest_key":""
+ },
+ "storage_profile.bd_partitions-name":{
+ "manifest_key":""
+ },
+ "storage_profile.bd_partitions-size":{
+ "manifest_key":""
+ },
+ "storage_profile.bd_partitions-bootable":{
+ "manifest_key":""
+ },
+ "storage_profile.bd_partitions.filesystem-mountpoint":{
+ "manifest_key":""
+ },
+ "storage_profile.bd_partitions.filesystem-fstype":{
+ "manifest_key":""
+ },
+ "storage_profile.bd_partitions.filesystem-mount_options":{
+ "manifest_key":""
+ },
+ "storage_profile.data_devices-name":{
+ "manifest_key":""
+ },
+ "storage_profile.journal_devices-name":{
+ "manifest_key":""
+ }
+} \ No newline at end of file
diff --git a/sdv/docker/sdvconfig/requirements.txt b/sdv/docker/sdvconfig/requirements.txt
new file mode 100644
index 0000000..c3368df
--- /dev/null
+++ b/sdv/docker/sdvconfig/requirements.txt
@@ -0,0 +1 @@
+tornado
diff --git a/sdv/docker/sdvconfig/server.py b/sdv/docker/sdvconfig/server.py
new file mode 100644
index 0000000..86aae60
--- /dev/null
+++ b/sdv/docker/sdvconfig/server.py
@@ -0,0 +1,177 @@
+#!/usr/bin/env python
+
+# Copyright (C) 2020 Ashwin Nayak
+#
+# Licensed under the Apache License, Version 2.0 (the "License");
+# you may not use this file except in compliance with the License.
+# You may obtain a copy of the License at
+#
+# http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
+
+# pylint: disable=abstract-method, too-many-statements
+
+""" http server code """
+
+import os
+import json
+import logging
+from tornado.web import Application, RequestHandler
+from tornado.ioloop import IOLoop
+import tornado.concurrent
+import tornado.httpserver
+import tornado.ioloop
+import tornado.gen
+import tornado.options
+import tornado.web
+import tornado.log
+
+from cli_validation import Validate
+from extrapolation import Extrapolate
+
+
+class ValidateJson(RequestHandler):
+ "rest api class for validation "
+
+ def set_default_headers(self):
+ """ set default headers"""
+ self.set_header('Content-Type', 'application/json')
+
+ def post(self):
+ """ consume post request """
+ failures = 0
+
+ # decode the body
+ data = json.loads(self.request.body.decode())
+
+ # check for keys
+ try:
+ data["pdf"]
+ except KeyError:
+ APP_LOG.error("pdf key does not exist")
+ self.write("provide pdf key\n ")
+ failures += 1
+
+ try:
+ data["inst_dir"]
+ except KeyError:
+ APP_LOG.error("inst_dir key does not exist")
+ self.write("provide inst_dir key\n")
+ failures += 1
+
+ try:
+ data["inst_type"]
+ except KeyError:
+ APP_LOG.error("inst_type key does not exist")
+ self.write("provide inst_type key\n")
+ failures += 1
+
+ try:
+ data["sitename"]
+ except KeyError:
+ APP_LOG.error("sitename key does not exist")
+ self.write("provide sitename key\n")
+ failures += 1
+
+ if failures == 0:
+ pdf, inst_dir, inst_type, sitename = \
+ data["pdf"], data["inst_dir"], data["inst_type"], data["sitename"]
+
+ if inst_type not in ["airship", "tripleo"]:
+ APP_LOG.error("only airship and tripleo are supported")
+ self.write("only airship and tripleo are supported, for now.\n")
+ failures += 1
+
+ if failures == 0:
+ result = Validate(inst_dir, inst_type, pdf, sitename).validate()
+ self.write(result)
+
+
+class ExtrapolateJson(RequestHandler):
+ """rest api class for extrapolation"""
+
+ def set_default_headers(self):
+ """ set default header"""
+ self.set_header('Content-Type', 'application/json')
+
+ def post(self):
+ """consume post request"""
+ failures = 0
+
+ data = json.loads(self.request.body.decode())
+
+ # check for keys
+ try:
+ data["pdf_fn"]
+ except KeyError:
+ APP_LOG.error("pdf_fn key does not exist")
+ self.write("provide pdf key\n ")
+ failures += 1
+
+ try:
+ data["store_at"]
+ except KeyError:
+ APP_LOG.error("store-at key does not exist")
+ self.write("provide store_at key\n ")
+ failures += 1
+
+ if failures == 0:
+ pdf_fd = data["pdf_fn"]
+ store_at = data["store_at"]
+
+ # check if the paths are relative or not
+ if not os.path.isabs(store_at):
+ APP_LOG.critical("path provided for store_at is not an absolute path")
+ self.write("provide absolute path for store_at\n ")
+ failures += 1
+
+ if failures == 0:
+ try:
+ Extrapolate(pdf_fd, store_at)
+ self.write({"message": "success! New pdf file:pd_new.json"})
+ except ValueError as error:
+ self.write({"message": "failure:" + str(error)})
+
+
+def make_app():
+ """url mapping to class """
+ urls = [
+ ("/validate", ValidateJson),
+ ("/extrapolate", ExtrapolateJson)
+ ]
+ return Application(urls, debug=True)
+
+
+if __name__ == '__main__':
+ # app config
+ APP = make_app()
+
+ # Cli Config
+ tornado.options.define("port", default=8000, help="run on the given port", type=int)
+ tornado.options.parse_command_line()
+
+ # Server Config
+ HTTP_SERVER = tornado.httpserver.HTTPServer(APP)
+ HTTP_SERVER.listen(tornado.options.options.port)
+
+ # Tornado's event loop handles it from here
+ print("Starting Tornado server.....")
+
+ # Logging
+ LOG_FILE_FILENAME = "tornado.log"
+ HANDLER = logging.FileHandler(LOG_FILE_FILENAME)
+ APP_LOG = logging.getLogger("tornado.general")
+ APP_LOG.level = logging.DEBUG
+ tornado.log.enable_pretty_logging()
+ APP_LOG.addHandler(HANDLER)
+
+ # Start Loop
+ tornado.ioloop.IOLoop.current().start()
+
+ # start
+ IOLoop.instance().start()
diff --git a/sdv/docker/sdvconfig/testapi/__init__.py b/sdv/docker/sdvconfig/testapi/__init__.py
new file mode 100644
index 0000000..acd5868
--- /dev/null
+++ b/sdv/docker/sdvconfig/testapi/__init__.py
@@ -0,0 +1,19 @@
+#!/usr/bin/env python
+
+# Copyright (C) 2020 Ashwin Nayak
+#
+# Licensed under the Apache License, Version 2.0 (the "License");
+# you may not use this file except in compliance with the License.
+# You may obtain a copy of the License at
+#
+# http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
+
+""" test-api package """
+
+from .testapi import PushResults
diff --git a/sdv/docker/sdvconfig/testapi/testapi.py b/sdv/docker/sdvconfig/testapi/testapi.py
new file mode 100644
index 0000000..5546ee9
--- /dev/null
+++ b/sdv/docker/sdvconfig/testapi/testapi.py
@@ -0,0 +1,77 @@
+#!/usr/bin/env python
+
+# Copyright (C) 2020 Ashwin Nayak
+#
+# Licensed under the Apache License, Version 2.0 (the "License");
+# you may not use this file except in compliance with the License.
+# You may obtain a copy of the License at
+#
+# http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
+
+""" testapi class to push results to opnfv testapi """
+
+from datetime import datetime as dt
+import requests
+
+OPNFV_URL = "http://testresults.opnfv.org/test/api/v1"
+POD_NAME = 'intel-pod10'
+INSTALLER = 'Airship'
+BUILD_TAG = "none"
+PKG_LIST = 'package-list.mk'
+START_TIME = dt.now().strftime('%Y-%m-%d %H:%M:%S')
+STOP_TIME = dt.now().strftime('%Y-%m-%d %H:%M:%S')
+TC_NAME = 'SDV_config_basic'
+VERSION = '1.0'
+CRITERIA = 'PASS'
+
+
+class PushResults():
+ """ Push results to opnfv test api """
+
+ def __init__(self, results, logger):
+ """ constructor """
+ # store external values
+ self.results = results
+ self.logger = logger
+ # initialize internal values
+ self.push_vals = dict()
+ # call functions
+ self.generate_response()
+ self.push_results()
+
+ def generate_response(self):
+ """ generate json output to be pushed """
+ # Build body
+ body = {
+ "project_name": "sdv",
+ "scenario": "none",
+ "start_date": START_TIME,
+ "stop_date": STOP_TIME,
+ "case_name": TC_NAME,
+ "pod_name": POD_NAME,
+ "installer": INSTALLER,
+ "version": VERSION,
+ "build_tag": BUILD_TAG,
+ "criteria": CRITERIA,
+ "details": self.results
+ }
+ self.logger.debug("The generated json response to be pushed:%s", body)
+ # store this value in the class variable
+ self.push_vals = body
+
+ def push_results(self):
+ """ push results to testapi """
+ url = OPNFV_URL + "/results"
+
+ try:
+ response = requests.post(url, json=self.push_vals)
+ self.logger.info("testapi push response:%s", response)
+ except ConnectionError:
+ self.logger.exception("error while pushing results to testapi")
+ self.logger.error("failed to push results")
diff --git a/sdv/docker/sdvconfig/validation/__init__.py b/sdv/docker/sdvconfig/validation/__init__.py
new file mode 100644
index 0000000..7554676
--- /dev/null
+++ b/sdv/docker/sdvconfig/validation/__init__.py
@@ -0,0 +1,24 @@
+#!/usr/bin/env python
+
+# Copyright (C) 2020 Ashwin Nayak
+#
+# Licensed under the Apache License, Version 2.0 (the "License");
+# you may not use this file except in compliance with the License.
+# You may obtain a copy of the License at
+#
+# http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
+
+""" validation package """
+
+from .hardware import HardwareValidation
+from .info import InfoValidation
+from .network import NetworkValidation
+from .platform import PlatformValidation
+from .software import SoftwareValidation
+from .storage import StorageValidation
diff --git a/sdv/docker/sdvconfig/validation/hardware.py b/sdv/docker/sdvconfig/validation/hardware.py
new file mode 100644
index 0000000..b01d871
--- /dev/null
+++ b/sdv/docker/sdvconfig/validation/hardware.py
@@ -0,0 +1,214 @@
+#!/usr/bin/env python
+
+# Copyright (C) 2020 Ashwin Nayak
+#
+# Licensed under the Apache License, Version 2.0 (the "License");
+# you may not use this file except in compliance with the License.
+# You may obtain a copy of the License at
+#
+# http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
+
+# pylint: disable=too-many-instance-attributes
+
+""" program which validates hardware profile """
+
+
+class HardwareValidation():
+ """ perform hardware validation """
+
+ def __init__(self, json, value, manifest, logger):
+ # store external values
+ self.role = "global"
+ self.json = json
+ self.value = value
+ self.logger = logger
+ # initialize internal values
+ self.right = 0
+ self.wrong = 0
+ self.total = 0
+ self.result = ""
+ # initialization functions
+ self.manifest = manifest
+ self.validate_hardware()
+
+ def get_values(self):
+ """ return set of right wrong and total """
+ return self.right, self.wrong, self.total, self.result
+
+ def comparison(self, key, profile, pdf_val, man_val):
+ """ do comparison and print results"""
+ self.total += 1
+ self.logger.debug("key:%s, profile:%s, pdf_val:%s, man_val:%s, role:%s",
+ key, profile, pdf_val, man_val, self.role)
+
+ if pdf_val == "":
+ self.result += ("No value exists for pdf-key:{} of profile:{} and role:{}\n"\
+ .format(key, profile, self.role))
+ elif man_val == []:
+ self.result += ("No value exists for manifest-key:{} of profile:{} and role:{}\n"\
+ .format(key, profile, self.role))
+ elif str(pdf_val) not in man_val:
+ self.result += (
+ "The pdf and manifest values do not match for key:{} profile:{} role:{}\n".format(
+ key, profile, self.role))
+ self.result += ("the pdf val:{} and manifest val:{}\n".format(pdf_val, man_val))
+ self.wrong += 1
+ else:
+ self.result += (
+ "The pdf and manifest values do match for key:{} profile:{} role:{}\n".format(
+ key, profile, self.role))
+ self.right += 1
+
+ def validate_bios_profile(self, value):
+ """ validate bios profile """
+ val = ""
+ profile = 'bios_profile'
+ keys = [
+ 'bios_version',
+ 'bios_mode',
+ 'bootstrap_proto',
+ 'hyperthreading_enabled',
+ 'bios_setting']
+
+ self.logger.info("Starting with the validation of bios profile name:%s", value)
+
+ for key in self.json[profile]:
+ if key["profile_name"] == value:
+ val = key
+ break
+
+ if val == "":
+ self.logger.error("Not able to find bios profile name: %s", value)
+ else:
+ for key in keys:
+ try:
+ temp1 = val[key]
+ temp2 = self.manifest.find_val(self.role, profile, key)
+ self.comparison(key, profile, temp1, temp2)
+ except KeyError:
+ self.logger.error("Not able to find key: %s in bios profile: %s", key, value)
+
+ self.logger.info("Completed with the validation of bios profile name:%s", value)
+
+ def validate_processor_profile(self, value):
+ """ validate processor profile """
+ val = ""
+ profile = 'processor_profiles'
+ keys = ['speed', 'model', 'architecture']
+
+ self.logger.info("Starting with the validation of processor profile:%s", value)
+
+ for key in self.json[profile]:
+ if key["profile_name"] == self.value:
+ val = key
+ break
+
+ if val == "":
+ self.logger.error("Not able to find processor profile name: %s", value)
+ else:
+ val = val["profile_info"]
+ for key in keys:
+ try:
+ temp1 = val[key]
+ temp2 = self.manifest.find_val(self.role, profile, key)
+ self.comparison(key, profile, temp1, temp2)
+ except KeyError:
+ self.logger.error(
+ "Not able to find key: %s in processor profile: %s", key, value)
+ self.logger.info("Completed with the validation of processor profile:%s", value)
+
+ def validate_disks_profile(self, value):
+ """ validate disks profile """
+ val = ""
+ profile = 'disks_profiles'
+ keys = ['address', 'dev_type', 'rotation', 'bus']
+
+ self.logger.info("Starting with the validation of disks profile:%s", value)
+
+ for key in self.json[profile]:
+ if key["profile_name"] == self.value:
+ val = key
+ break
+
+ if val == "":
+ self.logger.error("Not able to find disk profile name: %s", value)
+ else:
+ val = val["profile_info"]
+ for vals in val:
+ for key in keys:
+ try:
+ temp1 = vals[key]
+ temp2 = self.manifest.find_val(self.role, profile, key)
+ self.comparison(key, profile, temp1, temp2)
+ except KeyError:
+ self.logger.error(
+ "Not able to find key: %s in disk profile: %s", key, value)
+ self.logger.info("Completed with the validation of disks profile:%s", value)
+
+ def validate_nic_profile(self, value):
+ """ validate nic profile """
+ val = ""
+ profile = 'nic_profiles'
+ keys = ['address', 'dev_type', 'bus', 'sriov_capable', 'numa_id']
+
+ self.logger.info("Starting with the validation of nic profile:%s", value)
+
+ for key in self.json[profile]:
+ if key["profile_name"] == self.value:
+ val = key
+ break
+
+ if val == "":
+ self.logger.error("Not able to find nic profile name: %s", value)
+ else:
+ val = val["profile_info"]
+
+ for vals in val:
+ for key in keys:
+ try:
+ temp1 = vals[key]
+ temp2 = self.manifest.find_val(self.role, profile, key)
+ self.comparison(key, profile, temp1, temp2)
+ except KeyError:
+ self.logger.error("Not able to find key: %s in nic profile: %s", key, value)
+ self.logger.info("Completed with the validation of nic profile:%s", value)
+
+ def validate_hardware(self):
+ """ validate hardware """
+ # find hardware profile with given key
+ val = ""
+ profile = 'hardware_profiles'
+ keys = ['manufacturer', 'model', 'generation', 'memory']
+
+ self.logger.info("Starting with the validation of hardware profile:%s", self.value)
+
+ for key in self.json[profile]:
+ if key["profile_name"] == self.value:
+ val = key
+ break
+
+ if val == "":
+ self.logger.error("Not able to find hardware profile name: %s", self.value)
+ else:
+ val = val["profile_info"]
+
+ for key in keys:
+ try:
+ temp1 = val[key]
+ temp2 = self.manifest.find_val(self.role, profile, key)
+ self.comparison(key, profile, temp1, temp2)
+ except KeyError:
+ self.logger.error(
+ "Not able to find key: %s in hardware profile: %s", key, self.value)
+ self.logger.info("Completed with the validation of hardware profile:%s", self.value)
+
+ self.validate_bios_profile(val["bios_profile"])
+ self.validate_processor_profile(val["processor_profile"])
+ self.validate_disks_profile(val["disks_profile"])
+ self.validate_nic_profile(val["nics_profile"])
diff --git a/sdv/docker/sdvconfig/validation/info.py b/sdv/docker/sdvconfig/validation/info.py
new file mode 100644
index 0000000..2d2b498
--- /dev/null
+++ b/sdv/docker/sdvconfig/validation/info.py
@@ -0,0 +1,202 @@
+#!/usr/bin/env python
+
+# Copyright (C) 2020 Ashwin Nayak
+#
+# Licensed under the Apache License, Version 2.0 (the "License");
+# you may not use this file except in compliance with the License.
+# You may obtain a copy of the License at
+#
+# http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
+
+# pylint: disable=too-many-instance-attributes, too-many-statements
+
+""" validate general info of pdf """
+
+
+class InfoValidation():
+ """ perform hardware validation """
+
+ def __init__(self, json, manifest, logger):
+ # store external state
+ self.role = 'masters'
+ self.json = json
+ self.logger = logger
+ self.manifest = manifest
+ # initialize internal state
+ self.right = 0
+ self.wrong = 0
+ self.total = 0
+ self.result = ""
+ # initialization function
+ self.validate()
+
+ def get_values(self):
+ """ return set of right wrong and total """
+ return self.right, self.wrong, self.total, self.result
+
+ def comparison(self, key, profile, pdf_val, man_val):
+ """ do comparison and print results"""
+ self.total += 1
+ self.logger.debug("key:%s, profile:%s, pdf_val:%s, man_val:%s, role:%s",
+ key, profile, pdf_val, man_val, self.role)
+
+ if pdf_val == "":
+ self.result += ("No value exists for pdf-key:{} of profile:{} and role:{}\n"\
+ .format(key, profile, self.role))
+ elif man_val == []:
+ self.result += ("No value exists for manifest-key:{} of profile:{} and role:{}\n"\
+ .format(key, profile, self.role))
+ elif str(pdf_val) not in man_val:
+ self.result += (
+ "The pdf and manifest values do not match for key:{} profile:{} role:{}\n".format(
+ key, profile, self.role))
+ self.result += ("the pdf val:{} and manifest val:{}\n".format(pdf_val, man_val))
+ self.wrong += 1
+ else:
+ self.result += (
+ "The pdf and manifest values do match for key:{} profile:{} role:{}\n".format(
+ key, profile, self.role))
+ self.right += 1
+
+ def validate(self):
+ """ validate all infos """
+ self.logger.info("starting info validation")
+ val = ""
+ profile = 'management_info'
+ keys = ['owner', 'area_name', 'area_center_name', 'room_id', 'city', 'resource_pool_name']
+
+ val = self.json[profile]
+
+ for key in keys:
+ temp1 = val[key]
+ temp2 = self.manifest.find_val(self.role, profile, key)
+ self.comparison(key, profile, temp1, temp2)
+
+ val = ""
+ profile = 'ntp_info'
+ keys = ['primary_ip', 'primary_zone', 'secondary_ip', 'secondary_zone']
+
+ val = self.json[profile]
+
+ for key in keys:
+ temp1 = val[key]
+ temp2 = self.manifest.find_val(self.role, profile, key)
+ self.comparison(key, profile, temp1, temp2)
+
+ val = ""
+ profile = 'syslog_info'
+ keys = ["server_ip", "transport"]
+
+ val = self.json[profile]
+
+ for key in keys:
+ temp1 = val[key]
+ temp2 = self.manifest.find_val(self.role, profile, key)
+ self.comparison(key, profile, temp1, temp2)
+
+ # val = ""
+ # profile = 'ldap_info'
+ # keys = ["base_url", "url", "auth_path", "common_name", "subdomain", "domain"]
+
+ # val = self.json[profile]
+ # for key in keys:
+ # temp1 = val[key]
+ # temp2 = self.manifest.find_val(self.role, profile, key)
+ # self.comparison(key, profile, temp1, temp2)
+
+ val = ""
+ profile = 'proxy_info'
+ keys = ["address", "port", "user", "password"]
+
+ val = self.json[profile]
+ for key in keys:
+ temp1 = val[key]
+ temp2 = self.manifest.find_val(self.role, profile, key)
+ self.comparison(key, profile, temp1, temp2)
+
+ val = ""
+ profile = 'vim_info'
+ keys = [
+ "vim_name",
+ "vim_id",
+ "vendor",
+ "version",
+ "installer",
+ "deployment_style",
+ "container_orchestrator",
+ "storage_type"]
+
+ val = self.json[profile]
+
+ for key in keys:
+ temp1 = val[key]
+ temp2 = self.manifest.find_val(self.role, profile, key)
+ self.comparison(key, profile, temp1, temp2)
+
+ val = ""
+ profile = 'deployment_info'
+ keys = [
+ "high_availability",
+ "introspection",
+ "deployment_type",
+ "installer_used",
+ "workload_vnf",
+ "workload_cnf",
+ "sdn_controller",
+ "sdn_controller_version",
+ "sdn_controller_nbapps",
+ "vnfm",
+ "vnfm_version",
+ "data_plane_used",
+ "ironic_deploy_interface",
+ "external_storage_cluster",
+ "bl_str_connect_method",
+ "cpu_allocation_ratio"]
+
+ val = self.json[profile]
+
+ for key in keys:
+ temp1 = val[key]
+ temp2 = self.manifest.find_val(self.role, profile, key)
+ self.comparison(key, profile, temp1, temp2)
+
+ val = ""
+ profile = 'jumphost_info'
+ keys = ["ip", "name"]
+
+ val = self.json[profile]
+
+ for key in keys:
+ temp1 = val[key]
+ temp2 = self.manifest.find_val(self.role, profile, key)
+ self.comparison(key, profile, temp1, temp2)
+
+ # val = ""
+ # profile = 'rack_info.rack_details'
+ # keys = ["rack_name","rack_description", "raack_az"]
+
+ # val = self.json["rack_info"]["rack_split"]
+
+ # for key in keys:
+ # temp1 = val[key]
+ # temp2 = self.manifest.find_val(self.role, profile, key)
+ # self.comparison(key, profile, temp1, temp2)
+
+ # val = ""
+ # profile = 'storage_cluster_info'
+ # keys = ["name", "cluster_type", "cluster_id", "auth_type", "username", "password", \
+ # "certificate_location", "client_key", "public_cidr", "cluster_cidr"]
+
+ # val = self.json[profile]
+
+ # for key in keys:
+ # temp1 = val[key]
+ # temp2 = self.manifest.find_val(self.role, profile, key)
+ # self.comparison(key, profile, temp1, temp2)
+ self.logger.info("completing info validation")
diff --git a/sdv/docker/sdvconfig/validation/network.py b/sdv/docker/sdvconfig/validation/network.py
new file mode 100644
index 0000000..35c6b5d
--- /dev/null
+++ b/sdv/docker/sdvconfig/validation/network.py
@@ -0,0 +1,242 @@
+#!/usr/bin/env python
+
+# Copyright (C) 2020 Ashwin Nayak
+#
+# Licensed under the Apache License, Version 2.0 (the "License");
+# you may not use this file except in compliance with the License.
+# You may obtain a copy of the License at
+#
+# http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
+
+# pylint: disable=too-many-instance-attributes, too-many-arguments, too-many-branches
+
+""" program which validates network profile """
+
+
+class NetworkValidation():
+ """ perform network validation """
+
+ def __init__(self, role, json, value, manifest, logger):
+ # store external state
+ self.role = role
+ self.json = json
+ self.value = value
+ self.logger = logger
+ self.manifest = manifest
+ # initialize internal state
+ self.right = 0
+ self.wrong = 0
+ self.total = 0
+ self.result = ""
+ # initialization functions
+ self.validate_network()
+
+ def get_values(self):
+ """ return set of right wrong and total """
+ return self.right, self.wrong, self.total, self.result
+
+ def comparison(self, key, profile, pdf_val, man_val, role):
+ """ do comparison and print results"""
+ self.total += 1
+ self.logger.debug("key:%s, profile:%s, pdf_val:%s, man_val:%s, role:%s",
+ key, profile, pdf_val, man_val, role)
+
+ if pdf_val == "":
+ self.result += ("No value exists for pdf-key:{} of profile:{} and role:{}\n"\
+ .format(key, profile, role))
+ elif man_val == []:
+ self.result += ("No value exists for manifest-key:{} of profile:{} and role:{}\n"\
+ .format(key, profile, role))
+ elif str(pdf_val) not in man_val:
+ self.result += (
+ "The pdf and manifest values do not match for key:{} profile:{} role:{}\n".format(
+ key, profile, role))
+ self.result += ("the pdf val:{} and manifest val:{}\n".format(pdf_val, man_val))
+ self.wrong += 1
+ else:
+ self.result += (
+ "The pdf and manifest values do match for key:{} profile:{} role:{}\n".format(
+ key, profile, role))
+ self.right += 1
+
+ def validate_networks(self, value):
+ """ validate network link """
+ self.logger.info("Starting with the validation of networks profile:%s", value)
+ val = ""
+ profile = 'networks'
+ keys = [
+ 'name',
+ 'tunnel_type',
+ 'tunnel_id',
+ 'tunnel_id_range',
+ 'mtu',
+ 'routedomain',
+ 'cidr',
+ 'dns',
+ 'v6_cidr']
+
+ self.logger.info("Starting with the validation of network profile:%s", value)
+
+ for key in self.json[profile]:
+ if key["name"] == value:
+ val = key
+ break
+
+ if val == "":
+ self.logger.error("Not able to find network profile name: %s", value)
+ else:
+ for key in keys:
+ try:
+ temp1 = val[key]
+ temp2 = self.manifest.find_val(value, profile, key)
+ self.comparison(key, profile, temp1, temp2, value)
+ except KeyError:
+ self.logger.error("Not able to find key: %s in network profile: %s", key, value)
+
+ keys = ["name", "ip"]
+
+ for item in val["vips"]:
+ for key in keys:
+ try:
+ temp1 = item[key]
+ temp2 = self.manifest.find_val(value, profile + '.vips', key)
+ self.comparison(key, profile, temp1, temp2, value)
+ except KeyError:
+ self.logger.error(
+ "Not able to find key: %s in network.vips profile: %s", key, value)
+
+ keys = ["subnet", "gateway", "metric", "routedomain"]
+
+ for item in val["routes"]:
+ for key in keys:
+ try:
+ temp1 = item[key]
+ temp2 = self.manifest.find_val(value, profile + '.routes', key)
+ self.comparison(key, profile, temp1, temp2, value)
+ except KeyError:
+ self.logger.error(
+ "Not able to find key: %s in network.routes profile: %s", key, value)
+
+ keys = ["type", "start", "end"]
+
+ for item in val["allocation_pools"]:
+ for key in keys:
+ try:
+ temp1 = item[key]
+ temp2 = self.manifest.find_val(value, profile + '.allocation_pools', key)
+ self.comparison(key, profile, temp1, temp2, value)
+ except KeyError:
+ self.logger.error(
+ "Not able to find key: %s in network.allocation_pools profile: %s"\
+ , key, value)
+
+ keys = ["type", "start", "end"]
+
+ for item in val["v6_allocation_pools"]:
+ for key in keys:
+ try:
+ temp1 = item[key]
+ temp2 = self.manifest.find_val(value, profile + '.v6_allocation_pools', key)
+ self.comparison(key, profile, temp1, temp2, value)
+ except KeyError:
+ self.logger.error(
+ "Not able to find key: %s in network.v6_allocation_pools profile: %s"\
+ , key, value)
+ self.logger.info("Completed with the validation of networks profile:%s", value)
+
+ def validate_network_link(self, value):
+ """ validate network link """
+ self.logger.info("Starting with the validation of network link:%s", value)
+ val = ""
+ profile = 'network_link'
+ keys = [
+ 'name',
+ 'type',
+ 'bonding_mode',
+ 'mtu',
+ 'linkspeed',
+ 'trunking_mode',
+ 'trunking_default_nw',
+ 'vid',
+ 'vf_count']
+
+ self.logger.info("Starting with the validation of network link:%s", value)
+
+ for key in self.json[profile]:
+ if key["name"] == value:
+ val = key
+ break
+
+ if val == "":
+ self.logger.error("Not able to find network link name: %s", value)
+ else:
+ val = val["profile_info"]
+
+ for key in keys:
+ try:
+ temp1 = val[key]
+ temp2 = self.manifest.find_val(value, profile, key)
+ self.comparison(key, profile, temp1, temp2, value)
+ except KeyError:
+ self.logger.error(
+ "Not able to find key: %s in network link profile: %s", key, value)
+
+ keys = ["key", "value"]
+
+ for key in keys:
+ try:
+ temp1 = val["metadata"][key]
+ temp2 = self.manifest.find_val(value, profile + '.metadata', key)
+ self.comparison(key, profile, temp1, temp2, value)
+ except KeyError:
+ self.logger.error(
+ "Not able to find key: %s in network link.metadata profile: %s", key, value)
+
+ keys = ["name", "type"]
+
+ for key in keys:
+ try:
+ temp1 = val["members"][key]
+ temp2 = self.manifest.find_val(value, profile + '.members', key)
+ self.comparison(key, profile, temp1, temp2, value)
+ except KeyError:
+ self.logger.error(
+ "Not able to find key: %s in network link.metadata profile: %s", key, value)
+ self.logger.info("completed with the validation of network link:%s", value)
+
+ def validate_network(self):
+ """ validate network """
+ # find in interface mapping profile with given key
+ val = ""
+ profile = 'interface_mapping_profiles'
+
+ self.logger.info("Starting with the validation of interface mapping profile:%s", self.value)
+
+ for key in self.json[profile]:
+ if key["profile_name"] == self.value:
+ val = key
+ break
+
+ if val == "":
+ self.logger.error("Not able to find interface profile name: %s", self.value)
+ else:
+ val = val["profile_data"]
+
+ for item in val:
+ try:
+ self.validate_network_link(item["interface_name"])
+ self.logger.info("networks:%s", item["networks"])
+ for smaller_item in item["networks"]:
+ self.validate_networks(smaller_item["name"])
+ except KeyError:
+ self.logger.exception(
+ "Not able to find key in interface mapping profile profile:%s", self.value)
+ self.logger.info(
+ "Completed with the validation of interface mapping profile:%s",
+ self.value)
diff --git a/sdv/docker/sdvconfig/validation/platform.py b/sdv/docker/sdvconfig/validation/platform.py
new file mode 100644
index 0000000..9453505
--- /dev/null
+++ b/sdv/docker/sdvconfig/validation/platform.py
@@ -0,0 +1,93 @@
+#!/usr/bin/env python
+
+# Copyright (C) 2020 Ashwin Nayak
+#
+# Licensed under the Apache License, Version 2.0 (the "License");
+# you may not use this file except in compliance with the License.
+# You may obtain a copy of the License at
+#
+# http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
+
+# pylint: disable=too-many-instance-attributes, too-many-arguments
+
+""" program which validates platform profile """
+
+
+class PlatformValidation():
+ """ perform hardware validation """
+
+ def __init__(self, role, json, value, manifest, logger):
+ # store external values
+ self.role = role
+ self.json = json
+ self.value = value
+ self.logger = logger
+ self.manifest = manifest
+ # intialize internal values
+ self.right = 0
+ self.wrong = 0
+ self.total = 0
+ self.result = ""
+ # initialization functions
+ self.validate()
+
+ def get_values(self):
+ """ return set of right wrong and total """
+ return self.right, self.wrong, self.total, self.result
+
+ def comparison(self, key, profile, pdf_val, man_val):
+ """ do comparison and print results"""
+ self.total += 1
+ self.logger.debug("key:%s, profile:%s, pdf_val:%s, man_val:%s, role:%s",
+ key, profile, pdf_val, man_val, self.role)
+
+ if pdf_val == "":
+ self.result += ("No value exists for pdf-key:{} of profile:{} and role:{}\n"\
+ .format(key, profile, self.role))
+ elif man_val == []:
+ self.result += ("No value exists for manifest-key:{} of profile:{} and role:{}\n"\
+ .format(key, profile, self.role))
+ elif str(pdf_val) not in man_val:
+ self.result += (
+ "The pdf and manifest values do not match for key:{} profile:{} role:{}\n".format(
+ key, profile, self.role))
+ self.result += ("the pdf val:{} and manifest val:{}\n".format(pdf_val, man_val))
+ self.wrong += 1
+ else:
+ self.result += (
+ "The pdf and manifest values do match for key:{} profile:{} role:{}\n".format(
+ key, profile, self.role))
+ self.right += 1
+
+ def validate(self):
+ """ validate platform profile """
+ val = ""
+ profile = 'platform_profiles'
+ keys = ['os', 'rt_kvm', 'kernel_version', 'kernel_parameters', 'isolated_cpus',
+ 'vnf_cores',
+ 'iommu', 'vswitch_daemon_cores', 'vswitch_type', 'vswitch_uio_driver',
+ 'vswitch_mem_channels', 'vswitch_socket_memory', 'vswitch_pmd_cores',
+ 'vswitch_dpdk_lcores', 'vswitch_dpdk_rxqs', 'vswitch_options']
+
+ for key in self.json[profile]:
+ if key["profile_name"] == self.value:
+ val = key
+ break
+
+ if val == "":
+ self.logger.error("Not able to find platform profile name: %s", self.value)
+ else:
+ for key in keys:
+ try:
+ temp1 = val[key]
+ temp2 = self.manifest.find_val(self.role, profile, key)
+ self.comparison(key, profile, temp1, temp2)
+ except KeyError:
+ self.logger.error(
+ "Not able to find key: %s in platform profile: %s", key, self.value)
diff --git a/sdv/docker/sdvconfig/validation/software.py b/sdv/docker/sdvconfig/validation/software.py
new file mode 100644
index 0000000..9603a22
--- /dev/null
+++ b/sdv/docker/sdvconfig/validation/software.py
@@ -0,0 +1,159 @@
+#!/usr/bin/env python
+
+# Copyright (C) 2020 Ashwin Nayak
+#
+# Licensed under the Apache License, Version 2.0 (the "License");
+# you may not use this file except in compliance with the License.
+# You may obtain a copy of the License at
+#
+# http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
+
+# pylint: disable=too-many-instance-attributes, too-many-arguments
+
+""" program which validates software profile """
+
+import os
+
+
+class SoftwareValidation():
+ """ perform hardware validation """
+
+ def __init__(self, role, json, value, manifest, global_dir, type_dir, man_dir, logger):
+ # store external values
+ self.role = role
+ self.json = json
+ self.logger = logger
+ self.manifest = manifest
+ # initialize internal values
+ self.right = 0
+ self.wrong = 0
+ self.total = 0
+ self.result = ""
+ self.software_list = []
+ # intialization functions
+ self.get_software_list(global_dir, type_dir, man_dir)
+ self.validate(value)
+
+ def get_software_list(self, global_dir, type_dir, man_dir):
+ """ get a list of all softwares by checking the global softwares,
+ type and site specific softwares used """
+ dirs = [global_dir, type_dir, man_dir]
+
+ try:
+ for direc in dirs:
+ for dirpath, dirnames, filenames in os.walk(direc):
+ for filename in [f for f in filenames if f.endswith(".yaml")]:
+ temp = filename.split('.')[0]
+ temp = temp.split('-')[0]
+ self.software_list.append(temp)
+ except FileNotFoundError:
+ self.logger.exception(" error in accessing dirs ")
+ raise
+
+ def get_values(self):
+ """ return set of right wrong and total """
+ return self.right, self.wrong, self.total, self.result
+
+ def comparison(self, key, profile, pdf_val, man_val):
+ """ do comparison and print results"""
+ self.total += 1
+ self.logger.debug("key:%s, profile:%s, pdf_val:%s, man_val:%s, role:%s",
+ key, profile, pdf_val, man_val, self.role)
+
+ if pdf_val == "":
+ self.result += ("No value exists for pdf-key:{} of profile:{} and role:{}\n".
+ format(key, profile, self.role))
+ elif man_val == []:
+ self.result += ("No value exists for manifest-key:{} of profile:{} and role:{}\n".
+ format(key, profile, self.role))
+ elif str(pdf_val) not in man_val:
+ self.result += (
+ "The pdf and manifest values do not match for key:{} profile:{} role:{}\n".format(
+ key, profile, self.role))
+ self.result += ("the pdf val:{} and manifest val:{}\n".format(pdf_val, man_val))
+ self.wrong += 1
+ else:
+ self.result += (
+ "The pdf and manifest values do match for key:{} profile:{} role:{}\n".format(
+ key, profile, self.role))
+ self.right += 1
+
+ def validate(self, value):
+ """ validate software profile """
+ self.logger.info("started with the validation of software set:%s", value)
+ val = ""
+ profile = 'software_set'
+ # keys = ["none"]
+
+ for key in self.json[profile]:
+ if key["set_name"] == value:
+ val = key
+ break
+ self.logger.info("completed with the validation of software set:%s", value)
+
+ self.validate_undercloud(val["undercloud_profile"])
+ self.validate_infrastructure(val["infrasw_profile"])
+ self.validate_openstack(val["openstack_profile"])
+
+ def validate_undercloud(self, value):
+ """ validate undercloud sw """
+ self.logger.info("started with the validation of undercloud:%s", value)
+ val = ""
+ profile = 'undercloud_sw_profiles'
+ keys = ["name", "version"]
+
+ for key in self.json[profile]:
+ if key["profile_name"] == value:
+ val = key
+ break
+
+ for val in val["sw_list"]:
+ for _, key in enumerate(keys):
+ temp1 = val[key]
+ temp2 = self.software_list
+ self.comparison(key, profile, temp1, temp2)
+ self.logger.info("completed with the validation of undercloud:%s", value)
+
+ def validate_infrastructure(self, value):
+ """ validate infra sw """
+ self.logger.info("started with the validation of infra_sw:%s", value)
+ val = ""
+ profile = 'infra_sw_profiles'
+ keys = ["name", "version"]
+
+ for key in self.json[profile]:
+ if key["profile_name"] == value:
+ val = key
+ break
+
+ for val in val["sw_list"]:
+ for _, key in enumerate(keys):
+ temp1 = val[key]
+ temp2 = self.software_list
+ self.comparison(key, profile, temp1, temp2)
+ self.logger.info("completed with the validation of infra_sw:%s", value)
+
+ def validate_openstack(self, value):
+ """ validate openstack sw """
+ self.logger.info("started with the validation of opensatck_sw:%s", value)
+ val = ""
+ profile = 'openstack_sw_profiles'
+ keys = ["name", "version"]
+
+ for key in self.json[profile]:
+ if key["profile_name"] == value:
+ val = key
+ break
+
+ for val in val["sw_list"]:
+ for _, key in enumerate(keys):
+ temp1 = val[key]
+ temp2 = self.software_list
+ self.comparison(key, profile, temp1, temp2)
+ self.logger.info("completed with the validation of openstack_sw:%s", value)
diff --git a/sdv/docker/sdvconfig/validation/storage.py b/sdv/docker/sdvconfig/validation/storage.py
new file mode 100644
index 0000000..8b26351
--- /dev/null
+++ b/sdv/docker/sdvconfig/validation/storage.py
@@ -0,0 +1,160 @@
+#!/usr/bin/env python
+
+# Copyright (C) 2020 Ashwin Nayak
+#
+# Licensed under the Apache License, Version 2.0 (the "License");
+# you may not use this file except in compliance with the License.
+# You may obtain a copy of the License at
+#
+# http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
+
+# pylint: disable=too-many-instance-attributes, too-many-arguments, too-many-branches, too-many-statements
+
+""" program which validates storage profile """
+
+
+class StorageValidation():
+ """ perform hardware validation """
+
+ def __init__(self, role, json, value, manifest, logger):
+ # saving external state
+ self.role = role
+ self.json = json
+ self.value = value
+ self.logger = logger
+ self.manifest = manifest
+ # initialzing internal state
+ self.right = 0
+ self.wrong = 0
+ self.total = 0
+ self.result = ""
+ # initialization function
+ self.validate()
+
+ def get_values(self):
+ """ return set of right wrong and total """
+ return self.right, self.wrong, self.total, self.result
+
+ def comparison(self, key, profile, pdf_val, man_val):
+ """ do comparison and print results"""
+ self.total += 1
+ self.logger.debug("key:%s, profile:%s, pdf_val:%s, man_val:%s, role:%s",
+ key, profile, pdf_val, man_val, self.role)
+
+ if pdf_val == "":
+ self.result += ("No value exists for pdf-key:{} of profile:{} and role:{}\n"\
+ .format(key, profile, self.role))
+ elif man_val == []:
+ self.result += ("No value exists for manifest-key:{} of profile:{} and role:{}\n"\
+ .format(key, profile, self.role))
+ elif str(pdf_val) not in man_val:
+ self.result += (
+ "The pdf and manifest values do not match for key:{} profile:{} role:{}\n".format(
+ key, profile, self.role))
+ self.result += ("the pdf val:{} and manifest val:{}\n".format(pdf_val, man_val))
+ self.wrong += 1
+ else:
+ self.result += (
+ "The pdf and manifest values do match for key:{} profile:{} role:{}\n".format(
+ key, profile, self.role))
+ self.right += 1
+
+ def validate(self):
+ """ validate storage profile """
+ val = ""
+ profile = 'storage_profile'
+ keys = ['bootdrive']
+
+ self.logger.info("Starting with the validation of storage profile: %s", self.value)
+
+ for key in self.json[profile]:
+ if key["name"] == self.value:
+ val = key
+ break
+
+ if val == "":
+ self.logger.error("Not able to find storage profile name: %s", self.value)
+ else:
+ for key in keys:
+ try:
+ temp1 = val[key]
+ temp2 = self.manifest.find_val(self.role, profile, key)
+ self.comparison(key, profile, temp1, temp2)
+ except KeyError:
+ self.logger.error(
+ "Not able to find key: %s in storage profile: %s", key, self.value)
+
+ # redefining keys for bd_partitions
+ keys = ["name", "size", "bootable"]
+ for valx in val["bd_partitions"]:
+ for _, key in enumerate(keys):
+ try:
+ temp1 = valx[key]
+ temp2 = self.manifest.find_val(self.role, profile + '.bd_partitions', key)
+ self.comparison(key, profile, temp1, temp2)
+ except KeyError:
+ self.logger.error(
+ "Not able to find key: %s in storage profile-bd_partitions: %s"\
+ , key, self.value)
+
+ # redefining keys for bd_comparisonpartitions.filesystem
+ keys = ["mountpoint", "fstype", "mount_options"]
+ for valx in val["bd_partitions"]:
+ for _, key in enumerate(keys):
+ try:
+ temp1 = valx["filesystem"][key]
+ temp2 = self.manifest.find_val(
+ self.role, profile + '.bd_partitions.filesystem', key)
+ self.comparison(key, profile, temp1, temp2)
+ except KeyError:
+ self.logger.error(
+ "Not able to find key: %s in storage profile-filesystem: %s"\
+ , key, self.value)
+
+ # redefining keys for data_devices
+ keys_1 = ["name", "size"]
+ keys_2 = ["mountpoint", "fstype", "mount_options"]
+
+ for val1 in val["data_devices"]:
+ for val2 in val1["partitions"]:
+ for _, key in enumerate(keys_1):
+ try:
+ temp1 = val2[key]
+ temp2 = self.manifest.find_val(
+ self.role, profile + '.data_devices', key)
+ self.comparison(key, profile, temp1, temp2)
+ except KeyError:
+ self.logger.error(
+ "Not able to find key: %s in storage profile-data_devices: %s"\
+ , key, self.value)
+ for _, key in enumerate(keys_2):
+ try:
+ temp1 = val2["filesystem"][key]
+ temp2 = self.manifest.find_val(
+ self.role, profile + '.data_devices', key)
+ self.comparison(key, profile, temp1, temp2)
+ except KeyError:
+ self.logger.error(
+ "Not able to find key: %s in storage profile-data_devices: %s"\
+ , key, self.value)
+
+ # redefining keys for journal_devices
+ keys = ["name"]
+ for valx in val["journal_devices"]:
+ for _, key in enumerate(keys):
+ try:
+ temp1 = valx[key]
+ temp2 = self.manifest.find_val(self.role, profile + '.journal_devices', key)
+ self.comparison(key, profile, temp1, temp2)
+ except KeyError:
+ self.logger.error(
+ "Not able to find key: %s in storage profile-journal_devices: %s"\
+ , key, self.value)
+
+ self.logger.info("completed with the validation of storage profile: %s", self.value)
diff --git a/sdv/docs/docker/sdvconfig/developer/devguide.rst b/sdv/docs/docker/sdvconfig/developer/devguide.rst
new file mode 100644
index 0000000..7c1fc15
--- /dev/null
+++ b/sdv/docs/docker/sdvconfig/developer/devguide.rst
@@ -0,0 +1,309 @@
+=========
+SDVConfig
+=========
+Welcome to the SDVConfig Developer Guide!
+
+Who should use this guide?
+
+If you want to extend SDVConfig by using a creating a module, adding functionality to an existing module, or expanding test coverage, this guide is for you. We’ve included detailed information for developers on how to test and document modules, as well as the prerequisites for getting your module be accepted into the main SDV repository.
+
+Table of Contents
+^^^^^^^^^^^^^^^^^
+- Description of the project
+- Software architecture of the project
+- Software technologies uses
+- Setting up your local environment
+- Flow of the project
+- Project Structure
+- Code walkthrough of core functionalities
+
+Description of the project
+^^^^^^^^^^^^^^^^^^^^^^^^^^
+Cloud software validation is an upcoming project consisting of many microservices which all together form a single software validation solution. This documentation will be focused on one of the microservice namely SDV: Pre-Deployment Software Validation.
+
+PDF(POD Descriptor File) is a standard, cloud-agnostic descriptor file meant to be used by Vendors/DC admins to describe the target Cloud Environment/Lab. One of the objectives of PDF is to provide interoperability between various Cloud-infrastructure and Vendors. My work at this internship aims to develop this PDF file further, add more details and develop some toolings around PDF to make it easier to consume by the end-user. The final process will involve validating PDF against installer manifests. The target installers being airship and TripleO.
+
+In summary, the goals of the project are:
+- Develop the PDF file further, add more details and develop some tooling around PDF to make it easier to consume by the end-user.
+- Validate the existing PDF with installers.
+
+Software architecture of the project
+^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^
+There were two modules in SDVConfig, one is the extrapolation, which is a tooling around the PDF and validation, which validates the existing pdf with installers.
+
+The software architecture of extrapolation module is as follows.
+.. image:: extrapolation.png
+
+The software architecture of validation module is as follows.
+.. image:: validation.png
+
+Software technologies used
+^^^^^^^^^^^^^^^^^^^^^^^^^^
+The software technologies used in the project are
+- Tornado module for creating rest-apis
+- json module for handling json data
+- yaml module for handling yaml data
+- requests module for pushing data to testapi
+
+Setting up your local environment
+^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^
+Use Python Virtual Environment Manager.
+
+```
+python3 -m pip install --user virtualenv
+python3 -m venv env
+source env/bin/activate
+```
+
+Install the required packages from requirements.txt.
+
+```
+pip install -r requirements.txt
+```
+
+Flow of the project
+^^^^^^^^^^^^^^^^^^^
+The flow of the two modules is shown in this detailed picture as shown below.
+
+The flow diagram of the extrapolation module is as follows.
+
+.. image:: extrapolation-flow.png
+
+The flow diagram of the validation module is as follows.
+
+.. image:: validation-flow.png
+
+Project Structure
+^^^^^^^^^^^^^^^^^
+The project structure is as follows.
+
+.. code-block:: bash
+
+ sdvconfig
+ ├── cli_validation.py
+ ├── Dockerfile
+ ├── extrapolation
+ │ ├── extrapolation.py
+ │ └── __init__.py
+ ├── manifest
+ │ ├── __init__.py
+ │ └── manifest.py
+ ├── mapping
+ │ ├── airship
+ │ │ ├── hardware-mapping.json
+ │ │ ├── info-mapping.json
+ │ │ ├── network-mapping.json
+ │ │ ├── platform-mapping.json
+ │ │ ├── software-mapping.json
+ │ │ └── storage-mapping.json
+ │ ├── template
+ │ │ ├── hardware-mapping.json
+ │ │ ├── info-mapping.json
+ │ │ ├── network-mapping.json
+ │ │ ├── platform-mapping.json
+ │ │ ├── software-mapping.json
+ │ │ └── storage-mapping.json
+ │ └── TripleO
+ │ ├── hardware-mapping.json
+ │ ├── info-mapping.json
+ │ ├── network-mapping.json
+ │ ├── platform-mapping.json
+ │ ├── software-mapping.json
+ │ └── storage-mapping.json
+ ├── README.md
+ ├── requirements.txt
+ ├── server.py
+ ├── testapi
+ │ ├── __init__.py
+ │ └── testapi.py
+ └── validation
+ ├── hardware.py
+ ├── info.py
+ ├── __init__.py
+ ├── network.py
+ ├── platform.py
+ ├── software.py
+ └── storage.py
+
+
+Code walkthrough of core functionalities
+^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^
+Extrapolation
+"""""""""""""
+The core code of extrapolation is as shown below.
+
+.. code:: python
+ def extrapolate(self):
+ """ Perform Extrapolation """
+
+ list_servers = []
+
+ # get ipmi info
+ count = 0
+
+ for val in self.pdf["roles"]:
+ num_servers = int(val["count"]) # Number of servers in the particular role.
+ role = val["name"]
+
+ for idx in range(num_servers):
+ temp = dict()
+ temp["role_name"] = role
+ temp["device_name"] = str(role) + str(idx + 1)
+ temp["az_name"] = "default"
+ temp["ha_name"] = "default"
+
+ temp["ilo_info"] = self.get_ilo_info(count)
+ count += 1
+
+ list_servers.append(temp)
+
+We iterate through list of roles and generate list of servers with the following code. The IP values are picked from networks/ipmi/cidr from the pdf and is used in the extrapolation process.
+
+Validation
+""""""""""
+The core code of validation is as shown below.
+
+.. code:: python
+ def validate(self):
+ """ description about validation """
+ # validate info
+ correct, wrong, total, result = InfoValidation(
+ self.json, self.manifest, self.logger).get_values()
+ self.correct += correct
+ self.wrong += wrong
+ self.total += total
+ string = (
+ "The number of correct :{} wrong:{} and total:{} in info profile\n\n".format(
+ self.correct,
+ self.wrong,
+ self.total))
+ self.result += result + string
+
+ # iterate through the roles: have a class for each for each of the roles
+ for _, value in enumerate(self.json["roles"]):
+ role = value["name"]
+ # print(role,value["hardware_profile"])
+ correct, wrong, total, result = HardwareValidation(
+ self.json, value["hardware_profile"], self.manifest, self.logger).get_values()
+ self.correct += correct
+ self.wrong += wrong
+ self.total += total
+ string = (
+ "The number of correct :{} wrong:{} and total:{} in hardware profile\n\n".format(
+ correct, wrong, total))
+ self.result += result + string
+
+ correct, wrong, total, result = StorageValidation(
+ role, self.json, value["storage_mapping"], self.manifest, self.logger).get_values()
+ self.correct += correct
+ self.wrong += wrong
+ self.total += total
+ string = (
+ "The number of correct :{} wrong:{} and total:{} in storage profile\n\n".format(
+ correct, wrong, total))
+ self.result += result + string
+
+ correct, wrong, total, result = SoftwareValidation(role, self.json, \
+ value["sw_set_name"], self.manifest, self.global_sw_dir, self.type_sw_dir, \
+ self.site_sw_dir, self.logger).get_values()
+ self.correct += correct
+ self.wrong += wrong
+ self.total += total
+ string = (
+ "The number of correct :{} wrong:{} and total:{} in software profile\n\n".format(
+ correct, wrong, total))
+ self.result += result + string
+
+ correct, wrong, total, result = PlatformValidation(
+ role, self.json, value["platform_profile"], self.manifest, self.logger).get_values()
+ self.correct += correct
+ self.wrong += wrong
+ self.total += total
+ string = (
+ "The number of correct :{} wrong:{} and total:{} in platform profile\n\n".format(
+ correct, wrong, total))
+ self.result += result + string
+
+ correct, wrong, total, result = NetworkValidation(role, self.json, \
+ value["interface_mapping"], self.manifest, self.logger).get_values()
+ self.correct += correct
+ self.wrong += wrong
+ self.total += total
+ string = (
+ "The number of correct :{} wrong:{} and total:{} in network profile\n\n".format(
+ correct, wrong, total))
+ self.result += result + string
+
+ self.testapi_result["timestamp"] = datetime.datetime.now()
+ self.testapi_result["correct"] = self.correct
+ self.testapi_result["wrong"] = self.wrong
+ self.testapi_result["total"] = self.total
+
+ # print the final report
+ self.logger.info("Validation complete!")
+ # push results to opnfv testapi
+ PushResults(self.testapi_result, self.logger)
+
+ return self.result
+
+and one sample validation file, say hardware validation code is as follow.
+
+.. code:: python
+ def validate_profile(self, value):
+ """ validate profile """
+ val = ""
+ profile = 'profile'
+ keys = [
+ 'bios_version',
+ 'bios_mode',
+ 'bootstrap_proto',
+ 'hyperthreading_enabled',
+ 'bios_setting']
+
+ for key in self.json[profile]:
+ if key["profile_name"] == value:
+ val = key
+ break
+
+ if val == "":
+ self.logger.error("Not able to find bios profile name: %s", value)
+ else:
+ for key in keys:
+ try:
+ temp1 = val[key]
+ temp2 = self.manifest.find_val(self.role, profile, key)
+ self.comparison(key, profile, temp1, temp2)
+ except KeyError:
+ self.logger.error("Not able to find key: %s in profile: %s", key, value)
+
+ self.logger.info("Completed with the validation of profile name:%s", value)
+
+and the core recursive code which is used to find keys in the manifest files is as follows.
+
+.. code::python
+ def find_vals(self, key, temp_json):
+ """ insert all matching json key-vals in array """
+ # self.logger.info("temp_json value:%s", temp_json)
+ for k, value in temp_json.items():
+ if k == key:
+ if isinstance(value, list):
+ for val in value:
+ self.vals.append(str(val))
+ else:
+ self.vals.append(str(value))
+
+ if isinstance(value, dict):
+ found = self.find_vals(key, value)
+ if found:
+ return True
+
+ if isinstance(value, list):
+ for _, val in enumerate(value):
+ if isinstance(val, str):
+ continue
+ found = self.find_vals(key, val)
+ if found:
+ return True
+ return False
+
+The code first iterates through all the profiles, and for each profile it checks with each key, gets its corresponding mapped value from the mapping files and checks whether the key exists in the installer manifest or not. \ No newline at end of file
diff --git a/sdv/docs/docker/sdvconfig/developer/extrapolation-flow.png b/sdv/docs/docker/sdvconfig/developer/extrapolation-flow.png
new file mode 100644
index 0000000..5b220af
--- /dev/null
+++ b/sdv/docs/docker/sdvconfig/developer/extrapolation-flow.png
Binary files differ
diff --git a/sdv/docs/docker/sdvconfig/developer/extrapolation.png b/sdv/docs/docker/sdvconfig/developer/extrapolation.png
new file mode 100644
index 0000000..1a0f777
--- /dev/null
+++ b/sdv/docs/docker/sdvconfig/developer/extrapolation.png
Binary files differ
diff --git a/sdv/docs/docker/sdvconfig/developer/validation-flow.png b/sdv/docs/docker/sdvconfig/developer/validation-flow.png
new file mode 100644
index 0000000..de4853e
--- /dev/null
+++ b/sdv/docs/docker/sdvconfig/developer/validation-flow.png
Binary files differ
diff --git a/sdv/docs/docker/sdvconfig/developer/validation.png b/sdv/docs/docker/sdvconfig/developer/validation.png
new file mode 100644
index 0000000..f6d2dc3
--- /dev/null
+++ b/sdv/docs/docker/sdvconfig/developer/validation.png
Binary files differ
diff --git a/sdv/docs/docker/sdvconfig/user/configguide.rst b/sdv/docs/docker/sdvconfig/user/configguide.rst
new file mode 100644
index 0000000..d8bb3c2
--- /dev/null
+++ b/sdv/docs/docker/sdvconfig/user/configguide.rst
@@ -0,0 +1,83 @@
+=========
+SDVConfig
+=========
+Welcome to the SDVConfig config Guide!
+
+Who should use this guide?
+
+If you are searching for a way to run the sdvconfig code and don't know how, this guide is for you.
+
+There currently exists two ways of running the code, they are through commandline and through docker-http.
+
+Commandline
+^^^^^^^^^^^
+The configuration required are as follows.
+
+Use Python Virtual Environment Manager
+```
+python3 -m pip install --user virtualenv
+python3 -m venv env
+source env/bin/activate
+```
+Install the required packages from requirements.txt
+
+```
+pip install -r requirements.txt
+```
+Please refer the user guide on how to run the code on commandline.
+
+docker-http
+^^^^^^^^^^^
+Make sure you have docker installed before proceeding any further.
+
+The Dockerfile contents are as follows.
+
+.. code:: bash
+ FROM python:3.8-slim-buster
+
+ # create folder sdvconfig
+ RUN mkdir sdvconfig
+ # change the workdir to the newly created file
+ WORKDIR /sdvconfig/
+
+ # install from requirements.txt
+ COPY requirements.txt /sdvconfig/requirements.txt
+ RUN pip install -r requirements.txt
+ RUN rm requirements.txt
+
+ # copy all required files/folders
+ COPY extrapolation/ /sdvconfig/extrapolation/
+ COPY mapping/ /sdvconfig/mapping/
+ COPY validation/ /sdvconfig/validation/
+ COPY server.py /sdvconfig/
+ COPY cli_validation.py /sdvconfig/
+ COPY testapi/ sdvconfig/testapi/
+ COPY manifest /sdvconfig/manifest/
+
+ # expose port for rest calls
+ EXPOSE 8000
+
+ # run the http server
+ CMD [ "python", "server.py" ]
+
+Build the docker image with the following command.
+
+```
+docker build --tag <user>/sdvconfig:<version>
+```
+You’ll see Docker step through each instruction in your Dockerfile, building up your image as it goes. If successful, the build process should end with a message Successfully tagged <user>/sdvconfig:<version>.
+
+Finally we can run the image as a container with the follwing command.
+
+```
+docker run -v /path/to/folder:/path/to/folder --publish 8000:8000 --detach --name config <user>/sdvconfig:<version>
+```
+
+There are a couple of common flags here:
+- --publish asks Docker to forward traffic incoming on the host’s port 8000 to the container’s port 8080. Containers have their own private set of ports, so if you want to reach one from the network, you have to forward traffic to it in this way. Otherwise, firewall rules will prevent all network traffic from reaching your container, as a default security posture.
+- --detach asks Docker to run this container in the background.
+- --name specifies a name with which you can refer to your container in subsequent commands, in this case config.
+Finally we attach a volume from the localhost to the container so we can feed in files such as pdf, manifests to docker-http module and get the results persisted in this volume . This is done with ``` -v ```.
+
+Please refer the user guide regarding the http requests.
+
diff --git a/sdv/docs/docker/sdvconfig/user/userguide.rst b/sdv/docs/docker/sdvconfig/user/userguide.rst
new file mode 100644
index 0000000..f38303d
--- /dev/null
+++ b/sdv/docs/docker/sdvconfig/user/userguide.rst
@@ -0,0 +1,42 @@
+=========
+SDVConfig
+=========
+Welcome to the SDVConfig user Guide!
+
+Who should use this guide?
+
+If you are searching for a way to run the sdvconfig code and don't know how, this guide is for you.
+
+Currently there exists two functionalities, extrapolation and validation.
+
+To do a extrapolate POST request, use following command.
+
+```
+curl --header "Content-Type: application/json" --request POST --data '{"pdf_fn":"<>", "store_at":"<>"}' http://localhost:8000/extrapolate
+```
+
+To run this on commandline, use the following command
+
+```
+python extrapolation.py --pdf_fn="path/to/pdf_fn" --store-at="path/to/storage"
+```
+
+The pdf_fn key expects absolute filepath to pdf or a raw github file url.
+the store_at key expects absolute filepath to which the new generated pdf should be stored at.
+
+To do a validation POST request, use following command
+
+```
+curl --header "Content-Type: application/json" --request POST --data '{"pdf_file":"<>", "inst_dir":"<>", "inst_type":"<>", "sitename":"<>"}' http://localhost:8000/validate
+```
+
+To run this on commandline, use the following command.
+
+```
+python cli_validation.py --inst_dir=path/to/mani_dir --inst_type=type --pdf=path/to/pdf --sitename=sitename
+```
+
+The pdf_file key expects absolute filepath to pdf or a raw github file url.
+The inst_dir key expects absolute filepath to installer directory or a github clone url.
+The inst_type key expects installer type string ("airship", "tripleo", etc.)
+sitename: intel-pod10, intel-pod15 etc.