From e0a327b8e02c9202732752cbb14ca4900c49bc52 Mon Sep 17 00:00:00 2001 From: opensource-tnbt Date: Mon, 30 Nov 2020 23:30:03 +0530 Subject: DOCS: Update the Documentation. This patch updates the documentation for Jerma. Signed-off-by: Sridhar K. N. Rao Change-Id: Ic0d61a74b1c8d31c8facaa16837894fc816ac739 --- docs/index.rst | 10 +- docs/pdf/userguide.rst | 29 ++ docs/release/configguide/index.rst | 6 - docs/release/release-notes/release-notes.rst | 18 +- .../docker/sdvconfig/developer/devguide.rst | 309 --------------------- .../sdvconfig/developer/extrapolation-flow.png | Bin 48637 -> 0 bytes .../docker/sdvconfig/developer/extrapolation.png | Bin 6923 -> 0 bytes .../docker/sdvconfig/developer/validation-flow.png | Bin 73615 -> 0 bytes .../docker/sdvconfig/developer/validation.png | Bin 17281 -> 0 bytes .../docker/sdvconfig/user/configguide.rst | 83 ------ .../userguide/docker/sdvconfig/user/userguide.rst | 42 --- .../docker/state/development/design/index.rst | 0 .../docker/state/development/overview/index.rst | 0 .../overview/result-api-architecture.png | Bin 24756 -> 0 bytes .../state/development/overview/result_api.rst | 178 ------------ .../docker/state/development/overview/settings.rst | 38 --- .../docker/state/release/configguide/index.rst | 0 .../docker/state/release/userguide/index.rst | 0 docs/release/userguide/docker/urls/userguide.rst | 24 -- docs/release/userguide/index.rst | 6 - docs/release/userguide/pdf/user/userguide.rst | 29 -- docs/sdvconfig/developer/devguide.rst | 309 +++++++++++++++++++++ docs/sdvconfig/developer/extrapolation-flow.png | Bin 0 -> 48637 bytes docs/sdvconfig/developer/extrapolation.png | Bin 0 -> 6923 bytes docs/sdvconfig/developer/validation-flow.png | Bin 0 -> 73615 bytes docs/sdvconfig/developer/validation.png | Bin 0 -> 17281 bytes docs/sdvconfig/user/configguide.rst | 83 ++++++ docs/sdvconfig/user/userguide.rst | 42 +++ docs/state/development/design/index.rst | 0 docs/state/development/overview/index.rst | 0 .../overview/result-api-architecture.png | Bin 0 -> 24756 bytes docs/state/development/overview/result_api.rst | 178 ++++++++++++ docs/state/development/overview/settings.rst | 38 +++ docs/urls/userguide.rst | 24 ++ tox.ini | 34 +-- 35 files changed, 735 insertions(+), 745 deletions(-) create mode 100644 docs/pdf/userguide.rst delete mode 100644 docs/release/configguide/index.rst delete mode 100644 docs/release/userguide/docker/sdvconfig/developer/devguide.rst delete mode 100644 docs/release/userguide/docker/sdvconfig/developer/extrapolation-flow.png delete mode 100644 docs/release/userguide/docker/sdvconfig/developer/extrapolation.png delete mode 100644 docs/release/userguide/docker/sdvconfig/developer/validation-flow.png delete mode 100644 docs/release/userguide/docker/sdvconfig/developer/validation.png delete mode 100644 docs/release/userguide/docker/sdvconfig/user/configguide.rst delete mode 100644 docs/release/userguide/docker/sdvconfig/user/userguide.rst delete mode 100644 docs/release/userguide/docker/state/development/design/index.rst delete mode 100644 docs/release/userguide/docker/state/development/overview/index.rst delete mode 100644 docs/release/userguide/docker/state/development/overview/result-api-architecture.png delete mode 100644 docs/release/userguide/docker/state/development/overview/result_api.rst delete mode 100644 docs/release/userguide/docker/state/development/overview/settings.rst delete mode 100644 docs/release/userguide/docker/state/release/configguide/index.rst delete mode 100644 docs/release/userguide/docker/state/release/userguide/index.rst delete mode 100644 docs/release/userguide/docker/urls/userguide.rst delete mode 100644 docs/release/userguide/index.rst delete mode 100644 docs/release/userguide/pdf/user/userguide.rst create mode 100644 docs/sdvconfig/developer/devguide.rst create mode 100644 docs/sdvconfig/developer/extrapolation-flow.png create mode 100644 docs/sdvconfig/developer/extrapolation.png create mode 100644 docs/sdvconfig/developer/validation-flow.png create mode 100644 docs/sdvconfig/developer/validation.png create mode 100644 docs/sdvconfig/user/configguide.rst create mode 100644 docs/sdvconfig/user/userguide.rst create mode 100644 docs/state/development/design/index.rst create mode 100644 docs/state/development/overview/index.rst create mode 100644 docs/state/development/overview/result-api-architecture.png create mode 100644 docs/state/development/overview/result_api.rst create mode 100644 docs/state/development/overview/settings.rst create mode 100644 docs/urls/userguide.rst diff --git a/docs/index.rst b/docs/index.rst index 08fb3bd..5c7f35b 100644 --- a/docs/index.rst +++ b/docs/index.rst @@ -12,6 +12,10 @@ OPNFV CIRV-SDV :maxdepth: 3 release/release-notes/index - release/userguide/results/index - release/configguide/index - development/index + pdf/userguide + sdvconfig/developer/devguide + sdvconfig/user/configguide + sdvconfig/user/userguide + state/development/design/index + state/development/overview/settings + state/development/overview/result_api diff --git a/docs/pdf/userguide.rst b/docs/pdf/userguide.rst new file mode 100644 index 0000000..2394888 --- /dev/null +++ b/docs/pdf/userguide.rst @@ -0,0 +1,29 @@ +******************************************* +Platform Descriptor File (PDF) : User Guide +******************************************* + +Using the PDF template +###################### +Please follow these steps. + +* Go to sdv/pdf/template folder. +* Make a copy of it using site name :: + + + cp pdf_template.json .json. + +* Fill all the values. + +* Use the .json with sdvconfig tool to extrapolate the pdf. Refer to sdvconfig documentation. +* PDF is ready to use. + +Using GUI to create platform-description +######################################## +Please follow these steps. + +* Go to sdv/pdf/site folder. +* Open index.html with any browser. +* Follow the guidelines and fill all the values. +* Click Submit and Download. File named sitepdf.json will be downloaded. +* Use the sitepdf.json with sdvconfig tool to extrapolate the pdf. Refer to sdvconfig documentation. +* PDF is ready to use. diff --git a/docs/release/configguide/index.rst b/docs/release/configguide/index.rst deleted file mode 100644 index 73ef396..0000000 --- a/docs/release/configguide/index.rst +++ /dev/null @@ -1,6 +0,0 @@ -.. This work is licensed under a Creative Commons Attribution 4.0 International License. -.. http://creativecommons.org/licenses/by/4.0 -.. (c) OPNFV, Spirent Communications, University of Delhi and others. - -CIRV-SDV Configuration Guide -============================ diff --git a/docs/release/release-notes/release-notes.rst b/docs/release/release-notes/release-notes.rst index 1257fb5..97b28db 100644 --- a/docs/release/release-notes/release-notes.rst +++ b/docs/release/release-notes/release-notes.rst @@ -2,11 +2,17 @@ .. http://creativecommons.org/licenses/by/4.0 .. (c) OPNFV, Spirent Communications, University of Delhi and others. -OPNFV Jerma Release -==================== +OPNFV CIRV-SDV Jerma Release +============================ -* Platform Definition File Templates and Tools. -* Resource Modelling. -* Pre-Deployment Validation -* Post-Deployment Validation +Platform Definition file + * Version-1 Template + * GUI Tool to create PDF. + +Tools: + + * Resource Modelling. + * URL Validation - Airship. + * Pre-Deployment Validation: Airship v1.7. + * Post-Deployment Validation: Airship v1.7 diff --git a/docs/release/userguide/docker/sdvconfig/developer/devguide.rst b/docs/release/userguide/docker/sdvconfig/developer/devguide.rst deleted file mode 100644 index 7c1fc15..0000000 --- a/docs/release/userguide/docker/sdvconfig/developer/devguide.rst +++ /dev/null @@ -1,309 +0,0 @@ -========= -SDVConfig -========= -Welcome to the SDVConfig Developer Guide! - -Who should use this guide? - -If you want to extend SDVConfig by using a creating a module, adding functionality to an existing module, or expanding test coverage, this guide is for you. We’ve included detailed information for developers on how to test and document modules, as well as the prerequisites for getting your module be accepted into the main SDV repository. - -Table of Contents -^^^^^^^^^^^^^^^^^ -- Description of the project -- Software architecture of the project -- Software technologies uses -- Setting up your local environment -- Flow of the project -- Project Structure -- Code walkthrough of core functionalities - -Description of the project -^^^^^^^^^^^^^^^^^^^^^^^^^^ -Cloud software validation is an upcoming project consisting of many microservices which all together form a single software validation solution. This documentation will be focused on one of the microservice namely SDV: Pre-Deployment Software Validation. - -PDF(POD Descriptor File) is a standard, cloud-agnostic descriptor file meant to be used by Vendors/DC admins to describe the target Cloud Environment/Lab. One of the objectives of PDF is to provide interoperability between various Cloud-infrastructure and Vendors. My work at this internship aims to develop this PDF file further, add more details and develop some toolings around PDF to make it easier to consume by the end-user. The final process will involve validating PDF against installer manifests. The target installers being airship and TripleO. - -In summary, the goals of the project are: -- Develop the PDF file further, add more details and develop some tooling around PDF to make it easier to consume by the end-user. -- Validate the existing PDF with installers. - -Software architecture of the project -^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ -There were two modules in SDVConfig, one is the extrapolation, which is a tooling around the PDF and validation, which validates the existing pdf with installers. - -The software architecture of extrapolation module is as follows. -.. image:: extrapolation.png - -The software architecture of validation module is as follows. -.. image:: validation.png - -Software technologies used -^^^^^^^^^^^^^^^^^^^^^^^^^^ -The software technologies used in the project are -- Tornado module for creating rest-apis -- json module for handling json data -- yaml module for handling yaml data -- requests module for pushing data to testapi - -Setting up your local environment -^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ -Use Python Virtual Environment Manager. - -``` -python3 -m pip install --user virtualenv -python3 -m venv env -source env/bin/activate -``` - -Install the required packages from requirements.txt. - -``` -pip install -r requirements.txt -``` - -Flow of the project -^^^^^^^^^^^^^^^^^^^ -The flow of the two modules is shown in this detailed picture as shown below. - -The flow diagram of the extrapolation module is as follows. - -.. image:: extrapolation-flow.png - -The flow diagram of the validation module is as follows. - -.. image:: validation-flow.png - -Project Structure -^^^^^^^^^^^^^^^^^ -The project structure is as follows. - -.. code-block:: bash - - sdvconfig - ├── cli_validation.py - ├── Dockerfile - ├── extrapolation - │ ├── extrapolation.py - │ └── __init__.py - ├── manifest - │ ├── __init__.py - │ └── manifest.py - ├── mapping - │ ├── airship - │ │ ├── hardware-mapping.json - │ │ ├── info-mapping.json - │ │ ├── network-mapping.json - │ │ ├── platform-mapping.json - │ │ ├── software-mapping.json - │ │ └── storage-mapping.json - │ ├── template - │ │ ├── hardware-mapping.json - │ │ ├── info-mapping.json - │ │ ├── network-mapping.json - │ │ ├── platform-mapping.json - │ │ ├── software-mapping.json - │ │ └── storage-mapping.json - │ └── TripleO - │ ├── hardware-mapping.json - │ ├── info-mapping.json - │ ├── network-mapping.json - │ ├── platform-mapping.json - │ ├── software-mapping.json - │ └── storage-mapping.json - ├── README.md - ├── requirements.txt - ├── server.py - ├── testapi - │ ├── __init__.py - │ └── testapi.py - └── validation - ├── hardware.py - ├── info.py - ├── __init__.py - ├── network.py - ├── platform.py - ├── software.py - └── storage.py - - -Code walkthrough of core functionalities -^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ -Extrapolation -""""""""""""" -The core code of extrapolation is as shown below. - -.. code:: python - def extrapolate(self): - """ Perform Extrapolation """ - - list_servers = [] - - # get ipmi info - count = 0 - - for val in self.pdf["roles"]: - num_servers = int(val["count"]) # Number of servers in the particular role. - role = val["name"] - - for idx in range(num_servers): - temp = dict() - temp["role_name"] = role - temp["device_name"] = str(role) + str(idx + 1) - temp["az_name"] = "default" - temp["ha_name"] = "default" - - temp["ilo_info"] = self.get_ilo_info(count) - count += 1 - - list_servers.append(temp) - -We iterate through list of roles and generate list of servers with the following code. The IP values are picked from networks/ipmi/cidr from the pdf and is used in the extrapolation process. - -Validation -"""""""""" -The core code of validation is as shown below. - -.. code:: python - def validate(self): - """ description about validation """ - # validate info - correct, wrong, total, result = InfoValidation( - self.json, self.manifest, self.logger).get_values() - self.correct += correct - self.wrong += wrong - self.total += total - string = ( - "The number of correct :{} wrong:{} and total:{} in info profile\n\n".format( - self.correct, - self.wrong, - self.total)) - self.result += result + string - - # iterate through the roles: have a class for each for each of the roles - for _, value in enumerate(self.json["roles"]): - role = value["name"] - # print(role,value["hardware_profile"]) - correct, wrong, total, result = HardwareValidation( - self.json, value["hardware_profile"], self.manifest, self.logger).get_values() - self.correct += correct - self.wrong += wrong - self.total += total - string = ( - "The number of correct :{} wrong:{} and total:{} in hardware profile\n\n".format( - correct, wrong, total)) - self.result += result + string - - correct, wrong, total, result = StorageValidation( - role, self.json, value["storage_mapping"], self.manifest, self.logger).get_values() - self.correct += correct - self.wrong += wrong - self.total += total - string = ( - "The number of correct :{} wrong:{} and total:{} in storage profile\n\n".format( - correct, wrong, total)) - self.result += result + string - - correct, wrong, total, result = SoftwareValidation(role, self.json, \ - value["sw_set_name"], self.manifest, self.global_sw_dir, self.type_sw_dir, \ - self.site_sw_dir, self.logger).get_values() - self.correct += correct - self.wrong += wrong - self.total += total - string = ( - "The number of correct :{} wrong:{} and total:{} in software profile\n\n".format( - correct, wrong, total)) - self.result += result + string - - correct, wrong, total, result = PlatformValidation( - role, self.json, value["platform_profile"], self.manifest, self.logger).get_values() - self.correct += correct - self.wrong += wrong - self.total += total - string = ( - "The number of correct :{} wrong:{} and total:{} in platform profile\n\n".format( - correct, wrong, total)) - self.result += result + string - - correct, wrong, total, result = NetworkValidation(role, self.json, \ - value["interface_mapping"], self.manifest, self.logger).get_values() - self.correct += correct - self.wrong += wrong - self.total += total - string = ( - "The number of correct :{} wrong:{} and total:{} in network profile\n\n".format( - correct, wrong, total)) - self.result += result + string - - self.testapi_result["timestamp"] = datetime.datetime.now() - self.testapi_result["correct"] = self.correct - self.testapi_result["wrong"] = self.wrong - self.testapi_result["total"] = self.total - - # print the final report - self.logger.info("Validation complete!") - # push results to opnfv testapi - PushResults(self.testapi_result, self.logger) - - return self.result - -and one sample validation file, say hardware validation code is as follow. - -.. code:: python - def validate_profile(self, value): - """ validate profile """ - val = "" - profile = 'profile' - keys = [ - 'bios_version', - 'bios_mode', - 'bootstrap_proto', - 'hyperthreading_enabled', - 'bios_setting'] - - for key in self.json[profile]: - if key["profile_name"] == value: - val = key - break - - if val == "": - self.logger.error("Not able to find bios profile name: %s", value) - else: - for key in keys: - try: - temp1 = val[key] - temp2 = self.manifest.find_val(self.role, profile, key) - self.comparison(key, profile, temp1, temp2) - except KeyError: - self.logger.error("Not able to find key: %s in profile: %s", key, value) - - self.logger.info("Completed with the validation of profile name:%s", value) - -and the core recursive code which is used to find keys in the manifest files is as follows. - -.. code::python - def find_vals(self, key, temp_json): - """ insert all matching json key-vals in array """ - # self.logger.info("temp_json value:%s", temp_json) - for k, value in temp_json.items(): - if k == key: - if isinstance(value, list): - for val in value: - self.vals.append(str(val)) - else: - self.vals.append(str(value)) - - if isinstance(value, dict): - found = self.find_vals(key, value) - if found: - return True - - if isinstance(value, list): - for _, val in enumerate(value): - if isinstance(val, str): - continue - found = self.find_vals(key, val) - if found: - return True - return False - -The code first iterates through all the profiles, and for each profile it checks with each key, gets its corresponding mapped value from the mapping files and checks whether the key exists in the installer manifest or not. \ No newline at end of file diff --git a/docs/release/userguide/docker/sdvconfig/developer/extrapolation-flow.png b/docs/release/userguide/docker/sdvconfig/developer/extrapolation-flow.png deleted file mode 100644 index 5b220af..0000000 Binary files a/docs/release/userguide/docker/sdvconfig/developer/extrapolation-flow.png and /dev/null differ diff --git a/docs/release/userguide/docker/sdvconfig/developer/extrapolation.png b/docs/release/userguide/docker/sdvconfig/developer/extrapolation.png deleted file mode 100644 index 1a0f777..0000000 Binary files a/docs/release/userguide/docker/sdvconfig/developer/extrapolation.png and /dev/null differ diff --git a/docs/release/userguide/docker/sdvconfig/developer/validation-flow.png b/docs/release/userguide/docker/sdvconfig/developer/validation-flow.png deleted file mode 100644 index de4853e..0000000 Binary files a/docs/release/userguide/docker/sdvconfig/developer/validation-flow.png and /dev/null differ diff --git a/docs/release/userguide/docker/sdvconfig/developer/validation.png b/docs/release/userguide/docker/sdvconfig/developer/validation.png deleted file mode 100644 index f6d2dc3..0000000 Binary files a/docs/release/userguide/docker/sdvconfig/developer/validation.png and /dev/null differ diff --git a/docs/release/userguide/docker/sdvconfig/user/configguide.rst b/docs/release/userguide/docker/sdvconfig/user/configguide.rst deleted file mode 100644 index d8bb3c2..0000000 --- a/docs/release/userguide/docker/sdvconfig/user/configguide.rst +++ /dev/null @@ -1,83 +0,0 @@ -========= -SDVConfig -========= -Welcome to the SDVConfig config Guide! - -Who should use this guide? - -If you are searching for a way to run the sdvconfig code and don't know how, this guide is for you. - -There currently exists two ways of running the code, they are through commandline and through docker-http. - -Commandline -^^^^^^^^^^^ -The configuration required are as follows. - -Use Python Virtual Environment Manager -``` -python3 -m pip install --user virtualenv -python3 -m venv env -source env/bin/activate -``` -Install the required packages from requirements.txt - -``` -pip install -r requirements.txt -``` -Please refer the user guide on how to run the code on commandline. - -docker-http -^^^^^^^^^^^ -Make sure you have docker installed before proceeding any further. - -The Dockerfile contents are as follows. - -.. code:: bash - FROM python:3.8-slim-buster - - # create folder sdvconfig - RUN mkdir sdvconfig - # change the workdir to the newly created file - WORKDIR /sdvconfig/ - - # install from requirements.txt - COPY requirements.txt /sdvconfig/requirements.txt - RUN pip install -r requirements.txt - RUN rm requirements.txt - - # copy all required files/folders - COPY extrapolation/ /sdvconfig/extrapolation/ - COPY mapping/ /sdvconfig/mapping/ - COPY validation/ /sdvconfig/validation/ - COPY server.py /sdvconfig/ - COPY cli_validation.py /sdvconfig/ - COPY testapi/ sdvconfig/testapi/ - COPY manifest /sdvconfig/manifest/ - - # expose port for rest calls - EXPOSE 8000 - - # run the http server - CMD [ "python", "server.py" ] - -Build the docker image with the following command. - -``` -docker build --tag /sdvconfig: -``` -You’ll see Docker step through each instruction in your Dockerfile, building up your image as it goes. If successful, the build process should end with a message Successfully tagged /sdvconfig:. - -Finally we can run the image as a container with the follwing command. - -``` -docker run -v /path/to/folder:/path/to/folder --publish 8000:8000 --detach --name config /sdvconfig: -``` - -There are a couple of common flags here: -- --publish asks Docker to forward traffic incoming on the host’s port 8000 to the container’s port 8080. Containers have their own private set of ports, so if you want to reach one from the network, you have to forward traffic to it in this way. Otherwise, firewall rules will prevent all network traffic from reaching your container, as a default security posture. -- --detach asks Docker to run this container in the background. -- --name specifies a name with which you can refer to your container in subsequent commands, in this case config. -Finally we attach a volume from the localhost to the container so we can feed in files such as pdf, manifests to docker-http module and get the results persisted in this volume . This is done with ``` -v ```. - -Please refer the user guide regarding the http requests. - diff --git a/docs/release/userguide/docker/sdvconfig/user/userguide.rst b/docs/release/userguide/docker/sdvconfig/user/userguide.rst deleted file mode 100644 index f38303d..0000000 --- a/docs/release/userguide/docker/sdvconfig/user/userguide.rst +++ /dev/null @@ -1,42 +0,0 @@ -========= -SDVConfig -========= -Welcome to the SDVConfig user Guide! - -Who should use this guide? - -If you are searching for a way to run the sdvconfig code and don't know how, this guide is for you. - -Currently there exists two functionalities, extrapolation and validation. - -To do a extrapolate POST request, use following command. - -``` -curl --header "Content-Type: application/json" --request POST --data '{"pdf_fn":"<>", "store_at":"<>"}' http://localhost:8000/extrapolate -``` - -To run this on commandline, use the following command - -``` -python extrapolation.py --pdf_fn="path/to/pdf_fn" --store-at="path/to/storage" -``` - -The pdf_fn key expects absolute filepath to pdf or a raw github file url. -the store_at key expects absolute filepath to which the new generated pdf should be stored at. - -To do a validation POST request, use following command - -``` -curl --header "Content-Type: application/json" --request POST --data '{"pdf_file":"<>", "inst_dir":"<>", "inst_type":"<>", "sitename":"<>"}' http://localhost:8000/validate -``` - -To run this on commandline, use the following command. - -``` -python cli_validation.py --inst_dir=path/to/mani_dir --inst_type=type --pdf=path/to/pdf --sitename=sitename -``` - -The pdf_file key expects absolute filepath to pdf or a raw github file url. -The inst_dir key expects absolute filepath to installer directory or a github clone url. -The inst_type key expects installer type string ("airship", "tripleo", etc.) -sitename: intel-pod10, intel-pod15 etc. diff --git a/docs/release/userguide/docker/state/development/design/index.rst b/docs/release/userguide/docker/state/development/design/index.rst deleted file mode 100644 index e69de29..0000000 diff --git a/docs/release/userguide/docker/state/development/overview/index.rst b/docs/release/userguide/docker/state/development/overview/index.rst deleted file mode 100644 index e69de29..0000000 diff --git a/docs/release/userguide/docker/state/development/overview/result-api-architecture.png b/docs/release/userguide/docker/state/development/overview/result-api-architecture.png deleted file mode 100644 index e464187..0000000 Binary files a/docs/release/userguide/docker/state/development/overview/result-api-architecture.png and /dev/null differ diff --git a/docs/release/userguide/docker/state/development/overview/result_api.rst b/docs/release/userguide/docker/state/development/overview/result_api.rst deleted file mode 100644 index 12fd62c..0000000 --- a/docs/release/userguide/docker/state/development/overview/result_api.rst +++ /dev/null @@ -1,178 +0,0 @@ -Result API -========== - -Table of contents -^^^^^^^^^^^^^^^^^ - -- `Description <#Description>`__ -- `Architecture <#Architecture>`__ -- `Managing configuration <#Managing%20configuration>`__ -- `How to Use <#How%20to%20Use>`__ - ----------------------- - -Description -~~~~~~~~~~~ - -Result API is very simple write-only API for storing results of any size -to multiple storage endpoints. - -Results API focuses on only sending test results to required -destinations so that they can be later viewed by user but do not -implements read functionality as this is not required while running -tests - -Usage is very simple: -''''''''''''''''''''' - -.. code:: python - - from result_api import result_api as api - from result_api import Local - - # First register storage enpoints - endpoint = Local() - api.register_storage(endpoint) - - # Now, use anywhere in program - api.store(data) - -Note: In above example we used only one storage endpoints so, whenever -``api`` will make call to ``store()`` method it will be used. But if -register multiple storage endpoints then whenever ``api`` will make call -to ``store()`` method all endpoints will be called to store data. This -package doesn't allows to selectively make calls to different storage -endpoint for storing. Instead follows **one api -> many endpoint (one to -many)** design. - ----------------------- - -Architecture -~~~~~~~~~~~~ - -|img| - -*ResultsAPI exports data to various storage endpoints like harddisk, -SQL, Mongo, etc. exposed by StorageAPI in write-only mode* - ----------------------- - -Managing configuration -~~~~~~~~~~~~~~~~~~~~~~ - -Result API uses `Conf: Program Settings handler <../conf/readme.md>`__ -to manage all it's settings. So, user can change any settings of -ResultAPI as they need. ``Conf`` allows to change settings in two ways -either read from file ``settings.load_from_file(path/to/file)`` or set -inside program ``settings.setValue(key, Value)`` - -Configurations available: - -+-----------------+---------------------+-------------------------------------+ -| Storage | setting | optional | -+=================+=====================+=====================================+ -| Local Storage | results\_path | Yes, defaults to ``/tmp/local/`` | -+-----------------+---------------------+-------------------------------------+ -| Local Storage | results\_filename | Yes, defaults to ``results.json`` | -+-----------------+---------------------+-------------------------------------+ - ----------------------- - -How to Use -~~~~~~~~~~ - -For using ResultAPI successfully the following steps are required - -#. **Import result\_api instance** from ResultAPI package as this - instance will be commonly used across our program. -#. **Import required StorageAPI** -#. **Set Storage settings** or load settings from file -#. **Create Storage Endpoints from StorageAPI** -#. **Register Storage Endpoints with result\_api** - -Now, result\_api is ready to use. We can send values to all registered -storage endpoints by making a simple call to ```store()`` -method <#store()%20method>`__ - -store() method -'''''''''''''' - -``result_api`` has ``store(dict)`` method for storing data. - -.. code:: python - - # data must be a dict - data = { 'key1': "value1", 'key2': 5, 'dumykeytext': "dummy string value"} - result_api.store(data) - -rfile -''''' - -rfile stands for result file. When you have extremely big value for a -key you would like to tell your storage endpoint to store them in -separate file and refer them in actual key. For storing value of type -file in result\_api use rfile construct. - -.. code:: python - - data = { 'name': 'example', 'myfile': rfile('Text to store in this file')} - result_api.store(data) - # Respective StorageAPI will evaluate "data" for all rfile values and store their text in some separate file/storage-object and put there refernece in "data" - -Use Local Storage with Result API -''''''''''''''''''''''''''''''''' - -.. code:: python - - from result_api import result_api as api - from result_api import Local - from result_api import rfile - - def main(): - # Update settings required for Local storage - settings.setValue('results_path', '/tmp/myresults/') - settings.setValue('results_filename', 'results.json') - - # Register Local() to result_api, this will load above settings automatically - api.register_storage(Local()) - - data = { 'testcase': "RA1.24", 'value': 'Pass', 'logs': rfile('These are logs')} - - # Now, store any data - api.store(data) - -Register Storage Endpoint -''''''''''''''''''''''''' - -.. code:: python - - from result_api.storage.mystorageapi import MyStorageAPI - - # Set required settings values for storage endpoint - settings.setValue('mysetting1', 'value') - settings.setValue('mysetting2', 'value2') - - #Now Register StorageAPI to ResultAPI - endpoint = MyStorageAPI() - api.register_storage(endpoint) - -Changing settings -''''''''''''''''' - -.. code:: python - - # Set values in a yaml file and load it - settings.load_from_file('result.yml') - - # Or set in program - settings.setValue('key', 'value') - - # Note above steps will only change settings values but will not impact any previously registered storage endpoints - # To use endpoints with new value, register new endpoints - endpoint = MyStorageAPI() - api.register_storage(endpoint) - - # And do not forget to unregister old endpoint as they have old settings - api.unregister_storage(old_endpoint) - -.. |img| image:: result-api-architecture.png diff --git a/docs/release/userguide/docker/state/development/overview/settings.rst b/docs/release/userguide/docker/state/development/overview/settings.rst deleted file mode 100644 index c70f21e..0000000 --- a/docs/release/userguide/docker/state/development/overview/settings.rst +++ /dev/null @@ -1,38 +0,0 @@ -Program Settings handler ------------------------- - -``from tools.conf import settings`` - -Settings will be loaded from several ``.yaml`` or ``.yml`` files and any -user provided settings files. - -So, that user can use these settings values across program. - -This utility loads settings from yaml files in form of key and value -where key is always ``string`` while value can be of type python: - -- ``int`` e.g. 5, 45, 1234 -- ``str`` e.g. hello, world -- ``float`` e.g. 34.56, 12.7 -- ``list`` e.g. [ ‘month’ , ‘is’, 45 ] -- ``dict`` e.g. {‘program’: ‘sdv’, ‘language’: ‘python’} -- ``bool`` e.g. True, False - -keys are case-insensitive -^^^^^^^^^^^^^^^^^^^^^^^^^ - -The utility is case-insensitive to keys used as it automatically -converts all keys to lower case. - -E.g. ``Program : sdv``, ``program : sdv``, ``PrOgRam : sdv`` all are -same. - -- ``settings.load_from_file(path/to/file)`` -- ``settings.load_from_env()`` -- ``settings.load_from_dir(directory/to/search/yamls)`` - -``settings.load_from_dir()`` reads all yaml files in given directory and -all it’s sub-directory recursively in ascending order, hence if a -configuration item exists in more than one file, then the setting in the -file that occurs in the last read file will have high precedence and -overwrite previous values. . \ No newline at end of file diff --git a/docs/release/userguide/docker/state/release/configguide/index.rst b/docs/release/userguide/docker/state/release/configguide/index.rst deleted file mode 100644 index e69de29..0000000 diff --git a/docs/release/userguide/docker/state/release/userguide/index.rst b/docs/release/userguide/docker/state/release/userguide/index.rst deleted file mode 100644 index e69de29..0000000 diff --git a/docs/release/userguide/docker/urls/userguide.rst b/docs/release/userguide/docker/urls/userguide.rst deleted file mode 100644 index 15d0724..0000000 --- a/docs/release/userguide/docker/urls/userguide.rst +++ /dev/null @@ -1,24 +0,0 @@ -******************************************************** -CIRV-SDV: Validating the URLs in the Installer Manifests -******************************************************** - -Supported Installer Manifest: Airship. - -Building and starting the container: -* Go to folder sdv/docker/sdvurls -* Build the container with 'docker build' command. Consider naming/tagging the container properly. -* Run the container using docker run. The container creates a report under /tmp folder. Hence, consider mapping a volume to '/tmp' folder to get the report. - - -Interacting with the container -############################## -Inputs: - -* Installer Used. Keyword: "installer". Example Value: "airship". This is mandatory -* Link to the installer manifests. Keyword: "link". Example Value: "https://gerrit.opnfv.org/gerrit/airship". This is mandatory -* Version (For Airship, this refers to Treasuremap Version). Keyword: "version". Example Value: "v1.7". This is mandatory only for Airship. -* Name of the site. Keyword: "name". Example Value: "intel-pod10". This is mandatory only for Airship - -Assuming the container is running locally, the example command would be:: - - curl --header "Content-Type: application/json" --redata '{"installer":"airship", "link":"https://gerrit.opnfv.org/gerrit/airship", "version":"v1.7", "name":"intel-pod10"}' http://localhost:8989/airship diff --git a/docs/release/userguide/index.rst b/docs/release/userguide/index.rst deleted file mode 100644 index 6e36402..0000000 --- a/docs/release/userguide/index.rst +++ /dev/null @@ -1,6 +0,0 @@ -.. This work is licensed under a Creative Commons Attribution 4.0 International License. -.. http://creativecommons.org/licenses/by/4.0 -.. (c) OPNFV, Spirent Communications, University of Delhi and others. - -CIRV-SDV User Guide -=================== diff --git a/docs/release/userguide/pdf/user/userguide.rst b/docs/release/userguide/pdf/user/userguide.rst deleted file mode 100644 index 2394888..0000000 --- a/docs/release/userguide/pdf/user/userguide.rst +++ /dev/null @@ -1,29 +0,0 @@ -******************************************* -Platform Descriptor File (PDF) : User Guide -******************************************* - -Using the PDF template -###################### -Please follow these steps. - -* Go to sdv/pdf/template folder. -* Make a copy of it using site name :: - - - cp pdf_template.json .json. - -* Fill all the values. - -* Use the .json with sdvconfig tool to extrapolate the pdf. Refer to sdvconfig documentation. -* PDF is ready to use. - -Using GUI to create platform-description -######################################## -Please follow these steps. - -* Go to sdv/pdf/site folder. -* Open index.html with any browser. -* Follow the guidelines and fill all the values. -* Click Submit and Download. File named sitepdf.json will be downloaded. -* Use the sitepdf.json with sdvconfig tool to extrapolate the pdf. Refer to sdvconfig documentation. -* PDF is ready to use. diff --git a/docs/sdvconfig/developer/devguide.rst b/docs/sdvconfig/developer/devguide.rst new file mode 100644 index 0000000..ec61c46 --- /dev/null +++ b/docs/sdvconfig/developer/devguide.rst @@ -0,0 +1,309 @@ +========================= +SDVConfig Developer Guide +========================= +Welcome to the SDVConfig Developer Guide! + +Who should use this guide? + +If you want to extend SDVConfig by using a creating a module, adding functionality to an existing module, or expanding test coverage, this guide is for you. We’ve included detailed information for developers on how to test and document modules, as well as the prerequisites for getting your module be accepted into the main SDV repository. + +Contents +^^^^^^^^ +- Description of the project +- Software architecture of the project +- Software technologies uses +- Setting up your local environment +- Flow of the project +- Project Structure +- Code walkthrough of core functionalities + +Description of the project +^^^^^^^^^^^^^^^^^^^^^^^^^^ +Cloud software validation is an upcoming project consisting of many microservices which all together form a single software validation solution. This documentation will be focused on one of the microservice namely SDV: Pre-Deployment Software Validation. + +PDF(POD Descriptor File) is a standard, cloud-agnostic descriptor file meant to be used by Vendors/DC admins to describe the target Cloud Environment/Lab. One of the objectives of PDF is to provide interoperability between various Cloud-infrastructure and Vendors. My work at this internship aims to develop this PDF file further, add more details and develop some toolings around PDF to make it easier to consume by the end-user. The final process will involve validating PDF against installer manifests. The target installers being airship and TripleO. + +In summary, the goals of the project are: +- Develop the PDF file further, add more details and develop some tooling around PDF to make it easier to consume by the end-user. +- Validate the existing PDF with installers. + +Software architecture of the project +^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ +There were two modules in SDVConfig, one is the extrapolation, which is a tooling around the PDF and validation, which validates the existing pdf with installers. + +The software architecture of extrapolation module is as follows. +.. image:: extrapolation.png + +The software architecture of validation module is as follows. +.. image:: validation.png + +Software technologies used +^^^^^^^^^^^^^^^^^^^^^^^^^^ +The software technologies used in the project are +- Tornado module for creating rest-apis +- json module for handling json data +- yaml module for handling yaml data +- requests module for pushing data to testapi + +Setting up your local environment +^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ +Use Python Virtual Environment Manager. + +``` +python3 -m pip install --user virtualenv +python3 -m venv env +source env/bin/activate +``` + +Install the required packages from requirements.txt. + +``` +pip install -r requirements.txt +``` + +Flow of the project +^^^^^^^^^^^^^^^^^^^ +The flow of the two modules is shown in this detailed picture as shown below. + +The flow diagram of the extrapolation module is as follows. + +.. image:: extrapolation-flow.png + +The flow diagram of the validation module is as follows. + +.. image:: validation-flow.png + +Project Structure +^^^^^^^^^^^^^^^^^ +The project structure is as follows. + +.. code-block:: bash + + sdvconfig + ├── cli_validation.py + ├── Dockerfile + ├── extrapolation + │ ├── extrapolation.py + │ └── __init__.py + ├── manifest + │ ├── __init__.py + │ └── manifest.py + ├── mapping + │ ├── airship + │ │ ├── hardware-mapping.json + │ │ ├── info-mapping.json + │ │ ├── network-mapping.json + │ │ ├── platform-mapping.json + │ │ ├── software-mapping.json + │ │ └── storage-mapping.json + │ ├── template + │ │ ├── hardware-mapping.json + │ │ ├── info-mapping.json + │ │ ├── network-mapping.json + │ │ ├── platform-mapping.json + │ │ ├── software-mapping.json + │ │ └── storage-mapping.json + │ └── TripleO + │ ├── hardware-mapping.json + │ ├── info-mapping.json + │ ├── network-mapping.json + │ ├── platform-mapping.json + │ ├── software-mapping.json + │ └── storage-mapping.json + ├── README.md + ├── requirements.txt + ├── server.py + ├── testapi + │ ├── __init__.py + │ └── testapi.py + └── validation + ├── hardware.py + ├── info.py + ├── __init__.py + ├── network.py + ├── platform.py + ├── software.py + └── storage.py + + +Code walkthrough of core functionalities +^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ +Extrapolation +""""""""""""" +The core code of extrapolation is as shown below. + +.. code:: python + def extrapolate(self): + """ Perform Extrapolation """ + + list_servers = [] + + # get ipmi info + count = 0 + + for val in self.pdf["roles"]: + num_servers = int(val["count"]) # Number of servers in the particular role. + role = val["name"] + + for idx in range(num_servers): + temp = dict() + temp["role_name"] = role + temp["device_name"] = str(role) + str(idx + 1) + temp["az_name"] = "default" + temp["ha_name"] = "default" + + temp["ilo_info"] = self.get_ilo_info(count) + count += 1 + + list_servers.append(temp) + +We iterate through list of roles and generate list of servers with the following code. The IP values are picked from networks/ipmi/cidr from the pdf and is used in the extrapolation process. + +Validation +"""""""""" +The core code of validation is as shown below. + +.. code:: python + def validate(self): + """ description about validation """ + # validate info + correct, wrong, total, result = InfoValidation( + self.json, self.manifest, self.logger).get_values() + self.correct += correct + self.wrong += wrong + self.total += total + string = ( + "The number of correct :{} wrong:{} and total:{} in info profile\n\n".format( + self.correct, + self.wrong, + self.total)) + self.result += result + string + + # iterate through the roles: have a class for each for each of the roles + for _, value in enumerate(self.json["roles"]): + role = value["name"] + # print(role,value["hardware_profile"]) + correct, wrong, total, result = HardwareValidation( + self.json, value["hardware_profile"], self.manifest, self.logger).get_values() + self.correct += correct + self.wrong += wrong + self.total += total + string = ( + "The number of correct :{} wrong:{} and total:{} in hardware profile\n\n".format( + correct, wrong, total)) + self.result += result + string + + correct, wrong, total, result = StorageValidation( + role, self.json, value["storage_mapping"], self.manifest, self.logger).get_values() + self.correct += correct + self.wrong += wrong + self.total += total + string = ( + "The number of correct :{} wrong:{} and total:{} in storage profile\n\n".format( + correct, wrong, total)) + self.result += result + string + + correct, wrong, total, result = SoftwareValidation(role, self.json, \ + value["sw_set_name"], self.manifest, self.global_sw_dir, self.type_sw_dir, \ + self.site_sw_dir, self.logger).get_values() + self.correct += correct + self.wrong += wrong + self.total += total + string = ( + "The number of correct :{} wrong:{} and total:{} in software profile\n\n".format( + correct, wrong, total)) + self.result += result + string + + correct, wrong, total, result = PlatformValidation( + role, self.json, value["platform_profile"], self.manifest, self.logger).get_values() + self.correct += correct + self.wrong += wrong + self.total += total + string = ( + "The number of correct :{} wrong:{} and total:{} in platform profile\n\n".format( + correct, wrong, total)) + self.result += result + string + + correct, wrong, total, result = NetworkValidation(role, self.json, \ + value["interface_mapping"], self.manifest, self.logger).get_values() + self.correct += correct + self.wrong += wrong + self.total += total + string = ( + "The number of correct :{} wrong:{} and total:{} in network profile\n\n".format( + correct, wrong, total)) + self.result += result + string + + self.testapi_result["timestamp"] = datetime.datetime.now() + self.testapi_result["correct"] = self.correct + self.testapi_result["wrong"] = self.wrong + self.testapi_result["total"] = self.total + + # print the final report + self.logger.info("Validation complete!") + # push results to opnfv testapi + PushResults(self.testapi_result, self.logger) + + return self.result + +and one sample validation file, say hardware validation code is as follow. + +.. code:: python + def validate_profile(self, value): + """ validate profile """ + val = "" + profile = 'profile' + keys = [ + 'bios_version', + 'bios_mode', + 'bootstrap_proto', + 'hyperthreading_enabled', + 'bios_setting'] + + for key in self.json[profile]: + if key["profile_name"] == value: + val = key + break + + if val == "": + self.logger.error("Not able to find bios profile name: %s", value) + else: + for key in keys: + try: + temp1 = val[key] + temp2 = self.manifest.find_val(self.role, profile, key) + self.comparison(key, profile, temp1, temp2) + except KeyError: + self.logger.error("Not able to find key: %s in profile: %s", key, value) + + self.logger.info("Completed with the validation of profile name:%s", value) + +and the core recursive code which is used to find keys in the manifest files is as follows. + +.. code::python + def find_vals(self, key, temp_json): + """ insert all matching json key-vals in array """ + # self.logger.info("temp_json value:%s", temp_json) + for k, value in temp_json.items(): + if k == key: + if isinstance(value, list): + for val in value: + self.vals.append(str(val)) + else: + self.vals.append(str(value)) + + if isinstance(value, dict): + found = self.find_vals(key, value) + if found: + return True + + if isinstance(value, list): + for _, val in enumerate(value): + if isinstance(val, str): + continue + found = self.find_vals(key, val) + if found: + return True + return False + +The code first iterates through all the profiles, and for each profile it checks with each key, gets its corresponding mapped value from the mapping files and checks whether the key exists in the installer manifest or not. diff --git a/docs/sdvconfig/developer/extrapolation-flow.png b/docs/sdvconfig/developer/extrapolation-flow.png new file mode 100644 index 0000000..5b220af Binary files /dev/null and b/docs/sdvconfig/developer/extrapolation-flow.png differ diff --git a/docs/sdvconfig/developer/extrapolation.png b/docs/sdvconfig/developer/extrapolation.png new file mode 100644 index 0000000..1a0f777 Binary files /dev/null and b/docs/sdvconfig/developer/extrapolation.png differ diff --git a/docs/sdvconfig/developer/validation-flow.png b/docs/sdvconfig/developer/validation-flow.png new file mode 100644 index 0000000..de4853e Binary files /dev/null and b/docs/sdvconfig/developer/validation-flow.png differ diff --git a/docs/sdvconfig/developer/validation.png b/docs/sdvconfig/developer/validation.png new file mode 100644 index 0000000..f6d2dc3 Binary files /dev/null and b/docs/sdvconfig/developer/validation.png differ diff --git a/docs/sdvconfig/user/configguide.rst b/docs/sdvconfig/user/configguide.rst new file mode 100644 index 0000000..424f08c --- /dev/null +++ b/docs/sdvconfig/user/configguide.rst @@ -0,0 +1,83 @@ +====================== +SDVConfig Config Guide +====================== +Welcome to the SDVConfig config Guide! + +Who should use this guide? + +If you are searching for a way to run the sdvconfig code and don't know how, this guide is for you. + +There currently exists two ways of running the code, they are through commandline and through docker-http. + +Commandline +^^^^^^^^^^^ +The configuration required are as follows. + +Use Python Virtual Environment Manager +``` +python3 -m pip install --user virtualenv +python3 -m venv env +source env/bin/activate +``` +Install the required packages from requirements.txt + +``` +pip install -r requirements.txt +``` +Please refer the user guide on how to run the code on commandline. + +docker-http +^^^^^^^^^^^ +Make sure you have docker installed before proceeding any further. + +The Dockerfile contents are as follows. + +.. code:: bash + FROM python:3.8-slim-buster + + # create folder sdvconfig + RUN mkdir sdvconfig + # change the workdir to the newly created file + WORKDIR /sdvconfig/ + + # install from requirements.txt + COPY requirements.txt /sdvconfig/requirements.txt + RUN pip install -r requirements.txt + RUN rm requirements.txt + + # copy all required files/folders + COPY extrapolation/ /sdvconfig/extrapolation/ + COPY mapping/ /sdvconfig/mapping/ + COPY validation/ /sdvconfig/validation/ + COPY server.py /sdvconfig/ + COPY cli_validation.py /sdvconfig/ + COPY testapi/ sdvconfig/testapi/ + COPY manifest /sdvconfig/manifest/ + + # expose port for rest calls + EXPOSE 8000 + + # run the http server + CMD [ "python", "server.py" ] + +Build the docker image with the following command. + +``` +docker build --tag /sdvconfig: +``` +You’ll see Docker step through each instruction in your Dockerfile, building up your image as it goes. If successful, the build process should end with a message Successfully tagged /sdvconfig:. + +Finally we can run the image as a container with the follwing command. + +``` +docker run -v /path/to/folder:/path/to/folder --publish 8000:8000 --detach --name config /sdvconfig: +``` + +There are a couple of common flags here: +- --publish asks Docker to forward traffic incoming on the host’s port 8000 to the container’s port 8080. Containers have their own private set of ports, so if you want to reach one from the network, you have to forward traffic to it in this way. Otherwise, firewall rules will prevent all network traffic from reaching your container, as a default security posture. +- --detach asks Docker to run this container in the background. +- --name specifies a name with which you can refer to your container in subsequent commands, in this case config. +Finally we attach a volume from the localhost to the container so we can feed in files such as pdf, manifests to docker-http module and get the results persisted in this volume . This is done with ``` -v ```. + +Please refer the user guide regarding the http requests. + diff --git a/docs/sdvconfig/user/userguide.rst b/docs/sdvconfig/user/userguide.rst new file mode 100644 index 0000000..917ea8d --- /dev/null +++ b/docs/sdvconfig/user/userguide.rst @@ -0,0 +1,42 @@ +==================== +SDVConfig User Guide +==================== +Welcome to the SDVConfig user Guide! + +Who should use this guide? + +If you are searching for a way to run the sdvconfig code and don't know how, this guide is for you. + +Currently there exists two functionalities, extrapolation and validation. + +To do a extrapolate POST request, use following command. + +``` +curl --header "Content-Type: application/json" --request POST --data '{"pdf_fn":"<>", "store_at":"<>"}' http://localhost:8000/extrapolate +``` + +To run this on commandline, use the following command + +``` +python extrapolation.py --pdf_fn="path/to/pdf_fn" --store-at="path/to/storage" +``` + +The pdf_fn key expects absolute filepath to pdf or a raw github file url. +the store_at key expects absolute filepath to which the new generated pdf should be stored at. + +To do a validation POST request, use following command + +``` +curl --header "Content-Type: application/json" --request POST --data '{"pdf_file":"<>", "inst_dir":"<>", "inst_type":"<>", "sitename":"<>"}' http://localhost:8000/validate +``` + +To run this on commandline, use the following command. + +``` +python cli_validation.py --inst_dir=path/to/mani_dir --inst_type=type --pdf=path/to/pdf --sitename=sitename +``` + +The pdf_file key expects absolute filepath to pdf or a raw github file url. +The inst_dir key expects absolute filepath to installer directory or a github clone url. +The inst_type key expects installer type string ("airship", "tripleo", etc.) +sitename: intel-pod10, intel-pod15 etc. diff --git a/docs/state/development/design/index.rst b/docs/state/development/design/index.rst new file mode 100644 index 0000000..e69de29 diff --git a/docs/state/development/overview/index.rst b/docs/state/development/overview/index.rst new file mode 100644 index 0000000..e69de29 diff --git a/docs/state/development/overview/result-api-architecture.png b/docs/state/development/overview/result-api-architecture.png new file mode 100644 index 0000000..e464187 Binary files /dev/null and b/docs/state/development/overview/result-api-architecture.png differ diff --git a/docs/state/development/overview/result_api.rst b/docs/state/development/overview/result_api.rst new file mode 100644 index 0000000..6412c7d --- /dev/null +++ b/docs/state/development/overview/result_api.rst @@ -0,0 +1,178 @@ +SDVState: Result API +==================== + +Table of contents +^^^^^^^^^^^^^^^^^ + +- `Description <#Description>`__ +- `Architecture <#Architecture>`__ +- `Managing configuration <#Managing%20configuration>`__ +- `How to Use <#How%20to%20Use>`__ + +---------------------- + +Description +~~~~~~~~~~~ + +Result API is very simple write-only API for storing results of any size +to multiple storage endpoints. + +Results API focuses on only sending test results to required +destinations so that they can be later viewed by user but do not +implements read functionality as this is not required while running +tests + +Usage is very simple: +''''''''''''''''''''' + +.. code:: python + + from result_api import result_api as api + from result_api import Local + + # First register storage enpoints + endpoint = Local() + api.register_storage(endpoint) + + # Now, use anywhere in program + api.store(data) + +Note: In above example we used only one storage endpoints so, whenever +``api`` will make call to ``store()`` method it will be used. But if +register multiple storage endpoints then whenever ``api`` will make call +to ``store()`` method all endpoints will be called to store data. This +package doesn't allows to selectively make calls to different storage +endpoint for storing. Instead follows **one api -> many endpoint (one to +many)** design. + +---------------------- + +Architecture +~~~~~~~~~~~~ + +|img| + +*ResultsAPI exports data to various storage endpoints like harddisk, +SQL, Mongo, etc. exposed by StorageAPI in write-only mode* + +---------------------- + +Managing configuration +~~~~~~~~~~~~~~~~~~~~~~ + +Result API uses `Conf: Program Settings handler <../conf/readme.md>`__ +to manage all it's settings. So, user can change any settings of +ResultAPI as they need. ``Conf`` allows to change settings in two ways +either read from file ``settings.load_from_file(path/to/file)`` or set +inside program ``settings.setValue(key, Value)`` + +Configurations available: + ++-----------------+---------------------+-------------------------------------+ +| Storage | setting | optional | ++=================+=====================+=====================================+ +| Local Storage | results\_path | Yes, defaults to ``/tmp/local/`` | ++-----------------+---------------------+-------------------------------------+ +| Local Storage | results\_filename | Yes, defaults to ``results.json`` | ++-----------------+---------------------+-------------------------------------+ + +---------------------- + +How to Use +~~~~~~~~~~ + +For using ResultAPI successfully the following steps are required + +#. **Import result\_api instance** from ResultAPI package as this + instance will be commonly used across our program. +#. **Import required StorageAPI** +#. **Set Storage settings** or load settings from file +#. **Create Storage Endpoints from StorageAPI** +#. **Register Storage Endpoints with result\_api** + +Now, result\_api is ready to use. We can send values to all registered +storage endpoints by making a simple call to ```store()`` +method <#store()%20method>`__ + +store() method +'''''''''''''' + +``result_api`` has ``store(dict)`` method for storing data. + +.. code:: python + + # data must be a dict + data = { 'key1': "value1", 'key2': 5, 'dumykeytext': "dummy string value"} + result_api.store(data) + +rfile +''''' + +rfile stands for result file. When you have extremely big value for a +key you would like to tell your storage endpoint to store them in +separate file and refer them in actual key. For storing value of type +file in result\_api use rfile construct. + +.. code:: python + + data = { 'name': 'example', 'myfile': rfile('Text to store in this file')} + result_api.store(data) + # Respective StorageAPI will evaluate "data" for all rfile values and store their text in some separate file/storage-object and put there refernece in "data" + +Use Local Storage with Result API +''''''''''''''''''''''''''''''''' + +.. code:: python + + from result_api import result_api as api + from result_api import Local + from result_api import rfile + + def main(): + # Update settings required for Local storage + settings.setValue('results_path', '/tmp/myresults/') + settings.setValue('results_filename', 'results.json') + + # Register Local() to result_api, this will load above settings automatically + api.register_storage(Local()) + + data = { 'testcase': "RA1.24", 'value': 'Pass', 'logs': rfile('These are logs')} + + # Now, store any data + api.store(data) + +Register Storage Endpoint +''''''''''''''''''''''''' + +.. code:: python + + from result_api.storage.mystorageapi import MyStorageAPI + + # Set required settings values for storage endpoint + settings.setValue('mysetting1', 'value') + settings.setValue('mysetting2', 'value2') + + #Now Register StorageAPI to ResultAPI + endpoint = MyStorageAPI() + api.register_storage(endpoint) + +Changing settings +''''''''''''''''' + +.. code:: python + + # Set values in a yaml file and load it + settings.load_from_file('result.yml') + + # Or set in program + settings.setValue('key', 'value') + + # Note above steps will only change settings values but will not impact any previously registered storage endpoints + # To use endpoints with new value, register new endpoints + endpoint = MyStorageAPI() + api.register_storage(endpoint) + + # And do not forget to unregister old endpoint as they have old settings + api.unregister_storage(old_endpoint) + +.. |img| image:: result-api-architecture.png diff --git a/docs/state/development/overview/settings.rst b/docs/state/development/overview/settings.rst new file mode 100644 index 0000000..29068c5 --- /dev/null +++ b/docs/state/development/overview/settings.rst @@ -0,0 +1,38 @@ +SDVState: Program Settings handler +---------------------------------- + +``from tools.conf import settings`` + +Settings will be loaded from several ``.yaml`` or ``.yml`` files and any +user provided settings files. + +So, that user can use these settings values across program. + +This utility loads settings from yaml files in form of key and value +where key is always ``string`` while value can be of type python: + +- ``int`` e.g. 5, 45, 1234 +- ``str`` e.g. hello, world +- ``float`` e.g. 34.56, 12.7 +- ``list`` e.g. [ ‘month’ , ‘is’, 45 ] +- ``dict`` e.g. {‘program’: ‘sdv’, ‘language’: ‘python’} +- ``bool`` e.g. True, False + +keys are case-insensitive +^^^^^^^^^^^^^^^^^^^^^^^^^ + +The utility is case-insensitive to keys used as it automatically +converts all keys to lower case. + +E.g. ``Program : sdv``, ``program : sdv``, ``PrOgRam : sdv`` all are +same. + +- ``settings.load_from_file(path/to/file)`` +- ``settings.load_from_env()`` +- ``settings.load_from_dir(directory/to/search/yamls)`` + +``settings.load_from_dir()`` reads all yaml files in given directory and +all it’s sub-directory recursively in ascending order, hence if a +configuration item exists in more than one file, then the setting in the +file that occurs in the last read file will have high precedence and +overwrite previous values. . diff --git a/docs/urls/userguide.rst b/docs/urls/userguide.rst new file mode 100644 index 0000000..15d0724 --- /dev/null +++ b/docs/urls/userguide.rst @@ -0,0 +1,24 @@ +******************************************************** +CIRV-SDV: Validating the URLs in the Installer Manifests +******************************************************** + +Supported Installer Manifest: Airship. + +Building and starting the container: +* Go to folder sdv/docker/sdvurls +* Build the container with 'docker build' command. Consider naming/tagging the container properly. +* Run the container using docker run. The container creates a report under /tmp folder. Hence, consider mapping a volume to '/tmp' folder to get the report. + + +Interacting with the container +############################## +Inputs: + +* Installer Used. Keyword: "installer". Example Value: "airship". This is mandatory +* Link to the installer manifests. Keyword: "link". Example Value: "https://gerrit.opnfv.org/gerrit/airship". This is mandatory +* Version (For Airship, this refers to Treasuremap Version). Keyword: "version". Example Value: "v1.7". This is mandatory only for Airship. +* Name of the site. Keyword: "name". Example Value: "intel-pod10". This is mandatory only for Airship + +Assuming the container is running locally, the example command would be:: + + curl --header "Content-Type: application/json" --redata '{"installer":"airship", "link":"https://gerrit.opnfv.org/gerrit/airship", "version":"v1.7", "name":"intel-pod10"}' http://localhost:8989/airship diff --git a/tox.ini b/tox.ini index 5fafde1..069297a 100644 --- a/tox.ini +++ b/tox.ini @@ -1,25 +1,17 @@ [tox] -envlist = pep8,yamllint,pylint +minversion = 1.6 +envlist = + docs, + docs-linkcheck +skipsdist = true -[testenv] -deps = - -c{env:UPPER_CONSTRAINTS_FILE:https://opendev.org/openstack/requirements/raw/branch/master/upper-constraints.txt} - -r{toxinidir}/requirements.txt - -r{toxinidir}/test-requirements.txt - -[testenv:pep8] -basepython = python3 -commands = flake8 - -[testenv:yamllint] -basepython = python3 -files = - INFO.yaml - sdv +[testenv:docs] +deps = -rdocs/requirements.txt commands = - yamllint -s {[testenv:yamllint]files} -c {toxinidir}/yamllintrc + sphinx-build -b html -n -d {envtmpdir}/doctrees ./docs/ {toxinidir}/docs/_build/html + echo "Generated docs available in {toxinidir}/docs/_build/html" +whitelist_externals = echo -[testenv:pylint] -basepython = python3 -commands = - pylint sdv +[testenv:docs-linkcheck] +deps = -rdocs/requirements.txt +commands = sphinx-build -b linkcheck -d {envtmpdir}/doctrees ./docs/ {toxinidir}/docs/_build/linkcheck -- cgit 1.2.3-korg