summaryrefslogtreecommitdiffstats
diff options
context:
space:
mode:
-rw-r--r--docs/testing/user/userguide/cli_reference.rst26
-rw-r--r--docs/testing/user/userguide/testing_guide.rst24
-rwxr-xr-xdovetail/run.py8
-rw-r--r--dovetail/test_runner.py2
-rw-r--r--etc/conf/cmd_config.yml4
-rw-r--r--etc/conf/functest_config.yml3
6 files changed, 55 insertions, 12 deletions
diff --git a/docs/testing/user/userguide/cli_reference.rst b/docs/testing/user/userguide/cli_reference.rst
index 7fca7137..9377c2a6 100644
--- a/docs/testing/user/userguide/cli_reference.rst
+++ b/docs/testing/user/userguide/cli_reference.rst
@@ -83,6 +83,10 @@ Commands List
| dovetail run --vnf_tag | -v <vnftest_docker_image_tag> | Specify vnftest's docker image tag, default is beijing.0 |
| | |
+------------------------------------------------------------------------+---------------------------------------------------------------------------------------------------+
+| dovetail run --deploy-scenario <deploy_scenario_name> | Specify the deploy scenario having as project name 'ovs' |
+| | |
++------------------------------------------------------------------------+---------------------------------------------------------------------------------------------------+
+
Commands Examples
=================
@@ -191,23 +195,25 @@ Dovetail Run Commands
Dovetail compliance test entry!
Options:
- -b, --bott_tag TEXT Overwrite tag for bottlenecks docker container (e.g. cvp.0.4.0)
- -f, --func_tag TEXT Overwrite tag for functest docker container (e.g. cvp.0.5.0)
- -y, --yard_tag TEXT Overwrite tag for yardstick docker container (e.g. danube.3.2)
- --testarea TEXT compliance testarea within testsuite
- --offline run in offline method, which means not to update the docker upstream images, functest, yardstick, etc.
- -r, --report TEXT push results to DB (e.g. --report http://192.168.135.2:8000/api/v1/results)
- --testsuite TEXT compliance testsuite.
- -d, --debug Flag for showing debug log on screen.
- -h, --help Show this message and exit.
+ -b, --bott_tag TEXT Overwrite tag for bottlenecks docker container (e.g. cvp.0.4.0)
+ -f, --func_tag TEXT Overwrite tag for functest docker container (e.g. cvp.0.5.0)
+ -y, --yard_tag TEXT Overwrite tag for yardstick docker container (e.g. danube.3.2)
+ --deploy-scenario TEXT Specify the DEPLOY_SCENARIO which will be used as input by each testcase respectively
+ --testarea TEXT compliance testarea within testsuite
+ --offline run in offline method, which means not to update the docker upstream images, functest, yardstick, etc.
+ -r, --report TEXT push results to DB (e.g. --report http://192.168.135.2:8000/api/v1/results)
+ --testsuite TEXT compliance testsuite.
+ -d, --debug Flag for showing debug log on screen.
+ -h, --help Show this message and exit.
.. code-block:: bash
- root@1f230e719e44:~/dovetail/dovetail# dovetail run --testsuite proposed_tests --testarea vping --offline -r http://192.168.135.2:8000/api/v1/results
+ root@1f230e719e44:~/dovetail/dovetail# dovetail run --testsuite proposed_tests --testarea vping --offline -r http://192.168.135.2:8000/api/v1/results --deploy-scenario os-nosdn-ovs-ha
2017-10-12 14:57:51,278 - run - INFO - ================================================
2017-10-12 14:57:51,278 - run - INFO - Dovetail compliance: proposed_tests!
2017-10-12 14:57:51,278 - run - INFO - ================================================
2017-10-12 14:57:51,278 - run - INFO - Build tag: daily-master-b80bca76-af5d-11e7-879a-0242ac110002
+ 2017-10-12 14:57:51,278 - run - INFO - DEPLOY_SCENARIO : os-nosdn-ovs-ha
2017-10-12 14:57:51,336 - run - WARNING - There is no hosts file /home/jenkins/opnfv/slave_root/workspace/dovetail-compass-huawei-pod7-proposed_tests-danube/cvp/pre_config/hosts.yaml, may be some issues with domain name resolution.
2017-10-12 14:57:51,517 - run - INFO - >>[testcase]: dovetail.vping.tc001
2017-10-12 14:58:21,325 - run - INFO - Results have been pushed to database and stored with local file /home/dovetail/results/results.json.
diff --git a/docs/testing/user/userguide/testing_guide.rst b/docs/testing/user/userguide/testing_guide.rst
index 17c5431f..a979d9d6 100644
--- a/docs/testing/user/userguide/testing_guide.rst
+++ b/docs/testing/user/userguide/testing_guide.rst
@@ -610,6 +610,30 @@ and its intended usage, refer to
.. code-block:: bash
+ $ dovetail run --testcase dovetail.tempest.osinterop --deploy-scenario os-nosdn-ovs-ha
+
+By default, during test case execution, the respective feature is responsible to
+decide what flavor is going to use for the execution of each test scenario which is under
+of its umbrella.
+In parallel, there is also implemented a mechanism in order for the extra specs in flavors of
+executing test scenarios to be hugepages instead of the default option.
+This is happening if the name of the scenario contains the substring "ovs".
+In this case, the flavor which is going to be used for the running test case has
+'hugepage' characteristics.
+
+Taking the above into our consideration and having in our mind that the DEPLOY_SCENARIO
+environment parameter is not used by dovetail framework (the initial value is 'unknown'),
+we set as input, for the features that they are responsible for the test case execution,
+the DEPLOY_SCENARIO environment parameter having as substring the feature name "ovs"
+(e.g. os-nosdn-ovs-ha).
+
+Note for the users:
+ - if their system uses DPDK, they should run with --deploy-scenario <xx-yy-ovs-zz>
+ (e.g. os-nosdn-ovs-ha)
+ - this is an experimental feature
+
+.. code-block:: bash
+
$ dovetail run --no-api-validation
By default, results are stored in local files on the Test Host at ``$DOVETAIL_HOME/results``.
diff --git a/dovetail/run.py b/dovetail/run.py
index d32650bc..4f643758 100755
--- a/dovetail/run.py
+++ b/dovetail/run.py
@@ -32,7 +32,6 @@ from utils.dovetail_config import DovetailConfig as dt_cfg
import utils.dovetail_logger as dt_logger
import utils.dovetail_utils as dt_utils
-
EXIT_RUN_FAILED = 2
@@ -203,6 +202,12 @@ def env_init(logger):
dt_utils.source_env(openrc)
+def update_deploy_scenario(logger, **kwargs):
+ if 'deploy_scenario' in kwargs and kwargs['deploy_scenario'] is not None:
+ os.environ['DEPLOY_SCENARIO'] = kwargs['deploy_scenario']
+ logger.info("DEPLOY_SCENARIO : %s", os.environ['DEPLOY_SCENARIO'])
+
+
def check_hosts_file(logger):
hosts_file = os.path.join(dt_cfg.dovetail_config['config_dir'],
'hosts.yaml')
@@ -279,6 +284,7 @@ def main(*args, **kwargs):
logger.info('================================================')
logger.info('Build tag: {}'.format(dt_cfg.dovetail_config['build_tag']))
parse_cli(logger, **kwargs)
+ update_deploy_scenario(logger, **kwargs)
env_init(logger)
copy_userconfig_files(logger)
copy_patch_files(logger)
diff --git a/dovetail/test_runner.py b/dovetail/test_runner.py
index 40e55283..c49182b8 100644
--- a/dovetail/test_runner.py
+++ b/dovetail/test_runner.py
@@ -151,6 +151,8 @@ class DockerRunner(object):
config_item['validate_testcase'] = testcase.validate_testcase()
config_item['testcase'] = testcase.name()
config_item['os_insecure'] = os.getenv("OS_INSECURE")
+ if 'DEPLOY_SCENARIO' in os.environ:
+ config_item['deploy_scenario'] = os.environ['DEPLOY_SCENARIO']
return config_item
def _update_config(self, testcase):
diff --git a/etc/conf/cmd_config.yml b/etc/conf/cmd_config.yml
index 05456583..3bfce46e 100644
--- a/etc/conf/cmd_config.yml
+++ b/etc/conf/cmd_config.yml
@@ -93,3 +93,7 @@ cli:
- '-n'
is_flag: 'True'
help: 'Keep all Containers created for debuging.'
+ deployscenario:
+ flags:
+ - '--deploy-scenario'
+ help: 'Specify the DEPLOY_SCENARIO which will be used as input by each testcase respectively'
diff --git a/etc/conf/functest_config.yml b/etc/conf/functest_config.yml
index 1e7cfeb3..b9453e17 100644
--- a/etc/conf/functest_config.yml
+++ b/etc/conf/functest_config.yml
@@ -1,6 +1,7 @@
---
{% set validate_testcase = validate_testcase or '' %}
+{% set deploy_scenario = deploy_scenario or 'unknown' %}
{% set os_insecure = os_insecure or 'False' %}
{% set os_verify = '' %}
{% if os_insecure == 'True' %}
@@ -11,7 +12,7 @@ functest:
image_name: opnfv/functest-smoke
docker_tag: fraser
opts: '-id --privileged=true'
- envs: '{{os_verify}} -e INSTALLER_TYPE=unknown -e DEPLOY_SCENARIO=unknown -e NODE_NAME=unknown
+ envs: '{{os_verify}} -e INSTALLER_TYPE=unknown -e DEPLOY_SCENARIO={{deploy_scenario}} -e NODE_NAME=unknown
-e TEST_DB_URL=file:///home/opnfv/functest/results/functest_results.txt'
config:
dir: '/home/opnfv/userconfig'