aboutsummaryrefslogtreecommitdiffstats
diff options
context:
space:
mode:
-rw-r--r--INFO.yaml85
-rw-r--r--Pipfile1
-rw-r--r--Pipfile.lock521
-rw-r--r--contrib/nettest/Dockerfile47
-rw-r--r--contrib/nettest/README.md (renamed from legacy/api/__init__.py)0
-rw-r--r--contrib/nettest/nettest/heat_2stcv.yaml170
-rw-r--r--contrib/nettest/nettest/nettest.py157
-rw-r--r--contrib/nettest/nettest/requirements.txt9
-rw-r--r--contrib/nettest/nettest/rest_server.py351
-rw-r--r--contrib/nettest/nettest/rfc2544test.py596
-rw-r--r--contrib/nettest/nettest/start.sh (renamed from legacy/tests/helper/suite.yaml)9
-rw-r--r--contrib/nettest/nettest/stcv_stack.py165
-rw-r--r--contrib/nettest_client/nettest_client.py191
-rw-r--r--docker/Dockerfile2
-rw-r--r--docker/Dockerfile.local2
-rw-r--r--docs/conf.py9
-rw-r--r--docs/index.rst10
-rwxr-xr-xdocs/proposal/NetworPerformanceIndicator.ipynb235
-rw-r--r--docs/proposal/network-performance-indicator.rst78
-rw-r--r--docs/release/release-notes/fraser.rst97
-rw-r--r--docs/release/release-notes/index.rst1
-rw-r--r--docs/testing/developer/devguide/index.rst1
-rw-r--r--docs/testing/developer/devguide/web.rst100
-rw-r--r--docs/testing/user/configguide/configuration.rst5
-rw-r--r--docs/testing/user/configguide/index.rst1
-rw-r--r--docs/testing/user/configguide/web.rst74
-rw-r--r--docs/testing/user/userguide/compute.rst35
-rw-r--r--docs/testing/user/userguide/index.rst3
-rw-r--r--docs/testing/user/userguide/network.rst114
-rw-r--r--docs/testing/user/userguide/network_testcase_description.rst90
-rw-r--r--docs/testing/user/userguide/storage.rst19
-rw-r--r--docs/testing/user/userguide/web.rst70
-rw-r--r--legacy/api/cmd/__init__.py0
-rw-r--r--legacy/api/cmd/server.py31
-rw-r--r--legacy/api/handler/__init__.py0
-rw-r--r--legacy/api/handler/db.py98
-rw-r--r--legacy/api/handler/job_handler.py174
-rw-r--r--legacy/api/handler/result_handler.py58
-rw-r--r--legacy/api/model/__init__.py0
-rw-r--r--legacy/api/model/job_model.py33
-rw-r--r--legacy/api/router/__init__.py0
-rw-r--r--legacy/api/router/mapper.py15
-rw-r--r--legacy/api/router/mapper.py.orig19
-rw-r--r--legacy/cli/helper.py14
-rw-r--r--legacy/docker/README.md11
-rw-r--r--legacy/docker/cleanup_qtip_image.sh24
-rw-r--r--legacy/docker/prepare_qtip_image.sh49
-rwxr-xr-xlegacy/docker/push_db.sh3
-rwxr-xr-xlegacy/docker/run_qtip.sh39
-rw-r--r--legacy/docs/_testcase_description.rst46
-rw-r--r--legacy/tests/__init__.py0
-rw-r--r--legacy/tests/api/__init__.py0
-rw-r--r--legacy/tests/api/test_server.py131
-rw-r--r--legacy/tests/create_zones_test.py118
-rw-r--r--legacy/tests/functional/__init__.py0
-rw-r--r--legacy/tests/functional/yaml_schema_test.py24
-rw-r--r--legacy/tests/helper/perftest.yaml13
-rw-r--r--legacy/tests/spawn_vm_test.py64
-rw-r--r--legacy/utils/__init__.py0
-rw-r--r--legacy/utils/create_zones.py86
-rw-r--r--legacy/utils/dashboard/__init__.py0
-rw-r--r--legacy/utils/dashboard/pushtoDB.py82
-rw-r--r--legacy/utils/report/__init__.py0
-rw-r--r--legacy/utils/report/get_indices.py16
-rw-r--r--legacy/utils/report/get_results.py58
-rw-r--r--legacy/utils/report/qtip_graph.py38
-rw-r--r--legacy/utils/report/qtip_report.py117
-rw-r--r--legacy/utils/spawn_vm.py206
-rw-r--r--legacy/utils/transform/__init__.py0
-rw-r--r--legacy/utils/transform/fio_transform.py37
-rw-r--r--legacy/utils/transform/iperf_transform.py35
-rw-r--r--opt/infra/roles/user/files/Taseer - taseer94@gmail.com/taseer.authorized_keys1
-rw-r--r--opt/infra/roles/user/files/Taseer - taseer94@gmail.com/taseer_gpg_key51
-rw-r--r--opt/infra/roles/user/files/taseer.authorized_keys1
-rw-r--r--qtip/ansible_library/modules/nettest.py83
-rw-r--r--qtip/cli/commands/cmd_project.py14
-rw-r--r--qtip/scripts/quickstart.sh9
-rw-r--r--resources/ansible_roles/ceph-info/tasks/main.yml6
-rw-r--r--resources/ansible_roles/openstack/defaults/main.yml10
-rw-r--r--resources/ansible_roles/openstack/tasks/main.yml99
-rw-r--r--resources/ansible_roles/openstack/templates/heat_template.yml70
-rw-r--r--resources/ansible_roles/openstack/templates/hosts.j217
-rw-r--r--resources/ansible_roles/openstack/templates/ssh.cfg.j211
-rw-r--r--resources/ansible_roles/opnfv-testapi/tasks/main.yml2
-rw-r--r--resources/ansible_roles/opnfv-testapi/tasks/report.yml2
-rw-r--r--resources/ansible_roles/qtip-generator/defaults/main.yml4
-rw-r--r--resources/ansible_roles/qtip-generator/files/compute/group_vars/all.yml4
-rw-r--r--resources/ansible_roles/qtip-generator/files/compute/heat_template.yml (renamed from legacy/config/SampleHeat.yaml)53
-rw-r--r--resources/ansible_roles/qtip-generator/files/compute/host_vars/localhost.yml6
-rw-r--r--resources/ansible_roles/qtip-generator/files/compute/run.yml16
-rw-r--r--resources/ansible_roles/qtip-generator/files/compute/setup.yml15
-rw-r--r--resources/ansible_roles/qtip-generator/files/compute/teardown.yml4
-rw-r--r--resources/ansible_roles/qtip-generator/files/doctor/group_vars/all.yml9
-rw-r--r--resources/ansible_roles/qtip/tasks/generate-inventory.yml2
-rw-r--r--resources/ansible_roles/qtip/tasks/install-deps.yml2
-rw-r--r--resources/ansible_roles/qtip/tasks/main.yml2
-rw-r--r--resources/ansible_roles/qtip/tasks/setup-node.yml2
-rw-r--r--tests/ci/compute/docker-compose.yaml3
-rwxr-xr-xtests/ci/experimental.sh11
-rw-r--r--tests/ci/network/docker-compose.yaml (renamed from legacy/tests/helper/version.yaml)27
-rwxr-xr-xtests/ci/periodic.sh8
-rw-r--r--tests/ci/utils/start_services.sh7
102 files changed, 2982 insertions, 2456 deletions
diff --git a/INFO.yaml b/INFO.yaml
new file mode 100644
index 00000000..2722994e
--- /dev/null
+++ b/INFO.yaml
@@ -0,0 +1,85 @@
+---
+project: 'QTIP, Platform Performance Benchmarking'
+project_creation_date: ''
+project_category: 'Integration & Testing'
+lifecycle_state: ''
+project_lead: &opnfv_qtip_ptl
+ name: 'zhihui wu'
+ email: 'wu.zhihui1@zte.com.cn'
+ id: 'wu.zhihui'
+ company: 'zte.com.cn'
+ timezone: 'UTC+8'
+primary_contact: *opnfv_qtip_ptl
+issue_tracking:
+ type: 'jira'
+ url: 'https://jira.opnfv.org/projects/QTIP'
+ key: 'QTIP'
+mailing_list:
+ type: 'mailman2'
+ url: 'opnfv-tech-discuss@lists.opnfv.org'
+ tag: '[qtip]'
+realtime_discussion:
+ type: irc
+ server: 'freenode.net'
+ channel: '#opnfv-qtip'
+meetings:
+ - type: 'irc'
+ agenda: # eg: 'https://wiki.opnfv.org/display/'
+ url: # eg: 'https://global.gotomeeting.com/join/819733085'
+ server: 'freenode.net'
+ channel: '#opnfv-qtip'
+ repeats: 'weekly'
+ time: "07:30 UTC Every Monday"
+repositories:
+ - 'qtip'
+committers:
+ - <<: *opnfv_qtip_ptl
+ - name: 'Prabu Kuppuswamy'
+ email: 'prabu.kuppuswamy@spirent.com'
+ company: 'spirent.com'
+ id: 'Prabu.kuppuswamy'
+ - name: 'Trevor Cooper'
+ email: 'trevor.cooper@intel.com'
+ company: 'intel.com'
+ id: 'trev'
+ - name: 'Prakash Ramchandran'
+ email: 'prakash.ramchandran@huawei.com'
+ company: 'huawei.com'
+ id: 'rprakash'
+ - name: 'Morgan Richomme'
+ email: 'morgan.richomme@orange.com'
+ company: 'orange.com'
+ id: 'mrichomme'
+ - name: 'Yujun Zhang'
+ email: 'zhang.yujunz@zte.com.cn'
+ company: 'zte.com.cn'
+ id: 'yujunz'
+ - name: 'zhifeng jiang'
+ email: 'jiang.zhifeng@zte.com.cn'
+ company: 'zte.com.cn'
+ id: 'zhifeng.jiang'
+ - name: 'Taseer Ahmed'
+ email: 'taseer94@gmail.com'
+ company: 'gmail.com'
+ id: 'linux_geek'
+ - name: 'Akhil Batra'
+ email: 'akhil.batra@research.iiit.ac.in'
+ company: 'research.iiit.ac.in'
+ id: 'akhilbatra898'
+# yamllint disable rule:line-length
+tsc:
+ approval: 'http://meetbot.opnfv.org/meetings/opnfv-meeting/2015/opnfv-meeting.2015-01-27-14.59.html'
+ changes:
+ - type: 'approval'
+ link: 'https://lists.opnfv.org/pipermail/opnfv-tsc/2016-July/002745.html'
+ - type: 'approval'
+ link: 'https://lists.opnfv.org/pipermail/opnfv-tsc/2016-July/002755.html'
+ - type: 'approval'
+ link: 'https://lists.opnfv.org/pipermail/opnfv-tsc/2016-August/002848.html'
+ - type: 'approval'
+ link: 'https://lists.opnfv.org/pipermail/opnfv-tsc/2016-October/003104.html'
+ - type: 'approval'
+ link: 'https://lists.opnfv.org/pipermail/opnfv-tsc/2016-October/003118.html'
+ - type: 'approval'
+ link: 'https://lists.opnfv.org/pipermail/opnfv-tsc/2016-November/003169.html'
+# yamllint enable rule:line-length
diff --git a/Pipfile b/Pipfile
index 58fcaf9f..866502ea 100644
--- a/Pipfile
+++ b/Pipfile
@@ -28,6 +28,7 @@ pyyaml = "*"
humanfriendly = "*"
asq = "*"
requests = "*"
+shade = "*"
[requires]
diff --git a/Pipfile.lock b/Pipfile.lock
index c4dcf444..cc681b03 100644
--- a/Pipfile.lock
+++ b/Pipfile.lock
@@ -1,7 +1,7 @@
{
"_meta": {
"hash": {
- "sha256": "1bd8bf77bc0201b85bfdc4ffe8364bc18ba107caca8a954765e3e161f57dffd7"
+ "sha256": "c943c45ab5594bfc47b9151a80f19b667c0b282e6abbad48d929e21b83539590"
},
"host-environment-markers": {
"implementation_name": "cpython",
@@ -9,12 +9,12 @@
"os_name": "posix",
"platform_machine": "x86_64",
"platform_python_implementation": "CPython",
- "platform_release": "17.3.0",
- "platform_system": "Darwin",
- "platform_version": "Darwin Kernel Version 17.3.0: Thu Nov 9 18:09:22 PST 2017; root:xnu-4570.31.3~1/RELEASE_X86_64",
- "python_full_version": "2.7.13",
+ "platform_release": "4.4.0-108-generic",
+ "platform_system": "Linux",
+ "platform_version": "#131-Ubuntu SMP Sun Jan 7 14:34:49 UTC 2018",
+ "python_full_version": "2.7.12",
"python_version": "2.7",
- "sys_platform": "darwin"
+ "sys_platform": "linux2"
},
"pipfile-spec": 6,
"requires": {
@@ -31,9 +31,16 @@
"default": {
"ansible": {
"hashes": [
- "sha256:315f1580b20bbc2c2f1104f8b5e548c6b4cac943b88711639c5e0d4dfc4d7658"
+ "sha256:714ede4b0f5455eb2dd2536d8cabb42ab439f52b5ac902308ad027b46f68e6dc"
],
- "version": "==2.4.2.0"
+ "version": "==2.5.0"
+ },
+ "appdirs": {
+ "hashes": [
+ "sha256:d8b24664561d0d34ddfaec54636d502d7cea6e29c3eaf68f3df6180863e2166e",
+ "sha256:9e5896d1372858f8dd3344faf4e5014d21849c756c8d5701f78f8a103b372d92"
+ ],
+ "version": "==1.4.3"
},
"asn1crypto": {
"hashes": [
@@ -44,10 +51,10 @@
},
"asq": {
"hashes": [
- "sha256:ca3495eac34b7cf2c4d10636777ed128b4e0872aa6353ceeb5edd45f1cbf32a8",
- "sha256:a7f1cac711fc2834a348aee0075c9a3f4ea06642257add71585d457e3f86e68d"
+ "sha256:6b5764aeb552679cd75e96bd7080a848a18b04f784328b5f6e8476e5176c0cbe",
+ "sha256:aeed1e56369372a6666f69facb890c10a8e8a6c5b8898119ca5330b4bb879f7b"
],
- "version": "==1.2.1"
+ "version": "==1.3"
},
"bcrypt": {
"hashes": [
@@ -85,46 +92,43 @@
},
"certifi": {
"hashes": [
- "sha256:244be0d93b71e93fc0a0a479862051414d0e00e16435707e5bf5000f92e04694",
- "sha256:5ec74291ca1136b40f0379e1128ff80e866597e4e2c1e755739a913bbc3613c0"
+ "sha256:9fa520c1bacfb634fa7af20a76bcbd3d5fb390481724c597da32c719a7dca4b0",
+ "sha256:13e698f54293db9f89122b0581843a782ad0934a4fe0172d2a980ba77fc61bb7"
],
- "version": "==2017.11.5"
+ "version": "==2018.4.16"
},
"cffi": {
"hashes": [
- "sha256:2c707e97ad7b0417713543be7cb87315c015bb5dd97903480168d60ebe3e313e",
- "sha256:6d8c7e20eb90be9e1ccce8e8dd4ee5163b37289fc5708f9eeafc00adc07ba891",
- "sha256:627298d788edcb317b6a01347428501e773f5e8f2988407231c07e50e3f6c1cf",
- "sha256:bdd28cf8302eeca1b4c70ec727de384d4f6ea640b0e698934fd9b4c3bc88eeb1",
- "sha256:248198cb714fe09f5c60b6acba3675d52199c6142641536796cdf89dd45e5590",
- "sha256:c962cb68987cbfb70b034f153bfa467c615c0b55305d39b3237c4bdbdbc8b0f4",
- "sha256:401ba2f6c1f1672b6c38670e1c00fa5f84f841edd30c32742dab5c7151cd89bf",
- "sha256:1c103c0ee8235c47c4892288b2287014f33e7cb24b9d4a665be3aa744377dcb9",
- "sha256:d7461ef8671ae40f991384bbc4a6b1b79f4e7175d8052584be44041996f46517",
- "sha256:3ac9be5763238da1d6fa467c43e3f86472626837a478588c94165df09e62e120",
- "sha256:d54a7c37f954fdbb971873c935a77ddc33690cec9b7ac254d9f948c43c32fa83",
- "sha256:4d9bf1b23896bcd4d042e823f50ad36fb6d8e1e645a3dfb2fe2f070851489b92",
- "sha256:61cf049b1c649d8eec360a1a1d09a61c37b9b2d542364506e8feb4afd232363d",
- "sha256:ce3da410ae2ab8709565cc3b18fbe9a0eb96ea7b2189416098c48d839ecced84",
- "sha256:e72d8b5056f967ecb57e166537408bc913f2f97dc568027fb6342fcfa9f81d64",
- "sha256:11a8ba88ef6ae89110ef029dae7f1a293365e50bdd0c6ca973beed80cec95ae4",
- "sha256:974f69112721ba2e8a6acd0f6b68a5e11432710a3eca4e4e6f4d7aaf99214ed1",
- "sha256:062c66dabc3faf8e0db1ca09a6b8e308846e5d35f43bed1a68c492b0d96ac171",
- "sha256:03a9b9efc280dbe6be149a7fa689f59a822df009eee633fdaf55a6f38795861f",
- "sha256:8b3d6dc9981cedfb1ddcd4600ec0c7f5ac2c6ad2dc482011c7eecb4ae9c819e0",
- "sha256:09b7d195d163b515ef7c2b2e26a689c9816c83d5319cceac6c36ffdab97ab048",
- "sha256:943b94667749d1cfcd964e215a20b9c891deae913202ee8eacaf2b94164b155f",
- "sha256:89829f5cfbcb5ad568a3d61bd23a8e33ad69b488d8f6a385e0097a4c20742a9b",
- "sha256:ba78da7c940b041cdbb5aaff5afe11e8a8f25fe19564c12eefea5c5bd86930ca",
- "sha256:a79b15b9bb4726672865cf5b0f63dee4835974a2b11b49652d70d49003f5d1f4",
- "sha256:f6799913eb510b682de971ddef062bbb4a200f190e55cae81c413bc1fd4733c1",
- "sha256:e7f5ad6b12f21b77d3a37d5c67260e464f4e9068eb0c0622f61d0e30390b31b6",
- "sha256:5f96c92d5f5713ccb71e76dfa14cf819c59ecb9778e94bcb541e13e6d96d1ce5",
- "sha256:5357b465e3d6b98972b7810f9969c913d365e75b09b7ba813f5f0577fe1ac9f4",
- "sha256:75e1de9ba7c155d89bcf67d149b1c741df553c8158536e8d27e63167403159af",
- "sha256:ab87dd91c0c4073758d07334c1e5f712ce8fe48f007b86f8238773963ee700a6"
- ],
- "version": "==1.11.2"
+ "sha256:1b0493c091a1898f1136e3f4f991a784437fac3673780ff9de3bcf46c80b6b50",
+ "sha256:87f37fe5130574ff76c17cab61e7d2538a16f843bb7bca8ebbc4b12de3078596",
+ "sha256:1553d1e99f035ace1c0544050622b7bc963374a00c467edafac50ad7bd276aef",
+ "sha256:151b7eefd035c56b2b2e1eb9963c90c6302dc15fbd8c1c0a83a163ff2c7d7743",
+ "sha256:edabd457cd23a02965166026fd9bfd196f4324fe6032e866d0f3bd0301cd486f",
+ "sha256:ba5e697569f84b13640c9e193170e89c13c6244c24400fc57e88724ef610cd31",
+ "sha256:79f9b6f7c46ae1f8ded75f68cf8ad50e5729ed4d590c74840471fc2823457d04",
+ "sha256:b0f7d4a3df8f06cf49f9f121bead236e328074de6449866515cea4907bbc63d6",
+ "sha256:4c91af6e967c2015729d3e69c2e51d92f9898c330d6a851bf8f121236f3defd3",
+ "sha256:7a33145e04d44ce95bcd71e522b478d282ad0eafaf34fe1ec5bbd73e662f22b6",
+ "sha256:95d5251e4b5ca00061f9d9f3d6fe537247e145a8524ae9fd30a2f8fbce993b5b",
+ "sha256:b75110fb114fa366b29a027d0c9be3709579602ae111ff61674d28c93606acca",
+ "sha256:ae5e35a2c189d397b91034642cb0eab0e346f776ec2eb44a49a459e6615d6e2e",
+ "sha256:fdf1c1dc5bafc32bc5d08b054f94d659422b05aba244d6be4ddc1c72d9aa70fb",
+ "sha256:9d1d3e63a4afdc29bd76ce6aa9d58c771cd1599fbba8cf5057e7860b203710dd",
+ "sha256:be2a9b390f77fd7676d80bc3cdc4f8edb940d8c198ed2d8c0be1319018c778e1",
+ "sha256:ed01918d545a38998bfa5902c7c00e0fee90e957ce036a4000a88e3fe2264917",
+ "sha256:857959354ae3a6fa3da6651b966d13b0a8bed6bbc87a0de7b38a549db1d2a359",
+ "sha256:2ba8a45822b7aee805ab49abfe7eec16b90587f7f26df20c71dd89e45a97076f",
+ "sha256:a36c5c154f9d42ec176e6e620cb0dd275744aa1d804786a71ac37dc3661a5e95",
+ "sha256:e55e22ac0a30023426564b1059b035973ec82186ddddbac867078435801c7801",
+ "sha256:3eb6434197633b7748cea30bf0ba9f66727cdce45117a712b29a443943733257",
+ "sha256:ecbb7b01409e9b782df5ded849c178a0aa7c906cf8c5a67368047daab282b184",
+ "sha256:770f3782b31f50b68627e22f91cb182c48c47c02eb405fd689472aa7b7aa16dc",
+ "sha256:d5d8555d9bfc3f02385c1c37e9f998e2011f0db4f90e250e5bc0c0a85a813085",
+ "sha256:3c85641778460581c42924384f5e68076d724ceac0f267d66c757f7535069c93",
+ "sha256:e90f17980e6ab0f3c2f3730e56d1fe9bcba1891eeea58966e89d352492cc74f4"
+ ],
+ "markers": "platform_python_implementation != 'PyPy'",
+ "version": "==1.11.5"
},
"chardet": {
"hashes": [
@@ -156,31 +160,45 @@
},
"cryptography": {
"hashes": [
- "sha256:69285f5615507b6625f89ea1048addd1d9218585fb886eb90bdebb1d2b2d26f5",
- "sha256:6cb1224da391fa90f1be524eafb375b62baf8d3df9690b32e8cc475ccfccee5e",
- "sha256:4f385ee7d39ee1ed74f1d6b1da03d0734ea82855a7b28a9e6e88c4091bc58664",
- "sha256:a5f2c681fd040ed648513939a1dd2242af19bd5e9e79e53b6dcfa33bdae61217",
- "sha256:fc2208d95d9ecc8032f5e38330d5ace2e3b0b998e42b08c30c35b2ab3a4a3bc8",
- "sha256:0d39a93cf25edeae1f796bbc5960e587f34513a852564f6345ea4491a86c5997",
- "sha256:41f94194ae78f83fd94ca94fb8ad65f92210a76a2421169ffa5c33c3ec7605f4",
- "sha256:7a2409f1564c84bcf2563d379c9b6148c5bc6b0ae46e109f6a7b4bebadf551df",
- "sha256:55555d784cfdf9033e81f044c0df04babed2aa141213765d960d233b0139e353",
- "sha256:9a47a80f65f4feaaf8415a40c339806c7d7d867152ddccc6ca87f707c8b7b565",
- "sha256:6fb22f63e17813f3d1d8e30dd1e249e2c34233ba1d3de977fd31cb5db764c7d0",
- "sha256:ee245f185fae723133511e2450be08a66c2eebb53ad27c0c19b228029f4748a5",
- "sha256:9a2945efcff84830c8e237ab037d0269380d75d400a89cc9e5628e52647e21be",
- "sha256:2cfcee8829c5dec55597826d52c26bc26e7ce39adb4771584459d0636b0b7108",
- "sha256:33b564196dcd563e309a0b07444e31611368afe3a3822160c046f5e4c3b5cdd7",
- "sha256:18d0b0fc21f39b35ea469a82584f55eeecec1f65a92d85af712c425bdef627b3",
- "sha256:d18df9cf3f3212df28d445ea82ce702c4d7a35817ef7a38ee38879ffa8f7e857",
- "sha256:b984523d28737e373c7c35c8b6db6001537609d47534892de189bebebaa42a47",
- "sha256:27a208b9600166976182351174948e128818e7fc95cbdba18143f3106a211546",
- "sha256:28e4e9a97713aa47b5ef9c5003def2eb58aec89781ef3ef82b1c2916a8b0639b",
- "sha256:a3c180d12ffb1d8ee5b33a514a5bcb2a9cc06cc89aa74038015591170c82f55d",
- "sha256:8487524a1212223ca6dc7e2c8913024618f7ff29855c98869088e3818d5f6733",
- "sha256:e4d967371c5b6b2e67855066471d844c5d52d210c36c28d49a8507b96e2c5291"
- ],
- "version": "==2.1.4"
+ "sha256:abd070b5849ed64e6d349199bef955ee0ad99aefbad792f0c587f8effa681a5e",
+ "sha256:3f3b65d5a16e6b52fba63dc860b62ca9832f51f1a2ae5083c78b6840275f12dd",
+ "sha256:77d0ad229d47a6e0272d00f6bf8ac06ce14715a9fd02c9a97f5a2869aab3ccb2",
+ "sha256:808fe471b1a6b777f026f7dc7bd9a4959da4bfab64972f2bbe91e22527c1c037",
+ "sha256:6fef51ec447fe9f8351894024e94736862900d3a9aa2961528e602eb65c92bdb",
+ "sha256:60bda7f12ecb828358be53095fc9c6edda7de8f1ef571f96c00b2363643fa3cd",
+ "sha256:5cb990056b7cadcca26813311187ad751ea644712022a3976443691168781b6f",
+ "sha256:c332118647f084c983c6a3e1dba0f3bcb051f69d12baccac68db8d62d177eb8a",
+ "sha256:f57008eaff597c69cf692c3518f6d4800f0309253bb138b526a37fe9ef0c7471",
+ "sha256:551a3abfe0c8c6833df4192a63371aa2ff43afd8f570ed345d31f251d78e7e04",
+ "sha256:db6013746f73bf8edd9c3d1d3f94db635b9422f503db3fc5ef105233d4c011ab",
+ "sha256:d6f46e862ee36df81e6342c2177ba84e70f722d9dc9c6c394f9f1f434c4a5563",
+ "sha256:9b62fb4d18529c84b961efd9187fecbb48e89aa1a0f9f4161c61b7fc42a101bd",
+ "sha256:9e5bed45ec6b4f828866ac6a6bedf08388ffcfa68abe9e94b34bb40977aba531",
+ "sha256:f6c821ac253c19f2ad4c8691633ae1d1a17f120d5b01ea1d256d7b602bc59887",
+ "sha256:ba6a774749b6e510cffc2fb98535f717e0e5fd91c7c99a61d223293df79ab351",
+ "sha256:9fc295bf69130a342e7a19a39d7bbeb15c0bcaabc7382ec33ef3b2b7d18d2f63"
+ ],
+ "version": "==2.2.2"
+ },
+ "decorator": {
+ "hashes": [
+ "sha256:2c51dff8ef3c447388fe5e4453d24a2bf128d3a4c32af3fabef1f01c6851ab82",
+ "sha256:c39efa13fbdeb4506c476c9b3babf6a718da943dab7811c206005a4a956c080c"
+ ],
+ "version": "==4.3.0"
+ },
+ "deprecation": {
+ "hashes": [
+ "sha256:0ea3d6f95a5b08fe0e14ba728d4baafd79bfd5e2aaa40b779230cf4af62d1128",
+ "sha256:e8d0dc5a17d7d551730e5f23ff3a53fc9e438364b9efb47d41c3e9b05522eabe"
+ ],
+ "version": "==2.0.2"
+ },
+ "dogpile.cache": {
+ "hashes": [
+ "sha256:631197e78b4471bb0e93d0a86264c45736bc9ae43b4205d581dcc34fbe9b5f31"
+ ],
+ "version": "==0.6.5"
},
"enum34": {
"hashes": [
@@ -207,12 +225,20 @@
"markers": "python_version == '2.7'",
"version": "==3.2.3.post2"
},
+ "futures": {
+ "hashes": [
+ "sha256:ec0a6cb848cc212002b9828c3e34c675e0c9ff6741dc445cab6fdd4e1085d1f1",
+ "sha256:9ec02aa7d674acb8618afb127e27fde7fc68994c0437ad759fa094a574adb265"
+ ],
+ "markers": "python_version == '2.7' or python_version == '2.6'",
+ "version": "==3.2.0"
+ },
"humanfriendly": {
"hashes": [
- "sha256:abe35f7096e2d27ef6059355a33386b089eecbcd5157201be05dc99e50fb2c28",
- "sha256:928eff707f0682029f1968cefe108fd2870ead5a2f8d80875231a27ba2b20410"
+ "sha256:32f4a21fb3db1f54a3c9f48e2d7e3cc2d9230a066f0e27909de6b191dba3c44b",
+ "sha256:30eeab45eb22eb95e1f7d1e2657d381fa9b2b3f28613b942e5935dc90bb5a6c1"
],
- "version": "==4.6"
+ "version": "==4.10"
},
"idna": {
"hashes": [
@@ -229,10 +255,19 @@
},
"ipaddress": {
"hashes": [
- "sha256:200d8686011d470b5e4de207d803445deee427455cd0cb7c982b68cf82524f81"
+ "sha256:64b28eec5e78e7510698f6d4da08800a5c575caa4a286c93d651c5d3ff7b6794",
+ "sha256:b146c751ea45cad6188dd6cf2d9b757f6f4f8d6ffb96a023e6f2e26eea02a72c"
],
"markers": "python_version < '3'",
- "version": "==1.0.19"
+ "version": "==1.0.22"
+ },
+ "iso8601": {
+ "hashes": [
+ "sha256:210e0134677cc0d02f6028087fee1df1e1d76d372ee1db0bf30bf66c5c1c89a3",
+ "sha256:bbbae5fb4a7abfe71d4688fd64bff70b91bbd74ef6a99d964bab18f7fdf286dd",
+ "sha256:49c4b20e1f38aa5cf109ddcd39647ac419f928512c869dc01d5c7098eddede82"
+ ],
+ "version": "==0.1.12"
},
"itsdangerous": {
"hashes": [
@@ -247,6 +282,27 @@
],
"version": "==2.10"
},
+ "jmespath": {
+ "hashes": [
+ "sha256:f11b4461f425740a1d908e9a3f7365c3d2e569f6ca68a2ff8bc5bcd9676edd63",
+ "sha256:6a81d4c9aa62caf061cb517b4d9ad1dd300374cd4706997aff9cd6aedd61fc64"
+ ],
+ "version": "==0.9.3"
+ },
+ "jsonpatch": {
+ "hashes": [
+ "sha256:8bf92fa26bc42c346c03bd4517722a8e4f429225dbe775ac774b2c70d95dbd33",
+ "sha256:49f29cab70e9068db3b1dc6b656cbe2ee4edf7dfe9bf5a0055f17a4b6804a4b9"
+ ],
+ "version": "==1.23"
+ },
+ "jsonpointer": {
+ "hashes": [
+ "sha256:ff379fa021d1b81ab539f5ec467c7745beb1a5671463f9dcc2b2d458bd361c1e",
+ "sha256:c192ba86648e05fdae4f08a17ec25180a9aef5008d973407b581798a83975362"
+ ],
+ "version": "==2.0"
+ },
"jsonschema": {
"hashes": [
"sha256:000e68abd33c972a5248544925a0cae7d1125f9bf6c58280d37546b946769a08",
@@ -254,6 +310,13 @@
],
"version": "==2.6.0"
},
+ "keystoneauth1": {
+ "hashes": [
+ "sha256:5a242ded38cf3f6fb0290ecb24e0db9290507667e520dfa41a129ad769d327ec",
+ "sha256:9f1565eb261677e6d726c1323ce8ed8da3e1b0f70e9cee14f094ebd03fbeb328"
+ ],
+ "version": "==3.4.0"
+ },
"markupsafe": {
"hashes": [
"sha256:a6be69091dac236ea9c6bc7d012beab42010fa914c459791d627dad4910eb665"
@@ -268,40 +331,98 @@
"markers": "python_version == '2.6' or python_version == '2.7' or python_version == '3.0' or python_version == '3.1' or python_version == '3.2'",
"version": "==1.4"
},
+ "munch": {
+ "hashes": [
+ "sha256:fc64a4aeb47e34ff1125f69af3cf3f59e51554b96dbaa2b720c6fb6c9e47a551"
+ ],
+ "version": "==2.3.1"
+ },
+ "netifaces": {
+ "hashes": [
+ "sha256:137a77c2e0a68a3e409a532fe73340c3df6a59ffe8eb565ec8b1f0a131402d09",
+ "sha256:ef223d45b73cc96c25a6295f471106b3195d2367b7f153e43490673d89e9240e",
+ "sha256:4ddf0f329d83516bba096b7eb1ad2ee354a98e2483f89ad3a590e653ece963c8",
+ "sha256:61fd2706de21aac11475c921ba0fd98af19d5280702a11c5c8e2e910765dc378",
+ "sha256:48275e627ce9220acfed2e1ca1e4cf01f58940412f2aebac7995750b50232701",
+ "sha256:f8b352247ae4b6731192d33fd35b27f247e3e4618a2d5cf65de41d46bbb53223",
+ "sha256:a0c7c19e1fb62ac6018582f72d15ac056e75c3d2ab222fb25369e7766ed67453",
+ "sha256:8c3a2c7d573511507f0f29c9d1a28ce1b2a958b8d0d7a1b1966c6fd0fa5d2953",
+ "sha256:5a0114933657eebe4985fdf7b0099a27ec75501901000770addca6ad7bd23008",
+ "sha256:3b19bf224b3e46c62f5f5e65a9fbd2e9731cda09289c76aca110a3dbdf0c3332",
+ "sha256:2245677ee3aa1244bbd0fbf3d6e0158d38b612eba406e7be9639e7efe0371bfa",
+ "sha256:7925add91982cb689963cc28fb8718c006f7713b527d262e32b29b4491cec295",
+ "sha256:0c4da523f36d36f1ef92ee183f2512f3ceb9a9d2a45f7d19cda5a42c6689ebe0",
+ "sha256:337f0fae970ab7a9acf5690516f7c7795f41934350cc1e8ad33c5c0331904ac0",
+ "sha256:563a18f942a9c9f64eed27fe2a1b3dfb5866a440cdaf4d833213798699cc1789",
+ "sha256:88d8fa4fcccaca07519141e95b42f52fb650bed2e8f5b29c44e22968b92b7097",
+ "sha256:60f25e5b6d2a682a394c87a6d2bf4d38c8dd8999ee32b955af88ceccaef7fe93",
+ "sha256:c455ca29737bf9b298408fd78a48f8fc6ddaa1f50a6eb92d814a8874412c631b",
+ "sha256:369eb616a6c844987bd4df486bb5f591aa0d5552378c6831f56ed81cfc228cab"
+ ],
+ "version": "==0.10.6"
+ },
"numpy": {
"hashes": [
- "sha256:428cd3c0b197cf857671353d8c85833193921af9fafcc169a1f29c7185833d50",
- "sha256:a476e437d73e5754aa66e1e75840d0163119c3911b7361f4cd06985212a3c3fb",
- "sha256:289ff717138cd9aa133adcbd3c3e284458b9c8230db4d42b39083a3407370317",
- "sha256:c5eccb4bf96dbb2436c61bb3c2658139e779679b6ae0d04c5e268e6608b58053",
- "sha256:75471acf298d455b035226cc609a92aee42c4bb6aa71def85f77fa2c2b646b61",
- "sha256:5c54fb98ecf42da59ed93736d1c071842482b18657eb16ba6e466bd873e1b923",
- "sha256:9ddf384ac3aacb72e122a8207775cc29727cbd9c531ee1a4b95754f24f42f7f3",
- "sha256:781d3197da49c421a07f250750de70a52c42af08ca02a2f7bdb571c0625ae7eb",
- "sha256:93b26d6c06a22e64d56aaca32aaaffd27a4143db0ac2f21a048f0b571f2bfc55",
- "sha256:b2547f57d05ba59df4289493254f29f4c9082d255f1f97b7e286f40f453e33a1",
- "sha256:eef6af1c752eef538a96018ef9bdf8e37bbf28aab50a1436501a4aa47a6467df",
- "sha256:ff8a4b2c3ac831964f529a2da506c28d002562b230261ae5c16885f5f53d2e75",
- "sha256:194074058c22a4066e1b6a4ea432486ee468d24ab16f13630c1030409e6b8666",
- "sha256:4e13f1a848fde960dea33702770265837c72b796a6a3eaac7528cfe75ddefadd",
- "sha256:91101216d72749df63968d86611b549438fb18af2c63849c01f9a897516133c7",
- "sha256:97507349abb7d1f6b76b877258defe8720833881dc7e7fd052bac90c88587387",
- "sha256:1479b46b6040b5c689831496354c8859c456b152d37315673a0c18720b41223b",
- "sha256:98b1ac79c160e36093d7914244e40ee1e7164223e795aa2c71dcce367554e646",
- "sha256:24bbec9a199f938eab75de8390f410969bc33c218e5430fa1ae9401b00865255",
- "sha256:7880f412543e96548374a4bb1d75e4cdb8cad80f3a101ed0f8d0e0428f719c1c",
- "sha256:6112f152b76a28c450bbf665da11757078a724a90330112f5b7ea2d6b6cefd67",
- "sha256:7c5276763646480143d5f3a6c2acb2885460c765051a1baf4d5070f63d05010f",
- "sha256:3de643935b212307b420248018323a44ec51987a336d1d747c1322afc3c099fb"
- ],
- "version": "==1.14.0"
+ "sha256:719d914f564f35cce4dc103808f8297c807c9f0297ac183ed81ae8b5650e698e",
+ "sha256:0f6a5ed0cd7ab1da11f5c07a8ecada73fc55a70ef7bb6311a4109891341d7277",
+ "sha256:d0928076d9bd8a98de44e79b1abe50c1456e7abbb40af7ef58092086f1a6c729",
+ "sha256:d858423f5ed444d494b15c4cc90a206e1b8c31354c781ac7584da0d21c09c1c3",
+ "sha256:20cac3123d791e4bf8482a580d98d6b5969ba348b9d5364df791ba3a666b660d",
+ "sha256:528ce59ded2008f9e8543e0146acb3a98a9890da00adf8904b1e18c82099418b",
+ "sha256:56e392b7c738bd70e6f46cf48c8194d3d1dd4c5a59fae4b30c58bb6ef86e5233",
+ "sha256:99051e03b445117b26028623f1a487112ddf61a09a27e2d25e6bc07d37d94f25",
+ "sha256:768e777cc1ffdbf97c507f65975c8686ebafe0f3dc8925d02ac117acc4669ce9",
+ "sha256:675e0f23967ce71067d12b6944add505d5f0a251f819cfb44bdf8ee7072c090d",
+ "sha256:a958bf9d4834c72dee4f91a0476e7837b8a2966dc6fcfc42c421405f98d0da51",
+ "sha256:bb370120de6d26004358611441e07acda26840e41dfedc259d7f8cc613f96495",
+ "sha256:f2b1378b63bdb581d5d7af2ec0373c8d40d651941d283a2afd7fc71184b3f570",
+ "sha256:a1413d06abfa942ca0553bf3bccaff5fdb36d55b84f2248e36228db871147dab",
+ "sha256:7f76d406c6b998d6410198dcb82688dcdaec7d846aa87e263ccf52efdcfeba30",
+ "sha256:a7157c9ac6bddd2908c35ef099e4b643bc0e0ebb4d653deb54891d29258dd329",
+ "sha256:0fd65cbbfdbf76bbf80c445d923b3accefea0fe2c2082049e0ce947c81fe1d3f",
+ "sha256:8c18ee4dddd5c6a811930c0a7c7947bf16387da3b394725f6063f1366311187d",
+ "sha256:0739146eaf4985962f07c62f7133aca89f3a600faac891ce6c7f3a1e2afe5272",
+ "sha256:07e21f14490324cc1160db101e9b6c1233c33985af4cb1d301dd02650fea1d7f",
+ "sha256:e6120d63b50e2248219f53302af7ec6fa2a42ed1f37e9cda2c76dbaca65036a7",
+ "sha256:6be6b0ca705321c178c9858e5ad5611af664bbdfae1df1541f938a840a103888",
+ "sha256:facc6f925c3099ac01a1f03758100772560a0b020fb9d70f210404be08006bcb"
+ ],
+ "version": "==1.14.2"
+ },
+ "openstacksdk": {
+ "hashes": [
+ "sha256:9819718df593ba825e9e6ac3a65f9bd600ccf37ef39bd8bb37277dcfc3adb201",
+ "sha256:720590c09b4773adbf72f5d3385fb173fbb68205a23da06543172e9b8de5c86e"
+ ],
+ "version": "==0.12.0"
+ },
+ "os-client-config": {
+ "hashes": [
+ "sha256:f602f18ba58e4fe14ff607bebee00d20d34c517bca1289fd0c63f9e777f1ce43",
+ "sha256:e98bdde50e30396d47d237cfb23e209e8c0a6f834ada190a2dcfe5305bd42af0"
+ ],
+ "version": "==1.29.0"
+ },
+ "os-service-types": {
+ "hashes": [
+ "sha256:4dd42c728b7f33e80a44996ace3c044b2544b58c226d7552f5ccc19eb01668b6",
+ "sha256:b08fb4ec1249d313afea2728fa4db916b1907806364126fe46de482671203111"
+ ],
+ "version": "==1.2.0"
+ },
+ "packaging": {
+ "hashes": [
+ "sha256:e9215d2d2535d3ae866c3d6efc77d5b24a0192cce0ff20e42896cc0664f889c0",
+ "sha256:f019b770dd64e585a99714f1fd5e01c7a8f11b45635aa953fd41c689a657375b"
+ ],
+ "version": "==17.1"
},
"paramiko": {
"hashes": [
- "sha256:8851e728e8b7590989e68e3936c48ee3ca4dad91d29e3d7ff0305b6c5fc582db",
- "sha256:486f637f0a33a4792e0e567be37426c287efaa8c4c4a45e3216f9ce7fd70b1fc"
+ "sha256:24fb31c947de85fbdeca09e222d41206781581fb0bdf118d2ef18f6e414cd388",
+ "sha256:33e36775a6c71790ba7692a73f948b329cf9295a72b0102144b031114bd2a4f3"
],
- "version": "==2.4.0"
+ "version": "==2.4.1"
},
"pathlib": {
"hashes": [
@@ -311,10 +432,10 @@
},
"pbr": {
"hashes": [
- "sha256:60c25b7dfd054ef9bb0ae327af949dd4676aa09ac3a9471cdc871d8a9213f9ac",
- "sha256:05f61c71aaefc02d8e37c0a3eeb9815ff526ea28b3b76324769e6158d7f95be1"
+ "sha256:4e8a0ed6a8705a26768f4c3da26026013b157821fe5f95881599556ea9d91c19",
+ "sha256:dae4aaa78eafcad10ce2581fc34d694faa616727837fd8e55c1a00951ad6744f"
],
- "version": "==3.1.1"
+ "version": "==4.0.2"
},
"prettytable": {
"hashes": [
@@ -375,6 +496,18 @@
],
"version": "==1.2.1"
},
+ "pyparsing": {
+ "hashes": [
+ "sha256:fee43f17a9c4087e7ed1605bd6df994c6173c1e977d7ade7b651292fab2bd010",
+ "sha256:0832bcf47acd283788593e7a0f542407bd9550a55a8a8435214a1960e04bcb04",
+ "sha256:9e8143a3e15c13713506886badd96ca4b579a87fbdf49e550dbfc057d6cb218e",
+ "sha256:281683241b25fe9b80ec9d66017485f6deff1af5cde372469134b56ca8447a07",
+ "sha256:b8b3117ed9bdf45e14dcc89345ce638ec7e0e29b2b579fa1ecf32ce45ebac8a5",
+ "sha256:8f1e18d3fd36c6795bb7e02a39fd05c611ffc2596c1e0d995d34d67630426c18",
+ "sha256:e4d45427c6e20a59bf4f88c639dcc03ce30d193112047f94012102f235853a58"
+ ],
+ "version": "==2.2.0"
+ },
"pyyaml": {
"hashes": [
"sha256:3262c96a1ca437e7e4763e2843746588a965426550f3797a79fca9c6199c431f",
@@ -401,6 +534,20 @@
],
"version": "==2.18.4"
},
+ "requestsexceptions": {
+ "hashes": [
+ "sha256:3083d872b6e07dc5c323563ef37671d992214ad9a32b0ca4a3d7f5500bf38ce3",
+ "sha256:b095cbc77618f066d459a02b137b020c37da9f46d9b057704019c9f77dba3065"
+ ],
+ "version": "==1.4.0"
+ },
+ "shade": {
+ "hashes": [
+ "sha256:241ff43e02bb8c2af341340b1a13aea3f171640691558d90d23160dddc694d68",
+ "sha256:d1702b9427eb4146e69a9c61c0f8d83457a9f8b4fa5f43aa775c34672e1907a6"
+ ],
+ "version": "==1.27.1"
+ },
"six": {
"hashes": [
"sha256:832dc0e10feb1aa2c68dcc57dbb658f1c7e65b9b61af69048abc87a2db00a0eb",
@@ -408,6 +555,13 @@
],
"version": "==1.11.0"
},
+ "stevedore": {
+ "hashes": [
+ "sha256:e3d96b2c4e882ec0c1ff95eaebf7b575a779fd0ccb4c741b9832bed410d58b3d",
+ "sha256:f1c7518e7b160336040fee272174f1f7b29a46febb3632502a8f2055f973d60b"
+ ],
+ "version": "==1.28.0"
+ },
"swagger-spec-validator": {
"hashes": [
"sha256:aedacb6c6b475026a1b5ac218fb590382d08064e227da254eb961d17cfd2b7c1",
@@ -417,11 +571,11 @@
},
"typing": {
"hashes": [
- "sha256:349b1f9c109c84b53ac79ac1d822eaa68fc91d63b321bd9392df15098f746f53",
- "sha256:63a8255fe7c6269916baa440eb9b6a67139b0b97a01af632e7bd2842e1e02f15",
- "sha256:d514bd84b284dd3e844f0305ac07511f097e325171f6cc4a20878d11ad771849"
+ "sha256:b2c689d54e1144bbcfd191b0832980a21c2dbcf7b5ff7a66248a60c90e951eb8",
+ "sha256:3a887b021a77b292e151afb75323dea88a7bc1b3dfa92176cff8e44c8b68bddf",
+ "sha256:d400a9344254803a2368533e4533a4200d21eb7b6b729c173bc38201a74db3f2"
],
- "version": "==3.6.2"
+ "version": "==3.6.4"
},
"urllib3": {
"hashes": [
@@ -448,48 +602,44 @@
},
"coverage": {
"hashes": [
- "sha256:d1ee76f560c3c3e8faada866a07a32485445e16ed2206ac8378bd90dadffb9f0",
- "sha256:007eeef7e23f9473622f7d94a3e029a45d55a92a1f083f0f3512f5ab9a669b05",
- "sha256:17307429935f96c986a1b1674f78079528833410750321d22b5fb35d1883828e",
- "sha256:845fddf89dca1e94abe168760a38271abfc2e31863fbb4ada7f9a99337d7c3dc",
- "sha256:3f4d0b3403d3e110d2588c275540649b1841725f5a11a7162620224155d00ba2",
- "sha256:4c4f368ffe1c2e7602359c2c50233269f3abe1c48ca6b288dcd0fb1d1c679733",
- "sha256:f8c55dd0f56d3d618dfacf129e010cbe5d5f94b6951c1b2f13ab1a2f79c284da",
- "sha256:cdd92dd9471e624cd1d8c1a2703d25f114b59b736b0f1f659a98414e535ffb3d",
- "sha256:2ad357d12971e77360034c1596011a03f50c0f9e1ecd12e081342b8d1aee2236",
- "sha256:e9a0e1caed2a52f15c96507ab78a48f346c05681a49c5b003172f8073da6aa6b",
- "sha256:eea9135432428d3ca7ee9be86af27cb8e56243f73764a9b6c3e0bda1394916be",
- "sha256:700d7579995044dc724847560b78ac786f0ca292867447afda7727a6fbaa082e",
- "sha256:66f393e10dd866be267deb3feca39babba08ae13763e0fc7a1063cbe1f8e49f6",
- "sha256:5ff16548492e8a12e65ff3d55857ccd818584ed587a6c2898a9ebbe09a880674",
- "sha256:d00e29b78ff610d300b2c37049a41234d48ea4f2d2581759ebcf67caaf731c31",
- "sha256:87d942863fe74b1c3be83a045996addf1639218c2cb89c5da18c06c0fe3917ea",
- "sha256:358d635b1fc22a425444d52f26287ae5aea9e96e254ff3c59c407426f44574f4",
- "sha256:81912cfe276e0069dca99e1e4e6be7b06b5fc8342641c6b472cb2fed7de7ae18",
- "sha256:079248312838c4c8f3494934ab7382a42d42d5f365f0cf7516f938dbb3f53f3f",
- "sha256:b0059630ca5c6b297690a6bf57bf2fdac1395c24b7935fd73ee64190276b743b",
- "sha256:493082f104b5ca920e97a485913de254cbe351900deed72d4264571c73464cd0",
- "sha256:e3ba9b14607c23623cf38f90b23f5bed4a3be87cbfa96e2e9f4eabb975d1e98b",
- "sha256:82cbd3317320aa63c65555aa4894bf33a13fb3a77f079059eb5935eea415938d",
- "sha256:9721f1b7275d3112dc7ccf63f0553c769f09b5c25a26ee45872c7f5c09edf6c1",
- "sha256:bd4800e32b4c8d99c3a2c943f1ac430cbf80658d884123d19639bcde90dad44a",
- "sha256:f29841e865590af72c4b90d7b5b8e93fd560f5dea436c1d5ee8053788f9285de",
- "sha256:f3a5c6d054c531536a83521c00e5d4004f1e126e2e2556ce399bef4180fbe540",
- "sha256:dd707a21332615108b736ef0b8513d3edaf12d2a7d5fc26cd04a169a8ae9b526",
- "sha256:2e1a5c6adebb93c3b175103c2f855eda957283c10cf937d791d81bef8872d6ca",
- "sha256:f87f522bde5540d8a4b11df80058281ac38c44b13ce29ced1e294963dd51a8f8",
- "sha256:a7cfaebd8f24c2b537fa6a271229b051cdac9c1734bb6f939ccfc7c055689baa",
- "sha256:309d91bd7a35063ec7a0e4d75645488bfab3f0b66373e7722f23da7f5b0f34cc",
- "sha256:0388c12539372bb92d6dde68b4627f0300d948965bbb7fc104924d715fdc0965",
- "sha256:ab3508df9a92c1d3362343d235420d08e2662969b83134f8a97dc1451cbe5e84",
- "sha256:43a155eb76025c61fc20c3d03b89ca28efa6f5be572ab6110b2fb68eda96bfea",
- "sha256:f98b461cb59f117887aa634a66022c0bd394278245ed51189f63a036516e32de",
- "sha256:b6cebae1502ce5b87d7c6f532fa90ab345cfbda62b95aeea4e431e164d498a3d",
- "sha256:a4497faa4f1c0fc365ba05eaecfb6b5d24e3c8c72e95938f9524e29dadb15e76",
- "sha256:2b4d7f03a8a6632598cbc5df15bbca9f778c43db7cf1a838f4fa2c8599a8691a",
- "sha256:1afccd7e27cac1b9617be8c769f6d8a6d363699c9b86820f40c74cfb3328921c"
- ],
- "version": "==4.4.2"
+ "sha256:7608a3dd5d73cb06c531b8925e0ef8d3de31fed2544a7de6c63960a1e73ea4bc",
+ "sha256:3a2184c6d797a125dca8367878d3b9a178b6fdd05fdc2d35d758c3006a1cd694",
+ "sha256:f3f501f345f24383c0000395b26b726e46758b71393267aeae0bd36f8b3ade80",
+ "sha256:0b136648de27201056c1869a6c0d4e23f464750fd9a9ba9750b8336a244429ed",
+ "sha256:337ded681dd2ef9ca04ef5d93cfc87e52e09db2594c296b4a0a3662cb1b41249",
+ "sha256:3eb42bf89a6be7deb64116dd1cc4b08171734d721e7a7e57ad64cc4ef29ed2f1",
+ "sha256:be6cfcd8053d13f5f5eeb284aa8a814220c3da1b0078fa859011c7fffd86dab9",
+ "sha256:69bf008a06b76619d3c3f3b1983f5145c75a305a0fea513aca094cae5c40a8f5",
+ "sha256:2eb564bbf7816a9d68dd3369a510be3327f1c618d2357fa6b1216994c2e3d508",
+ "sha256:9d6dd10d49e01571bf6e147d3b505141ffc093a06756c60b053a859cb2128b1f",
+ "sha256:701cd6093d63e6b8ad7009d8a92425428bc4d6e7ab8d75efbb665c806c1d79ba",
+ "sha256:5a13ea7911ff5e1796b6d5e4fbbf6952381a611209b736d48e675c2756f3f74e",
+ "sha256:c1bb572fab8208c400adaf06a8133ac0712179a334c09224fb11393e920abcdd",
+ "sha256:03481e81d558d30d230bc12999e3edffe392d244349a90f4ef9b88425fac74ba",
+ "sha256:28b2191e7283f4f3568962e373b47ef7f0392993bb6660d079c62bd50fe9d162",
+ "sha256:de4418dadaa1c01d497e539210cb6baa015965526ff5afc078c57ca69160108d",
+ "sha256:8c3cb8c35ec4d9506979b4cf90ee9918bc2e49f84189d9bf5c36c0c1119c6558",
+ "sha256:7e1fe19bd6dce69d9fd159d8e4a80a8f52101380d5d3a4d374b6d3eae0e5de9c",
+ "sha256:6bc583dc18d5979dc0f6cec26a8603129de0304d5ae1f17e57a12834e7235062",
+ "sha256:198626739a79b09fa0a2f06e083ffd12eb55449b5f8bfdbeed1df4910b2ca640",
+ "sha256:7aa36d2b844a3e4a4b356708d79fd2c260281a7390d678a10b91ca595ddc9e99",
+ "sha256:3d72c20bd105022d29b14a7d628462ebdc61de2f303322c0212a054352f3b287",
+ "sha256:4635a184d0bbe537aa185a34193898eee409332a8ccb27eea36f262566585000",
+ "sha256:e05cb4d9aad6233d67e0541caa7e511fa4047ed7750ec2510d466e806e0255d6",
+ "sha256:76ecd006d1d8f739430ec50cc872889af1f9c1b6b8f48e29941814b09b0fd3cc",
+ "sha256:7d3f553904b0c5c016d1dad058a7554c7ac4c91a789fca496e7d8347ad040653",
+ "sha256:3c79a6f7b95751cdebcd9037e4d06f8d5a9b60e4ed0cd231342aa8ad7124882a",
+ "sha256:56e448f051a201c5ebbaa86a5efd0ca90d327204d8b059ab25ad0f35fbfd79f1",
+ "sha256:ac4fef68da01116a5c117eba4dd46f2e06847a497de5ed1d64bb99a5fda1ef91",
+ "sha256:1c383d2ef13ade2acc636556fd544dba6e14fa30755f26812f54300e401f98f2",
+ "sha256:b8815995e050764c8610dbc82641807d196927c3dbed207f0a079833ffcf588d",
+ "sha256:104ab3934abaf5be871a583541e8829d6c19ce7bde2923b2751e0d3ca44db60a",
+ "sha256:9e112fcbe0148a6fa4f0a02e8d58e94470fc6cb82a5481618fea901699bf34c4",
+ "sha256:15b111b6a0f46ee1a485414a52a7ad1d703bdf984e9ed3c288a4414d3871dcbd",
+ "sha256:e4d96c07229f58cb686120f168276e434660e4358cc9cf3b0464210b04913e77",
+ "sha256:f8a923a85cb099422ad5a2e345fe877bbc89a8a8b23235824a93488150e45f6e"
+ ],
+ "version": "==4.5.1"
},
"docopt": {
"hashes": [
@@ -499,10 +649,10 @@
},
"faker": {
"hashes": [
- "sha256:2f6ccc9da046d4cd20401734cf6a1ac73a4e4d8256e7b283496ee6827ad2eb60",
- "sha256:e928cf853ef69d7471421f2a3716a1239e43de0fa9855f4016ee0c9f1057328a"
+ "sha256:226d8fa67a8cf8b4007aab721f67639f130e9cfdc53a7095a2290ebb07a65c71",
+ "sha256:48fed4b4a191e2b42ad20c14115f1c6d36d338b80192075d7573f0f42d7fb321"
],
- "version": "==0.8.8"
+ "version": "==0.8.13"
},
"funcsigs": {
"hashes": [
@@ -514,10 +664,11 @@
},
"ipaddress": {
"hashes": [
- "sha256:200d8686011d470b5e4de207d803445deee427455cd0cb7c982b68cf82524f81"
+ "sha256:64b28eec5e78e7510698f6d4da08800a5c575caa4a286c93d651c5d3ff7b6794",
+ "sha256:b146c751ea45cad6188dd6cf2d9b757f6f4f8d6ffb96a023e6f2e26eea02a72c"
],
"markers": "python_version == '2.7'",
- "version": "==1.0.19"
+ "version": "==1.0.22"
},
"mock": {
"hashes": [
@@ -526,39 +677,49 @@
],
"version": "==2.0.0"
},
+ "more-itertools": {
+ "hashes": [
+ "sha256:11a625025954c20145b37ff6309cd54e39ca94f72f6bb9576d1195db6fa2442e",
+ "sha256:0dd8f72eeab0d2c3bd489025bb2f6a1b8342f9b198f6fc37b52d15cfa4531fea",
+ "sha256:c9ce7eccdcb901a2c75d326ea134e0886abfbea5f93e91cc95de9507c0816c44"
+ ],
+ "version": "==4.1.0"
+ },
"pbr": {
"hashes": [
- "sha256:60c25b7dfd054ef9bb0ae327af949dd4676aa09ac3a9471cdc871d8a9213f9ac",
- "sha256:05f61c71aaefc02d8e37c0a3eeb9815ff526ea28b3b76324769e6158d7f95be1"
+ "sha256:4e8a0ed6a8705a26768f4c3da26026013b157821fe5f95881599556ea9d91c19",
+ "sha256:dae4aaa78eafcad10ce2581fc34d694faa616727837fd8e55c1a00951ad6744f"
],
- "version": "==3.1.1"
+ "version": "==4.0.2"
},
"pluggy": {
"hashes": [
+ "sha256:d345c8fe681115900d6da8d048ba67c25df42973bda370783cd58826442dcd7c",
+ "sha256:e160a7fcf25762bb60efc7e171d4497ff1d8d2d75a3d0df7a21b76821ecbf5c5",
"sha256:7f8ae7f5bdf75671a718d2daf0a64b7885f74510bcd98b1a0bb420eb9a9d0cff"
],
"version": "==0.6.0"
},
"py": {
"hashes": [
- "sha256:8cca5c229d225f8c1e3085be4fcf306090b00850fefad892f9d96c7b6e2f310f",
- "sha256:ca18943e28235417756316bfada6cd96b23ce60dd532642690dcfdaba988a76d"
+ "sha256:983f77f3331356039fdd792e9220b7b8ee1aa6bd2b25f567a963ff1de5a64f6a",
+ "sha256:29c9fab495d7528e80ba1e343b958684f4ace687327e6f789a94bf3d1915f881"
],
- "version": "==1.5.2"
+ "version": "==1.5.3"
},
"pykwalify": {
"hashes": [
- "sha256:d382481b3829d057931a1a6c708f03f80e2d7ebc0123a78b267fc626e796fd68",
- "sha256:2298fafe84dc68161835f62a1b8d0d72dd749d5742baa196224882a6ac2ff844"
+ "sha256:0959032cf185c168256a623b80ff3d2ca57d704f78ca286b4155ebcd9fae9d49",
+ "sha256:191fd3f457f23c0aa8538c3a5c0249f70eeb1046e88d0eaaef928e09c44dff8d"
],
- "version": "==1.6.0"
+ "version": "==1.6.1"
},
"pytest": {
"hashes": [
- "sha256:b84878865558194630c6147f44bdaef27222a9f153bbd4a08908b16bf285e0b1",
- "sha256:53548280ede7818f4dc2ad96608b9f08ae2cc2ca3874f2ceb6f97e3583f25bc4"
+ "sha256:6266f87ab64692112e5477eba395cfedda53b1933ccd29478e671e73b420c19c",
+ "sha256:fae491d1874f199537fd5872b5e1f0e74a009b979df9d53d1553fd03da1703e1"
],
- "version": "==3.3.2"
+ "version": "==3.5.0"
},
"pytest-cov": {
"hashes": [
@@ -575,17 +736,17 @@
},
"pytest-mock": {
"hashes": [
- "sha256:7ed6e7e8c636fd320927c5d73aedb77ac2eeb37196c3410e6176b7c92fdf2f69",
- "sha256:920d1167af5c2c2ad3fa0717d0c6c52e97e97810160c15721ac895cac53abb1c"
+ "sha256:40f4f7148a81c94656a4fe7cf067edd62e541599bb70a6d3af50ffac4c2312f2",
+ "sha256:173fd47c872d105368aeb22dca1415db9f269c0ee1c3832c3aebadd3e80f6133"
],
- "version": "==1.6.3"
+ "version": "==1.9.0"
},
"python-dateutil": {
"hashes": [
- "sha256:95511bae634d69bc7329ba55e646499a842bc4ec342ad54a8cdb65645a0aad3c",
- "sha256:891c38b2a02f5bb1be3e4793866c8df49c7d19baabf9c1bad62547e0b4866aca"
+ "sha256:3220490fb9741e2342e1cf29a503394fdac874bc39568288717ee67047ff29df",
+ "sha256:9d8074be4c993fbe4947878ce593052f71dac82932a677d49194d8ce9778002e"
],
- "version": "==2.6.1"
+ "version": "==2.7.2"
},
"pyyaml": {
"hashes": [
@@ -615,10 +776,10 @@
},
"text-unidecode": {
"hashes": [
- "sha256:02efd86b9c0f489f858d8cead62e94d3760dab444054b258734716f7602330a3",
- "sha256:d0afd5e8a7ac69bfb1372e1bbfa3c63c22e3db8ae1284690e96b45c4430d08d0"
+ "sha256:801e38bd550b943563660a91de8d4b6fa5df60a542be9093f7abf819f86050cc",
+ "sha256:5a1375bb2ba7968740508ae38d92e1f889a0832913cb1c447d5e2046061a396d"
],
- "version": "==1.1"
+ "version": "==1.2"
}
}
}
diff --git a/contrib/nettest/Dockerfile b/contrib/nettest/Dockerfile
new file mode 100644
index 00000000..7a73cf54
--- /dev/null
+++ b/contrib/nettest/Dockerfile
@@ -0,0 +1,47 @@
+##########################################################
+# Dockerfile to run a flask-based web application# Based on an ubuntu:16.04
+##########################################################
+
+# Set the base image to use to centos
+FROM ubuntu:16.04
+
+# Set the file maintainer
+MAINTAINER Qiang.Dai@spirent.com
+LABEL version="0.1" description="Spirent networking test Docker container"
+
+# Set env varibles used in this Dockerfile (add a unique prefix, such as DOCKYARD)
+# Local directory with project source
+ENV DOCKYARD_SRC=nettest \
+ DOCKYARD_SRCHOME=/opt \
+ DOCKYARD_SRCPROJ=/opt/nettest
+
+# Update the defualt application repository source list
+RUN apt-get update && apt-get install -y \
+ gcc \
+ python-dev \
+ python-pip \
+ python-setuptools \
+ --no-install-recommends \
+ && rm -rf /var/lib/apt/lists/*
+
+# Copy application source code to SRCDIR
+COPY $DOCKYARD_SRC $DOCKYARD_SRCPROJ
+
+# Create application subdirectories
+WORKDIR $DOCKYARD_SRCPROJ
+RUN mkdir -p log
+VOLUME ["$DOCKYARD_SRCPROJ/log/"]
+
+# Install Python dependencies
+RUN python -m pip install -U pip \
+ && pip install -U setuptools \
+ && pip install -r $DOCKYARD_SRCPROJ/requirements.txt
+
+# Port to expose
+EXPOSE 5000
+
+# Copy entrypoint script into the image
+WORKDIR $DOCKYARD_SRCPROJ
+
+#CMD ["/bin/bash"]
+CMD ["/bin/bash", "start.sh"]
diff --git a/legacy/api/__init__.py b/contrib/nettest/README.md
index e69de29b..e69de29b 100644
--- a/legacy/api/__init__.py
+++ b/contrib/nettest/README.md
diff --git a/contrib/nettest/nettest/heat_2stcv.yaml b/contrib/nettest/nettest/heat_2stcv.yaml
new file mode 100644
index 00000000..77c6e6e8
--- /dev/null
+++ b/contrib/nettest/nettest/heat_2stcv.yaml
@@ -0,0 +1,170 @@
+##############################################################################
+# Copyright (c) 2018 Spirent Communications and others.
+#
+# All rights reserved. This program and the accompanying materials
+# are made available under the terms of the Apache License, Version 2.0
+# which accompanies this distribution, and is available at
+# http://www.apache.org/licenses/LICENSE-2.0
+##############################################################################
+
+heat_template_version: 2016-10-14
+
+description: Template for deploying 2 STCv and 1 labserver
+
+parameters:
+ public_net_name: {default: external, description: Public network to allocate floating IPs to VMs', type: string}
+ #public_net_id: {description: public_network id for exernal connectivity,type: string}
+ mgmt_net_name: {default: admin, description: Name of STCv mgmt network to be created, type: string}
+ mgmt_net_cidr: {default: 10.10.10.0/24, description: STCv mgmt network CIDR,type: string}
+ mgmt_net_gw: {default: 10.10.10.1, description: STCv mgmt network gateway address, type: string}
+ mgmt_net_pool_start: {default: 10.10.10.10, description: Start of mgmt network IP address allocation pool, type: string}
+ mgmt_net_pool_end: {default: 10.10.10.20, description: End of mgmt network IP address allocation pool, type: string}
+ tst_net_name: {default: tst, description: Name of STCv private network to be created, type: string}
+ tst_net_cidr: {default: 192.168.1.0/24, description: STCv private network CIDR,type: string}
+ tst_net_gw: {default: 192.168.1.1, description: STCv private network gateway address, type: string}
+ tst_net_pool_start: {default: 192.168.1.10, description: Start of private network IP address allocation pool, type: string}
+ tst_net_pool_end: {default: 192.168.1.20, description: End of private network IP address allocation pool, type: string}
+ stcv_image: {default: "stcv-4.79", description: Image name to use for STCv, type: string}
+ stcv_flavor: {default: "m1.tiny", description: Flavor to use for STCv, type: string}
+ #stcv_user_data: {default: "", description: user data such as ntp server ip for stcv, type: string}
+ #stcv_config_file: {default: "stcv_config_file", description: user data such as ntp server ip for stcv, type: string}
+ ntp_server_ip: {default: "", description: user data such as ntp server ip for stcv, type: string}
+ stcv_sg_name: {default: stcv_sg, description: server group name, type: string}
+ stcv_sg_affinity: {default: affinity, description: server group affinity for stcv, type: string}
+
+resources:
+ stcv_server_group:
+ type: OS::Nova::ServerGroup
+ properties:
+ name: {get_param: stcv_sg_name}
+ policies: [{get_param: stcv_sg_affinity}]
+ mgmt_net:
+ type: OS::Neutron::Net
+ properties:
+ name: {get_param: mgmt_net_name}
+ mgmt_net_subnet:
+ type: OS::Neutron::Subnet
+ properties:
+ allocation_pools:
+ - end: {get_param: mgmt_net_pool_end}
+ start: {get_param: mgmt_net_pool_start}
+ cidr: {get_param: mgmt_net_cidr}
+ gateway_ip: {get_param: mgmt_net_gw}
+ network: {get_resource: mgmt_net}
+ public_router:
+ type: OS::Neutron::Router
+ properties:
+ external_gateway_info:
+ network: {get_param: public_net_name}
+ router_interface:
+ type: OS::Neutron::RouterInterface
+ properties:
+ router: {get_resource: public_router}
+ subnet: {get_resource: mgmt_net_subnet}
+ tst_net:
+ type: OS::Neutron::Net
+ properties:
+ name: {get_param: tst_net_name}
+ tst_subnet:
+ type: OS::Neutron::Subnet
+ properties:
+ allocation_pools:
+ - end: {get_param: tst_net_pool_end}
+ start: {get_param: tst_net_pool_start}
+ cidr: {get_param: tst_net_cidr}
+ gateway_ip: {get_param: tst_net_gw}
+ network: {get_resource: tst_net}
+ stcv_1_port_1:
+ type: OS::Neutron::Port
+ properties:
+ network: {get_resource: mgmt_net}
+ fixed_ips:
+ - subnet: {get_resource: mgmt_net_subnet}
+ floating_ip1:
+ type: OS::Neutron::FloatingIP
+ properties:
+ floating_network: {get_param: public_net_name}
+ port_id: {get_resource: stcv_1_port_1}
+ stcv_1_port_2:
+ type: OS::Neutron::Port
+ properties:
+ network: {get_resource: tst_net}
+ port_security_enabled: False
+ fixed_ips:
+ - subnet: {get_resource: tst_subnet}
+ STCv_1:
+ type: OS::Nova::Server
+ properties:
+ #availability_zone : {get_param: availability_zone_name}
+ flavor: {get_param: stcv_flavor}
+ image: {get_param: stcv_image}
+ name: STCv_1
+ user_data:
+ str_replace:
+ template: |
+ #cloud-config
+ spirent:
+ ntp: $ntp_server_ip
+ params:
+ $ntp_server_ip: {get_param: ntp_server_ip}
+ user_data_format: RAW
+ config_drive: True
+ scheduler_hints:
+ group: {get_resource: stcv_server_group}
+ networks:
+ - port: {get_resource: stcv_1_port_1}
+ - port: {get_resource: stcv_1_port_2}
+ stcv_2_port_1:
+ type: OS::Neutron::Port
+ properties:
+ network: {get_resource: mgmt_net}
+ fixed_ips:
+ - subnet: {get_resource: mgmt_net_subnet}
+ floating_ip2:
+ type: OS::Neutron::FloatingIP
+ properties:
+ floating_network: {get_param: public_net_name}
+ port_id: {get_resource: stcv_2_port_1}
+ stcv_2_port_2:
+ type: OS::Neutron::Port
+ properties:
+ network: {get_resource: tst_net}
+ port_security_enabled: False
+ fixed_ips:
+ - subnet: {get_resource: tst_subnet}
+ STCv_2:
+ type: OS::Nova::Server
+ properties:
+ #availability_zone : {get_param: availability_zone_name}
+ flavor: {get_param: stcv_flavor}
+ image: {get_param: stcv_image}
+ name: STCv_2
+ user_data:
+ str_replace:
+ template: |
+ #cloud-config
+ spirent:
+ ntp: $ntp_server_ip
+ params:
+ $ntp_server_ip: {get_param: ntp_server_ip}
+ user_data_format: RAW
+ config_drive: True
+ scheduler_hints:
+ group: {get_resource: stcv_server_group}
+ networks:
+ - port: {get_resource: stcv_2_port_1}
+ - port: {get_resource: stcv_2_port_2}
+outputs:
+ STCv_1_Mgmt_Ip:
+ value: {get_attr: [floating_ip1, floating_ip_address]}
+ description: STCv_1 Mgmt IP
+ STCv_2_Mgmt_Ip:
+ value: {get_attr: [floating_ip2, floating_ip_address]}
+ description: STCv_2 Mgmt IP
+ STCv_1_Tst_Ip:
+ value: {get_attr: [stcv_1_port_2, fixed_ips]}
+ description: STCv_1 Tst IP
+ STCv_2_Tst_Ip:
+ value: {get_attr: [stcv_2_port_2, fixed_ips]}
+ description: STCv_2 Tst IP
+
diff --git a/contrib/nettest/nettest/nettest.py b/contrib/nettest/nettest/nettest.py
new file mode 100644
index 00000000..c5a203e0
--- /dev/null
+++ b/contrib/nettest/nettest/nettest.py
@@ -0,0 +1,157 @@
+##############################################################################
+# Copyright (c) 2018 Spirent Communications and others.
+#
+# All rights reserved. This program and the accompanying materials
+# are made available under the terms of the Apache License, Version 2.0
+# which accompanies this distribution, and is available at
+# http://www.apache.org/licenses/LICENSE-2.0
+##############################################################################
+
+import logging
+from time import sleep
+
+from rfc2544test import StcRfc2544Test
+from stcv_stack import StcvStack
+
+
+class NetTestMaster(object):
+
+ def __init__(self):
+ self.logger = logging.getLogger(__name__)
+
+ self.stacks = []
+ self.testcases = []
+
+ self.stack_created = False
+ self.status_reason = ''
+
+ def get_stack_by_id(self, id):
+ for stack in self.stacks:
+ if id == stack.stack_id:
+ return stack
+ return None
+
+ def get_stack_by_name(self, name):
+ for stack in self.stacks:
+ if name == stack.name:
+ return stack
+ return None
+
+ def create_stack(self, name, stack_type, pub_net_name, **kwargs):
+ if stack_type != 'stcv':
+ raise Exception('only support stcv stack type currently')
+
+ try:
+ stack = StcvStack(name=name,
+ pub_net_name=pub_net_name,
+ ntp_server_ip=kwargs.get('license_server_ip'),
+ lab_server_ip=kwargs.get('lab_server_ip'),
+ stcv_image=kwargs.get('stcv_image'),
+ stcv_flavor=kwargs.get('stcv_flavor'),
+ stcv_affinity=kwargs.get('stcv_affinity'))
+ stack.create_stack()
+ self.stacks.append(stack)
+
+ except Exception as err:
+ self.logger.error('create stack fail. err = %s', str(err))
+ raise err
+
+ return stack
+
+ def delete_stack(self, stack_id):
+ stack = self.get_stack_by_id(stack_id)
+ if stack is None:
+ raise Exception('stack does not exist, stack_id = %s', stack_id)
+
+ self.stacks.remove(stack)
+ stack.delete_stack()
+
+ def get_tc_result(self, tc_id):
+ tc = self.get_tc_by_id(tc_id)
+ return tc.get_result()
+
+ def get_tc_status(self, tc_id):
+ tc = self.get_tc_by_id(tc_id)
+ return tc.get_status()
+
+ def execute_testcase(self, name, category, stack_id, **kwargs):
+ if category != 'rfc2544':
+ raise Exception("currently only support rfc2544 test")
+
+ stack = self.get_stack_by_id(stack_id)
+ if stack is None:
+ raise Exception("defined stack not exist, stack_id = %s", stack_id)
+
+ tc = StcRfc2544Test(name=name,
+ lab_server_ip=stack.lab_server_ip,
+ license_server_ip=stack.ntp_server_ip,
+ west_stcv_admin_ip=stack.get_west_stcv_ip(),
+ west_stcv_tst_ip=stack.get_west_stcv_tst_ip(),
+ east_stcv_admin_ip=stack.get_east_stcv_ip(),
+ east_stcv_tst_ip=stack.get_east_stcv_tst_ip(),
+ stack_id=stack_id,
+ **kwargs)
+ self.testcases.append(tc)
+ tc.execute()
+
+ return tc.tc_id
+
+ def get_tc_by_id(self, id):
+ for tc in self.testcases:
+ if id == tc.tc_id:
+ return tc
+ return None
+
+ def delete_testcase(self, tc_id):
+ tc = self.get_tc_by_id(tc_id)
+
+ if tc.status == 'finished':
+ tc.delete_result()
+
+ if tc.status == 'running':
+ tc.cancel_run()
+
+ self.testcases.remove(tc)
+
+
+if __name__ == "__main__":
+ try:
+ nettest = NetTestMaster()
+ stack_params = {
+ "stcv_affinity": True,
+ "stcv_image": "stcv-4.79",
+ "stcv_flavor": "m1.tiny",
+ "lab_server_ip": "192.168.37.122",
+ "license_server_ip": "192.168.37.251"
+ }
+
+ stack = nettest.create_stack(name='stack1',
+ stack_type='stcv',
+ pub_net_name='external',
+ **stack_params)
+ tc_params = {
+ 'metric': 'throughput',
+ 'framesizes': [64, 128]
+ }
+ tc = nettest.execute_testcase(name='tc1',
+ category='rfc2544',
+ stack_id=stack.stack_id,
+ **tc_params)
+
+ print "test case id is %s" % tc.id
+
+ status = tc.get_status()
+ while (status != tc.TC_STATUS_FINISHED):
+ if status == tc.TC_STATUS_ERROR:
+ print "tc exectue fail, reason %s" % tc.get_err_reason()
+ break
+ sleep(2)
+ if status == tc.TC_STATUS_FINISHED:
+ print tc.get_result()
+
+ nettest.delete_testcase(tc.id)
+
+ nettest.delete_stack(stack.stack_id)
+
+ except Exception as err:
+ print err
diff --git a/contrib/nettest/nettest/requirements.txt b/contrib/nettest/nettest/requirements.txt
new file mode 100644
index 00000000..3efb124b
--- /dev/null
+++ b/contrib/nettest/nettest/requirements.txt
@@ -0,0 +1,9 @@
+flask
+flask_cors
+flask_restful
+flask_restful_swagger
+#openstacksdk
+keystoneauth1
+python-heatclient
+stcrestclient
+
diff --git a/contrib/nettest/nettest/rest_server.py b/contrib/nettest/nettest/rest_server.py
new file mode 100644
index 00000000..3558b9ac
--- /dev/null
+++ b/contrib/nettest/nettest/rest_server.py
@@ -0,0 +1,351 @@
+##############################################################################
+# Copyright (c) 2018 Spirent Communications and others.
+#
+# All rights reserved. This program and the accompanying materials
+# are made available under the terms of the Apache License, Version 2.0
+# which accompanies this distribution, and is available at
+# http://www.apache.org/licenses/LICENSE-2.0
+##############################################################################
+
+import os
+import logging
+
+from flask import Flask, abort, jsonify, request, send_from_directory
+from flask_cors import CORS
+from flask_restful import Api, Resource, fields
+from flask_restful_swagger import swagger
+
+from nettest import NetTestMaster
+
+app = Flask(__name__)
+CORS(app)
+api = swagger.docs(Api(app), apiVersion="1.0")
+
+stcv_master = NetTestMaster()
+
+
+@app.route("/tc_results/<tc_id>", methods=["GET"])
+def download_result_file(tc_id):
+ directory = os.getcwd() + "/tc_results/rfc2544/" + tc_id
+ files = os.listdir(directory)
+ return send_from_directory(directory, files[0], as_attachment=True)
+
+
+@swagger.model
+class StackRequestModel:
+ resource_fields = {
+ 'stack_name': fields.String,
+ 'stack_type': fields.String,
+ 'public_network': fields.String,
+ "stack_params": fields.Nested,
+ }
+
+
+@swagger.model
+class StackResponseModel:
+ resource_fields = {
+ 'stack_name': fields.String,
+ 'stack_created': fields.Boolean,
+ "stack_id": fields.String
+ }
+
+
+class Stack(Resource):
+ def __init__(self):
+ self.logger = logging.getLogger(__name__)
+
+ @swagger.operation(
+ notes='Fetch the stack configuration',
+ parameters=[
+ {
+ "name": "id",
+ "description": "The UUID of the stack in the format "
+ "NNNNNNNN-NNNN-NNNN-NNNN-NNNNNNNNNNNN",
+ "required": True,
+ "type": "string",
+ "allowMultiple": False,
+ "paramType": "query"
+ },
+ ],
+ type=StackResponseModel.__name__
+ )
+ def get(self):
+ stack_id = request.args.get('id')
+ stack = stcv_master.get_stack_by_id(stack_id)
+
+ if not stack:
+ abort(404)
+
+ return jsonify({
+ 'stack_name': stack.name,
+ 'stack_created': True,
+ "stack_id": stack_id})
+
+ @swagger.operation(
+ notes='''set the current agent configuration and create a stack in
+ the controller. Returns once the stack create is completed.''',
+ parameters=[
+ {
+ "name": "stack",
+ "description": '''Configuration to be set. All parameters are
+ necessory.
+ ''',
+ "required": True,
+ "type": "StackRequestModel",
+ "paramType": "body"
+ }
+ ],
+ type=StackResponseModel.__name__
+ )
+ def post(self):
+ if not request.json:
+ abort(400, "ERROR: No data specified")
+
+ self.logger.info(request.json)
+
+ try:
+ params = {
+ 'lab_server_ip': request.json['stack_params'].get('lab_server_ip'),
+ 'license_server_ip': request.json['stack_params'].get('license_server_ip'),
+ 'stcv_image': request.json['stack_params'].get('stcv_image'),
+ 'stcv_flavor': request.json['stack_params'].get('stcv_flavor'),
+ 'stcv_affinity': request.json['stack_params'].get('stcv_affinity')
+ }
+
+ stack = stcv_master.create_stack(name=request.json['stack_name'],
+ stack_type=request.json['stack_type'],
+ pub_net_name=request.json['public_network'],
+ **params)
+ if stack is None:
+ abort(400, "ERROR: create stack fail")
+
+ return jsonify({'stack_name': request.json['stack_name'],
+ 'stack_created': True,
+ 'stack_id': stack.stack_id})
+
+ except Exception as e:
+ abort(400, str(e))
+
+ @swagger.operation(
+ notes='delete deployed stack',
+ parameters=[
+ {
+ "name": "id",
+ "description": "The UUID of the stack in the format "
+ "NNNNNNNN-NNNN-NNNN-NNNN-NNNNNNNNNNNN",
+ "required": True,
+ "type": "string",
+ "allowMultiple": False,
+ "paramType": "query"
+ },
+ ],
+ responseMessages=[
+ {
+ "code": 200,
+ "message": "Stack ID found, response in JSON format"
+ },
+ {
+ "code": 404,
+ "message": "Stack ID not found"
+ }
+ ]
+ )
+ def delete(self):
+ try:
+ stack_id = request.args.get('id')
+ stcv_master.delete_stack(stack_id)
+ except Exception as e:
+ abort(400, str(e))
+
+
+@swagger.model
+class TestcaseRequestModel:
+ resource_fields = {
+ 'name': fields.String,
+ 'category': fields.String,
+ 'stack_id': fields.String,
+ 'params': fields.Nested
+ }
+
+
+@swagger.model
+class TestcaseResponseModel:
+ resource_fields = {
+ 'name': fields.String,
+ 'category': fields.String,
+ 'stack_id': fields.String,
+ 'tc_id': fields.String
+ }
+
+
+class TestCase(Resource):
+
+ """TestCase API"""
+
+ def __init__(self):
+ self.logger = logging.getLogger(__name__)
+
+ @swagger.operation(
+ notes='Fetch the metrics of the specified testcase',
+ parameters=[
+ {
+ "name": "id",
+ "description": "The UUID of the testcase in the format "
+ "NNNNNNNN-NNNN-NNNN-NNNN-NNNNNNNNNNNN",
+ "required": True,
+ "type": "string",
+ "allowMultiple": False,
+ "paramType": "query"
+ },
+ {
+ "name": "type",
+ "description": "The type of metrics to report. May be "
+ "metrics (default), metadata, or status",
+ "required": True,
+ "type": "string",
+ "allowMultiple": False,
+ "paramType": "query"
+ }
+ ],
+ responseMessages=[
+ {
+ "code": 200,
+ "message": "Workload ID found, response in JSON format"
+ },
+ {
+ "code": 404,
+ "message": "Workload ID not found"
+ }
+ ]
+ )
+ def get(self):
+ tc_id = request.args.get('id')
+ query_type = request.args.get('type')
+ ret = {}
+
+ try:
+ tc = stcv_master.get_tc_by_id(tc_id)
+ if query_type == "result":
+ ret = tc.get_result()
+
+ if query_type == "status":
+ status = tc.get_status()
+ ret['status'] = status
+ if 'error' == status:
+ reason = tc.get_err_reason()
+ ret['reason'] = reason
+
+ return jsonify(ret)
+
+ except Exception as err:
+ abort(400, str(err))
+
+ @swagger.operation(
+ parameters=[
+ {
+ "name": "body",
+ "description": """Start execution of a testcase with the
+parameters, only support rfc25cc test
+ """,
+ "required": True,
+ "type": "TestcaseRequestModel",
+ "paramType": "body"
+ }
+ ],
+ type=TestcaseResponseModel.__name__,
+ responseMessages=[
+ {
+ "code": 200,
+ "message": "TestCase submitted"
+ },
+ {
+ "code": 400,
+ "message": "Missing configuration data"
+ }
+ ]
+ )
+ def post(self):
+ if not request.json:
+ abort(400, "ERROR: Missing configuration data")
+
+ self.logger.info(request.json)
+
+ try:
+ name = request.json['name']
+ category = request.json['category']
+ stack_id = request.json['stack_id']
+ tc_id = stcv_master.execute_testcase(name=request.json['name'],
+ category=request.json['category'],
+ stack_id=request.json['stack_id'],
+ **request.json['params'])
+
+ return jsonify({'name': name,
+ 'category': category,
+ 'stack_id': stack_id,
+ 'tc_id': tc_id})
+
+ except Exception as e:
+ abort(400, str(e))
+
+ @swagger.operation(
+ notes='Cancels the currently running testcase or delete testcase result',
+ parameters=[
+ {
+ "name": "id",
+ "description": "The UUID of the testcase in the format "
+ "NNNNNNNN-NNNN-NNNN-NNNN-NNNNNNNNNNNN",
+ "required": True,
+ "type": "string",
+ "allowMultiple": False,
+ "paramType": "query"
+ },
+ ],
+ responseMessages=[
+ {
+ "code": 200,
+ "message": "Wordload ID found, response in JSON format"
+ },
+ ]
+ )
+ def delete(self):
+ try:
+ tc_id = request.args.get("id")
+ self.logger.info("receive delete testcase msg. tc_id = %s", tc_id)
+
+ stcv_master.delete_testcase(tc_id)
+
+ except Exception as e:
+ abort(400, str(e))
+
+
+api.add_resource(Stack, "/api/v1.0/stack")
+api.add_resource(TestCase, "/api/v1.0/testcase")
+
+'''
+@app.route("/")
+def hello_world():
+ return 'hello world'
+
+@app.route("/testcases")
+def get_testcases():
+ return []
+
+
+@app.route("/testcases/<int: tc_id>")
+def query_testcase(tc_id):
+ return []
+
+@app.route("/stctest/api/v1.0/testcase/<string: tc_name>", methods = ['GET'])
+def query_tc_result(tc_name):
+ return []
+
+@app.route("/stctest/api/v1.0/testcase", methods = ['POST'])
+def execut_testcase():
+ return []
+'''
+
+
+if __name__ == "__main__":
+ logger = logging.getLogger("nettest").setLevel(logging.DEBUG)
+
+ app.run(host="0.0.0.0", debug=True, threaded=True)
diff --git a/contrib/nettest/nettest/rfc2544test.py b/contrib/nettest/nettest/rfc2544test.py
new file mode 100644
index 00000000..e8c9cd64
--- /dev/null
+++ b/contrib/nettest/nettest/rfc2544test.py
@@ -0,0 +1,596 @@
+##############################################################################
+# Copyright (c) 2018 Spirent Communications and others.
+#
+# All rights reserved. This program and the accompanying materials
+# are made available under the terms of the Apache License, Version 2.0
+# which accompanies this distribution, and is available at
+# http://www.apache.org/licenses/LICENSE-2.0
+##############################################################################
+
+import base64
+import copy
+import logging
+import os
+import shutil
+import threading
+from time import sleep
+import uuid
+
+import requests
+from stcrestclient import stchttp
+
+
+class Stcv2Net1Stack(object):
+ ADMIN_NETWORK_NAME = "admin"
+ ADMIN_SUBNET_ADDR = "50.50.50.0/24"
+ ADMIN_GW_IP = "50.50.50.1"
+ TST_NETWORK_NAME = "tst"
+ TST_SUBNET_ADDR = "192.168.0.0/24"
+ TST_GW_IP = "192.168.0.1"
+ ROUTER_NAME = "router"
+ WEST_STCV_NAME = "west_stcv"
+ EAST_STCV_NAME = "east_stcv"
+ AFFINITY_SG_NAME = "affinity"
+ STCV_USER_DATA = '''#cloud-config
+spirent:
+ ntp: '''
+
+ def __init__(self, name, conn, ext_network_name, params):
+ self.logger = logging.getLogger(__name__)
+
+ self.name = name
+ self.conn = conn
+ self.ext_network_name = ext_network_name
+ self.image_name = params['stcv_image']
+ self.flavor_name = params['stcv_flavor']
+ self.ntp_server_ip = params['license_server_ip']
+ self.affinity = params['stcv_affinity']
+
+ self.stack_id = str(uuid.uuid4())
+ self.admin_network = None
+ self.admin_subnet = None
+ self.tst_network = None
+ self.tst_subnet = None
+ self.ext_network = None
+ self.router = None
+ self.affinity_sg = None
+
+ self.west_stcv = None
+ self.west_stcv_ip = ''
+ self.east_stcv = None
+ self.east_stcv_ip = ''
+
+ def _deploy_test_network(self):
+
+ # create tst network and subnet
+ self.tst_network = self.conn.network.create_network(
+ name=self.TST_NETWORK_NAME)
+ self.tst_subnet = self.conn.network.create_subnet(
+ name=self.TST_NETWORK_NAME + '_subnet',
+ network_id=self.tst_network.id,
+ ip_version='4',
+ cidr=self.TST_SUBNET_ADDR,
+ gateway_ip=self.TST_GW_IP,
+ is_dhcp_enabled=True)
+
+ # create admin network and subnet
+ self.admin_network = self.conn.network.create_network(
+ name=self.ADMIN_NETWORK_NAME)
+ self.admin_subnet = self.conn.network.create_subnet(
+ name=self.ADMIN_NETWORK_NAME + '_subnet',
+ network_id=self.admin_network.id,
+ ip_version='4',
+ cidr=self.ADMIN_SUBNET_ADDR,
+ gateway_ip=self.ADMIN_GW_IP,
+ is_dhcp_enabled=True)
+
+ # create external gateway and connect admin subnet to router
+ self.ext_network = self.conn.network.find_network(self.ext_network_name)
+ self.router = self.conn.network.create_router(name=self.ROUTER_NAME,
+ external_gateway_info={"network_id": self.ext_network.id},
+ is_admin_state_up=True)
+ self.conn.network.add_interface_to_router(self.router, subnet_id=self.admin_subnet.id)
+
+ def _depoly_stcv(self, name, image_id, flavor_id, scheduler_hints, user_data):
+
+ stcv = self.conn.compute.create_server(
+ name=name, image_id=image_id, flavor_id=flavor_id,
+ networks=[{"uuid": self.admin_network.id}, {"uuid": self.tst_network.id}],
+ config_drive=True,
+ user_data=base64.encodestring(user_data)
+ )
+ stcv = self.conn.compute.wait_for_server(stcv)
+
+ stcv_fixed_ip = stcv.addresses[self.admin_network.name][0]['addr']
+ stcv_floating_ip = self.conn.network.create_ip(floating_network_id=self.ext_network.id)
+ self.conn.compute.add_floating_ip_to_server(server=stcv, address=stcv_floating_ip.floating_ip_address,
+ fixed_address=stcv_fixed_ip)
+
+ return {'stcv': stcv, 'fixed_ip': stcv_fixed_ip, 'floating_ip': stcv_floating_ip}
+
+ def create_stack(self):
+
+ image = self.conn.compute.find_image(self.image_name)
+ flavor = self.conn.compute.find_flavor(self.flavor_name)
+
+ if self.affinity:
+ self.affinity_sg = \
+ self.conn.compute.create_server_group(name=self.AFFINITY_SG_NAME,
+ policies=["affinity"])
+ else:
+ self.affinity_sg = \
+ self.conn.compute.create_server_group(name=self.AFFINITY_SG_NAME,
+ policies=["anti-affinity"])
+ self._deploy_test_network()
+
+ user_data = self.STCV_USER_DATA + self.ntp_server_ip
+
+ stcv = self._depoly_stcv(name=self.WEST_STCV_NAME,
+ image_id=image.id,
+ flavor_id=flavor.id,
+ scheduler_hints=self.affinity_sg,
+ user_data=user_data)
+ self.west_stcv = stcv['stcv']
+ self.west_stcv_ip = stcv['floating_ip']
+
+ stcv = self._depoly_stcv(name=self.EAST_STCV_NAME,
+ image_id=image.id,
+ flavor_id=flavor.id,
+ scheduler_hints=self.affinity_sg,
+ user_data=user_data)
+ self.east_stcv = stcv['stcv']
+ self.east_stcv_ip = stcv['floating_ip']
+
+ def delete_stack(self):
+
+ self.conn.compute.delete_server(self.west_stcv, ignore_missing=True)
+ self.conn.compute.delete_server(self.east_stcv, ignore_missing=True)
+
+ self.conn.compute.delete_server_group(server_group=self.affinity_sg,
+ ignore_missing=True)
+
+ # delete external gateway
+ self.conn.network.delete_router(self.router, ignore_missing=True)
+
+ # delete tst network
+ self.conn.network.delete_subnet(self.tst_subnet, ignore_missing=True)
+ self.conn.network.delete_network(self.tst_network, ignore_missing=True)
+
+ # delete admin network
+ self.conn.network.delete_subnet(self.admin_subnet, ignore_missing=True)
+ self.conn.network.delete_network(self.admin_network, ignore_missing=True)
+
+
+class StcSession:
+ """ wrapper class for stc session"""
+
+ def __init__(self, labserver_addr, user_name, session_name):
+ self.logger = logging.getLogger(__name__)
+
+ # create connection obj
+ self.stc = stchttp.StcHttp(labserver_addr)
+ self.user_name = user_name
+ self.session_name = session_name
+
+ # create session on labserver
+ self.session_id = self.stc.new_session(self.user_name, self.session_name)
+ self.stc.join_session(self.session_id)
+ return
+
+ def __del__(self):
+ # destroy resource on labserver
+ self.stc.end_session()
+
+ def clean_all_session(self):
+ session_urls = self.stc.session_urls()
+ for session in session_urls:
+ resp = requests.delete(session)
+ self.logger.info("delete session resp: %s", str(resp))
+ return
+
+
+class StcRfc2544Test:
+ """ RFC2544 test class"""
+
+ RESULT_PATH_PREFIX = './tc_results/rfc2544/'
+ TC_STATUS_INIT = 'init'
+ TC_STATUS_RUNNING = 'running'
+ TC_STATUS_FINISHED = 'finished'
+ TC_STATUS_ERROR = 'error'
+
+ throughput_additional_params = {
+ "AcceptableFrameLoss": 0.0,
+ "Duration": 30,
+ "FrameSizeList": 64,
+ "LearningMode": 'AUTO',
+ "NumOfTrials": 1,
+ # "RateInitial": 99.0,
+ # "RateLowerLimit": 99.0,
+ # "RateStep": 10.0,
+ # "RateUpperLimit": 99.0,
+ "Resolution": 1.0,
+ "SearchMode": 'BINARY',
+ "TrafficPattern": 'PAIR'
+ }
+
+ latency_additional_params = {
+ "Duration": 30,
+ "ExecuteSynchronous": True,
+ "FrameSizeList": 64,
+ "LearningMode": 'AUTO',
+ # "LoadType": 'STEP',
+ # "LoadStart": 10.0,
+ # "LoadEnd": 100.0,
+ # "LoadStep": 10.0,
+ "LoadList": [10, 20, 30, 40, 50, 60, 70, 80, 90, 100],
+ "LoadUnits": "PERCENT_LINE_RATE",
+ "NumOfTrials": 1,
+ "TrafficPattern": 'PAIR'
+ }
+
+ def __init__(self, name, lab_server_ip, license_server_ip,
+ west_stcv_admin_ip, west_stcv_tst_ip,
+ east_stcv_admin_ip, east_stcv_tst_ip,
+ stack_id=None, **kwargs):
+ self.logger = logging.getLogger(__name__)
+
+ self.name = name
+ self.lab_server_ip = lab_server_ip
+ self.license_server_ip = license_server_ip
+ self.west_stcv_ip = west_stcv_admin_ip
+ self.west_stcv_tst_ip = west_stcv_tst_ip
+ self.east_stcv_ip = east_stcv_admin_ip
+ self.east_stcv_tst_ip = east_stcv_tst_ip
+ self.stack_id = stack_id
+ self.metric = kwargs.get('metric')
+ if self.metric == 'throughput':
+ self.additional_params = copy.copy(self.throughput_additional_params)
+ elif self.metric == 'latency':
+ self.additional_params = copy.copy(self.latency_additional_params)
+ else:
+ raise Exception('invalid metric, metric = ' + self.metric)
+ self.additional_params['FrameSizeList'] = kwargs.get('framesizes')
+
+ self.tc_id = str(uuid.uuid4())
+
+ self.stc = None
+ self.sess = None
+ self.executor = None
+ self.status = 'init'
+ self.err_reason = ''
+
+ def config_license(self, license_server_addr):
+ license_mgr = self.stc.get("system1", "children-licenseservermanager")
+ self.stc.create("LicenseServer",
+ under=license_mgr,
+ attributes={"server": license_server_addr})
+ return
+
+ def create_project(self, traffic_custom=None):
+ self.project = self.stc.get("System1", "children-Project")
+ # Configure any custom traffic parameters
+ if traffic_custom == "cont":
+ self.stc.create("ContinuousTestConfig", under=self.project)
+ return
+
+ def config_test_port(self, chassis_addr, slot_no, port_no, intf_addr, gateway_addr):
+ # create test port
+ port_loc = "//%s/%s/%s" % (chassis_addr, slot_no, port_no)
+ chassis_port = self.stc.create('port', self.project)
+ self.stc.config(chassis_port, {'location': port_loc})
+
+ # Create emulated genparam for east port
+ device_gen_params = self.stc.create("EmulatedDeviceGenParams",
+ under=self.project,
+ attributes={"Port": chassis_port})
+ # Create the DeviceGenEthIIIfParams object
+ self.stc.create("DeviceGenEthIIIfParams",
+ under=device_gen_params,
+ attributes={"UseDefaultPhyMac": "True"})
+
+ # Configuring Ipv4 interfaces
+ self.stc.create("DeviceGenIpv4IfParams",
+ under=device_gen_params,
+ attributes={"Addr": intf_addr, "Gateway": gateway_addr})
+
+ # Create Devices using the Device Wizard
+ self.stc.perform("DeviceGenConfigExpand",
+ params={"DeleteExisting": "No", "GenParams": device_gen_params})
+
+ return
+
+ def do_test(self):
+ if self.metric == "throughput":
+ self.stc.perform("Rfc2544SetupThroughputTestCommand", self.additional_params)
+ elif self.metric == "backtoback":
+ self.stc.perform("Rfc2544SetupBackToBackTestCommand", self.additional_params)
+ elif self.metric == "frameloss":
+ self.stc.perform("Rfc2544SetupFrameLossTestCommand", self.additional_params)
+ elif self.metric == "latency":
+ self.stc.perform("Rfc2544SetupLatencyTestCommand", self.additional_params)
+ else:
+ raise Exception("invalid rfc2544 test metric.")
+
+ # Save the configuration
+ self.stc.perform("SaveToTcc", params={"Filename": "2544.tcc"})
+
+ # Connect to the hardware...
+ self.stc.perform("AttachPorts",
+ params={"portList": self.stc.get("system1.project", "children-port"),
+ "autoConnect": "TRUE"})
+
+ # Apply configuration.
+ self.stc.apply()
+ self.stc.perform("SequencerStart")
+ self.stc.wait_until_complete()
+
+ return
+
+ def write_query_results_to_csv(self, results_path, csv_results_file_prefix, query_results):
+ filec = os.path.join(results_path, csv_results_file_prefix + ".csv")
+ with open(filec, "wb") as result_file:
+ result_file.write(query_results["Columns"].replace(" ", ",") + "\n")
+ for row in (query_results["Output"].replace("} {", ",").replace("{", "").replace("}", "").split(",")):
+ result_file.write(row.replace(" ", ",") + "\n")
+
+ def format_result(self, metric, original_result_dict):
+ result = {}
+ if metric == 'throughput':
+ columns = original_result_dict["Columns"].split(' ')
+ index_framesize = columns.index("ConfiguredFrameSize")
+ index_result = columns.index("Result")
+ index_throughput = columns.index("Throughput(%)")
+ index_ForwardingRate = columns.index("ForwardingRate(fps)")
+ outputs = \
+ original_result_dict["Output"].replace('} {', ',').replace("{", "").replace("}", "").split(",")
+
+ for row in outputs:
+ output = row.split(' ')
+ result[output[index_framesize]] = {'Result': output[index_result],
+ "Throughput(%)": output[index_throughput],
+ "ForwardingRate(fps)": output[index_ForwardingRate]}
+
+ elif self.metric == "latency":
+ pass
+
+ elif self.metric == "frameloss":
+ pass
+
+ elif self.metric == "backtoback":
+ pass
+
+ return result
+
+ def collect_result(self, local_dir):
+ # Determine what the results database filename is...
+ lab_server_resultsdb = self.stc.get(
+ "system1.project.TestResultSetting", "CurrentResultFileName")
+ self.stc.perform("CSSynchronizeFiles",
+ params={"DefaultDownloadDir": local_dir})
+
+ resultsdb = local_dir + lab_server_resultsdb.split("/Results")[1]
+
+ if not os.path.exists(resultsdb):
+ resultsdb = lab_server_resultsdb
+ self.logger.info("Failed to create the local summary DB File, using"
+ " the remote DB file instead.")
+ else:
+ self.logger.info(
+ "The local summary DB file has been saved to %s", resultsdb)
+
+ if self.metric == "throughput":
+ resultsdict = self.stc.perform("QueryResult",
+ params={
+ "DatabaseConnectionString": lab_server_resultsdb,
+ "ResultPath": "RFC2544ThroughputTestResultDetailedSummaryView"})
+ elif self.metric == "backtoback":
+ resultsdict = self.stc.perform("QueryResult",
+ params={
+ "DatabaseConnectionString": lab_server_resultsdb,
+ "ResultPath": "RFC2544Back2BackTestResultDetailedSummaryView"})
+ elif self.metric == "frameloss":
+ resultsdict = self.stc.perform("QueryResult",
+ params={
+ "DatabaseConnectionString": lab_server_resultsdb,
+ "ResultPath": "RFC2544FrameLossTestResultDetailedSummaryView"})
+ elif self.metric == "latency":
+ resultsdict = self.stc.perform("QueryResult",
+ params={
+ "DatabaseConnectionString": lab_server_resultsdb,
+ "ResultPath": "RFC2544LatencyTestResultDetailedSummaryView"})
+ else:
+ raise Exception("invalid rfc2544 test metric.")
+
+ self.write_query_results_to_csv(self.results_dir, self.metric, resultsdict)
+
+ self.result = self.format_result(self.metric, resultsdict)
+
+ return
+
+ def thread_entry(self):
+ self.status = self.TC_STATUS_RUNNING
+ try:
+ # create session on lab server
+ self.sess = StcSession(self.lab_server_ip, session_name=self.name, user_name=self.name)
+ self.stc = self.sess.stc
+
+ # create test result directory
+ self.results_dir = self.RESULT_PATH_PREFIX + self.tc_id + '/'
+ os.makedirs(self.results_dir)
+
+ # Bring up license server
+ self.config_license(self.license_server_ip)
+
+ self.logger.info("config license success, license_server_addr = %s.", self.license_server_ip)
+
+ # Create the root project object and Configure any custom traffic parameters
+ self.create_project()
+
+ self.logger.info("create project success.")
+
+ # configure test port
+ self.config_test_port(self.west_stcv_ip, 1, 1, self.west_stcv_tst_ip, self.east_stcv_tst_ip)
+ self.config_test_port(self.east_stcv_ip, 1, 1, self.east_stcv_tst_ip, self.west_stcv_tst_ip)
+
+ self.logger.info("config test port success, west_chassis_addr = %s, east_chassis_addr = %s.",
+ self.west_stcv_ip, self.east_stcv_ip)
+
+ # execute test
+ self.do_test()
+
+ self.logger.info("execute test success.")
+
+ # collect test result
+ self.collect_result(self.results_dir)
+
+ self.logger.info("collect result file success, results_dir = %s.", self.results_dir)
+
+ self.status = self.TC_STATUS_FINISHED
+
+ except Exception as err:
+ self.logger.error("Failed to execute Rfc2544 testcase, err: %s", str(err))
+ self.err_reason = str(err)
+ self.status = self.TC_STATUS_ERROR
+
+ finally:
+ if self.sess is not None:
+ self.sess.clean_all_session()
+
+ def execute(self):
+
+ self.executor = threading.Thread(name='rfc2544', target=self.thread_entry())
+ self.executor.start()
+
+ def get_result(self):
+ if self.status != self.TC_STATUS_FINISHED:
+ return {'name': self.name,
+ 'tc_id': self.tc_id,
+ 'status': self.status
+ }
+
+ return {'name': self.name,
+ 'category': 'rfc2544',
+ 'id': self.tc_id,
+ 'params': {
+ 'metric': self.metric,
+ 'framesizes': self.additional_params.get('FrameSizeList')},
+ 'result': self.result}
+
+ def get_status(self):
+ return self.status
+
+ def delete_result(self):
+ shutil.rmtree(self.results_dir)
+ pass
+
+ def cancel_run(self):
+ pass
+
+ def get_err_reason(self):
+ return self.err_reason
+
+
+if __name__ == '__main__':
+
+ lab_server_ip = '192.168.37.122'
+ license_server_ip = '192.168.37.251'
+ west_stcv_admin_ip = '192.168.37.202'
+ west_stcv_tst_ip = '192.168.1.20'
+ east_stcv_admin_ip = '192.168.37.212'
+ east_stcv_tst_ip = '192.168.1.17'
+
+ tc = StcRfc2544Test(name='tc1',
+ lab_server_ip=lab_server_ip,
+ license_server_ip=license_server_ip,
+ west_stcv_admin_ip=west_stcv_admin_ip,
+ west_stcv_tst_ip=west_stcv_tst_ip,
+ east_stcv_admin_ip=east_stcv_admin_ip,
+ east_stcv_tst_ip=east_stcv_tst_ip,
+ metric="throughput",
+ framesizes=[64, 128, 256, 512, 1024])
+ tc.execute()
+ status = tc.get_status()
+ while(status != tc.TC_STATUS_FINISHED):
+ if status == tc.TC_STATUS_ERROR:
+ print "tc exectue fail, reason %s" % tc.get_err_reason()
+ break
+ sleep(2)
+ if status == tc.TC_STATUS_FINISHED:
+ print tc.get_result()
+'''
+ tc = StcRfc2544Test(name='tc2',
+ lab_server_ip=lab_server_ip,
+ license_server_ip=license_server_ip,
+ west_stcv_admin_ip=west_stcv_admin_ip,
+ west_stcv_tst_ip=west_stcv_tst_ip,
+ east_stcv_admin_ip=east_stcv_admin_ip,
+ east_stcv_tst_ip=east_stcv_tst_ip,
+ metric="latency",
+ framesizes=[64, 128, 256, 512, 1024])
+ tc.execute()
+ status = tc.get_status()
+ while(status != tc.TC_STATUS_FINISHED):
+ if status == tc.TC_STATUS_ERROR:
+ print "tc exectue fail, reason %s" % tc.get_err_reason()
+ break
+ sleep(2)
+ if status == tc.TC_STATUS_FINISHED:
+ print tc.get_result()
+
+ tc = StcRfc2544Test(name='tc3',
+ lab_server_ip=lab_server_ip,
+ license_server_ip=license_server_ip,
+ west_stcv_admin_ip=west_stcv_admin_ip,
+ west_stcv_tst_ip=west_stcv_tst_ip,
+ east_stcv_admin_ip=east_stcv_admin_ip,
+ east_stcv_tst_ip=east_stcv_tst_ip,
+ metric="backtoback",
+ framesizes=[64, 128, 256, 512, 1024])
+ tc.execute()
+ status = tc.get_status()
+ while(status != tc.TC_STATUS_FINISHED):
+ if status == tc.TC_STATUS_ERROR:
+ print "tc exectue fail, reason %s" % tc.get_err_reason()
+ break
+ sleep(2)
+ if status == tc.TC_STATUS_FINISHED:
+ print tc.get_result()
+
+ tc = StcRfc2544Test(name='tc4',
+ lab_server_ip=lab_server_ip,
+ license_server_ip=license_server_ip,
+ west_stcv_admin_ip=west_stcv_admin_ip,
+ west_stcv_tst_ip=west_stcv_tst_ip,
+ east_stcv_admin_ip=east_stcv_admin_ip,
+ east_stcv_tst_ip=east_stcv_tst_ip,
+ metric="frameloss",
+ framesizes=[64, 128, 256, 512, 1024])
+ tc.execute()
+ status = tc.get_status()
+ while(status != tc.TC_STATUS_FINISHED):
+ if status == tc.TC_STATUS_ERROR:
+ print "tc exectue fail, reason %s" % tc.get_err_reason()
+ break
+ sleep(2)
+ if status == tc.TC_STATUS_FINISHED:
+ print tc.get_result()
+'''
+
+'''
+class Testcase(object):
+
+ def __init__(self, stack):
+ self.stack = stack
+
+ def execute(self):
+ pass
+
+class TestcaseFactory(object):
+
+ def __init__(self):
+
+ def create_tc(self, tc_metadata):
+ self.tc_name = tc_metadata['tc_name']
+ self.tc_id = str(uuid.uuid4())
+ if
+'''
diff --git a/legacy/tests/helper/suite.yaml b/contrib/nettest/nettest/start.sh
index 84bf9239..12ae3eb0 100644
--- a/legacy/tests/helper/suite.yaml
+++ b/contrib/nettest/nettest/start.sh
@@ -1,14 +1,11 @@
+#!/bin/bash
##############################################################################
-# Copyright (c) 2017 taseer94@gmail.com and others.
+# Copyright (c) 2018 Spirent Communications and others.
#
# All rights reserved. This program and the accompanying materials
# are made available under the terms of the Apache License, Version 2.0
# which accompanies this distribution, and is available at
# http://www.apache.org/licenses/LICENSE-2.0
##############################################################################
----
-
- tests:
- - command: ['suite', 'run']
- output: "Run a suite\n"
+exec /usr/bin/python rest_server.py
diff --git a/contrib/nettest/nettest/stcv_stack.py b/contrib/nettest/nettest/stcv_stack.py
new file mode 100644
index 00000000..7c1d4336
--- /dev/null
+++ b/contrib/nettest/nettest/stcv_stack.py
@@ -0,0 +1,165 @@
+##############################################################################
+# Copyright (c) 2018 Spirent Communications and others.
+#
+# All rights reserved. This program and the accompanying materials
+# are made available under the terms of the Apache License, Version 2.0
+# which accompanies this distribution, and is available at
+# http://www.apache.org/licenses/LICENSE-2.0
+##############################################################################
+
+import logging
+import os
+from time import sleep
+import traceback
+
+import heatclient.client as heatclient
+from keystoneauth1 import loading
+from keystoneauth1 import session
+
+
+class StcvStack(object):
+ STCV_CONFIG_FILE = 'stcv_config_file'
+ STCV_HEAT_FILE = './heat_2stcv.yaml'
+
+ def __init__(self, name, **kwargs):
+ self.logger = logging.getLogger(__name__)
+
+ self.name = name
+ self.pub_net_name = kwargs.get('pub_net_name')
+ self.ntp_server_ip = kwargs.get('ntp_server_ip')
+ self.lab_server_ip = kwargs.get('lab_server_ip')
+ self.stcv_image = kwargs.get('stcv_image')
+ self.stcv_flavor = kwargs.get('stcv_flavor')
+ if kwargs.get('stcv_affinity'):
+ self.stcv_affinity = 'affinity'
+ else:
+ self.stcv_affinity = 'anti-affinity'
+
+ self.stack_id = None
+ self._heatc_lient = None
+
+ def _attach_to_openstack(self):
+ creds = {"username": os.environ.get('OS_USERNAME'),
+ "password": os.environ.get('OS_PASSWORD'),
+ "auth_url": os.environ.get('OS_AUTH_URL'),
+ "project_domain_id": os.environ.get('OS_PROJECT_DOMAIN_ID'),
+ "project_domain_name": os.environ.get('OS_PROJECT_DOMAIN_NAME'),
+ "project_id": os.environ.get('OS_PROJECT_ID'),
+ "project_name": os.environ.get('OS_PROJECT_NAME'),
+ "tenant_name": os.environ.get('OS_TENANT_NAME'),
+ "tenant_id": os.environ.get("OS_TENANT_ID"),
+ "user_domain_id": os.environ.get('OS_USER_DOMAIN_ID'),
+ "user_domain_name": os.environ.get('OS_USER_DOMAIN_NAME')
+ }
+
+ self.logger.debug("Creds: %s" % creds)
+
+ loader = loading.get_plugin_loader('password')
+ auth = loader.load_from_options(**creds)
+ sess = session.Session(auth)
+ self._heat_client = heatclient.Client("1", session=sess)
+
+ def _make_parameters(self):
+ return {
+ 'public_net_name': self.pub_net_name,
+ 'stcv_image': self.stcv_image,
+ 'stcv_flavor': self.stcv_flavor,
+ 'stcv_sg_affinity': self.stcv_affinity,
+ 'ntp_server_ip': self.ntp_server_ip
+ }
+
+ def acquire_ip_from_stack_output(self, output, key_name):
+ ip = None
+ for item in output:
+ if item['output_key'] == key_name:
+ ip = item['output_value']
+ if isinstance(ip, list):
+ ip = ip[0]['ip_address']
+ break
+
+ return ip
+
+ def create_stack(self):
+ with open(self.STCV_HEAT_FILE) as fd:
+ template = fd.read()
+
+ self._attach_to_openstack()
+
+ self.logger.debug("Creating stack")
+
+ stack = self._heat_client.stacks.create(
+ stack_name=self.name,
+ template=template,
+ parameters=self._make_parameters())
+
+ self.stack_id = stack['stack']['id']
+
+ while True:
+ stack = self._heat_client.stacks.get(self.stack_id)
+ status = getattr(stack, 'stack_status')
+ self.logger.debug("Stack status=%s" % (status,))
+ if (status == u'CREATE_COMPLETE'):
+ self.stcv1_ip = self.acquire_ip_from_stack_output(stack.outputs, "STCv_1_Mgmt_Ip")
+ self.stcv2_ip = self.acquire_ip_from_stack_output(stack.outputs, "STCv_2_Mgmt_Ip")
+ self.stcv1_tst_ip = self.acquire_ip_from_stack_output(stack.outputs, "STCv_1_Tst_Ip")
+ self.stcv2_tst_ip = self.acquire_ip_from_stack_output(stack.outputs, "STCv_2_Tst_Ip")
+ break
+ if (status == u'DELETE_COMPLETE'):
+ self.stack_id = None
+ break
+ if (status == u'CREATE_FAILED'):
+ self.status_reason = getattr(stack, 'stack_status_reason')
+ sleep(5)
+ self._heat_client.stacks.delete(stack_id=self.stack_id)
+ sleep(2)
+
+ def delete_stack(self):
+ if self.stack_id is None:
+ raise Exception('stack does not exist')
+
+ self._attach_to_openstack()
+ while True:
+ stack = self._heat_client.stacks.get(self.stack_id)
+ status = getattr(stack, 'stack_status')
+ self.logger.debug("Stack status=%s" % (status,))
+ if (status == u'CREATE_COMPLETE'):
+ self._heat_client.stacks.delete(stack_id=self.stack_id)
+ if (status == u'DELETE_COMPLETE'):
+ self.stack_id = None
+ break
+ if (status == u'DELETE_FAILED'):
+ sleep(5)
+ self._heat_client.stacks.delete(stack_id=self.stack_id)
+ sleep(2)
+
+ def get_west_stcv_ip(self):
+ return self.stcv1_ip
+
+ def get_west_stcv_tst_ip(self):
+ return self.stcv1_tst_ip
+
+ def get_east_stcv_ip(self):
+ return self.stcv2_ip
+
+ def get_east_stcv_tst_ip(self):
+ return self.stcv2_tst_ip
+
+
+if __name__ == '__main__':
+ try:
+ stack = StcvStack(name='stack1',
+ pub_net_name='external',
+ ntp_server_ip='192.168.37.151',
+ stcv_image='stcv-4.79',
+ stcv_flavor='m1.tiny',
+ affinity=False)
+ stack.create_stack()
+
+ print stack.get_east_stcv_ip()
+ print stack.get_east_stcv_tst_ip()
+ print stack.get_west_stcv_ip()
+ print stack.get_west_stcv_tst_ip()
+
+ except Exception as err:
+ excstr = traceback.format_exc()
+ print excstr
diff --git a/contrib/nettest_client/nettest_client.py b/contrib/nettest_client/nettest_client.py
new file mode 100644
index 00000000..4e08cad3
--- /dev/null
+++ b/contrib/nettest_client/nettest_client.py
@@ -0,0 +1,191 @@
+##############################################################################
+# Copyright (c) 2018 Spirent Communications and others.
+#
+# All rights reserved. This program and the accompanying materials
+# are made available under the terms of the Apache License, Version 2.0
+# which accompanies this distribution, and is available at
+# http://www.apache.org/licenses/LICENSE-2.0
+##############################################################################
+
+import os
+import logging
+import requests
+import json
+import time
+
+
+class NettestClient(object):
+
+ def __init__(self, rest_server_ip, port, version):
+ self.logger = logging.getLogger(__name__)
+
+ self.rest_server_ip = rest_server_ip
+ self.port = port
+ self.version = version
+ self.base_url = "http://" + self.rest_server_ip + ":" + str(port) + "/api/" + "v" + self.version + "/"
+ self.headers = {"Content-Type": "application/json"}
+
+ def write_log(self, log):
+ self.logger.info(log)
+ print log
+
+ def create_stack(self, name, stack_type, public_network_name, **kargs):
+ stack_id = None
+
+ try:
+ payload = {
+ "stack_name": name,
+ "stack_type": stack_type,
+ "public_network": public_network_name,
+ "stack_params": {
+ "stcv_affinity": kargs.get("stcv_affinity"),
+ "stcv_image": kargs.get("stcv_image"),
+ "stcv_flavor": kargs.get("stcv_flavor"),
+ "lab_server_ip": kargs.get("lab_server_ip"),
+ "license_server_ip": kargs.get("license_server_ip")
+ }
+ }
+
+ stack_url = self.base_url + "stack"
+ response = requests.post(url=stack_url, headers=self.headers, json=payload)
+ if requests.codes.ok != response.status_code:
+ self.write_log("create stack fail, response_content = " + response.content)
+ return None
+ print response.content
+ stack_id = json.loads(response.content)["stack_id"]
+ except Exception as err:
+ self.write_log("create stack fail, error = " + str(err))
+
+ return stack_id
+
+ def destroy_stack(self, stack_id):
+ payload = {"id": stack_id}
+ url = self.base_url + "stack"
+ try:
+ response = requests.delete(url, params=payload)
+ if requests.codes.ok != response.status_code:
+ self.write_log("delete stack fail, err: " + response.content)
+ except Exception as err:
+ self.write_log("delete stack fail, error = " + str(err))
+ return
+
+ self.write_log("delete stack success")
+
+ def run_rfc2544_testcase(self, stack_id, tc_name, metric_type, framesizes):
+ url = self.base_url + "testcase"
+ payload = {
+ "name": tc_name,
+ "stack_id": stack_id,
+ "category": "rfc2544",
+ "params": {
+ "metric": metric_type,
+ "framesizes": framesizes
+ }
+ }
+ try:
+ response = requests.post(url, headers=self.headers, json=payload)
+ if requests.codes.ok != response.status_code:
+ self.write_log("run rfc2544 testcase fail, err = " + response.content)
+ return None
+ except Exception as err:
+ self.write_log("run rfc2544 testcase fail, err = " + str(err))
+ return None
+
+ self.write_log("run rfc2544 testcase success")
+
+ tc_id = json.loads(response.content)["tc_id"]
+
+ return tc_id
+
+ def delete_testcase(self, tc_id):
+ url = self.base_url + "testcase"
+ params = {"tc_id": tc_id}
+ try:
+ response = requests.delete(url, params=params)
+ if requests.codes.ok != response.status_code:
+ self.write_log("delete testcase fail, err = " + response.content)
+ except Exception as err:
+ self.write_log("delete testcase fail, err = " + str(err))
+
+ def write_result(self, result):
+ pass
+
+ def get_tc_result(self, tc_id):
+ ret = False
+ url = self.base_url + "testcase"
+ status_params = {
+ "id": tc_id,
+ "type": "status"
+ }
+ while True:
+ response = requests.get(url, params=status_params)
+ if requests.codes.ok == response.status_code:
+ status = json.loads(response.content)["status"]
+ if status == "running":
+ time.sleep(2)
+ continue
+ elif status == "finished":
+ url = "http://" + self.rest_server_ip + ":" + str(self.port) + "/tc_results/" + tc_id
+ response = requests.get(url)
+ if requests.codes.ok == response.status_code:
+ self.write_log("get tc result success")
+ with open(os.getcwd() + "/" + tc_id + ".csv", "w") as fd:
+ fd.write(response.content)
+ break
+ ret = True
+ else:
+ self.write_log(response.content)
+ break
+ else:
+ self.write_log(response.content)
+ break
+ else:
+ self.write_log(response.content)
+ break
+
+ return ret
+
+
+def run(public_network_name, stcv_image, stcv_flavor,
+ lab_server_ip, license_server_ip):
+
+ nc = NettestClient(rest_server_ip="127.0.0.1", port=5000, version="1.0")
+
+ stack_params = {
+ "name": "s2",
+ "stack_type": "stcv",
+ "public_network_name": public_network_name,
+ "stcv_affinity": True,
+ "stcv_image": stcv_image,
+ "stcv_flavor": stcv_flavor,
+ "lab_server_ip": lab_server_ip,
+ "license_server_ip": license_server_ip,
+ }
+
+ stack_id = nc.create_stack(**stack_params)
+ if stack_id is None:
+ print "create stack fail"
+ # exit(1)
+
+ # wait stcv vm into stable status
+ time.sleep(30)
+
+ tc_params = {
+ "stack_id": stack_id,
+ "tc_name": "tc1",
+ "metric_type": "throughput",
+ "framesizes": [64]
+ }
+ tc_id = nc.run_rfc2544_testcase(**tc_params)
+ if tc_id is None:
+ print "run testcase fail"
+ nc.destroy_stack(stack_id)
+ exit(1)
+
+ result = nc.get_tc_result(tc_id)
+ if result is False:
+ print "get testcase result fail"
+
+ nc.delete_testcase(tc_id)
+
+ nc.destroy_stack(stack_id)
diff --git a/docker/Dockerfile b/docker/Dockerfile
index 5d93d591..8d9313d2 100644
--- a/docker/Dockerfile
+++ b/docker/Dockerfile
@@ -41,7 +41,7 @@ RUN apt-get update && apt-get install -y \
&& rm -rf /var/lib/apt/lists/*
RUN locale-gen en_US en_US.UTF-8 \
- && pip install -U pip \
+ && python -m pip install -U pip
&& pip install -U setuptools
#Cloning Repos
diff --git a/docker/Dockerfile.local b/docker/Dockerfile.local
index 86e86dcd..246895b7 100644
--- a/docker/Dockerfile.local
+++ b/docker/Dockerfile.local
@@ -41,7 +41,7 @@ RUN apt-get update && apt-get install -y \
&& rm -rf /var/lib/apt/lists/*
RUN locale-gen en_US en_US.UTF-8 \
- && pip install -U pip \
+ && python -m pip install -U pip \
&& pip install -U setuptools
COPY . $REPOS_DIR/qtip
diff --git a/docs/conf.py b/docs/conf.py
index 4469b229..5c21c3a6 100644
--- a/docs/conf.py
+++ b/docs/conf.py
@@ -1,3 +1,12 @@
+##############################################################################
+# Copyright (c) 2017 ZTE Corporation and others.
+#
+# All rights reserved. This program and the accompanying materials
+# are made available under the terms of the Apache License, Version 2.0
+# which accompanies this distribution, and is available at
+# http://www.apache.org/licenses/LICENSE-2.0
+##############################################################################
+
# -*- coding: utf-8 -*-
#
# QTIP documentation build configuration file, created by
diff --git a/docs/index.rst b/docs/index.rst
index f3e275e5..4e1a4591 100644
--- a/docs/index.rst
+++ b/docs/index.rst
@@ -20,3 +20,13 @@ for performance, simple but supported by comprehensive testing data and transpar
testing/user/configguide/index.rst
testing/user/userguide/index.rst
testing/developer/devguide/index.rst
+
+=========
+Proposals
+=========
+
+.. toctree::
+ :glob:
+ :maxdepth: 1
+
+ proposal/*
diff --git a/docs/proposal/NetworPerformanceIndicator.ipynb b/docs/proposal/NetworPerformanceIndicator.ipynb
deleted file mode 100755
index 92ac06fe..00000000
--- a/docs/proposal/NetworPerformanceIndicator.ipynb
+++ /dev/null
@@ -1,235 +0,0 @@
-{
- "cells": [
- {
- "cell_type": "markdown",
- "metadata": {},
- "source": [
- "# Network performance Indicator: A Proposal.\n",
- " Sridhar K. N. Rao, Spirent Communications"
- ]
- },
- {
- "cell_type": "markdown",
- "metadata": {},
- "source": [
- "Network performance is an important measure that should be considered for design and deployment of virtual network functions in the cloud. In this document, we propose an indicator for network performance. We consider following parameters for the indicator.\n",
- "\n",
- "1. The network throughput.\n",
- "2. The network delay\n",
- "3. Application SLAs\n",
- "4. The topology - Path Length and Number of Virtual Network-Elements.\n",
- "5. Network Virtualization - Vxlan, GRE, VLAN, etc. \n",
- "\n",
- "The most commonly used, and well measured, network-performance metrics are throughput and delay. However, considering the NFV environments, we add additional metrics to come up with a single indicator value. With these additional metrics, we plan to cover various deployment scenarios of the virtualized network functions.\n",
- "\n",
- "The proposed network performance indicator value ranges from $$0 - 1.0$$. As majority of indicators, these values should mainly be used for comparative analysis, and not to be seen as a absolute indicator.\n",
- "\n",
- "Note: Additional parameters such as - total load on the network - can be considered in future. \n",
- "\n",
- "The network performance indicator (I) can be represented as:"
- ]
- },
- {
- "cell_type": "markdown",
- "metadata": {},
- "source": [
- "\n",
- "$$ I = w_t(1- \\frac{E_t-O_t}{E_t}) + w_d(1-\\frac{O_d - E_d}{O_d}) + w_a(1-\\frac{E_a - O_a }{E_a}) + w_s (1-\\frac{T_n - V_n}{T_n}) + w_p(1-\\frac{1}{T_n + 1}) + w_v * {C_{nv}} $$\n",
- "Where, \n",
- "$$ w_t + w_d + w_a + w_s + w_p + w_v = 1.0 $$\n"
- ]
- },
- {
- "cell_type": "markdown",
- "metadata": {},
- "source": [
- "<table>\n",
- "<tr>\n",
- "<td>\n",
- "Notation \n",
- "</td>\n",
- "<td>\n",
- "Description \n",
- "</td>\n",
- "<td>\n",
- "Example Value\n",
- "</td>\n",
- "</tr>\n",
- "<tr>\n",
- "<td>\n",
- "$w_t$\n",
- "</td>\n",
- "<td>\n",
- "Weightage for the Throughput \n",
- "</td>\n",
- "<td>\n",
- "0.3\n",
- "</td>\n",
- "</tr>\n",
- "<tr>\n",
- "<td>\n",
- "$w_d$\n",
- "</td>\n",
- "<td>\n",
- "Weightage for the Delay\n",
- "</td>\n",
- "<td>\n",
- "0.3\n",
- "</td>\n",
- "</tr>\n",
- "<tr>\n",
- "<td>\n",
- "$w_a$\n",
- "</td>\n",
- "<td>\n",
- "Weightage for the Application SLA\n",
- "</td>\n",
- "<td>\n",
- "0.1\n",
- "</td>\n",
- "</tr>\n",
- "<tr>\n",
- "<td>\n",
- "$w_s$\n",
- "</td>\n",
- "<td>\n",
- "Weightage for the Topology - Network Elements \n",
- "</td>\n",
- "<td>\n",
- "0.1\n",
- "</td>\n",
- "</tr>\n",
- "<tr>\n",
- "<td>\n",
- "$w_p$ \n",
- "</td>\n",
- "<td>\n",
- "Weightage for the Topology - Path Length \n",
- "</td>\n",
- "<td>\n",
- "0.1 \n",
- "</td>\n",
- "</tr>\n",
- "<tr>\n",
- "<td>\n",
- "$w_v$ \n",
- "</td>\n",
- "<td>\n",
- "Weightage for the Virtualization \n",
- "</td>\n",
- "<td>\n",
- "0.1\n",
- "</td>\n",
- "</tr>\n",
- "</table>"
- ]
- },
- {
- "cell_type": "markdown",
- "metadata": {},
- "source": [
- "<table>\n",
- "<tr>\n",
- "<th>\n",
- "Notation\n",
- "</th>\n",
- "<th>\n",
- "Description\n",
- "</th>\n",
- "</tr>\n",
- "<tr>\n",
- "<td>\n",
- "$$E_t$$ $$\\&$$ $$O_t$$\n",
- "</td>\n",
- "<td>\n",
- "Expected (theoretical Max) and Obtained Average Throughput \n",
- "</td>\n",
- "</tr>\n",
- "<tr>\n",
- "<td>\n",
- "$$E_d$$ $$\\&$$ $$O_d$$ \n",
- "</td>\n",
- "<td>\n",
- "Expected and Otained Minimum Delay \n",
- "</td>\n",
- "</tr>\n",
- "<tr>\n",
- "<td>\n",
- "$$E_a$$ $$\\&$$ $$O_a$$ \n",
- "</td>\n",
- "<td>\n",
- "Expected and Obtained Application SLA Metric \n",
- "</td>\n",
- "</tr>\n",
- "<tr>\n",
- "<td>\n",
- "$T_n$\n",
- "</td>\n",
- "<td>\n",
- "Total number of Network Elements (Switches and Routers) \n",
- "</td>\n",
- "</tr>\n",
- "<tr>\n",
- "<td>\n",
- "$V_n$\n",
- "</td>\n",
- "<td>\n",
- "Total number of Virtual Network Elements \n",
- "</td>\n",
- "</tr>\n",
- "<tr>\n",
- "<td>\n",
- "$C_{nv}$ \n",
- "</td>\n",
- "<td>\n",
- "Network Virtualization Constant\n",
- "</td>\n",
- "</tr>\n",
- "</table>"
- ]
- },
- {
- "cell_type": "markdown",
- "metadata": {},
- "source": [
- "It would be interesting to explore the following alternative:\n",
- "$$I = I_E - I_O$$, where\n",
- "\n",
- "$$ I_E = w_t * E_t + w_d* \\frac{1}{E_d} + w_a.\\frac{1}{E_a} + w_s * \\frac{1}{T_n} + w_p * V_n + W_v * C_{nv} $$\n",
- "\n",
- "and \n",
- "$$ I_O = w_t * O_t + w_d* \\frac{1}{O_d} + w_a.\\frac{1}{O_a} + w_s * \\frac{1}{T_n} + w_p * V_n + W_v * C_{nv} $$"
- ]
- },
- {
- "cell_type": "code",
- "execution_count": null,
- "metadata": {
- "collapsed": true
- },
- "outputs": [],
- "source": []
- }
- ],
- "metadata": {
- "kernelspec": {
- "display_name": "Python 3",
- "language": "python",
- "name": "python3"
- },
- "language_info": {
- "codemirror_mode": {
- "name": "ipython",
- "version": 3
- },
- "file_extension": ".py",
- "mimetype": "text/x-python",
- "name": "python",
- "nbconvert_exporter": "python",
- "pygments_lexer": "ipython3",
- "version": "3.6.1"
- }
- },
- "nbformat": 4,
- "nbformat_minor": 2
-}
diff --git a/docs/proposal/network-performance-indicator.rst b/docs/proposal/network-performance-indicator.rst
new file mode 100644
index 00000000..bfb5239c
--- /dev/null
+++ b/docs/proposal/network-performance-indicator.rst
@@ -0,0 +1,78 @@
+.. This work is licensed under a Creative Commons Attribution 4.0 International License.
+.. http://creativecommons.org/licenses/by/4.0
+.. (c) 2016 ZTE Corp.
+
+
+*****************************
+Network Performance Indicator
+*****************************
+
+Sridhar K. N. Rao, Spirent Communications
+
+Network performance is an important measure that should be considered for design and deployment of virtual network functions in the cloud. In this document, we propose an indicator for network performance. We consider following parameters for the indicator.
+
+#. The network throughput.
+#. The network delay
+#. Application SLAs
+#. The topology - Path Length and Number of Virtual Network-Elements.
+#. Network Virtualization - Vxlan, GRE, VLAN, etc.
+
+The most commonly used, and well measured, network-performance metrics are throughput and delay. However, considering the NFV environments, we add additional metrics to come up with a single indicator value. With these additional metrics, we plan to cover various deployment scenarios of the virtualized network functions.
+
+The proposed network performance indicator value ranges from 0 - 1.0
+
+As majority of indicators, these values should mainly be used for comparative analysis, and not to be seen as a absolute indicator.
+
+Note: Additional parameters such as - total load on the network - can be considered in future.
+
+The network performance indicator (I) can be represented as:
+
+:math:`I = w_t(1- \frac{E_t-O_t}{E_t}) + w_d(1-\frac{O_d - E_d}{O_d}) + w_a(1-\frac{E_a - O_a }{E_a}) + w_s (1-\frac{T_n - V_n}{T_n}) + w_p(1-\frac{1}{T_n + 1}) + w_v * {C_{nv}}`
+
+Where,
+
++-------------+-----------------------------------------------+---------------+
+| Notation | Description | Example Value |
++=============+===============================================+===============+
+| :math:`w_t` | Weightage for the Throughput | 0.3 |
++-------------+-----------------------------------------------+---------------+
+| :math:`w_d` | Weightage for the Delay | 0.3 |
++-------------+-----------------------------------------------+---------------+
+| :math:`w_a` | Weightage for the Application SLA | 0.1 |
++-------------+-----------------------------------------------+---------------+
+| :math:`w_s` | Weightage for the Topology - Network Elements | 0.1 |
++-------------+-----------------------------------------------+---------------+
+| :math:`w_p` | Weightage for the Topology - Path Length | 0.1 |
++-------------+-----------------------------------------------+---------------+
+| :math:`w_v` | Weightage for the Virtualization | 0.1 |
++-------------+-----------------------------------------------+---------------+
+
+And
+
++---------------------------+------------------------------------------------------------+
+| Notation | Description |
++===========================+============================================================+
+| :math:`E_t` & :math:`O_t` | Expected (theoretical Max) and Obtained Average Throughput |
++---------------------------+------------------------------------------------------------+
+| :math:`E_d` & :math:`O_d` | Expected and Otained Minimum Delay |
++---------------------------+------------------------------------------------------------+
+| :math:`E_a` & :math:`O_a` | Expected and Obtained Application SLA Metric |
++---------------------------+------------------------------------------------------------+
+| :math:`T_n` | Total number of Network Elements (Switches and Routers) |
++---------------------------+------------------------------------------------------------+
+| :math:`V_n` | Total number of Virtual Network Elements |
++---------------------------+------------------------------------------------------------+
+| :math:`C_{nv}` | Network Virtualization Constant |
++---------------------------+------------------------------------------------------------+
+
+It would be interesting to explore the following alternative:
+
+:math:`I = I_E - I_O`
+
+where
+
+:math:`I_E = w_t * E_t + w_d* \frac{1}{E_d} + w_a.\frac{1}{E_a} + w_s * \frac{1}{T_n} + w_p * V_n + W_v * C_{nv}`
+
+and
+
+:math:`I_O = w_t * O_t + w_d* \frac{1}{O_d} + w_a.\frac{1}{O_a} + w_s * \frac{1}{T_n} + w_p * V_n + W_v * C_{nv}` \ No newline at end of file
diff --git a/docs/release/release-notes/fraser.rst b/docs/release/release-notes/fraser.rst
new file mode 100644
index 00000000..3d7d13a8
--- /dev/null
+++ b/docs/release/release-notes/fraser.rst
@@ -0,0 +1,97 @@
+.. This work is licensed under a Creative Commons Attribution 4.0 International License.
+.. http://creativecommons.org/licenses/by/4.0
+
+******
+Fraser
+******
+
+This document provides the release notes of QTIP for OPNFV Fraser release
+
+.. contents::
+ :depth: 3
+ :local:
+
+Version history
+===============
+
++--------------------+--------------------+--------------------+--------------------+
+| **Date** | **Ver.** | **Author** | **Comment** |
+| | | | |
++--------------------+--------------------+--------------------+--------------------+
+| 2018-04-25 | Fraser 1.0 | Zhihui Wu | |
+| | | | |
++--------------------+--------------------+--------------------+--------------------+
+
+Summary
+=======
+
+QTIP Fraser release supports the compute QPI(QTIP Performance Index) for **VNF**. In order to
+simplify the implementation, a Ubuntu 16.04 virtual machine is regarded as a simple VNF. The
+end users can try to run QTIP with a **real** VNF.
+
+Release Data
+============
+
++--------------------------------------+--------------------------------------+
+| **Project** | QTIP |
+| | |
++--------------------------------------+--------------------------------------+
+| **Repo/commit-ID** | qtip/opnfv-6.0.0 |
+| | |
++--------------------------------------+--------------------------------------+
+| **Release designation** | stable version |
+| | |
++--------------------------------------+--------------------------------------+
+| **Release date** | 2018-04-18 |
+| | |
++--------------------------------------+--------------------------------------+
+| **Purpose of the delivery** | release with OPNFV cycle |
+| | |
++--------------------------------------+--------------------------------------+
+
+Version change
+--------------
+
+Python packaging tool
+^^^^^^^^^^^^^^^^^^^^^
+
+Pipenv is the officially recommended Python packaging tool from Python.org.
+
+Pipenv uses the ``Pipfile`` and ``Pipfile.lock`` instead of ``requirements.txt`` to manage
+the dependency packages.
+
+Reason for version
+------------------
+
+Features additions
+^^^^^^^^^^^^^^^^^^
+
+* Support the compute QPI for **VNF**
+
+Deliverables
+------------
+
+Software
+^^^^^^^^
+
+- `QTIP Docker image <https://hub.docker.com/r/opnfv/qtip>`_ (tag: opnfv-6.0.0)
+
+Documentation
+^^^^^^^^^^^^^
+
+- `Installation & Configuration <http://docs.opnfv.org/en/stable-fraser/qtip/docs/testing/user/configguide>`_
+- `User Guide <http://docs.opnfv.org/en/stable-fraser/submodules/qtip/docs/testing/user/userguide>`_
+- `Developer Guide <http://docs.opnfv.org/en/stable-fraser/submodules/qtip/docs/testing/developer/devguide>`_
+
+Known Limitations, Issues and Workarounds
+=========================================
+
+Limitations
+-----------
+
+N/A
+
+Known issues
+------------
+
+N/A
diff --git a/docs/release/release-notes/index.rst b/docs/release/release-notes/index.rst
index a7d87a6e..d89ee124 100644
--- a/docs/release/release-notes/index.rst
+++ b/docs/release/release-notes/index.rst
@@ -13,6 +13,7 @@ QTIP Release Notes
.. toctree::
:maxdepth: 2
+ fraser.rst
euphrates.rst
danube.rst
brahmaputra.rst
diff --git a/docs/testing/developer/devguide/index.rst b/docs/testing/developer/devguide/index.rst
index ab411005..0b583cc5 100644
--- a/docs/testing/developer/devguide/index.rst
+++ b/docs/testing/developer/devguide/index.rst
@@ -16,6 +16,5 @@ QTIP Developer Guide
framework.rst
cli.rst
api.rst
- web.rst
compute-qpi.rst
storage-qpi.rst
diff --git a/docs/testing/developer/devguide/web.rst b/docs/testing/developer/devguide/web.rst
deleted file mode 100644
index ae4e3156..00000000
--- a/docs/testing/developer/devguide/web.rst
+++ /dev/null
@@ -1,100 +0,0 @@
-.. This work is licensed under a Creative Commons Attribution 4.0 International License.
-.. http://creativecommons.org/licenses/by/4.0
-
-
-***************************************
-Web Portal for Benchmarking Services
-***************************************
-
-QTIP consists of different tools(metrics) to benchmark the NFVI. These metrics
-fall under different NFVI subsystems(QPI's) such as compute, storage and network.
-QTIP benchmarking tasks are built upon `Ansible`_ playbooks and roles.
-QTIP web portal is a platform to expose QTIP as a benchmarking service hosted on a central host.
-
-Framework
-=========
-
-The web travel has been developed on Python `Django`_ framework. Dig into the documentation to learn about Django.
-
-Design
-======
-
-Django is a MTV (Model Template View) framework. Database objects are mapped to models in ``models.py``. Views handle the
-requests from client side and interact with database using Django ORM. Templates are responsible for
-UI rendering based on response context from Views.
-
-Models
-------
-
-Repo
-~~~~
-
-Model for `workspace`_ repos
-
-::
-
- Repo:
- name
- git_link
-
-
-Task
-~~~~
-
-Tasks keep track of every benchmark run through QTIP-Web Services. Whenever you run a benchmark,
-a new task is created which keep track of time stats and log task progress and ansible output for
-the respective playbook.
-
-::
-
- Task
- start_time
- end_time
- status
- run_time
- repo
- log
-
-
-Views
------
-
-Dashboard
-~~~~~~~~~
-
- - Base class - TemplateVIew
-
-Class based view serving as home page for the application.
-
-
-ReposView
-~~~~~~~~~
-
- - Base class - LoginRequiredMixin, CreateView
-
-Class based view for listing and add new repos
-
-
-RepoUpdate
-~~~~~~~~~~
-
- - Base class - LoginRequiredMixin, UpdateView
-
-Class based View for listing and updating an existing repo details.
-
-*Both ReposView and RepoUpdate View use same template ``repo_form.html``. The context has an extra variable ``template_role`` which is used to distinguish if repo form is for create or edit operation.*
-
-
-Run
-~~~
-
- - Base class - LoginRequiredMixin, View
- - template name - run.html
-
-Class based View for adding new task and run benchmark based on task details. The logs are saved
-in ``logs/run_<log_id>`` directory.
-
-
-.. _Ansible: https://www.ansible.com/
-.. _Django: https://docs.djangoproject.com/en/1.11/
-.. _workspace: https://github.com/opnfv/qtip/blob/master/docs/testing/developer/devguide/ansible.rst#create-workspace
diff --git a/docs/testing/user/configguide/configuration.rst b/docs/testing/user/configguide/configuration.rst
index ae745341..d04f5bab 100644
--- a/docs/testing/user/configguide/configuration.rst
+++ b/docs/testing/user/configguide/configuration.rst
@@ -40,8 +40,9 @@ Run and enter the docker instance
1. If you want to run benchmarks:
::
- envs="INSTALLER_TYPE={INSTALLER_TYPE} -e INSTALLER_IP={INSTALLER_IP} -e NODE_NAME={NODE_NAME}"
+ envs="INSTALLER_TYPE={INSTALLER_TYPE} -e INSTALLER_IP={INSTALLER_IP} -e NODE_NAME={NODE_NAME}"
docker run -p [HOST_IP:]<HOST_PORT>:5000 --name qtip -id -e $envs opnfv/qtip
+ docker start qtip
docker exec -i -t qtip /bin/bash
``INSTALLER_TYPE`` should be one of OPNFV installer, e.g. apex, compass, daisy, fuel
@@ -90,7 +91,7 @@ Environment configuration
Hardware configuration
----------------------
-QTIP does not have specific hardware requriements, and it can runs over any
+QTIP does not have specific hardware requirements, and it can runs over any
OPNFV installer.
diff --git a/docs/testing/user/configguide/index.rst b/docs/testing/user/configguide/index.rst
index fa893e5e..9c72ecd2 100644
--- a/docs/testing/user/configguide/index.rst
+++ b/docs/testing/user/configguide/index.rst
@@ -12,4 +12,3 @@ QTIP Installation Guide
:maxdepth: 2
./configuration.rst
- ./web.rst
diff --git a/docs/testing/user/configguide/web.rst b/docs/testing/user/configguide/web.rst
deleted file mode 100644
index 83365abe..00000000
--- a/docs/testing/user/configguide/web.rst
+++ /dev/null
@@ -1,74 +0,0 @@
-.. This work is licensed under a Creative Commons Attribution 4.0 International License.
-.. http://creativecommons.org/licenses/by/4.0
-
-
-***************************************
-Web Portal installation & configuration
-***************************************
-
-Web Portal for Benchmarking is developed on python `Django`_ Framework. Right now the installation
-is need to be done from source.
-
-
-
-Clone QTIP Repo
-===============
-
-::
-
- git clone https://github.com/opnfv/qtip.git
-
-
-Setup database and Initialize user data
-=======================================
-
-CD into `web` directory.
-------------------------
-
-::
-
- cd qtip/qtip/web
-
-
-Setup migrations
-----------------
-
-::
-
- python manage.py makemigrations
-
-
-In usual case migrations will be already available with source. Console willll notify you
-of the same.
-
-Run migrations
---------------
-
-::
-
- python manage.py migrate
-
-
-Create superuser
-----------------
-::
-
- python manage.py createsuperuser
-
-
-Console will prompt for adding new web admin. Enter new credentials.
-
-
-
-Collecting Static Dependencies
-------------------------------
-::
-
- python manage.py importstatic
-
-
-This will import js and css dependencies for UI in static directory. Now the web application is
-ready to run.
-
-
-.. _Django: https://docs.djangoproject.com/en/1.11/
diff --git a/docs/testing/user/userguide/compute.rst b/docs/testing/user/userguide/compute.rst
index f889bfe6..7c5adc26 100644
--- a/docs/testing/user/userguide/compute.rst
+++ b/docs/testing/user/userguide/compute.rst
@@ -16,10 +16,11 @@ test compute components.
All the compute benchmarks could be run in the scenario:
On Baremetal Machines provisioned by an OPNFV installer (Host machines)
+On Virtual machines provisioned by OpenStack deployed by an OPNFV installer
Note: The Compute benchmank constains relatively old benchmarks such as dhrystone
and whetstone. The suite would be updated for better benchmarks such as Linbench for
-the OPNFV E release.
+the OPNFV future release.
Getting started
@@ -32,7 +33,7 @@ Inventory File
QTIP uses Ansible to trigger benchmark test. Ansible uses an inventory file to
determine what hosts to work against. QTIP can automatically generate a inventory
-file via OPNFV installer. Users also can write their own inventory infomation into
+file via OPNFV installer. Users also can write their own inventory information into
``/home/opnfv/qtip/hosts``. This file is just a text file containing a list of host
IP addresses. For example:
::
@@ -53,19 +54,33 @@ manual. If *CI_DEBUG* is not set or set to *false*, QTIP will delete the key fro
remote hosts before the execution ends. Please make sure the key deleted from remote
hosts or it can introduce a security flaw.
-Commands
---------
+Execution
+---------
-In a QTIP container, you can run compute QPI by using QTIP CLI:
-::
+There are two ways to execute compute QPI:
+
+* Script
+
+ You can run compute QPI with docker exec:
+ ::
+
+ # run with baremetal machines provisioned by an OPNFV installer
+ docker exec <qtip container> bash -x /home/opnfv/repos/qtip/qtip/scripts/quickstart.sh -q compute
+
+ # run with virtual machines provisioned by OpenStack
+ docker exec <qtip container> bash -x /home/opnfv/repos/qtip/qtip/scripts/quickstart.sh -q compute -u vnf
+
+* Commands
+
+ In a QTIP container, you can run compute QPI by using QTIP CLI. You can get more details from
+ *userguide/cli.rst*.
- mkdir result
- qtip plan run <plan_name> -p $PWD/result
+Test result
+------------
-QTIP generates results in the ``$PWD/result`` directory are listed down under the
+QTIP generates results in the ``/home/opnfv/<project_name>/results/`` directory are listed down under the
timestamp name.
-you can get more details from *userguide/cli.rst*.
Metrics
-------
diff --git a/docs/testing/user/userguide/index.rst b/docs/testing/user/userguide/index.rst
index 262ddd70..93adc8a9 100644
--- a/docs/testing/user/userguide/index.rst
+++ b/docs/testing/user/userguide/index.rst
@@ -15,6 +15,7 @@ QTIP User Guide
getting-started.rst
cli.rst
api.rst
- web.rst
compute.rst
storage.rst
+ network.rst
+ network_testcase_description.rst
diff --git a/docs/testing/user/userguide/network.rst b/docs/testing/user/userguide/network.rst
new file mode 100644
index 00000000..68c39974
--- /dev/null
+++ b/docs/testing/user/userguide/network.rst
@@ -0,0 +1,114 @@
+.. This work is licensed under a Creative Commons Attribution 4.0 International License.
+.. http://creativecommons.org/licenses/by/4.0
+.. (c) 2018 Spirent Communications Corp.
+
+
+********************************
+Network Performance Benchmarking
+********************************
+Like compute or storage QPI, network QPI gives users an overall score for system network performance.
+For now it focuses on L2 virtual switch performance on NFVI. Current testcase are from RFC2544 standart and
+implemntation is based on Spirent Testcenter Virtual.
+
+For now, network QPI runs against on the baremetal/virtual scenario deployed by
+the OPNFV installer `APEX`_.
+
+Getting started
+===============
+Notice: All descriptions are based on containers.
+
+Requirements
+------------
+
+* Git must be installed.
+* Docker and docker-compose must be installed.
+* Spirent Testcenter Virtual image must be uploaded to the target cloud and the
+ associated flavor must be created before test.
+* Spirent License Server and Spirent LabServer must be set up and keep them ip
+ reachable from target cloud external network before test.
+
+Git Clone QTIP Repo
+-------------------
+
+::
+
+ git clone https://git.opnfv.org/qtip
+
+Running QTIP container and Nettest Containers
+----------------------------------------------
+
+With Docker Compose, we can use a YAML file to configure application's services and
+use a single command to create and start all the services.
+
+There is a YAML file ``./qtip/tests/ci/network/docker-compose.yaml`` from QTIP repos.
+It can help you to create and start the network QPI service.
+
+Before running docker-compose, you must specify these three variables:
+
+* DOCKER_TAG, which specified the Docker tag(ie: latest)
+* SSH_CREDENTIALS, a directory which includes an SSH key pair will be mounted into QTIP container.
+ QTIP use this SSH key pair to connect to remote hosts.
+* ENV_FILE, which includes the environment variables required by QTIP and Storperf containers
+
+ A example of ENV_FILE:
+
+ ::
+
+ INSTALLER_TYPE=apex
+ INSTALLER_IP=192.168.122.247
+ TEST_SUITE=network
+ NODE_NAME=zte-virtual5
+ SCENARIO=generic
+ TESTAPI_URL=
+ OPNFV_RELEASE=euphrates
+ # The below environment variables are Openstack Credentials.
+ OS_USERNAME=admin
+ OS_USER_DOMAIN_NAME=Default
+ OS_PROJECT_DOMAIN_NAME=Default
+ OS_BAREMETAL_API_VERSION=1.29
+ NOVA_VERSION=1.1
+ OS_PROJECT_NAME=admin
+ OS_PASSWORD=ZjmZJmkCvVXf9ry9daxgwmz3s
+ OS_NO_CACHE=True
+ COMPUTE_API_VERSION=1.1
+ no_proxy=,192.168.37.10,192.0.2.5
+ OS_CLOUDNAME=overcloud
+ OS_AUTH_URL=http://192.168.37.10:5000/v3
+ IRONIC_API_VERSION=1.29
+ OS_IDENTITY_API_VERSION=3
+ OS_AUTH_TYPE=password
+ # The below environment variables are extra info with Spirent.
+ SPT_LICENSE_SERVER_IP=192.168.37.251
+ SPT_LAB_SERVER_IP=192.168.37.122
+ SPT_STCV_IMAGE_NAME=stcv-4.79
+ SPT_STCV_FLAVOR_NAME=m1.tiny
+
+Then, you use the following commands to start network QPI service.
+
+::
+
+ docker-compose -f docker-compose.yaml pull
+ docker-compose -f docker-compose.yaml up -d
+
+Execution
+---------
+
+You can run network QPI with docker exec:
+::
+
+ docker exec <qtip container> bash -x /home/opnfv/repos/qtip/qtip/scripts/quickstart.sh
+
+QTIP generates results in the ``$PWD/results/`` directory are listed down under the
+timestamp name.
+
+Metrics
+-------
+
+Nettest provides the following `metrics`_:
+
+* RFC2544 througput
+* RFC2544 latency
+
+
+.. _APEX: https://wiki.opnfv.org/display/apex
+.. _metrics: https://tools.ietf.org/html/rfc2544
diff --git a/docs/testing/user/userguide/network_testcase_description.rst b/docs/testing/user/userguide/network_testcase_description.rst
new file mode 100644
index 00000000..0f1a0b45
--- /dev/null
+++ b/docs/testing/user/userguide/network_testcase_description.rst
@@ -0,0 +1,90 @@
+.. This work is licensed under a Creative Commons Attribution 4.0 International License.
+.. http://creativecommons.org/licenses/by/4.0
+.. (c) 2018 Spirent Communications Corp.
+.. Template to be used for test case descriptions in QTIP Project.
+
+
+Test Case Description
+=====================
+
++-----------------------------------------------------------------------------+
+|Network throughput |
++==============+==============================================================+
+|test case id | qtip_throughput |
++--------------+--------------------------------------------------------------+
+|metric | rfc2544 throughput |
++--------------+--------------------------------------------------------------+
+|test purpose | get the max throughput of the pathway on same host or accross|
+| | hosts |
++--------------+--------------------------------------------------------------+
+|configuration | None |
++--------------+--------------------------------------------------------------+
+|test tool | Spirent Test Center Virtual |
++--------------+--------------------------------------------------------------+
+|references | RFC2544 |
++--------------+--------------------------------------------------------------+
+|applicability | 1. test the switch throughput on same host or accross hosts |
+| | 2. test the switch throughput for different packet sizes |
++--------------+--------------------------------------------------------------+
+|pre-test | 1. deploy STC license server and LabServer on public network |
+|conditions | and verify it can operate correctlly |
+| | 2. upload STC virtual image and create STCv flavor on the |
+| | deployed cloud environment |
++--------------+------+----------------------------------+--------------------+
+|test sequence | step | description | result |
+| +------+----------------------------------+--------------------+
+| | 1 | deploy STCv stack on the target | 2 STCv VM will be |
+| | | cloud with affinity attribute | established on the |
+| | | according to requirements. | cloud |
+| +------+----------------------------------+--------------------+
+| | 2 | run rfc2544 throughput test with | test result report |
+| | | different packet size | will be produced in|
+| | | | QTIP container |
+| +------+----------------------------------+--------------------+
+| | 3 | destory STCv stack | STCv stack |
+| | | different packet size | destoried |
++--------------+------+----------------------------------+--------------------+
+|test verdict | find the test result report in QTIP container running |
+| | directory |
++--------------+--------------------------------------------------------------+
+
++-----------------------------------------------------------------------------+
+|Network throughput |
++==============+==============================================================+
+|test case id | qtip_latency |
++--------------+--------------------------------------------------------------+
+|metric | rfc2544 lantency |
++--------------+--------------------------------------------------------------+
+|test purpose | get the latency value of the pathway on same host or accross |
+| | hosts |
++--------------+--------------------------------------------------------------+
+|configuration | None |
++--------------+--------------------------------------------------------------+
+|test tool | Spirent Test Center Virtual |
++--------------+--------------------------------------------------------------+
+|references | RFC2544 |
++--------------+--------------------------------------------------------------+
+|applicability | 1. test the switch latency on same host or accross hosts |
+| | 2. test the switch latency for different packet sizes |
++--------------+--------------------------------------------------------------+
+|pre-test | 1. deploy STC license server and LabServer on public network |
+|conditions | and verify it can operate correctlly |
+| | 2. upload STC virtual image and create STCv flavor on the |
+| | deployed cloud environment |
++--------------+------+----------------------------------+--------------------+
+|test sequence | step | description | result |
+| +------+----------------------------------+--------------------+
+| | 1 | deploy STCv stack on the target | 2 STCv VM will be |
+| | | cloud with affinity attribute | established on the |
+| | | according to requirements. | cloud |
+| +------+----------------------------------+--------------------+
+| | 2 | run rfc2544 latency test with | test result report |
+| | | different packet size | will be produced in|
+| | | | QTIP container |
+| +------+----------------------------------+--------------------+
+| | 3 | destroy STCv stack | STCv stack |
+| | | | destried |
++--------------+------+----------------------------------+--------------------+
+|test verdict | find the test result report in QTIP container running |
+| | directory |
++--------------+--------------------------------------------------------------+
diff --git a/docs/testing/user/userguide/storage.rst b/docs/testing/user/userguide/storage.rst
index 7681ff7a..9457e67e 100644
--- a/docs/testing/user/userguide/storage.rst
+++ b/docs/testing/user/userguide/storage.rst
@@ -87,12 +87,23 @@ Then, you use the following commands to start storage QPI service.
Execution
---------
-You can run storage QPI with docker exec:
-::
+* Script
+
+ You can run storage QPI with docker exec:
+ ::
+
+ docker exec <qtip container> bash -x /home/opnfv/repos/qtip/qtip/scripts/quickstart.sh
+
+* Commands
- docker exec <qtip container> bash -x /home/opnfv/repos/qtip/qtip/scripts/quickstart.sh
+ In a QTIP container, you can run storage QPI by using QTIP CLI. You can get more
+ details from *userguide/cli.rst*.
+
+
+Test result
+------------
-QTIP generates results in the ``$PWD/results/`` directory are listed down under the
+QTIP generates results in the ``/home/opnfv/<project_name>/results/`` directory are listed down under the
timestamp name.
Metrics
diff --git a/docs/testing/user/userguide/web.rst b/docs/testing/user/userguide/web.rst
deleted file mode 100644
index 79f180d9..00000000
--- a/docs/testing/user/userguide/web.rst
+++ /dev/null
@@ -1,70 +0,0 @@
-.. This work is licensed under a Creative Commons Attribution 4.0 International License.
-.. http://creativecommons.org/licenses/by/4.0
-
-
-**********************
-Web Portal User Manual
-**********************
-
-QTIP consists of different tools(metrics) to benchmark the NFVI. These metrics
-fall under different NFVI subsystems(QPI's) such as compute, storage and network.
-QTIP benchmarking tasks are built upon `Ansible`_ playbooks and roles.
-QTIP web portal is a platform to expose QTIP as a benchmarking service hosted on a central host.
-
-
-Running
-=======
-
-After setting up the web portal as instructed in config guide, cd into the `web` directory.
-
-and run.
-
-::
-
- python manage.py runserver 0.0.0.0
-
-
-You can access the portal by logging onto `<host>:8000/bench/login/`
-
-If you want to use port 80, you may need sudo permission.
-
-::
-
- sudo python manage.py runserver 0.0.0.0:80
-
-To Deploy on `wsgi`_, Use the Django `deployment tutorial`_
-
-
-Features
-========
-
-After logging in You'll be redirect to QTIP-Web Dashboard. You'll see following menus on left.
-
- * Repos
- * Run Benchmarks
- * Tasks
-
-Repo
-----
-
- Repos are links to qtip `workspaces`_. This menu list all the aded repos. Links to new repos
- can be added here.
-
-Run Benchmarks
---------------
-
- To run a benchmark, select the corresponding repo and run. QTIP Benchmarking service will clone
- the workspace and run the benchmarks. Inventories used are predefined in the workspace repo in the `/hosts/` config file.
-
-Tasks
------
-
- All running or completed benchmark jobs can be seen in Tasks menu with their status.
-
-
-*New users can be added by Admin on the Django Admin app by logging into `/admin/'.*
-
-.. _Ansible: https://www.ansible.com/
-.. _wsgi: https://wsgi.readthedocs.io/en/latest/what.html
-.. _deployment tutorial: https://docs.djangoproject.com/en/1.11/howto/deployment/wsgi/
-.. _workspaces: https://github.com/opnfv/qtip/blob/master/docs/testing/developer/devguide/ansible.rst#create-workspace
diff --git a/legacy/api/cmd/__init__.py b/legacy/api/cmd/__init__.py
deleted file mode 100644
index e69de29b..00000000
--- a/legacy/api/cmd/__init__.py
+++ /dev/null
diff --git a/legacy/api/cmd/server.py b/legacy/api/cmd/server.py
deleted file mode 100644
index eea45ad3..00000000
--- a/legacy/api/cmd/server.py
+++ /dev/null
@@ -1,31 +0,0 @@
-##############################################################################
-# Copyright (c) 2016 ZTE Corp and others.
-#
-# All rights reserved. This program and the accompanying materials
-# are made available under the terms of the Apache License, Version 2.0
-# which accompanies this distribution, and is available at
-# http://www.apache.org/licenses/LICENSE-2.0
-##############################################################################
-
-from flask import Flask
-from flask_restful import Api
-from flask_restful_swagger import swagger
-
-import legacy.api.router.mapper as mapper
-
-app = Flask(__name__)
-api = swagger.docs(Api(app), apiVersion='0.1', description='QTIP API specs')
-
-
-def add_routers():
- for (handler, url) in mapper.mappers:
- api.add_resource(handler, url)
-
-
-def main():
- add_routers()
- app.run(host='0.0.0.0')
-
-
-if __name__ == "__main__":
- main()
diff --git a/legacy/api/handler/__init__.py b/legacy/api/handler/__init__.py
deleted file mode 100644
index e69de29b..00000000
--- a/legacy/api/handler/__init__.py
+++ /dev/null
diff --git a/legacy/api/handler/db.py b/legacy/api/handler/db.py
deleted file mode 100644
index 24fc27a5..00000000
--- a/legacy/api/handler/db.py
+++ /dev/null
@@ -1,98 +0,0 @@
-##############################################################################
-# Copyright (c) 2016 ZTE Corp and others.
-#
-# All rights reserved. This program and the accompanying materials
-# are made available under the terms of the Apache License, Version 2.0
-# which accompanies this distribution, and is available at
-# http://www.apache.org/licenses/LICENSE-2.0
-##############################################################################
-from datetime import datetime
-from operator import add
-import uuid
-
-jobs = {}
-threads = {}
-
-
-def create_job(args):
- if len(filter(lambda x: jobs[x]['state'] == 'processing', jobs.keys())) > 0:
- return None
- else:
- job = {'job_id': str(uuid.uuid4()),
- 'installer_type': args["installer_type"],
- 'installer_ip': args["installer_ip"],
- 'pod_name': args["pod_name"],
- 'suite_name': args["suite_name"],
- 'max_minutes': args["max_minutes"],
- 'type': args["type"],
- 'testdb_url': args["testdb_url"],
- 'node_name': args["node_name"],
- 'start_time': str(datetime.now()),
- 'end_time': None,
- 'state': 'processing',
- 'state_detail': [],
- 'result': None,
- 'result_detail': []}
- jobs[job['job_id']] = job
- return job['job_id']
-
-
-def delete_job(job_id):
- if job_id in threads:
- stop_thread(job_id)
- if job_id in jobs:
- jobs[job_id]['end_time'] = str(datetime.now())
- jobs[job_id]['state'] = 'terminated'
- return True
- else:
- return False
-
-
-def get_job_info(job_id):
- if job_id in jobs:
- return jobs[job_id]
- else:
- return None
-
-
-def finish_job(job_id):
- jobs[job_id]['end_time'] = str(datetime.now())
- jobs[job_id]['state'] = 'finished'
- jobs[job_id]['result'] = reduce(add, map(lambda x: x['result'],
- jobs[job_id]['result_detail']))
- del threads[job_id]
-
-
-def update_job_state_detail(job_id, state_detail):
- jobs[job_id]['state_detail'] = state_detail
-
-
-def update_job_result_detail(job_id, benchmark, result):
- result['benchmark'] = benchmark
- jobs[job_id]['result_detail'].append(result)
-
-
-def is_job_timeout(job_id):
- period = datetime.now() - datetime.strptime(jobs[job_id]['start_time'],
- "%Y-%m-%d %H:%M:%S.%f")
- return True if jobs[job_id]['max_minutes'] * 60 < period.total_seconds()\
- else False
-
-
-def start_thread(job_id, thread, thread_stop):
- threads[job_id] = {'thread': thread,
- 'thread_stop': thread_stop}
- thread.start()
-
-
-def stop_thread(job_id):
- if threads[job_id]['thread'].isAlive():
- threads[job_id]['thread_stop'].set()
- threads[job_id]['thread'].join()
- if job_id in threads:
- del threads[job_id]
-
-
-def update_benchmark_state(job_id, benchmark, benchmark_state):
- filter(lambda x: x["benchmark"] == benchmark,
- get_job_info(job_id)["state_detail"])[0]['state'] = benchmark_state
diff --git a/legacy/api/handler/job_handler.py b/legacy/api/handler/job_handler.py
deleted file mode 100644
index 4ecc1cee..00000000
--- a/legacy/api/handler/job_handler.py
+++ /dev/null
@@ -1,174 +0,0 @@
-##############################################################################
-# Copyright (c) 2017 ZTE Corporation and others.
-#
-# All rights reserved. This program and the accompanying materials
-# are made available under the terms of the Apache License, Version 2.0
-# which accompanies this distribution, and is available at
-# http://www.apache.org/licenses/LICENSE-2.0
-##############################################################################
-import threading
-from copy import copy
-
-from flask_restful import Resource, reqparse
-from flask_restful_swagger import swagger
-from qtip.api.model.job_model import JobResponseModel
-from qtip.utils import args_handler as args_handler
-from werkzeug.exceptions import abort
-
-from legacy.api.handler import db, result_handler
-
-
-class Job(Resource):
- @swagger.operation(
- notes='get a job by ID',
- nickname='get',
- parameters=[],
- responseMessages=[
- {
- "code": 200,
- "message": "Job detail info."
- },
- {
- "code": 404,
- "message": "Can't not find the job id XXXXXXX"
- }
- ]
- )
- def get(self, id):
- ret = db.get_job_info(id)
- return ret if ret else abort(404, " Can't not find the job id %s" % id)
-
- @swagger.operation(
- notes='delete a job by ID',
- nickname='delete',
- parameters=[],
- responseMessages=[
- {
- "code": 200,
- "message": "Delete successfully"
- },
- {
- "code": 404,
- "message": "Can not find job_id XXXXXXXXX"
- }
- ]
- )
- def delete(self, id):
- ret = db.delete_job(id)
- return {'result': "Delete successfully"} if ret else abort(404, "Can not find job_id %s" % id)
-
-
-class JobList(Resource):
- @swagger.operation(
- note='create a job with parameters',
- nickname='create',
- parameters=[
- {
- "name": "body",
- "description": """
-"installer_type": The installer type, for example fuel, compass..,
-
-"installer_ip": The installer ip of the pod,
-
-"max_minutes": If specified, the maximum duration in minutes
-for any single test iteration, default is '60',
-
-"pod_name": If specified, the Pod name, default is 'default',
-
-"suite_name": If specified, Test suite name, for example 'compute', 'network', 'storage',
-default is 'compute',
-
-"type": BM or VM,default is 'BM',
-
-"benchmark_name": If specified, benchmark name in suite, for example 'dhrystone_bm.yaml',
-default is all benchmarks in suite with specified type,
-
-"testdb_url": test db http url, for example 'http://testresults.opnfv.org/test/api/v1',
-
-"node_name": node name reported to test db
- """,
- "required": True,
- "type": "JobModel",
- "paramType": "body"
- }
- ],
- type=JobResponseModel.__name__,
- responseMessages=[
- {
- "code": 200,
- "message": "Job submitted"
- },
- {
- "code": 400,
- "message": "Missing configuration data"
- },
- {
- "code": 409,
- "message": "It already has one job running now!"
- }
- ]
- )
- def post(self):
- parser = reqparse.RequestParser()
- parser.add_argument('installer_type', type=str, required=True, help='installer_type is required')
- parser.add_argument('installer_ip', type=str, required=True, help='installer_ip is required')
- parser.add_argument('max_minutes', type=int, required=False, default=60, help='max_minutes should be integer')
- parser.add_argument('pod_name', type=str, required=False, default='default', help='pod_name should be string')
- parser.add_argument('suite_name', type=str, required=False, default='compute', help='suite_name should be string')
- parser.add_argument('type', type=str, required=False, default='BM', help='type should be BM, VM and ALL')
- parser.add_argument('benchmark_name', type=str, required=False, default='all', help='benchmark_name should be string')
- parser.add_argument('testdb_url', type=str, required=False, default=None,
- help='testdb_url should be test db http url,for example http://testresults.opnfv.org/test/api/v1')
- parser.add_argument('node_name', type=str, required=False, default=None, help='node_name should be string')
- args = parser.parse_args()
- if not args_handler.check_suite(args["suite_name"]):
- return abort(404, 'message:Test suite {0} does not exist under benchmarks/suite'.format(args["suite_name"]))
- if not args_handler.check_lab_name(args["pod_name"]):
- return abort(404, 'message: You have specified a lab {0}\
- that is not present in test_cases'.format(args['pod_name']))
-
- job_id = db.create_job(args)
- if not job_id:
- return abort(409, 'message:It already has one job running now!')
-
- benchmarks = args_handler.get_files_in_suite(args["suite_name"],
- args["type"].lower())
- test_cases = args_handler.get_files_in_test_plan(args["pod_name"],
- args["suite_name"],
- args["type"].lower())
- benchmarks_list = filter(lambda x: x in test_cases, benchmarks)
- if args["benchmark_name"] in benchmarks_list:
- benchmarks_list = [args["benchmark_name"]]
- if (args["benchmark_name"] is not 'all') and args["benchmark_name"] not in benchmarks_list:
- return abort(404, 'message: Benchmark name {0} does not exist in suit {1}'.format(args["benchmark_name"],
- args["suite_name"]))
- state_detail = map(lambda x: {'benchmark': x, 'state': 'idle'}, benchmarks_list)
- db.update_job_state_detail(job_id, copy(state_detail))
- thread_stop = threading.Event()
- post_thread = threading.Thread(target=self.thread_post, args=(args["installer_type"],
- benchmarks_list,
- args["pod_name"],
- args["suite_name"],
- job_id,
- args["testdb_url"],
- args["node_name"],
- thread_stop))
- db.start_thread(job_id, post_thread, thread_stop)
- return {'job_id': str(job_id)}
-
- def thread_post(self, installer_type, benchmarks_list, pod_name, suite_name,
- job_id, testdb_url, node_name, stop_event):
- for benchmark in benchmarks_list:
- if db.is_job_timeout(job_id) or stop_event.is_set():
- break
- db.update_benchmark_state(job_id, benchmark, 'processing')
- result = args_handler.prepare_and_run_benchmark(installer_type,
- '/home',
- args_handler.get_benchmark_path(pod_name,
- suite_name,
- benchmark))
- db.update_job_result_detail(job_id, benchmark, copy(result))
- db.update_benchmark_state(job_id, benchmark, 'finished')
- if (result_handler.dump_suite_result(suite_name) and testdb_url):
- result_handler.push_suite_result_to_db(suite_name, testdb_url, installer_type, node_name)
- db.finish_job(job_id)
diff --git a/legacy/api/handler/result_handler.py b/legacy/api/handler/result_handler.py
deleted file mode 100644
index 3d1d592e..00000000
--- a/legacy/api/handler/result_handler.py
+++ /dev/null
@@ -1,58 +0,0 @@
-##############################################################################
-# Copyright (c) 2016 ZTE Corp and others.
-#
-# All rights reserved. This program and the accompanying materials
-# are made available under the terms of the Apache License, Version 2.0
-# which accompanies this distribution, and is available at
-# http://www.apache.org/licenses/LICENSE-2.0
-##############################################################################
-import importlib
-import json
-from os.path import expanduser
-
-import qtip.utils.dashboard.pushtoDB as push_to_db
-from qtip.utils import logger_utils
-
-logger = logger_utils.QtipLogger('suite_result').get
-
-
-def get_benchmark_result(benchmark_name, suite_name):
- benchmark_indices = importlib.import_module('scripts.ref_results'
- '.{0}_benchmarks_indices'.format(suite_name))
- methodToCall = getattr(benchmark_indices, '{0}_index'.format(benchmark_name))
- return methodToCall()
-
-
-def dump_suite_result(suite_name):
- suite_dict = {}
- suite_bench_list = {'compute': ['DPI', 'Dhrystone', 'Whetstone', 'SSL', 'RamSpeed'],
- 'storage': ['FIO'],
- 'network': ['IPERF']}
- temp = 0
- l = len(suite_bench_list[suite_name])
- for benchmark in suite_bench_list[suite_name]:
- try:
- suite_dict[benchmark] = get_benchmark_result(benchmark.lower(), suite_name)
- temp = temp + float(suite_dict[benchmark]['index'])
- except OSError:
- l = l - 1
- pass
-
- if l == 0:
- logger.info("No {0} suite results found".format(suite_name))
- return False
- else:
- suite_index = temp / l
- suite_dict_f = {'index': suite_index,
- 'suite_results': suite_dict}
- result_path = expanduser('~') + '/qtip/results'
- with open('{0}/{1}_result.json'.format(result_path, suite_name), 'w+') as result_json:
- json.dump(suite_dict_f, result_json, indent=4, sort_keys=True)
- return True
-
-
-def push_suite_result_to_db(suite_name, test_db_url, installer_type, node_name):
- with open('results/{0}_result.json'.format(suite_name), 'r') as result_file:
- j = json.load(result_file)
- push_to_db.push_results_to_db(test_db_url, '{0}_test_suite'.format(suite_name),
- j, installer_type, node_name)
diff --git a/legacy/api/model/__init__.py b/legacy/api/model/__init__.py
deleted file mode 100644
index e69de29b..00000000
--- a/legacy/api/model/__init__.py
+++ /dev/null
diff --git a/legacy/api/model/job_model.py b/legacy/api/model/job_model.py
deleted file mode 100644
index 73baf660..00000000
--- a/legacy/api/model/job_model.py
+++ /dev/null
@@ -1,33 +0,0 @@
-##############################################################################
-# Copyright (c) 2017 ZTE Corporation and others.
-#
-# All rights reserved. This program and the accompanying materials
-# are made available under the terms of the Apache License, Version 2.0
-# which accompanies this distribution, and is available at
-# http://www.apache.org/licenses/LICENSE-2.0
-##############################################################################
-from flask_restful import fields
-from flask_restful_swagger import swagger
-
-
-@swagger.model
-class JobModel:
- resource_fields = {
- 'installer_type': fields.String,
- 'installer_ip': fields.String,
- 'max_minutes': fields.Integer,
- 'pod_name': fields.String,
- 'suite_name': fields.String,
- 'type': fields.String,
- 'benchmark_name': fields.String,
- 'testdb_url': fields.String,
- 'node_name': fields.String
- }
- required = ['installer_type', 'installer_ip']
-
-
-@swagger.model
-class JobResponseModel:
- resource_fields = {
- 'job_id': fields.String
- }
diff --git a/legacy/api/router/__init__.py b/legacy/api/router/__init__.py
deleted file mode 100644
index e69de29b..00000000
--- a/legacy/api/router/__init__.py
+++ /dev/null
diff --git a/legacy/api/router/mapper.py b/legacy/api/router/mapper.py
deleted file mode 100644
index 470d18e2..00000000
--- a/legacy/api/router/mapper.py
+++ /dev/null
@@ -1,15 +0,0 @@
-##############################################################################
-# Copyright (c) 2017 ZTE Corporation and others.
-#
-# All rights reserved. This program and the accompanying materials
-# are made available under the terms of the Apache License, Version 2.0
-# which accompanies this distribution, and is available at
-# http://www.apache.org/licenses/LICENSE-2.0
-##############################################################################
-from legacy.api.handler.job_handler import Job, JobList
-
-
-mappers = [
- (JobList, '/api/v1.0/jobs'),
- (Job, '/api/v1.0/jobs/<string:id>'),
-]
diff --git a/legacy/api/router/mapper.py.orig b/legacy/api/router/mapper.py.orig
deleted file mode 100644
index 1acb40b5..00000000
--- a/legacy/api/router/mapper.py.orig
+++ /dev/null
@@ -1,19 +0,0 @@
-<<<<<<< HEAD
-from legacy.api.handler.job_handler import Job, JobList
-=======
-##############################################################################
-# Copyright (c) 2017 ZTE Corporation and others.
-#
-# All rights reserved. This program and the accompanying materials
-# are made available under the terms of the Apache License, Version 2.0
-# which accompanies this distribution, and is available at
-# http://www.apache.org/licenses/LICENSE-2.0
-##############################################################################
-from legacy.api.handler import Job, JobList
->>>>>>> 615b529... Add licence header according to OPNFV contribution guidelines[1] by script[2]
-
-
-mappers = [
- (JobList, '/api/v1.0/jobs'),
- (Job, '/api/v1.0/jobs/<string:id>'),
-]
diff --git a/legacy/cli/helper.py b/legacy/cli/helper.py
deleted file mode 100644
index acfecf8d..00000000
--- a/legacy/cli/helper.py
+++ /dev/null
@@ -1,14 +0,0 @@
-##############################################################################
-# Copyright (c) 2016 ZTE Corp and others.
-#
-# All rights reserved. This program and the accompanying materials
-# are made available under the terms of the Apache License, Version 2.0
-# which accompanies this distribution, and is available at
-# http://www.apache.org/licenses/LICENSE-2.0
-##############################################################################
-
-import os
-
-
-def fetch_root():
- return os.path.join(os.path.dirname(__file__), os.pardir, os.pardir, 'benchmarks/')
diff --git a/legacy/docker/README.md b/legacy/docker/README.md
deleted file mode 100644
index 35ac0935..00000000
--- a/legacy/docker/README.md
+++ /dev/null
@@ -1,11 +0,0 @@
-# QTIP The Indices for Performance
-
-[QTIP] is an [OPNFV] project.
-
-It aims to build a platform for creating and sharing indices of [NFVI] performance.
-
-See the [project vision](https://wiki.opnfv.org/display/qtip/Vision) for more details.
-
-[QTIP]: https://wiki.opnfv.org/display/qtip
-[OPNFV]: https://www.opnfv.org
-[NFVI]: https://en.wikipedia.org/wiki/Network_function_virtualization
diff --git a/legacy/docker/cleanup_qtip_image.sh b/legacy/docker/cleanup_qtip_image.sh
deleted file mode 100644
index 9c2b59db..00000000
--- a/legacy/docker/cleanup_qtip_image.sh
+++ /dev/null
@@ -1,24 +0,0 @@
-#!/bin/bash
-
-if [[ ! -f ${QTIP_DIR}/openrc ]];then
- source ${REPOS_DIR}/releng/utils/fetch_os_creds.sh \
- -d ${QTIP_DIR}/openrc \
- -i ${INSTALLER_TYPE} \
- -a ${INSTALLER_IP}
-fi
-
-source ${QTIP_DIR}/openrc
-
-cleanup_image()
-{
- echo
- if ! glance image-list; then
- return
- fi
-
- echo "Deleting image QTIP_CentOS..."
- glance image-delete $(glance image-list | grep -e QTIP_CentOS | awk '{print $2}')
-
-}
-
-cleanup_image
diff --git a/legacy/docker/prepare_qtip_image.sh b/legacy/docker/prepare_qtip_image.sh
deleted file mode 100644
index 4095c806..00000000
--- a/legacy/docker/prepare_qtip_image.sh
+++ /dev/null
@@ -1,49 +0,0 @@
-#!/bin/bash
-IMGNAME='QTIP_CentOS.qcow2'
-IMGPATH='/home/opnfv/imgstore'
-IMGURL='http://build.opnfv.org/artifacts.opnfv.org/qtip/QTIP_CentOS.qcow2'
-
-load_image()
-{
- if [[ -n $( glance image-list | grep -e QTIP_CentOS) ]]; then
- return
- fi
-
- test -d $IMGPATH || mkdir -p $IMGPATH
- if [[ ! -f "$IMGPATH/$IMGNAME" ]];then
- echo
- echo "========== Downloading QTIP_CentOS image =========="
- cd $IMGPATH
- wget -c --progress=dot:giga $IMGURL
- fi
-
- echo
- echo "========== Loading QTIP_CentOS image =========="
- output=$(glance image-create \
- --name QTIP_CentOS \
- --visibility public \
- --disk-format qcow2 \
- --container-format bare \
- --file $IMGPATH/$IMGNAME )
- echo "$output"
-
- IMAGE_ID=$(echo "$output" | grep " id " | awk '{print $(NF-1)}')
-
- if [ -z "$IMAGE_ID" ]; then
- echo 'Failed uploading QTIP_CentOS image to cloud'.
- exit 1
- fi
-
- echo "QTIP_CentOS image id: $IMAGE_ID"
-}
-
-rm -rf ${QTIP_DIR}/openrc
-
-${REPOS_DIR}/releng/utils/fetch_os_creds.sh \
--d ${QTIP_DIR}/openrc \
--i ${INSTALLER_TYPE} \
--a ${INSTALLER_IP}
-
-source ${QTIP_DIR}/openrc
-
-load_image
diff --git a/legacy/docker/push_db.sh b/legacy/docker/push_db.sh
deleted file mode 100755
index 50341eac..00000000
--- a/legacy/docker/push_db.sh
+++ /dev/null
@@ -1,3 +0,0 @@
-#!/bin/bash
-
-cd ${QTIP_DIR} && python qtip/utils/dashboard/pushtoDB.py
diff --git a/legacy/docker/run_qtip.sh b/legacy/docker/run_qtip.sh
deleted file mode 100755
index 98abf139..00000000
--- a/legacy/docker/run_qtip.sh
+++ /dev/null
@@ -1,39 +0,0 @@
-#! /bin/bash
-
-QTIP=qtip/run.py
-
-run_test_suite()
-{
- if [ "$TEST_CASE" == "compute" ]; then
- cd ${QTIP_DIR} && python ${QTIP} -l default -f compute
- cd ${QTIP_DIR} && python scripts/ref_results/suite_result.py compute
- elif [ "$TEST_CASE" == "storage" ]; then
- cd ${QTIP_DIR} && python ${QTIP} -l default -f storage
- cd ${QTIP_DIR} && python scripts/ref_results/suite_result.py storage
- elif [ "$TEST_CASE" == "network" ]; then
- cd ${QTIP_DIR} && python ${QTIP} -l default -f network
- cd ${QTIP_DIR} && python scripts/ref_results/suite_result.py network
- elif [ "$TEST_CASE" == "all" ]; then
- cd ${QTIP_DIR} && python ${QTIP} -l default -f compute
- cd ${QTIP_DIR} && python ${QTIP} -l default -f storage
- cd ${QTIP_DIR} && python ${QTIP} -l default -f network
-
- cd ${QTIP_DIR} && python scripts/ref_results/suite_result.py compute
- cd ${QTIP_DIR} && python scripts/ref_results/suite_result.py storage
- cd ${QTIP_DIR} && python scripts/ref_results/suite_result.py network
- fi
-}
-
-rm -f ${QTIP_DIR}/config/QtipKey*
-
-echo "Generating ssh keypair"
-ssh-keygen -t rsa -N "" -f ${QTIP_DIR}/config/QtipKey -q
-
-source ${QTIP_DIR}/docker/prepare_qtip_image.sh
-
-run_test_suite
-
-source ${QTIP_DIR}/docker/cleanup_qtip_image.sh
-
-echo "Remove ssh keypair"
-rm -f ${QTIP_DIR}/config/QtipKey*
diff --git a/legacy/docs/_testcase_description.rst b/legacy/docs/_testcase_description.rst
deleted file mode 100644
index d60ca949..00000000
--- a/legacy/docs/_testcase_description.rst
+++ /dev/null
@@ -1,46 +0,0 @@
-.. Template to be used for test case descriptions in QTIP Project.
- Write one .rst per test case.
- Borrowed Heavily from Yardstick
- Upload the .rst for the test case in /docs/ directory.
- Review in Gerrit.
-
-Test Case Description
-=====================
-
-+-----------------------------------------------------------------------------+
-|test case slogan e.g. Network throughput |
-+==============+==============================================================+
-|test case id | e.g. qtip_throughput |
-+--------------+--------------------------------------------------------------+
-|metric | what will be measured, e.g. latency |
-+--------------+--------------------------------------------------------------+
-|test purpose | describe what is the purpose of the test case |
-+--------------+--------------------------------------------------------------+
-|configuration | what .yaml file to use, state SLA if applicable, state |
-| | test duration, list and describe the scenario options used in|
-| | this TC and also list the options using default values. |
-+--------------+--------------------------------------------------------------+
-|test tool | e.g. ping |
-+--------------+--------------------------------------------------------------+
-|references | e.g. RFCxxx, ETSI-NFVyyy |
-+--------------+--------------------------------------------------------------+
-|applicability | describe variations of the test case which can be |
-| | performend, e.g. run the test for different packet sizes |
-+--------------+--------------------------------------------------------------+
-|pre-test | describe configuration in the tool(s) used to perform |
-|conditions | the measurements (e.g. fio, pktgen), POD-specific |
-| | configuration required to enable running the test |
-+--------------+------+----------------------------------+--------------------+
-|test sequence | step | description | result |
-| +------+----------------------------------+--------------------+
-| | 1 | use this to describe tests that | what happens in |
-| | | require several steps e.g. | this step |
-| | | step 1 collect logs | e.g. logs collected|
-| +------+----------------------------------+--------------------+
-| | 2 | remove interface | interface down |
-| +------+----------------------------------+--------------------+
-| | N | what is done in step N | what happens |
-+--------------+------+----------------------------------+--------------------+
-|test verdict | expected behavior, or SLA, pass/fail criteria |
-+--------------+--------------------------------------------------------------+
-
diff --git a/legacy/tests/__init__.py b/legacy/tests/__init__.py
deleted file mode 100644
index e69de29b..00000000
--- a/legacy/tests/__init__.py
+++ /dev/null
diff --git a/legacy/tests/api/__init__.py b/legacy/tests/api/__init__.py
deleted file mode 100644
index e69de29b..00000000
--- a/legacy/tests/api/__init__.py
+++ /dev/null
diff --git a/legacy/tests/api/test_server.py b/legacy/tests/api/test_server.py
deleted file mode 100644
index bf316f5d..00000000
--- a/legacy/tests/api/test_server.py
+++ /dev/null
@@ -1,131 +0,0 @@
-##############################################################################
-# Copyright (c) 2017 ZTE Corporation and others.
-#
-# All rights reserved. This program and the accompanying materials
-# are made available under the terms of the Apache License, Version 2.0
-# which accompanies this distribution, and is available at
-# http://www.apache.org/licenses/LICENSE-2.0
-##############################################################################
-import json
-import time
-
-import mock
-import pytest
-
-import qtip.api.cmd.server as server
-
-
-def setup_module():
- server.add_routers()
-
-
-@pytest.fixture
-def app():
- return server.app
-
-
-@pytest.fixture
-def app_client(app):
- client = app.test_client()
- return client
-
-
-def side_effect_sleep(sleep_time):
- time.sleep(sleep_time)
-
-
-def side_effect_pass():
- pass
-
-
-class TestClass:
- @pytest.mark.parametrize("body, expected", [
- ({'installer_type': 'fuel',
- 'installer_ip': '10.20.0.2'},
- {'job_id': '',
- 'installer_type': 'fuel',
- 'installer_ip': '10.20.0.2',
- 'pod_name': 'default',
- 'suite_name': 'compute',
- 'max_minutes': 60,
- 'type': 'BM',
- 'testdb_url': None,
- 'node_name': None,
- 'state': 'finished',
- 'state_detail': [{'state': 'finished', 'benchmark': 'dhrystone_bm.yaml'},
- {'state': 'finished', 'benchmark': 'whetstone_bm.yaml'},
- {'state': 'finished', 'benchmark': 'ramspeed_bm.yaml'},
- {'state': 'finished', 'benchmark': 'dpi_bm.yaml'},
- {'state': 'finished', 'benchmark': 'ssl_bm.yaml'}],
- 'result': 0}),
- ({'installer_type': 'fuel',
- 'installer_ip': '10.20.0.2',
- 'pod_name': 'default',
- 'max_minutes': 20,
- 'suite_name': 'compute',
- 'type': 'VM',
- 'benchmark_name': 'dhrystone_vm.yaml',
- 'testdb_url': 'http://testresults.opnfv.org/test/api/v1',
- 'node_name': 'zte-pod2'},
- {'job_id': '',
- 'installer_type': 'fuel',
- 'installer_ip': '10.20.0.2',
- 'pod_name': 'default',
- 'suite_name': 'compute',
- 'max_minutes': 20,
- 'type': 'VM',
- 'testdb_url': 'http://testresults.opnfv.org/test/api/v1',
- 'node_name': 'zte-pod2',
- 'state': 'finished',
- 'state_detail': [{u'state': u'finished', u'benchmark': u'dhrystone_vm.yaml'}],
- 'result': 0})
- ])
- @mock.patch('qtip.utils.args_handler.prepare_and_run_benchmark')
- def test_post_get_delete_job_successful(self, mock_args_handler, app_client, body, expected):
- mock_args_handler.return_value = {'result': 0,
- 'detail': {'host': [(u'10.20.6.14', {'unreachable': 0,
- 'skipped': 13,
- 'ok': 27,
- 'changed': 26,
- 'failures': 0}),
- ('localhost', {'unreachable': 0,
- 'skipped': 0,
- 'ok': 6,
- 'changed': 6,
- 'failures': 0}),
- (u'10.20.6.13', {'unreachable': 0,
- 'skipped': 13,
- 'ok': 27,
- 'changed': 26,
- 'failures': 0})]}}
-
- reply = app_client.post("/api/v1.0/jobs", data=body)
- print(reply.data)
- id = json.loads(reply.data)['job_id']
- expected['job_id'] = id
- post_process = ''
- while post_process != 'finished':
- get_reply = app_client.get("/api/v1.0/jobs/%s" % id)
- reply_data = json.loads(get_reply.data)
- post_process = reply_data['state']
- print(reply_data)
- assert len(filter(lambda x: reply_data[x] == expected[x], expected.keys())) == len(expected)
- delete_reply = app_client.delete("/api/v1.0/jobs/%s" % id)
- assert "successful" in delete_reply.data
-
- @pytest.mark.parametrize("body, expected", [
- ([{'installer_type': 'fuel',
- 'installer_ip': '10.20.0.2'},
- {'installer_type': 'compass',
- 'installer_ip': '192.168.20.50'}],
- ['job_id',
- 'It already has one job running now!'])
- ])
- @mock.patch('qtip.utils.args_handler.prepare_and_run_benchmark',
- side_effect=[side_effect_sleep(0.5), side_effect_pass])
- def test_post_two_jobs_unsuccessful(self, mock_args_hanler, app_client, body, expected):
- reply_1 = app_client.post("/api/v1.0/jobs", data=body[0])
- reply_2 = app_client.post("/api/v1.0/jobs", data=body[1])
- assert expected[0] in json.loads(reply_1.data).keys()
- app_client.delete("/api/v1.0/jobs/%s" % json.loads(reply_1.data)['job_id'])
- assert expected[1] in json.dumps(reply_2.data)
diff --git a/legacy/tests/create_zones_test.py b/legacy/tests/create_zones_test.py
deleted file mode 100644
index 1aa37477..00000000
--- a/legacy/tests/create_zones_test.py
+++ /dev/null
@@ -1,118 +0,0 @@
-##############################################################################
-# Copyright (c) 2017 ZTE Corporation and others.
-#
-# All rights reserved. This program and the accompanying materials
-# are made available under the terms of the Apache License, Version 2.0
-# which accompanies this distribution, and is available at
-# http://www.apache.org/licenses/LICENSE-2.0
-##############################################################################
-import pytest
-import mock
-from mock import Mock, MagicMock
-import os
-from qtip.utils.create_zones import AvailabilityZone
-
-return_list = []
-
-
-def get_agg_mock(host):
- agg = Mock()
- agg.name = host
- agg.id = host
- return agg
-
-
-class HyperMock(MagicMock):
- def list(self):
- mock_hypervisor = [Mock(service={'host': '10.20.0.4'}), Mock(service={'host': '10.20.0.5'})]
- return mock_hypervisor
-
-
-class AggMock(MagicMock):
- def get_details(self, agg_id):
- print "get_details:{0}".format(agg_id)
- return Mock(hosts=[])
-
- def create(self, host, agg):
- print "create:{0}:{1}".format(host, agg)
- return agg
-
- def list(self):
- return return_list
-
- def delete(self, agg_id):
- print "delete:{0}".format(agg_id)
- pass
-
- def add_host(self, aggregate, host):
- print "add_host:{0}:{1}".format(aggregate, host)
- pass
-
- def remove_host(self, agg_id, host):
- print "remove_host:{0}:{1}".format(agg_id, host)
- pass
-
-
-class NovaMock(MagicMock):
- hypervisors = HyperMock()
- aggregates = AggMock()
-
-
-@pytest.mark.xfail(reason="unstable result")
-class TestClass:
- @pytest.mark.parametrize("test_input, expected", [
- (['compute1', 'compute2'],
- ['create:compute1:compute1',
- 'add_host:compute1:10.20.0.4',
- 'create:compute2:compute2',
- 'add_host:compute2:10.20.0.5']),
- (['compute1'],
- ['create:compute1:compute1',
- 'add_host:compute1:10.20.0.4']),
- ])
- @mock.patch('qtip.utils.create_zones.client', autospec=True)
- @mock.patch('qtip.utils.create_zones.v2', autospec=True)
- @mock.patch('qtip.utils.create_zones.session')
- def test_create_zones_success(self, mock_keystone_session, mock_keystone_v2, mock_nova_client, test_input, expected, capfd):
- nova_obj = NovaMock()
- mock_nova_client.Client.return_value = nova_obj()
- k = mock.patch.dict(os.environ, {'OS_AUTH_URL': 'http://172.10.0.5:5000',
- 'OS_USERNAME': 'admin',
- 'OS_PASSWORD': 'admin',
- 'OS_TENANT_NAME': 'admin'})
- k.start()
- azone = AvailabilityZone()
- azone.create_aggs(test_input)
- k.stop()
- resout, reserr = capfd.readouterr()
- for x in expected:
- assert x in resout
-
- @pytest.mark.parametrize("test_input, expected", [
- ([get_agg_mock('10.20.0.4'), get_agg_mock('10.20.0.5')],
- ['get_details:10.20.0.4',
- 'delete:10.20.0.4',
- 'get_details:10.20.0.5',
- 'delete:10.20.0.5']),
- ([],
- []),
- ])
- @mock.patch('qtip.utils.create_zones.client', autospec=True)
- @mock.patch('qtip.utils.create_zones.v2', autospec=True)
- @mock.patch('qtip.utils.create_zones.session')
- def test_clean_all_aggregates(self, mock_keystone_session, mock_keystone_v2, mock_nova_client, test_input, expected, capfd):
- global return_list
- return_list = test_input
- nova_obj = NovaMock()
- mock_nova_client.Client.return_value = nova_obj()
- k = mock.patch.dict(os.environ, {'OS_AUTH_URL': 'http://172.10.0.5:5000',
- 'OS_USERNAME': 'admin',
- 'OS_PASSWORD': 'admin',
- 'OS_TENANT_NAME': 'admin'})
- k.start()
- azone = AvailabilityZone()
- azone.clean_all_aggregates()
- k.stop()
- resout, reserr = capfd.readouterr()
- for x in expected:
- assert x in resout
diff --git a/legacy/tests/functional/__init__.py b/legacy/tests/functional/__init__.py
deleted file mode 100644
index e69de29b..00000000
--- a/legacy/tests/functional/__init__.py
+++ /dev/null
diff --git a/legacy/tests/functional/yaml_schema_test.py b/legacy/tests/functional/yaml_schema_test.py
deleted file mode 100644
index 3c7994a5..00000000
--- a/legacy/tests/functional/yaml_schema_test.py
+++ /dev/null
@@ -1,24 +0,0 @@
-##############################################################################
-# Copyright (c) 2017 ZTE Corporation and others.
-#
-# All rights reserved. This program and the accompanying materials
-# are made available under the terms of the Apache License, Version 2.0
-# which accompanies this distribution, and is available at
-# http://www.apache.org/licenses/LICENSE-2.0
-##############################################################################
-import os
-import os.path
-from pykwalify.core import Core
-
-
-class TestClass:
- def test_schema_success(self):
- for root, dirs, files in os.walk("test_cases"):
- for name in files:
- print root + "/" + name
- if "_bm" in name:
- schema = "tests/schema/test_bm_schema.yaml"
- if "_vm" in name:
- schema = "tests/schema/test_vm_schema.yaml"
- c = Core(source_file=root + "/" + name, schema_files=[schema])
- c.validate(raise_exception=True)
diff --git a/legacy/tests/helper/perftest.yaml b/legacy/tests/helper/perftest.yaml
deleted file mode 100644
index 57948b62..00000000
--- a/legacy/tests/helper/perftest.yaml
+++ /dev/null
@@ -1,13 +0,0 @@
-##############################################################################
-# Copyright (c) 2017 taseer94@gmail.com and others.
-#
-# All rights reserved. This program and the accompanying materials
-# are made available under the terms of the Apache License, Version 2.0
-# which accompanies this distribution, and is available at
-# http://www.apache.org/licenses/LICENSE-2.0
-##############################################################################
----
-
- tests:
- - command: ['perftest', 'run']
- output: "Run a perftest\n"
diff --git a/legacy/tests/spawn_vm_test.py b/legacy/tests/spawn_vm_test.py
deleted file mode 100644
index ac58db27..00000000
--- a/legacy/tests/spawn_vm_test.py
+++ /dev/null
@@ -1,64 +0,0 @@
-##############################################################################
-# Copyright (c) 2017 ZTE Corporation and others.
-#
-# All rights reserved. This program and the accompanying materials
-# are made available under the terms of the Apache License, Version 2.0
-# which accompanies this distribution, and is available at
-# http://www.apache.org/licenses/LICENSE-2.0
-##############################################################################
-import pytest
-import mock
-from mock import Mock, MagicMock
-import os
-from qtip.utils.spawn_vm import SpawnVM
-
-
-class KeystoneMock(MagicMock):
- auth_token = Mock()
- v2_0 = Mock()
-
-
-class StackMock(MagicMock):
- status = 'COMPLETE'
- outputs = [{'output_key': 'availability_instance_1',
- 'output_value': 'output_value_1'},
- {'output_key': 'instance_ip_1',
- "output_value": "172.10.0.154"},
- {"output_key": "instance_PIP_1",
- "output_value": "10.10.17.5"}]
-
-
-class HeatMock(MagicMock):
- def list(self):
- return []
-
- def get(self, stackname):
- return StackMock()
-
- def create(self, stack_name, template):
- pass
-
-
-class TestClass:
- @pytest.mark.parametrize("test_input, expected", [
- ({'availability_zone': ['compute1', 'compute1'],
- 'OS_image': ['QTIP_CentOS', 'QTIP_CentOS'],
- 'public_network': ['admin-floating_net', 'admin-floating_net'],
- 'flavor': ['m1.large', 'm1.large'],
- 'role': ['1-server', '2-host']},
- [('172.10.0.154', '')]),
- ])
- @mock.patch('qtip.utils.spawn_vm.Env_setup')
- @mock.patch('qtip.utils.spawn_vm.AvailabilityZone')
- @mock.patch('qtip.utils.spawn_vm.keystoneclient.v2_0', autospec=True)
- @mock.patch('qtip.utils.spawn_vm.heatclient.client', autospec=True)
- def test_create_zones_success(self, mock_heat, mock_keystone,
- mock_zone, mock_setup, test_input, expected):
- open('./config/QtipKey.pub', 'a').close()
- mock_heat.Client.return_value = Mock(stacks=HeatMock())
- k = mock.patch.dict(os.environ, {'INSTALLER_TYPE': 'fuel'})
- k.start()
- SpawnVM(test_input)
- k.stop()
- os.remove('./config/QtipKey.pub')
- mock_setup.ip_pw_list.append.assert_called_with(expected[0])
diff --git a/legacy/utils/__init__.py b/legacy/utils/__init__.py
deleted file mode 100644
index e69de29b..00000000
--- a/legacy/utils/__init__.py
+++ /dev/null
diff --git a/legacy/utils/create_zones.py b/legacy/utils/create_zones.py
deleted file mode 100644
index 5e378c83..00000000
--- a/legacy/utils/create_zones.py
+++ /dev/null
@@ -1,86 +0,0 @@
-##############################################################################
-# Copyright (c) 2016 Dell Inc, ZTE and others.
-#
-# All rights reserved. This program and the accompanying materials
-# are made available under the terms of the Apache License, Version 2.0
-# which accompanies this distribution, and is available at
-# http://www.apache.org/licenses/LICENSE-2.0
-##############################################################################
-from keystoneclient.auth.identity import v2
-from keystoneclient import session
-from novaclient import client
-import os
-import random
-import logger_utils
-
-logger = logger_utils.QtipLogger('create_zones').get
-
-
-class AvailabilityZone:
-
- def __init__(self):
- self._keystone_client = None
- self._nova_client = None
-
- def _get_keystone_client(self):
- """returns a keystone client instance"""
-
- if self._keystone_client is None:
- '''
- self._keystone_client = keystoneclient.v2_0.client.Client(
- auth_url=os.environ.get('OS_AUTH_URL'),
- username=os.environ.get('OS_USERNAME'),
- password=os.environ.get('OS_PASSWORD'),
- tenant_name=os.environ.get('OS_TENANT_NAME'))
- '''
- auth = v2.Password(auth_url=os.environ.get('OS_AUTH_URL'),
- username=os.environ.get('OS_USERNAME'),
- password=os.environ.get('OS_PASSWORD'),
- tenant_name=os.environ.get('OS_TENANT_NAME'))
-
- sess = session.Session(auth=auth)
- else:
- return self._keystone_client
-
- return sess
-
- def _get_nova_client(self):
- if self._nova_client is None:
- keystone = self._get_keystone_client()
- self._nova_client = client.Client('2', session=keystone)
- return self._nova_client
-
- def clean_all_aggregates(self):
- logger.info("clean all aggregates")
- nova = self._get_nova_client()
- agg_list = nova.aggregates.list()
-
- for agg in agg_list:
- agg_info = nova.aggregates.get_details(agg.id)
- agg_hosts = agg_info.hosts
- if len(agg_hosts):
- for host in agg_hosts:
- nova.aggregates.remove_host(agg.id, host)
- nova.aggregates.delete(agg.id)
-
- def create_aggs(self, args):
- azone_list = list(set(args))
- azone_list.sort()
-
- nova = self._get_nova_client()
- hyper_list = nova.hypervisors.list()
-
- if len(azone_list) > len(hyper_list):
- logger.error("required available zones > compute nodes")
- return None
-
- compute_nodes = map(lambda x: x.service['host'], hyper_list)
- sample_nodes = random.sample(compute_nodes, len(azone_list))
- sample_nodes.sort()
-
- for index, item in enumerate(azone_list):
- logger.info("create aggregates: %s" % str(item))
- agg_id = nova.aggregates.create(item, item)
-
- logger.info("add host: %s" % sample_nodes[index])
- nova.aggregates.add_host(aggregate=agg_id, host=sample_nodes[index])
diff --git a/legacy/utils/dashboard/__init__.py b/legacy/utils/dashboard/__init__.py
deleted file mode 100644
index e69de29b..00000000
--- a/legacy/utils/dashboard/__init__.py
+++ /dev/null
diff --git a/legacy/utils/dashboard/pushtoDB.py b/legacy/utils/dashboard/pushtoDB.py
deleted file mode 100644
index ce54aebd..00000000
--- a/legacy/utils/dashboard/pushtoDB.py
+++ /dev/null
@@ -1,82 +0,0 @@
-##############################################################################
-# Copyright (c) 2017 ZTE Corporation and others.
-#
-# All rights reserved. This program and the accompanying materials
-# are made available under the terms of the Apache License, Version 2.0
-# which accompanies this distribution, and is available at
-# http://www.apache.org/licenses/LICENSE-2.0
-##############################################################################
-import requests
-import json
-import datetime
-import os
-import sys
-from qtip.utils import logger_utils
-
-logger = logger_utils.QtipLogger('push_db').get
-
-TEST_DB = 'http://testresults.opnfv.org/test/api/v1'
-
-suite_list = [('compute_result.json', 'compute_test_suite'),
- ('network_result.json', 'network_test_suite'),
- ('storage_result.json', 'storage_test_suite')]
-payload_list = {}
-
-
-def push_results_to_db(db_url, case_name, payload, installer, pod_name):
-
- url = db_url + "/results"
- creation_date = str(datetime.datetime.utcnow().isoformat())
-
- params = {"project_name": "qtip", "case_name": case_name,
- "pod_name": pod_name, "installer": installer, "start_date": creation_date,
- "version": "test", "details": payload}
-
- headers = {'Content-Type': 'application/json'}
- logger.info('pod_name:{0},installer:{1},creation_data:{2}'.format(pod_name,
- installer,
- creation_date))
- # temporary code, will be deleted after Bigergia dashboard is ready
- try:
- qtip_testapi_url = "http://testapi.qtip.openzero.net/results"
- qtip_testapi_r = requests.post(qtip_testapi_url, data=json.dumps(params), headers=headers)
- logger.info('Pushing Results to qtip_testapi: %s'.format(qtip_testapi_r))
- except:
- logger.info("Pushing Results to qtip_testapi Error:{0}".format(sys.exc_info()[0]))
-
- try:
- r = requests.post(url, data=json.dumps(params), headers=headers)
- logger.info(r)
- return True
- except:
- logger.info("Error:{0}".format(sys.exc_info()[0]))
- return False
-
-
-def populate_payload(suite_list):
-
- global payload_list
- for k, v in suite_list:
-
- if os.path.isfile('results/' + str(k)):
- payload_list[k] = v
-
-
-def main():
-
- global payload_list
- populate_payload(suite_list)
- if payload_list:
- logger.info(payload_list)
- for suite, case in payload_list.items():
- with open('results/' + suite, 'r') as result_file:
- j = json.load(result_file)
- push_results_to_db(TEST_DB, case, j,
- os.environ['INSTALLER_TYPE'],
- os.environ['NODE_NAME'])
- elif not payload_list:
- logger.info('Results not found')
-
-
-if __name__ == "__main__":
- main()
diff --git a/legacy/utils/report/__init__.py b/legacy/utils/report/__init__.py
deleted file mode 100644
index e69de29b..00000000
--- a/legacy/utils/report/__init__.py
+++ /dev/null
diff --git a/legacy/utils/report/get_indices.py b/legacy/utils/report/get_indices.py
deleted file mode 100644
index 42db6584..00000000
--- a/legacy/utils/report/get_indices.py
+++ /dev/null
@@ -1,16 +0,0 @@
-##############################################################################
-# Copyright (c) 2017 ZTE Corporation and others.
-#
-# All rights reserved. This program and the accompanying materials
-# are made available under the terms of the Apache License, Version 2.0
-# which accompanies this distribution, and is available at
-# http://www.apache.org/licenses/LICENSE-2.0
-##############################################################################
-import json
-
-
-def get_index(suite):
- with open('../../results/' + suite + '.json') as result_file:
- result_djson = json.load(result_file)
- index = result_djson['index']
- return index
diff --git a/legacy/utils/report/get_results.py b/legacy/utils/report/get_results.py
deleted file mode 100644
index 6df88235..00000000
--- a/legacy/utils/report/get_results.py
+++ /dev/null
@@ -1,58 +0,0 @@
-##############################################################################
-# Copyright (c) 2017 ZTE Corporation and others.
-#
-# All rights reserved. This program and the accompanying materials
-# are made available under the terms of the Apache License, Version 2.0
-# which accompanies this distribution, and is available at
-# http://www.apache.org/licenses/LICENSE-2.0
-##############################################################################
-import os
-import json
-
-
-def report_concat(targ_dir, testcase):
- machine_temp = []
- machines = []
-
- for file in os.listdir(targ_dir):
- if file.endswith(".json"):
- machine_temp.append(file)
-
- l = len(machine_temp)
-
- for x in range(0, l):
- file_t = machine_temp[x]
- with open(targ_dir + file_t) as result_file:
- result_djson = json.load(result_file)
- if result_djson['1 Testcase Name'] == str(testcase):
- machines.append(result_djson)
- return machines
-
-
-def space_count(l):
- spc = ''
- for x in range(l):
- spc = spc + ' '
- return spc
-
-
-def custom_dict(list1, list2, k):
- string_1 = ''
- for num_1 in range(0, len(list1)):
- string_1 = string_1 + space_count(k) + str(list1[num_1][0]) + "=" + str(list2[num_1]) + "\n"
- return string_1
-
-
-def generate_result(dict_a, k):
- list_1 = []
- list_2 = []
- count = 0
- for i, j in sorted(dict_a.iteritems()):
- list_1.append([])
- list_1[count].append(i)
- if (str(type(dict_a.get(i)))) == "<type 'dict'>":
- list_2.append(str("\n" + generate_result(dict_a.get(i), int(k + 1))))
- else:
- list_2.append(dict_a.get(i))
- count = count + 1
- return custom_dict(list_1, list_2, k)
diff --git a/legacy/utils/report/qtip_graph.py b/legacy/utils/report/qtip_graph.py
deleted file mode 100644
index 68ed660f..00000000
--- a/legacy/utils/report/qtip_graph.py
+++ /dev/null
@@ -1,38 +0,0 @@
-##############################################################################
-# Copyright (c) 2017 ZTE Corporation and others.
-#
-# All rights reserved. This program and the accompanying materials
-# are made available under the terms of the Apache License, Version 2.0
-# which accompanies this distribution, and is available at
-# http://www.apache.org/licenses/LICENSE-2.0
-##############################################################################
-import matplotlib
-import matplotlib.pyplot as plt
-import numpy as np
-
-matplotlib.use('Agg')
-
-
-def plot_indices(a, b, c):
- N = 3
- ind = np.arange(N)
- y_axis = (a, b, c)
- width = 0.35
- f = plt.figure()
- ax = f.gca()
- ax.set_autoscale_on(True)
- my_bars = ax.bar(ind, y_axis, width, color='b')
- ax.set_ylabel('Index Score*')
- ax.set_xlabel('Suite')
- ax.set_title(' QTIP benchmark scores')
- ax.axis('on')
- my_bars = ax.bar(ind, y_axis, width)
- ax.set_xticks(ind + width / 2)
- ax.set_xticklabels(['Compute', 'Storage', 'Network'])
- ax.axis([0, 3, 0, 1.25])
- f.text(0.7, 0.01, '* With Comparison to Refernece POD', fontsize=9)
-
- for rect in my_bars:
- height = rect.get_height()
- ax.text(rect.get_x() + rect.get_width() / 2., 1.05 * height, height, ha='center', va='bottom')
- f.savefig('qtip_graph.jpeg')
diff --git a/legacy/utils/report/qtip_report.py b/legacy/utils/report/qtip_report.py
deleted file mode 100644
index 1097df5f..00000000
--- a/legacy/utils/report/qtip_report.py
+++ /dev/null
@@ -1,117 +0,0 @@
-##############################################################################
-# Copyright (c) 2017 ZTE Corporation and others.
-#
-# All rights reserved. This program and the accompanying materials
-# are made available under the terms of the Apache License, Version 2.0
-# which accompanies this distribution, and is available at
-# http://www.apache.org/licenses/LICENSE-2.0
-##############################################################################
-from reportlab.platypus import SimpleDocTemplate, Paragraph, Spacer, Image
-from reportlab.lib.styles import getSampleStyleSheet
-from reportlab.lib.units import inch
-from reportlab.lib.pagesizes import letter
-import qtip_graph as graph
-import get_indices as results
-from get_results import report_concat
-from get_results import generate_result
-
-
-def dump_result(Stor, directory, testcase):
- try:
- lower_s = testcase.lower()
- Stor.append(Paragraph(testcase, Style['h3']))
- l1 = report_concat(directory, lower_s)
- l = 1
- for a in l1:
- Stor.append(Paragraph(testcase + " result_" + str(l), Style['h5']))
- raw_string = generate_result(a, 0)
- replaced_string = raw_string.replace('\n', '<br/> ').replace(' ', '&nbsp;')
- Stor.append(Paragraph(replaced_string, Style['BodyText']))
- l = l + 1
- except OSError:
- print "Results for {0} not found".format(testcase)
-
-
-doc = SimpleDocTemplate("../../results/QTIP_results.pdf", pagesize=letter,
- rightMargin=72, leftMargin=72,
- topMargin=72, bottomMargin=18)
-Stor = []
-Style = getSampleStyleSheet()
-Title = "QTIP Benchmark Suite"
-Stor.append(Paragraph(Title, Style['Title']))
-H1 = "Results"
-Stor.append(Spacer(0, 36))
-Stor.append(Paragraph(H1, Style['h2']))
-compute = 0
-storage = 0
-network = 0
-try:
- compute = results.get_index('compute_result')
-except IOError:
- pass
-
-try:
- storage = results.get_index('storage_result')
-except IOError:
- pass
-try:
- network = results.get_index('network_result')
-except IOError:
- pass
-
-Stor.append(Paragraph("Compute Suite: %f" % compute, Style['h5']))
-Stor.append(Paragraph("Storage Suite: %f" % storage, Style['h5']))
-Stor.append(Paragraph("Network Suite: %f" % network, Style['h5']))
-graph.plot_indices(compute, storage, network)
-qtip_graph = ('qtip_graph.jpeg')
-im = Image(qtip_graph, 5 * inch, 4 * inch)
-Stor.append(im)
-Stor.append(Spacer(0, 12))
-Stor.append(Paragraph("Reference POD", Style['h5']))
-ptext = "The Dell OPNFV Lab POD3 has been taken as the reference POD against which the reference results have been collected. The POD consists of 6 identical servers. The details of such a server are:"
-Stor.append(Paragraph(ptext, Style['Normal']))
-ptext = "<bullet>&bull;</bullet>Server Type: Dell PowerEdge R630 Server"
-Stor.append(Paragraph(ptext, Style['Bullet']))
-ptext = "<bullet>&bull;</bullet>CPU: Intel Xeon E5-2698 @ 2300 MHz"
-Stor.append(Paragraph(ptext, Style["Bullet"]))
-ptext = "<bullet>&bull;</bullet>RAM: 128GB"
-Stor.append(Paragraph(ptext, Style["Bullet"]))
-ptext = "<bullet>&bull;</bullet>Storage SSD: 420GB"
-Stor.append(Paragraph(ptext, Style["Bullet"]))
-ptext = "<bullet>&bull;</bullet>Network Card: Intel 2P X520/2P I350 rNDC"
-Stor.append(Paragraph(ptext, Style["Bullet"]))
-ptext = "Servers interconnected through a DELL S4810 switch using a 10Gbps physical link"
-Stor.append(Paragraph(ptext, Style["Bullet"]))
-Stor.append(Spacer(0, 12))
-ptext = "For Further Details of the Reference POD hardware, please visit: https://wiki.opnfv.org/reference_pod_hardware_details"
-Stor.append(Paragraph(ptext, Style['Normal']))
-Stor.append(Spacer(0, 12))
-ptext = "For Details of the Reference POD Results, please visit: https://wiki.opnfv.org/reference_pod_qtip_results"
-Stor.append(Spacer(0, 12))
-Stor.append(Paragraph(ptext, Style['Normal']))
-Stor.append(Paragraph("RAW Results", Style['h1']))
-Stor.append(Paragraph("Compute Results", Style['h2']))
-
-dump_result(Stor, "../../results/dhrystone/", "Dhrystone_bm")
-dump_result(Stor, "../../results/dhrystone/", "Dhrystone_vm")
-
-dump_result(Stor, "../../results/whetstone/", "Whetstone_bm")
-dump_result(Stor, "../../results/whetstone/", "Whetstone_vm")
-
-dump_result(Stor, "../../results/ramspeed/", "Ramspeed_bm")
-dump_result(Stor, "../../results/ramspeed/", "Ramspeed_vm")
-
-dump_result(Stor, "../../results/ssl/", "SSL_bm")
-dump_result(Stor, "../../results/ssl/", "SSL_vm")
-
-Stor.append(Paragraph("Network Results", Style['h2']))
-dump_result(Stor, "../../results/iperf/", "IPERF_bm")
-dump_result(Stor, "../../results/iperf/", "IPERF_vm")
-dump_result(Stor, "../../results/iperf/", "IPERF_vm_2")
-
-Stor.append(Paragraph("Storage Results", Style['h2']))
-dump_result(Stor, "../../results/fio/", "fio_bm")
-dump_result(Stor, "../../results/fio/", "fio_vm")
-
-
-doc.build(Stor)
diff --git a/legacy/utils/spawn_vm.py b/legacy/utils/spawn_vm.py
deleted file mode 100644
index f38c9a3a..00000000
--- a/legacy/utils/spawn_vm.py
+++ /dev/null
@@ -1,206 +0,0 @@
-##############################################################################
-# Copyright (c) 2016 Dell Inc, ZTE and others.
-#
-# All rights reserved. This program and the accompanying materials
-# are made available under the terms of the Apache License, Version 2.0
-# which accompanies this distribution, and is available at
-# http://www.apache.org/licenses/LICENSE-2.0
-##############################################################################
-import os
-import sys
-import yaml
-import heatclient.client
-import keystoneclient
-import time
-from env_setup import Env_setup
-from create_zones import AvailabilityZone
-import logger_utils
-
-logger = logger_utils.QtipLogger('spawn_vm').get
-
-
-class SpawnVM(Env_setup):
-
- def __init__(self, vm_info):
- logger.info('vm_info: %s' % vm_info)
- vm_role_ip_dict = vm_info.copy()
- self._keystone_client = None
- self._heat_client = None
- self._glance_client = None
- self._nova_client = None
- self.azone = AvailabilityZone()
- # TODO: it should clean up aggregates and stack after test case finished.
- self.azone.clean_all_aggregates()
- self.azone.create_aggs(vm_info['availability_zone'])
- self.heat_template = self.generate_heat_template(vm_info)
- self.create_stack(vm_role_ip_dict)
-
- @staticmethod
- def get_public_network():
-
- """
- TODO: GET THE NAMES OF THE PUBLIC NETWORKS for OTHER PROJECTS
- """
- installer = os.environ['INSTALLER_TYPE']
-
- if installer.lower() == 'fuel':
- return 'admin_floating_net'
- if installer.lower() == 'apex':
- return 'external'
- if installer.lower() == 'compass':
- return 'ext-net'
- if installer.lower() == 'joid':
- return 'ext-net'
-
- def generate_heat_template(self, vm_params):
- logger.info('Generating Heat Template')
- heat_dict = {}
- try:
- with open('./config/SampleHeat.yaml', 'r+') as H_temp:
- heat_dict = yaml.safe_load(H_temp)
- except yaml.YAMLError as exc:
- if hasattr(exc, 'problem_mark'):
- mark = exc.problem_mark
- logger.error(
- 'Error in qtip/config/SampleHeat.yaml at: (%s,%s)' % (mark.line + 1,
- mark.column + 1))
- logger.error('EXITING PROGRAM. Correct File and restart')
- sys.exit(1)
-
- fopen = open('./config/QtipKey.pub', 'r')
- fopenstr = fopen.read()
- fopenstr = fopenstr.rstrip()
- scriptcmd = '#!/bin/bash \n echo {0} >> foo.txt \n echo {1} >> /root/.ssh/authorized_keys'.format(
- fopenstr, fopenstr)
-
- netName = self.get_public_network()
- heat_dict['heat_template_version'] = '2015-04-30'
-
- heat_dict['parameters']['public_network'] = {
- 'type': 'string',
- 'default': netName
- }
-
- for x in range(1, len(vm_params['availability_zone']) + 1):
- avail_zone = vm_params['availability_zone'][x - 1]
-
- heat_dict['parameters']['availability_zone_' + str(x)] = \
- {'description': 'Availability Zone of the instance',
- 'default': avail_zone,
- 'type': 'string'}
-
- heat_dict['resources']['public_port_' + str(x)] = \
- {'type': 'OS::Neutron::Port',
- 'properties': {'network': {'get_resource': 'network'},
- 'security_groups': [{'get_resource': 'security_group'}],
- 'fixed_ips': [{'subnet_id': {'get_resource': 'subnet'}}]}}
-
- heat_dict['resources']['floating_ip_' + str(x)] = {
- 'type': 'OS::Neutron::FloatingIP',
- 'properties': {'floating_network': {'get_param': 'external_net_name'}}}
-
- heat_dict['resources']['floating_ip_assoc_' + str(x)] = {
- 'type': 'OS::Neutron::FloatingIPAssociation',
- 'properties': {
- 'floatingip_id': {'get_resource': 'floating_ip_' + str(x)},
- 'port_id': {'get_resource': 'public_port_' + str(x)}}}
-
- heat_dict['resources']['my_instance_' + str(x)] = \
- {'type': 'OS::Nova::Server',
- 'properties': {'image': {'get_param': 'image'},
- 'networks':
- [{'port': {'get_resource': 'public_port_' + str(x)}}],
- 'flavor': {'get_resource': 'flavor'},
- 'availability_zone': avail_zone,
- 'security_groups': [{'get_resource': 'security_group'}],
- 'name': 'instance' + str(x),
- 'user_data_format': 'RAW',
- 'user_data': scriptcmd}}
-
- heat_dict['outputs']['instance_PIP_' + str(x)] = {
- 'description': 'IP address of the instance',
- 'value': {'get_attr': ['my_instance_' + str(x), 'first_address']}}
-
- heat_dict['outputs']['instance_ip_' + str(x)] = {
- 'description': 'IP address of the instance',
- 'value': {'get_attr': ['floating_ip_' + str(x), 'floating_ip_address']}}
-
- heat_dict['outputs']['availability_instance_' + str(x)] = {
- 'description': 'Availability Zone of the Instance',
- 'value': {'get_param': 'availability_zone_' + str(x)}}
-
- del heat_dict['outputs']['description']
- logger.info(heat_dict)
-
- return heat_dict
-
- def _get_keystone_client(self):
- """returns a keystone client instance"""
-
- if self._keystone_client is None:
- self._keystone_client = keystoneclient.v2_0.client.Client(
- auth_url=os.environ.get('OS_AUTH_URL'),
- username=os.environ.get('OS_USERNAME'),
- password=os.environ.get('OS_PASSWORD'),
- tenant_name=os.environ.get('OS_TENANT_NAME'))
- return self._keystone_client
-
- def _get_heat_client(self):
- """returns a heat client instance"""
- if self._heat_client is None:
- keystone = self._get_keystone_client()
- heat_endpoint = keystone.service_catalog.url_for(
- service_type='orchestration')
- self._heat_client = heatclient.client.Client(
- '1', endpoint=heat_endpoint, token=keystone.auth_token)
- return self._heat_client
-
- def create_stack(self, vm_role_ip_dict):
- stackname = 'QTIP'
- heat = self._get_heat_client()
-
- self.delete_stack(stackname)
-
- logger.info('Start to create stack %s' % stackname)
- heat.stacks.create(stack_name=stackname, template=self.heat_template)
-
- stack_status = "IN_PROGRESS"
- while stack_status != 'COMPLETE':
- if stack_status == 'IN_PROGRESS':
- logger.debug('Create in Progress')
- if stack_status == 'CREATE_FAILED':
- raise RuntimeError("Stack %s created failed!" % stackname)
- stack_status = heat.stacks.get(stackname).status
- time.sleep(15)
- logger.info('Stack %s Created Complete!' % stackname)
-
- stack_outputs = heat.stacks.get(stackname).outputs
-
- for vm in range(len(vm_role_ip_dict['OS_image'])):
- for i in stack_outputs:
- instanceKey = "instance_ip_" + str(vm + 1)
- privateIPkey = 'instance_PIP_' + str(vm + 1)
- if i['output_key'] == instanceKey:
- Env_setup.roles_dict[vm_role_ip_dict['role'][vm]] \
- .append(str(i['output_value']))
- Env_setup.ip_pw_list.append((str(i['output_value']), ''))
-
- if i['output_key'] == privateIPkey:
- Env_setup.ip_pw_dict[vm_role_ip_dict['role'][vm]] = str(i['output_value'])
-
- logger.info('Getting Public IP(s): %s' % Env_setup.ip_pw_list)
-
- def delete_stack(self, stack_name):
- heat = self._get_heat_client()
-
- stacks = heat.stacks.list()
- exists = map(lambda x: x.stack_name, stacks)
- if stack_name in exists:
- logger.info("Delete stack %s" % stack_name)
- heat.stacks.delete(stack_name)
- while stack_name in exists:
- time.sleep(10)
- stacks = heat.stacks.list()
- exists = map(lambda x: x.stack_name, stacks)
- logger.debug("exists_stacks: %s" % exists)
- logger.info("%s doesn't exist" % stack_name)
diff --git a/legacy/utils/transform/__init__.py b/legacy/utils/transform/__init__.py
deleted file mode 100644
index e69de29b..00000000
--- a/legacy/utils/transform/__init__.py
+++ /dev/null
diff --git a/legacy/utils/transform/fio_transform.py b/legacy/utils/transform/fio_transform.py
deleted file mode 100644
index e8de2f9a..00000000
--- a/legacy/utils/transform/fio_transform.py
+++ /dev/null
@@ -1,37 +0,0 @@
-##############################################################################
-# Copyright (c) 2017 ZTE Corporation and others.
-#
-# All rights reserved. This program and the accompanying materials
-# are made available under the terms of the Apache License, Version 2.0
-# which accompanies this distribution, and is available at
-# http://www.apache.org/licenses/LICENSE-2.0
-##############################################################################
-import json
-import pickle
-import os
-import datetime
-
-
-def get_fio_job_result(fio_job_data):
- return {'read': {'io_bytes': fio_job_data["read"]["io_bytes"],
- 'io_ps': fio_job_data["read"]["iops"],
- 'io_runtime_millisec': fio_job_data["read"]["runtime"],
- 'mean_io_latenchy_microsec': fio_job_data["read"]["lat"]["mean"]},
- 'write': {'io_bytes': fio_job_data["write"]["io_bytes"],
- 'io_ps': fio_job_data["write"]["iops"],
- 'io_runtime_millisec': fio_job_data["write"]["runtime"],
- 'mean_io_latenchy_microsec': fio_job_data["write"]["lat"]["mean"]}}
-
-
-with open("fio_result.json") as fio_raw:
- fio_data = json.load(fio_raw)
-
-fio_result_dict = {}
-for x, result in enumerate(map(get_fio_job_result, fio_data["jobs"])):
- fio_result_dict['job_{0}'.format(x)] = result
-
-host_name = (os.popen("hostname").read().rstrip())
-report_time = str(datetime.datetime.utcnow().isoformat())
-os.system("mv fio_result.json " + str(host_name) + "-" + report_time + ".log")
-with open('./result_temp', 'w + ')as out_fio_result:
- pickle.dump(fio_result_dict, out_fio_result)
diff --git a/legacy/utils/transform/iperf_transform.py b/legacy/utils/transform/iperf_transform.py
deleted file mode 100644
index c5eef6f5..00000000
--- a/legacy/utils/transform/iperf_transform.py
+++ /dev/null
@@ -1,35 +0,0 @@
-##############################################################################
-# Copyright (c) 2017 ZTE Corporation and others.
-#
-# All rights reserved. This program and the accompanying materials
-# are made available under the terms of the Apache License, Version 2.0
-# which accompanies this distribution, and is available at
-# http://www.apache.org/licenses/LICENSE-2.0
-##############################################################################
-import json
-import datetime
-import pickle
-with open('iperf_raw.json', 'r') as ifile:
- raw_iperf_data = json.loads(ifile.read().rstrip())
-
-bits_sent = raw_iperf_data['end']['sum_sent']['bits_per_second']
-bits_received = raw_iperf_data['end']['sum_received']['bits_per_second']
-total_byte_sent = raw_iperf_data['end']['sum_sent']['bytes']
-total_byte_received = raw_iperf_data['end']['sum_received']['bytes']
-cpu_host_total_percent = raw_iperf_data['end']['cpu_utilization_percent']['host_total']
-cpu_remote_total_percent = raw_iperf_data['end']['cpu_utilization_percent']['remote_total']
-
-time_stamp = str(datetime.datetime.utcnow().isoformat())
-
-result = {'version': raw_iperf_data['start']['version'],
- 'bandwidth': {'sender_throughput': bits_sent,
- 'received_throughput': bits_received},
- 'cpu': {'cpu_host': cpu_host_total_percent,
- 'cpu_remote': cpu_remote_total_percent}
- }
-
-with open('iperf_raw-' + time_stamp + '.log', 'w+') as ofile:
- ofile.write(json.dumps(raw_iperf_data))
-
-with open('./result_temp', 'w+') as result_file:
- pickle.dump(result, result_file)
diff --git a/opt/infra/roles/user/files/Taseer - taseer94@gmail.com/taseer.authorized_keys b/opt/infra/roles/user/files/Taseer - taseer94@gmail.com/taseer.authorized_keys
new file mode 100644
index 00000000..7d69a7dc
--- /dev/null
+++ b/opt/infra/roles/user/files/Taseer - taseer94@gmail.com/taseer.authorized_keys
@@ -0,0 +1 @@
+ssh-rsa AAAAB3NzaC1yc2EAAAADAQABAAABAQDb3wIGllbpllLwkyr16AQKI8H0lvlbzfgsFDrlGkc3Kc8UeXuGn+x/CTkTuz3XBL9f49KzipMLcrcd/vBbXbI5EsK8eYC9s6j6vqzqHV5fUj8oOvQxya2z2bUUAwu62AxK9Mzruzm2pKoVTsQMiUe0uCY7Z7IKw60LymiFG1AB2eXm2J9KNjgeJfCo3T5LiCHn42lgDqiWLV2VqZWZYzvRvuxKhYRIENYpkeKW+h6kFkYdi0EBZkCXxUPIUlncELGbCaHqKvEai2CrXFZJLSYuqAFzLvjfG4IQFgp3H8y2px7GnAn8WngUcf5VfbMdMiqZhWSL0q79hAfGAuOGmD/v taseer@taseers-MacBook-Pro.local
diff --git a/opt/infra/roles/user/files/Taseer - taseer94@gmail.com/taseer_gpg_key b/opt/infra/roles/user/files/Taseer - taseer94@gmail.com/taseer_gpg_key
new file mode 100644
index 00000000..dea75fe8
--- /dev/null
+++ b/opt/infra/roles/user/files/Taseer - taseer94@gmail.com/taseer_gpg_key
@@ -0,0 +1,51 @@
+-----BEGIN PGP PUBLIC KEY BLOCK-----
+
+mQINBFqewdgBEACpWGFDmb9enTUR6AuzDPi4tIwq8htejyzEvgFL3iBW31neJGjl
+1m3P+KZC7eBk25FOeY8vJwQotYHyAL98H4DUD+yMIfWE0lrj5HfaK1K5785z9IIp
+2h6rMw4EeKDmR9r0nRoFl5bshiacZ/ZBuc60hv9lrhwSEvqS1SAtJE8QTZJqDW8r
+fBM9UIK1ETRYb61DhiBRhlFjCC1dGY5Nx7HLXawWHP5eI70rNbOsV28l/TYKAoya
+NnazgrdFxlGwWKVECvyh1LJX3eG9/D7/iMZa8MxR/ycMCUfcuAV2mRQocDOqPclc
+jHdQ+LuBkq2T4iugA5+z/qX9M/aaHPk8bTZ+A3+IJoTvrpRhIoN2oqjdQ0bXBbTO
+V8nB3Idal0pd0jvrrBrag1gvrAy4tKjxVZaJPq+v4pvTgJ93tQk1SHbrA0QiVLjv
+oDIDtEfj1JLaf76BVpMOBqzf86D6BhDs/+oZV23qfQPaPUjM5gZXEzyOQiFW903M
+J0UaL6WfvKm+npz4KLfTThJAfni8M/J82+deZ1iD41p2vhvP9Jat6SccvoKTKTmJ
+Lf9Ai13PoLq/K27HhaicmNNKr8eWyoTUjma4MUNUGsMNpjBR0KBeVwfjrfPMTcRc
+fIcA5JGyp8FjaQcQXQvhQvp8e3AHRVokO8nsKPcorojEO/FK158ocgSH8wARAQAB
+tCJUYXNlZXIgKE5vbmUpIDx0YXNlZXI5NEBnbWFpbC5jb20+iQJOBBMBCAA4FiEE
+wmDK3XRv/shlmaCjqhVyx98V7GQFAlqewdgCGwMFCwkIBwIGFQoJCAsCBBYCAwEC
+HgECF4AACgkQqhVyx98V7GRhYQ/8CUYdFE4UJdXMSgXscYN4/YEUmyrR+MNupQsL
+LMzXyS/nxNMpjL8RTMKLfqaCFzV9CxHoE1XIaaqc30SyLymxGds2QhYNS7ir4Xod
+xsUVumcJsnyAXPDcGSGrtMdbsyifCIPHRJdhfitO2FI6cURzehvg4aICj8ji9XNl
+Xqkjy0nIY9n1n9h4RgemxWtAUxRpO3MNoec0vU3xxXv54P2fGzOvXYvAaa+W/4Lt
+A0Xte+F/YZoK6Lc9MKMykkibKBgNTQ22LVpmDqOux/NTDbQuPe+LzV3uBDFx5/cA
+5JfcbhtBSCFYloJpISG7AsoNlBHLtF1sbKYAufaV6mFFzXfbHbHSS+n8DA/mDC/V
+tpWQQS+mYq4KdphXn9gmlIseIAbkTeJRX+xZlaeN5dD1hSeA5iouIMnGeDoKIhuE
+0KSZP8je8XmgcvfdZf45N9Bti0rCiLTklmfxUfPgMdJWyB/Q+QdvFvNP6YaNTPYQ
+z+cN9Bax0INu+VvhhyaNZRIQLvfoxnCvgnFs6JT8vwxMlW8dk5mjN4KsVD03iNxV
+Tej3YKYjNmSbD51MypcYU7OzSFlLt/bNxDVELzVgjmyj7l/65QXvoWJfH9nCk0+3
+fJLcOiGOwxsSslcXCJ8X78Z/p5QORhhWzRRXcNq9FdPRvIHsD9kMuHIopF+NPJoK
+7OKzd7O5Ag0EWp7B2AEQANo74CP9bXKZmT37tprTDT8BSVEVRjkKVmfx/OSR4CFP
+Oo6x9innRdSdJhP3MRwhkSCrG9PCJOZOcJgfEi8RS9LfL04yQ0pIomGXEmcP4vlO
+nWcufG6NtSVg7LrmSIDafEmKxRRvkiq7UkTsq9eg0x0vVvDHzGyEPrEvwnnrgzvr
+drlPZTMrMhf4QaOs8rlouMqHs2bb1XxPis/o8kes2VZ1TnkyZLoGBo9NHdqk1QOZ
+na+42DQxuI5JYA4D8BlrACJfitn5ox/NOk5fJ8XrU0NEEe6cOs2s2ikD2+9fZV+5
+1wywMguRBUqq10mqzTQ95zRrzWhDJ2BeLaYRrE52QHP4Rc1lDZ1V7zdNUZ6Ru+IZ
+CMmsgqMfbffXBCBpssVaZcQnDZnVDFm/och0pRZTbgpHuPd++hBsOe40drWNuwko
+T6ZCsgqQanSNjECAqOAcUooFAJwT8VyxIXD9OSorsI7G10D8r7FIdCepwfunXBSm
+FvDz6H1XPpLnQlNdH0S5J2y046GnPM6TU84Cibp3yaVXFSCQbRpBOD0rJTZ/UL6z
+dy7pr5P3sVHCG/Qhey53MJ3BVbsjXFtENngZB+RXSF3KN4RYP/SdyHBeKrdLc429
+fEiozNvC/8iRtwAaP8XGNgMoV8gL9ZnL2ezwqKXNEj/XJMFMSo1zN2++i+7QDJsD
+ABEBAAGJAjYEGAEIACAWIQTCYMrddG/+yGWZoKOqFXLH3xXsZAUCWp7B2AIbDAAK
+CRCqFXLH3xXsZGqID/4qsHJBPH+J+4J5Tz9/3JcQhNUQhleiCayY1J70+S7kjzih
+Uu+R72nIryNGnXxSXXPvat8ZaX6xaoELut1QCN6D8ri+6Wt0nhwt2n3yIsRv2+s5
+OeqLBWO1KPecnvfWd1Ofl5F92g+z9aYpgXXshyGV6Cv5AFlEhv592nZMI+NnJ7Xl
+UzS8g3NCV2/iTygqlwP5XPEL4NbOUjtdmsxmBn5ij6CPWJCbhqlTO12tETbrKjVw
+NTW1EH8YsQsw/XRQtCaOXvJfO6rHBvYkyBL1C2XCGd5op/qdJ63SZaEePTbjhdhx
+3F2Xd9yu/HauMeI8xuJFB4SvBrn90O7f0KQKPVn8iCyfb93dgAAUJ3R5ifO2RHdN
+D9SaX7WCN3v/Ejl0JxbY1sCiqLkZqvvuRl0e7Kw6fQSLsodytsVyIYuIPcqUZo3Q
+U65jrM4offsWexIMEslZpdMUq1qZUvueSs/Qia/jJO7pfplm1iM4G3xh8XT4NFLI
+0YbtUI7z+ZCbsa2ReHMGTokdOZaXWyOvANgYCwVmv7xUwFLhZzEIa5HwucduJuqg
+MwXdKZJkpv49fnAR8ssL5cdypgWikT8LK3JqE+z4CqF1+yPDwH/2lZDtwTKvW62M
+/vnWPEDdyklZHNrNnvA4xSyCZqh7IVlFUfkG0+TwlXfV/afdOCMkFzM9o3d/xA==
+=q7sQ
+-----END PGP PUBLIC KEY BLOCK-----
diff --git a/opt/infra/roles/user/files/taseer.authorized_keys b/opt/infra/roles/user/files/taseer.authorized_keys
deleted file mode 100644
index 2efb95e8..00000000
--- a/opt/infra/roles/user/files/taseer.authorized_keys
+++ /dev/null
@@ -1 +0,0 @@
-ssh-rsa AAAAB3NzaC1yc2EAAAADAQABAAABAQDCxCG9Z+8rBLQiJ9yt+RzRkAZZQIlou83e899Kkuf/UgXd+1TUgIu4AJUDvayrorTxk2kw7ra+DoysEa2NBkLL6FNnG+NpYV+XTaG+Z3tvp4l62ROV+5+O4soiWtBroRghQPDkrpiTQuFJ7/BKQt9bKZYPb3t2bxCLeFVWtUzCpFDIj6wDPGwDeTLnLMPtQNQtIlgPV+XGAet17rNSrm7EcDzqdDQcdZFmfeH5YvBsY9ZO+qwcbelEpMBWPsyEMU6OgwTqUj7mm3o+Quew35y13Zrhf+GAx5ZMXho/Cpjp1TzognDkwAFpFzZG7zWSNexXrD15Wzgyae2cl/vD75I7 taseer@123.org \ No newline at end of file
diff --git a/qtip/ansible_library/modules/nettest.py b/qtip/ansible_library/modules/nettest.py
new file mode 100644
index 00000000..5f141c95
--- /dev/null
+++ b/qtip/ansible_library/modules/nettest.py
@@ -0,0 +1,83 @@
+#!/usr/bin/python
+
+###############################################################
+# Copyright (c) 2018 ZTE Corporation and Others
+# taseer94@gmail.com
+#
+# All rights reserved. This program and the accompanying materials
+# are made available under the terms of the Apache License, Version 2.0
+# which accompanies this distribution, and is available at
+# http://www.apache.org/licenses/LICENSE-2.0
+##############################################################################
+
+import contrib.nettest_client.nettest_client as qtip_nettest
+from ansible.module_utils.basic import AnsibleModule
+
+
+ANSIBLE_METADATA = {'metadata_version': '1.0',
+ 'status': ['preview'],
+ 'supported_by': 'community'}
+
+DOCUMENTATION = '''
+---
+module: nettest
+short_description: execute Spirent testcases
+description:
+ - Use this module to execute network performance benchmarking
+version_added: "1.0"
+author: "Taseer Ahmed"
+'''
+
+RETURN = '''
+ansible_facts:
+ description: network benchmark tests
+ returned: success
+ type: dictionary
+ contains:
+ result:
+'''
+
+EXAMPLES = '''
+---
+- hosts: apex-undercloud
+ tasks:
+ - name: execute network performance test
+ nettest:
+ name: s1,
+ stack_type: stcv,
+ public_network_name: public,
+ stcv_affinity: True,
+ stcv_image: STCv-4.80.2426,
+ stcv_flavor: small.shared,
+ lab_server_ip: 10.61.67.53,
+ license_server_ip: 10.140.88.61
+'''
+
+
+def main():
+ module = AnsibleModule(
+ argument_spec=dict(
+ public_network_name=dict(type='str'),
+ stcv_image=dict(type='str'),
+ stcv_flavor=dict(type='str'),
+ lab_server_ip=dict(type='str'),
+ license_server_ip=dict(type='str')
+ )
+ )
+
+ public_network_name = module.params['public_network_name']
+ stcv_image = module.params['stcv_image']
+ stcv_flavor = module.params['stcv_flavor']
+ lab_server_ip = module.params['lab_server_ip']
+ license_server_ip = module.params['license_server_ip']
+
+ module.exit_json(changed=True,
+ ansible_facts=qtip_nettest.run(public_network_name,
+ stcv_image,
+ stcv_flavor,
+ lab_server_ip,
+ license_server_ip))
+
+
+if __name__ == '__main__':
+ main()
diff --git a/qtip/cli/commands/cmd_project.py b/qtip/cli/commands/cmd_project.py
index 2836fa69..11fa63e5 100644
--- a/qtip/cli/commands/cmd_project.py
+++ b/qtip/cli/commands/cmd_project.py
@@ -59,7 +59,16 @@ def cli():
help='Host configured for ssh client or IP addresses and domain name')
@click.option('--scenario',
help='OPNFV scenario')
-def create(project_name, project_template, pod_name, installer_type, installer_host, scenario):
+@click.option('--sut',
+ prompt='System Under Test type',
+ help='Type of system can be vnf')
+def create(project_name,
+ project_template,
+ pod_name,
+ installer_type,
+ installer_host,
+ scenario,
+ sut):
qtip_generator_role = os.path.join(utils.QTIP_ANSIBLE_ROLES, 'qtip-generator')
extra_vars = {
'qtip_package': utils.QTIP_PACKAGE,
@@ -69,7 +78,8 @@ def create(project_name, project_template, pod_name, installer_type, installer_h
'pod_name': pod_name,
'installer_type': installer_type,
'installer_host': installer_host,
- 'scenario': scenario
+ 'scenario': scenario,
+ 'sut': sut
}
os.system("ANSIBLE_ROLES_PATH={roles_path} ansible-playbook"
" -i {hosts}"
diff --git a/qtip/scripts/quickstart.sh b/qtip/scripts/quickstart.sh
index b430aa3f..22257f5b 100644
--- a/qtip/scripts/quickstart.sh
+++ b/qtip/scripts/quickstart.sh
@@ -13,7 +13,8 @@ set -o pipefail
set -x
usage(){
- echo "usage: $0 -q <qtip_test_suite> -t <installer_type> -i <installer_ip> -p <pod_name> -s <scenario> -r <report_url>" >&2
+ echo "usage: $0 -q <qtip_test_suite> -t <installer_type> -i <installer_ip> -p <pod_name> -s <scenario> -r <report_url>
+ -u <sut>" >&2
}
verify_connectivity(){
@@ -30,7 +31,7 @@ verify_connectivity(){
}
#Getoptions
-while getopts ":t:i:p:s:r:he" optchar; do
+while getopts ":t:i:p:s:r:u:he" optchar; do
case "${optchar}" in
q) test_suite=${OPTARG} ;;
t) installer_type=${OPTARG} ;;
@@ -38,6 +39,7 @@ while getopts ":t:i:p:s:r:he" optchar; do
p) pod_name=${OPTARG} ;;
s) scenario=${OPTARG} ;;
r) testapi_url=${OPTARG} ;;
+ u) sut=${OPTARG} ;;
h) usage
exit 0
;;
@@ -55,6 +57,7 @@ test_suite=${test_suite:-$TEST_SUITE}
pod_name=${pod_name:-$NODE_NAME}
scenario=${scenario:-$SCENARIO}
testapi_url=${testapi_url:-$TESTAPI_URL}
+sut=${sut:-node}
# we currently support fuel, apex and mcp
if [[ ! "$installer_type" =~ (fuel|apex|mcp) ]]; then
@@ -76,7 +79,7 @@ esac
cd /home/opnfv
qtip create --project-template ${test_suite} --pod-name ${pod_name} --installer-type ${installer_type} \
---installer-host ${installer_ip} --scenario ${scenario} ${test_suite}
+--installer-host ${installer_ip} --scenario ${scenario} --sut ${sut} ${test_suite}
cd ${test_suite}
diff --git a/resources/ansible_roles/ceph-info/tasks/main.yml b/resources/ansible_roles/ceph-info/tasks/main.yml
index 04cc859c..cb10b4c1 100644
--- a/resources/ansible_roles/ceph-info/tasks/main.yml
+++ b/resources/ansible_roles/ceph-info/tasks/main.yml
@@ -9,6 +9,6 @@
---
-- include: cache.yml
-- include: disk.yml
-- include: network.yml
+- import_tasks: cache.yml
+- import_tasks: disk.yml
+- import_tasks: network.yml
diff --git a/resources/ansible_roles/openstack/defaults/main.yml b/resources/ansible_roles/openstack/defaults/main.yml
index 3b83d6c0..261504ea 100644
--- a/resources/ansible_roles/openstack/defaults/main.yml
+++ b/resources/ansible_roles/openstack/defaults/main.yml
@@ -9,9 +9,11 @@
---
+image_url: https://cloud-images.ubuntu.com/releases/16.04/release-20180222/ubuntu-16.04-server-cloudimg-amd64-disk1.img
+checksum: 027b3e9d219f0f6c17b5448ed67dc41e
+temp_dir: /tmp/qtip
+
+flavor_name: qtip_flavor
stack_name: qtip_stack
image_name: qtip_image
-flavor_name: m1.large
-net_name: qtip_net
-subnet_name: qtip_subnet
-instance_name: qtip_vm \ No newline at end of file
+ansible_user: ubuntu
diff --git a/resources/ansible_roles/openstack/tasks/main.yml b/resources/ansible_roles/openstack/tasks/main.yml
index f4d3d18c..57942260 100644
--- a/resources/ansible_roles/openstack/tasks/main.yml
+++ b/resources/ansible_roles/openstack/tasks/main.yml
@@ -1,6 +1,6 @@
##############################################################################
-# Copyright (c) 2017 ZTE Corporation and others.
-# taseer94@gmail.com
+# Copyright (c) 2018 ZTE Corporation and others.
+#
# All rights reserved. This program and the accompanying materials
# are made available under the terms of the Apache License, Version 2.0
# which accompanies this distribution, and is available at
@@ -9,14 +9,97 @@
---
+- name: Delete QTIP stack
+ os_stack:
+ name: "{{ stack_name }}"
+ state: absent
+
+- name: Delete QTIP keypair
+ os_keypair:
+ name: QtipKey
+ state: absent
+
+- name: Delete QTIP flavor
+ os_nova_flavor:
+ name: "{{ flavor_name }}"
+ state: absent
+
+- name: Delete qtip image
+ os_image:
+ name: "{{ image_name }}"
+ state: absent
+
+- name: Create temp directory for QTIP
+ file:
+ path: "{{ temp_dir }}"
+ state: directory
+
+- name: Clean the existing SSH keypair
+ file:
+ state: absent
+ path: "{{ temp_dir }}/{{ item }}"
+ with_items:
+ - QtipKey.pub
+ - QtipKey
+
+- name: Generate a SSH key for QTIP VM
+ shell: ssh-keygen -t rsa -N "" -f "{{ temp_dir }}/QtipKey" -q
+
+- name: Create QTIP keypair
+ os_keypair:
+ name: QtipKey
+ public_key_file: "{{ temp_dir }}/QtipKey.pub"
+ state: present
+
+- name: Create QTIP flavor
+ os_nova_flavor:
+ name: "{{ flavor_name }}"
+ ram: 2048
+ vcpus: 3
+ disk: 4
+ state: present
+
+- name: Download image as qtip image
+ get_url:
+ url: "{{ image_url }}"
+ dest: "{{ temp_dir }}/{{ image_name }}.img"
+ checksum: "md5:{{ checksum }}"
+ when: image_url | search("https://")
+
+- name: Upload qtip image
+ os_image:
+ name: "{{ image_name }}"
+ container_format: bare
+ disk_format: qcow2
+ state: present
+ filename: "{{ temp_dir }}/{{ image_name }}.img"
+
- name: create qtip stack
os_stack:
name: "{{ stack_name }}"
state: present
- template: heat_template.yml
+ template: "{{ heat_template }}"
parameters:
- image_name: "{{ image_name }}"
- flavor_name: "{{ flavor_name }}"
- net_name: "{{ net_name }}"
- subnet_name: "{{ subnet_name }}"
- instance_name: "{{ inst_name }}"
+ image: "{{ image_name }}"
+ flavor: "{{ flavor_name }}"
+ keypair: QtipKey
+ external_network: "{{ external_network }}"
+ register: stack
+
+- name: Wait 600s for vm connection to become reachable/usable
+ wait_for:
+ host: "{{ item.output_value }}"
+ port: 22
+ timeout: 600
+ search_regex: OpenSSH
+ with_items: "{{ stack.stack.outputs }}"
+
+- name: generating inventory file
+ template:
+ src: hosts.j2
+ dest: ./hosts
+
+- name: generating ssh.cfg
+ template:
+ src: ssh.cfg.j2
+ dest: ./ssh.cfg
diff --git a/resources/ansible_roles/openstack/templates/heat_template.yml b/resources/ansible_roles/openstack/templates/heat_template.yml
deleted file mode 100644
index ed5a3ab7..00000000
--- a/resources/ansible_roles/openstack/templates/heat_template.yml
+++ /dev/null
@@ -1,70 +0,0 @@
-##############################################################################
-# Copyright (c) 2017 ZTE Corporation and others.
-# taseer94@gmail.com
-# All rights reserved. This program and the accompanying materials
-# are made available under the terms of the Apache License, Version 2.0
-# which accompanies this distribution, and is available at
-# http://www.apache.org/licenses/LICENSE-2.0
-##############################################################################
-
----
-
-heat_template_version: 2015-04-30
-
-description: Simple template to deploy a single compute instance
-
-parameters:
- image_name:
- type: string
- label: Image ID
- description: Image to be used for compute instance
- default: Ubuntu 16.04 x86_64
- flavor_name:
- type: string
- label: Instance Type
- description: Type of instance (flavor) to be used
- default: m1.large
- net_name:
- type: string
- label: Test network name
- description: The name of the stack's network
- default: qtip_net
- subnet_name:
- type: string
- label: Test subnet name
- description: The name of the stack's subnet
- default: qtip_subnet
- instance_name:
- type: string
- label: Test VM name
- description: The name of the spawned vm
- default: qtip_vm
-
-resources:
- private_net:
- type: OS::Neutron::Net
- properties:
- name: { get_param: net_name }
-
- private_subnet:
- type: OS::Neutron::Subnet
- properties:
- name: { get_param: subnet_name }
- network_id: { get_resource: private_net }
- cidr: 10.0.0.0/24
-
- server1_port:
- type: OS::Neutron::Port
- properties:
- network_id: { get_resource: private_net }
- fixed_ips:
- - subnet_id: { get_resource: private_subnet }
-
- my_instance:
- type: OS::Nova::Server
- properties:
- name: { get_param: inst_name }
- image: { get_param: image_name }
- flavor: { get_param: flavor_name }
- networks:
- - port: { get_resource: server1_port }
diff --git a/resources/ansible_roles/openstack/templates/hosts.j2 b/resources/ansible_roles/openstack/templates/hosts.j2
new file mode 100644
index 00000000..6756d2a2
--- /dev/null
+++ b/resources/ansible_roles/openstack/templates/hosts.j2
@@ -0,0 +1,17 @@
+localhost ansible_connection=local
+
+[SUT:children]
+compute
+
+[node-groups:children]
+compute
+
+[node-groups:vars]
+ansible_ssh_common_args=-F ./ssh.cfg
+
+[compute]
+{% for item in stack.stack.outputs %}
+{% if item.output_key == 'instance_ip' %}
+{{item.output_value}}
+{% endif %}
+{% endfor %}
diff --git a/resources/ansible_roles/openstack/templates/ssh.cfg.j2 b/resources/ansible_roles/openstack/templates/ssh.cfg.j2
new file mode 100644
index 00000000..c0a1f94e
--- /dev/null
+++ b/resources/ansible_roles/openstack/templates/ssh.cfg.j2
@@ -0,0 +1,11 @@
+# Connect to target node
+
+{% for item in stack.stack.outputs %}
+{% if item.output_key == 'instance_ip' %}
+Host {{ item.output_value }}
+ HostName {{ item.output_value }}
+ User {{ ansible_user }}
+ IdentityFile {{ temp_dir }}/QtipKey
+{% endif %}
+
+{% endfor %}
diff --git a/resources/ansible_roles/opnfv-testapi/tasks/main.yml b/resources/ansible_roles/opnfv-testapi/tasks/main.yml
index 270ffb59..334e06f4 100644
--- a/resources/ansible_roles/opnfv-testapi/tasks/main.yml
+++ b/resources/ansible_roles/opnfv-testapi/tasks/main.yml
@@ -9,4 +9,4 @@
---
-- include: "{{ tasks }}.yml"
+- include_tasks: "{{ tasks }}.yml"
diff --git a/resources/ansible_roles/opnfv-testapi/tasks/report.yml b/resources/ansible_roles/opnfv-testapi/tasks/report.yml
index 0633eafb..6db3a785 100644
--- a/resources/ansible_roles/opnfv-testapi/tasks/report.yml
+++ b/resources/ansible_roles/opnfv-testapi/tasks/report.yml
@@ -21,7 +21,7 @@
project_name: "{{ project_name }}"
case_name: "{{ case_name }}"
pod_name: "{{ pod_name }}"
- installer: "{{ installer_type }}"
+ installer: "{{ installer_type_adapter[installer_type] }}"
version: "{{ version }}"
scenario: "{{ scenario }}"
start_date: "{{ ansible_date_time.date }}"
diff --git a/resources/ansible_roles/qtip-generator/defaults/main.yml b/resources/ansible_roles/qtip-generator/defaults/main.yml
index 344feb54..6665d11f 100644
--- a/resources/ansible_roles/qtip-generator/defaults/main.yml
+++ b/resources/ansible_roles/qtip-generator/defaults/main.yml
@@ -16,6 +16,10 @@ installer_group:
apex: apex-underclouds
mcp: salt-master
+network_group:
+ mcp: floating_net
+ apex: external
+
project_name: 'qtip-project'
project_template: 'compute'
diff --git a/resources/ansible_roles/qtip-generator/files/compute/group_vars/all.yml b/resources/ansible_roles/qtip-generator/files/compute/group_vars/all.yml
index efb2bdb7..1b34fd69 100644
--- a/resources/ansible_roles/qtip-generator/files/compute/group_vars/all.yml
+++ b/resources/ansible_roles/qtip-generator/files/compute/group_vars/all.yml
@@ -21,3 +21,7 @@ installer_group:
fuel: fuel-masters
apex: apex-underclouds
mcp: salt-master
+
+network_group:
+ mcp: floating_net
+ apex: external
diff --git a/legacy/config/SampleHeat.yaml b/resources/ansible_roles/qtip-generator/files/compute/heat_template.yml
index 650c6a0c..cb67e624 100644
--- a/legacy/config/SampleHeat.yaml
+++ b/resources/ansible_roles/qtip-generator/files/compute/heat_template.yml
@@ -1,5 +1,5 @@
##############################################################################
-# Copyright (c) 2017 ZTE Corporation and others.
+# Copyright (c) 2018 ZTE Corporation and others.
#
# All rights reserved. This program and the accompanying materials
# are made available under the terms of the Apache License, Version 2.0
@@ -15,20 +15,23 @@ parameters:
image:
type: string
description: Name of the image
- default: QTIP_CentOS
+ default: qtip_image
- external_net_name:
+ flavor:
+ type: string
+ description: Name of flavor
+ default: qtip_flavor
+
+ keypair:
type: string
- description: Name of the external network which management network will connect to
- default: admin_floating_net
+ description: Name of keypair
+ default: QtipKey
+
+ external_network:
+ type: string
+ description: Name of the external network
resources:
- flavor:
- type: OS::Nova::Flavor
- properties:
- ram: 8192
- vcpus: 8
- disk: 80
network:
type: OS::Neutron::Net
@@ -49,7 +52,7 @@ resources:
properties:
name: qtip_router
external_gateway_info:
- network: { get_param: external_net_name }
+ network: { get_param: external_network }
management_router_interface:
type: OS::Neutron::RouterInterface
@@ -57,6 +60,17 @@ resources:
router: { get_resource: management_router }
subnet: { get_resource: subnet }
+ floating_ip:
+ type: OS::Neutron::FloatingIP
+ properties:
+ floating_network: { get_param: external_network }
+
+ floating_ip_association:
+ type: OS::Nova::FloatingIPAssociation
+ properties:
+ floating_ip: { get_resource: floating_ip }
+ server_id: { get_resource: qtip_instance }
+
security_group:
type: OS::Neutron::SecurityGroup
properties:
@@ -70,5 +84,18 @@ resources:
protocol: udp
- protocol: icmp
+ qtip_instance:
+ type: OS::Nova::Server
+ depends_on: [subnet]
+ properties:
+ name: { get_param: "OS::stack_name" }
+ image: { get_param: image }
+ flavor: { get_param: flavor }
+ key_name: { get_param: keypair }
+ security_groups: [{ get_resource: security_group }]
+ networks:
+ - network: { get_resource: network }
outputs:
- description: 'none'
+ instance_ip:
+ description: The IP address of the instance
+ value: { get_attr: [floating_ip, floating_ip_address] }
diff --git a/resources/ansible_roles/qtip-generator/files/compute/host_vars/localhost.yml b/resources/ansible_roles/qtip-generator/files/compute/host_vars/localhost.yml
index cc587c69..7f339045 100644
--- a/resources/ansible_roles/qtip-generator/files/compute/host_vars/localhost.yml
+++ b/resources/ansible_roles/qtip-generator/files/compute/host_vars/localhost.yml
@@ -14,3 +14,9 @@ case_name: "{{ case_name|default('compute') }}"
pod_name: "{{ pod_name|default('qtip-pod') }}"
scenario: "{{ scenario|default('generic') }}"
version: "{{ lookup('env','OPNFV_RELEASE')|default('master') }}"
+
+installer_type_adapter:
+ fuel: fuel
+ mcp: fuel
+ apex: apex
+ manual: manual \ No newline at end of file
diff --git a/resources/ansible_roles/qtip-generator/files/compute/run.yml b/resources/ansible_roles/qtip-generator/files/compute/run.yml
index f8e71f0c..c166e488 100644
--- a/resources/ansible_roles/qtip-generator/files/compute/run.yml
+++ b/resources/ansible_roles/qtip-generator/files/compute/run.yml
@@ -18,6 +18,22 @@
- hosts: SUT
+{% if sut == 'vnf' %}
+ gather_facts: no
+ pre_tasks:
+
+ - name: check whether install python 2.x in remote target
+ become: yes
+ raw: test -e /usr/bin/python || (apt-get -y update && apt-get install -y python-minimal)
+ register: rs
+ # Sometimes vm's network is not ready, have to give some attempts to install packages
+ until: rs.stdout.find("Setting up python-minimal") != -1
+ retries: 10
+ delay: 10
+ - name: gather facts
+ setup:
+{% endif %}
+
roles:
# prepare environment
- { role: qtip, tasks: setup-node, tags: [setup] }
diff --git a/resources/ansible_roles/qtip-generator/files/compute/setup.yml b/resources/ansible_roles/qtip-generator/files/compute/setup.yml
index f5920c1f..c3e8f58e 100644
--- a/resources/ansible_roles/qtip-generator/files/compute/setup.yml
+++ b/resources/ansible_roles/qtip-generator/files/compute/setup.yml
@@ -10,13 +10,24 @@
# Prepare connection to SUT (System Under Test)
---
+
+{% if sut == 'vnf' %}
+- hosts: localhost
+ gather_facts: no
+ roles:
+ - { role: openstack, external_network: {{ network_group[installer_type] }}, heat_template: heat_template.yml }
+
+{% elif sut == 'node' %}
+
{% if installer_type == 'manual' %}
- hosts: localhost
gather_facts: no
tasks:
- - debug: "msg='please add hosts to SUT group in `hosts` file manually.'"
+ - debug: "msg='please add hosts to SUT group in `hosts` file manually.'"
+
{% else %}
- hosts: [{{ installer_group[installer_type] }}]
roles:
- - { role: qtip, tasks: generate-inventory }
+ - { role: qtip, tasks: generate-inventory }
+{% endif %}
{% endif %}
diff --git a/resources/ansible_roles/qtip-generator/files/compute/teardown.yml b/resources/ansible_roles/qtip-generator/files/compute/teardown.yml
index dc659930..e483d7f6 100644
--- a/resources/ansible_roles/qtip-generator/files/compute/teardown.yml
+++ b/resources/ansible_roles/qtip-generator/files/compute/teardown.yml
@@ -7,8 +7,12 @@
# http://www.apache.org/licenses/LICENSE-2.0
##############################################################################
+
- hosts: SUT
roles:
# teardown environment
- { role: qtip, tasks: teardown }
+
+
+
diff --git a/resources/ansible_roles/qtip-generator/files/doctor/group_vars/all.yml b/resources/ansible_roles/qtip-generator/files/doctor/group_vars/all.yml
index 55d5b250..766520b9 100644
--- a/resources/ansible_roles/qtip-generator/files/doctor/group_vars/all.yml
+++ b/resources/ansible_roles/qtip-generator/files/doctor/group_vars/all.yml
@@ -1,3 +1,12 @@
+##############################################################################
+# Copyright (c) 2017 ZTE Corporation and others.
+#
+# All rights reserved. This program and the accompanying materials
+# are made available under the terms of the Apache License, Version 2.0
+# which accompanies this distribution, and is available at
+# http://www.apache.org/licenses/LICENSE-2.0
+##############################################################################
+
{% raw %}
doctor_project: doctor
doctor_user: doctor
diff --git a/resources/ansible_roles/qtip/tasks/generate-inventory.yml b/resources/ansible_roles/qtip/tasks/generate-inventory.yml
index aee25260..8aa36294 100644
--- a/resources/ansible_roles/qtip/tasks/generate-inventory.yml
+++ b/resources/ansible_roles/qtip/tasks/generate-inventory.yml
@@ -11,7 +11,7 @@
---
-- include: "gather-facts-{{ installer_type }}.yml"
+- include_tasks: "gather-facts-{{ installer_type }}.yml"
- name: generating inventory file
template: src=templates/hosts dest=./hosts
diff --git a/resources/ansible_roles/qtip/tasks/install-deps.yml b/resources/ansible_roles/qtip/tasks/install-deps.yml
index e9590f4b..81d054f6 100644
--- a/resources/ansible_roles/qtip/tasks/install-deps.yml
+++ b/resources/ansible_roles/qtip/tasks/install-deps.yml
@@ -8,4 +8,4 @@
##############################################################################
---
-- include: "install-deps-{{ ansible_os_family|lower }}.yml"
+- include_tasks: "install-deps-{{ ansible_os_family|lower }}.yml"
diff --git a/resources/ansible_roles/qtip/tasks/main.yml b/resources/ansible_roles/qtip/tasks/main.yml
index 270ffb59..334e06f4 100644
--- a/resources/ansible_roles/qtip/tasks/main.yml
+++ b/resources/ansible_roles/qtip/tasks/main.yml
@@ -9,4 +9,4 @@
---
-- include: "{{ tasks }}.yml"
+- include_tasks: "{{ tasks }}.yml"
diff --git a/resources/ansible_roles/qtip/tasks/setup-node.yml b/resources/ansible_roles/qtip/tasks/setup-node.yml
index af848f70..1fa7f9c4 100644
--- a/resources/ansible_roles/qtip/tasks/setup-node.yml
+++ b/resources/ansible_roles/qtip/tasks/setup-node.yml
@@ -26,4 +26,4 @@
path: "{{ qtip_workdir }}"
state: directory
-- include: install-deps.yml
+- import_tasks: install-deps.yml
diff --git a/tests/ci/compute/docker-compose.yaml b/tests/ci/compute/docker-compose.yaml
index 712c9c54..896908d1 100644
--- a/tests/ci/compute/docker-compose.yaml
+++ b/tests/ci/compute/docker-compose.yaml
@@ -11,8 +11,9 @@ version: '2'
services:
qtip:
- container_name: compute_qtip
+ container_name: compute_qtip_${SUT}
image: opnfv/qtip:${DOCKER_TAG}
env_file: ${ENV_FILE}
volumes:
- ${SSH_CREDENTIALS}:/root/.ssh
+ - ${IMAGE_DIR}:/tmp
diff --git a/tests/ci/experimental.sh b/tests/ci/experimental.sh
index 7fa18f2f..e45611e5 100755
--- a/tests/ci/experimental.sh
+++ b/tests/ci/experimental.sh
@@ -17,21 +17,22 @@ script_dir="$( cd "$( dirname "${BASH_SOURCE[0]}" )" && pwd )"
export DEPLOY_SCENARIO='generic'
export DOCKER_TAG='latest'
export CI_DEBUG='false'
-export TEST_SUITE='storage'
+export TEST_SUITE='compute'
export TESTAPI_URL=''
export SSH_CREDENTIALS='/root/.ssh'
-
export WORKSPACE=${WORKSPACE:-$(pwd)}
+export SUT='vnf'
source ${script_dir}/utils/start_services.sh
cd ${WORKSPACE}
qtip_repo='/home/opnfv/repos/qtip'
-docker cp . ${TEST_SUITE}_qtip:${qtip_repo}
-docker exec ${TEST_SUITE}_qtip bash -c "cd ${qtip_repo} && pip install -U -e ."
+docker cp . ${TEST_SUITE}_qtip_${SUT}:${qtip_repo}
+docker exec ${TEST_SUITE}_qtip_${SUT} bash -c "cd ${qtip_repo} && pip install -U -e ."
+
+docker exec ${TEST_SUITE}_qtip_${SUT} bash -x ${qtip_repo}/qtip/scripts/quickstart.sh -u "${SUT}"
-docker exec ${TEST_SUITE}_qtip bash -x ${qtip_repo}/qtip/scripts/quickstart.sh
echo "QTIP: Verify ${TEST_SUITE} done!"
exit 0
diff --git a/legacy/tests/helper/version.yaml b/tests/ci/network/docker-compose.yaml
index 59be4256..d4f2c904 100644
--- a/legacy/tests/helper/version.yaml
+++ b/tests/ci/network/docker-compose.yaml
@@ -1,20 +1,27 @@
##############################################################################
-# Copyright (c) 2017 taseer94@gmail.com and others.
+# Copyright (c) 2017 Dell EMC, ZTE and others.
#
# All rights reserved. This program and the accompanying materials
# are made available under the terms of the Apache License, Version 2.0
# which accompanies this distribution, and is available at
# http://www.apache.org/licenses/LICENSE-2.0
##############################################################################
----
- tests:
- - command: ['version', 'list']
- output: "Lists all the different versions\n"
+version: '2'
+services:
- - command: ['version', 'install', 'Colorado']
- output: "Install: Colorado\n"
-
- - command: ['version', 'uninstall', 'Arno']
- output: "Uninstall: Arno\n"
+ nettest:
+ container_name: nettest_qtip
+ image: opnfv/qtip-nettest:${DOCKER_TAG}
+ ports:
+ - 5001:5000
+ env_file: ${ENV_FILE}
+ qtip:
+ container_name: network_qtip
+ image: opnfv/qtip:${DOCKER_TAG}
+ env_file: ${ENV_FILE}
+ volumes:
+ - ${SSH_CREDENTIALS}:/root/.ssh
+ links:
+ - nettest
diff --git a/tests/ci/periodic.sh b/tests/ci/periodic.sh
index 87fa7627..ea2c269e 100755
--- a/tests/ci/periodic.sh
+++ b/tests/ci/periodic.sh
@@ -13,7 +13,13 @@ script_dir="$( cd "$( dirname "${BASH_SOURCE[0]}" )" && pwd )"
source ${script_dir}/utils/start_services.sh
-docker exec ${TEST_SUITE}_qtip bash -x /home/opnfv/repos/qtip/qtip/scripts/quickstart.sh
+if [[ "${TEST_SUITE}" =~ "compute" ]];then
+ docker exec ${TEST_SUITE}_qtip_${SUT} bash -x /home/opnfv/repos/qtip/qtip/scripts/quickstart.sh -u "${SUT}"
+elif [[ "${TEST_SUITE}" =~ "network" ]];then
+ :
+else
+ docker exec ${TEST_SUITE}_qtip bash -x /home/opnfv/repos/qtip/qtip/scripts/quickstart.sh
+fi
echo "${TEST_SUITE} QPI done!"
diff --git a/tests/ci/utils/start_services.sh b/tests/ci/utils/start_services.sh
index c2129570..6280be36 100644
--- a/tests/ci/utils/start_services.sh
+++ b/tests/ci/utils/start_services.sh
@@ -19,6 +19,7 @@ OPNFV_RELEASE=${OPNFV_RELEASE:-}
EOF
export SSH_CREDENTIALS=${SSH_CREDENTIALS:-/root/.ssh}
+export IMAGE_DIR=${IMAGE_DIR:-$HOME/tmp}
TMPFILE=`mktemp /tmp/qtip.XXXXXX` || exit 1
curl https://git.opnfv.org/releng/plain/utils/fetch_os_creds.sh | bash -s -- \
@@ -28,8 +29,10 @@ grep "export" ${TMPFILE} | sed "s/export //" >> $ENV_FILE
sed -i '/^PYTHONWARNINGS=/d' $ENV_FILE
# In CI job, it still uses fuel to represent MCP.
-if [[ "${BRANCH}" =~ "master" && "${INSTALLER_TYPE}" =~ "fuel" ]];then
- sed -i 's/^INSTALLER_TYPE=fuel/INSTALLER_TYPE=mcp/g' $ENV_FILE
+if [[ "${INSTALLER_TYPE}" =~ "fuel" ]];then
+ if [[ ! "${BRANCH}" =~ "euphrates" ]];then
+ sed -i 's/^INSTALLER_TYPE=fuel/INSTALLER_TYPE=mcp/g' $ENV_FILE
+ fi
fi
docker-compose -f $script_dir/${TEST_SUITE}/docker-compose.yaml pull