diff options
27 files changed, 1953 insertions, 198 deletions
diff --git a/Pipfile.lock b/Pipfile.lock index c4dcf444..cbdd51be 100644 --- a/Pipfile.lock +++ b/Pipfile.lock @@ -1,7 +1,7 @@ { "_meta": { "hash": { - "sha256": "1bd8bf77bc0201b85bfdc4ffe8364bc18ba107caca8a954765e3e161f57dffd7" + "sha256": "c943c45ab5594bfc47b9151a80f19b667c0b282e6abbad48d929e21b83539590" }, "host-environment-markers": { "implementation_name": "cpython", @@ -9,12 +9,12 @@ "os_name": "posix", "platform_machine": "x86_64", "platform_python_implementation": "CPython", - "platform_release": "17.3.0", - "platform_system": "Darwin", - "platform_version": "Darwin Kernel Version 17.3.0: Thu Nov 9 18:09:22 PST 2017; root:xnu-4570.31.3~1/RELEASE_X86_64", - "python_full_version": "2.7.13", + "platform_release": "4.4.0-108-generic", + "platform_system": "Linux", + "platform_version": "#131-Ubuntu SMP Sun Jan 7 14:34:49 UTC 2018", + "python_full_version": "2.7.12", "python_version": "2.7", - "sys_platform": "darwin" + "sys_platform": "linux2" }, "pipfile-spec": 6, "requires": { @@ -31,9 +31,16 @@ "default": { "ansible": { "hashes": [ - "sha256:315f1580b20bbc2c2f1104f8b5e548c6b4cac943b88711639c5e0d4dfc4d7658" + "sha256:0e98b3a56928d03979d5f8e7ae5d8e326939111b298729b03f00b3ad8f998a3d" ], - "version": "==2.4.2.0" + "version": "==2.4.3.0" + }, + "appdirs": { + "hashes": [ + "sha256:d8b24664561d0d34ddfaec54636d502d7cea6e29c3eaf68f3df6180863e2166e", + "sha256:9e5896d1372858f8dd3344faf4e5014d21849c756c8d5701f78f8a103b372d92" + ], + "version": "==1.4.3" }, "asn1crypto": { "hashes": [ @@ -44,10 +51,10 @@ }, "asq": { "hashes": [ - "sha256:ca3495eac34b7cf2c4d10636777ed128b4e0872aa6353ceeb5edd45f1cbf32a8", - "sha256:a7f1cac711fc2834a348aee0075c9a3f4ea06642257add71585d457e3f86e68d" + "sha256:6b5764aeb552679cd75e96bd7080a848a18b04f784328b5f6e8476e5176c0cbe", + "sha256:aeed1e56369372a6666f69facb890c10a8e8a6c5b8898119ca5330b4bb879f7b" ], - "version": "==1.2.1" + "version": "==1.3" }, "bcrypt": { "hashes": [ @@ -85,46 +92,42 @@ }, "certifi": { "hashes": [ - "sha256:244be0d93b71e93fc0a0a479862051414d0e00e16435707e5bf5000f92e04694", - "sha256:5ec74291ca1136b40f0379e1128ff80e866597e4e2c1e755739a913bbc3613c0" + "sha256:14131608ad2fd56836d33a71ee60fa1c82bc9d2c8d98b7bdbc631fe1b3cd1296", + "sha256:edbc3f203427eef571f79a7692bb160a2b0f7ccaa31953e99bd17e307cf63f7d" ], - "version": "==2017.11.5" + "version": "==2018.1.18" }, "cffi": { "hashes": [ - "sha256:2c707e97ad7b0417713543be7cb87315c015bb5dd97903480168d60ebe3e313e", - "sha256:6d8c7e20eb90be9e1ccce8e8dd4ee5163b37289fc5708f9eeafc00adc07ba891", - "sha256:627298d788edcb317b6a01347428501e773f5e8f2988407231c07e50e3f6c1cf", - "sha256:bdd28cf8302eeca1b4c70ec727de384d4f6ea640b0e698934fd9b4c3bc88eeb1", - "sha256:248198cb714fe09f5c60b6acba3675d52199c6142641536796cdf89dd45e5590", - "sha256:c962cb68987cbfb70b034f153bfa467c615c0b55305d39b3237c4bdbdbc8b0f4", - "sha256:401ba2f6c1f1672b6c38670e1c00fa5f84f841edd30c32742dab5c7151cd89bf", - "sha256:1c103c0ee8235c47c4892288b2287014f33e7cb24b9d4a665be3aa744377dcb9", - "sha256:d7461ef8671ae40f991384bbc4a6b1b79f4e7175d8052584be44041996f46517", - "sha256:3ac9be5763238da1d6fa467c43e3f86472626837a478588c94165df09e62e120", - "sha256:d54a7c37f954fdbb971873c935a77ddc33690cec9b7ac254d9f948c43c32fa83", - "sha256:4d9bf1b23896bcd4d042e823f50ad36fb6d8e1e645a3dfb2fe2f070851489b92", - "sha256:61cf049b1c649d8eec360a1a1d09a61c37b9b2d542364506e8feb4afd232363d", - "sha256:ce3da410ae2ab8709565cc3b18fbe9a0eb96ea7b2189416098c48d839ecced84", - "sha256:e72d8b5056f967ecb57e166537408bc913f2f97dc568027fb6342fcfa9f81d64", - "sha256:11a8ba88ef6ae89110ef029dae7f1a293365e50bdd0c6ca973beed80cec95ae4", - "sha256:974f69112721ba2e8a6acd0f6b68a5e11432710a3eca4e4e6f4d7aaf99214ed1", - "sha256:062c66dabc3faf8e0db1ca09a6b8e308846e5d35f43bed1a68c492b0d96ac171", - "sha256:03a9b9efc280dbe6be149a7fa689f59a822df009eee633fdaf55a6f38795861f", - "sha256:8b3d6dc9981cedfb1ddcd4600ec0c7f5ac2c6ad2dc482011c7eecb4ae9c819e0", - "sha256:09b7d195d163b515ef7c2b2e26a689c9816c83d5319cceac6c36ffdab97ab048", - "sha256:943b94667749d1cfcd964e215a20b9c891deae913202ee8eacaf2b94164b155f", - "sha256:89829f5cfbcb5ad568a3d61bd23a8e33ad69b488d8f6a385e0097a4c20742a9b", - "sha256:ba78da7c940b041cdbb5aaff5afe11e8a8f25fe19564c12eefea5c5bd86930ca", - "sha256:a79b15b9bb4726672865cf5b0f63dee4835974a2b11b49652d70d49003f5d1f4", - "sha256:f6799913eb510b682de971ddef062bbb4a200f190e55cae81c413bc1fd4733c1", - "sha256:e7f5ad6b12f21b77d3a37d5c67260e464f4e9068eb0c0622f61d0e30390b31b6", - "sha256:5f96c92d5f5713ccb71e76dfa14cf819c59ecb9778e94bcb541e13e6d96d1ce5", - "sha256:5357b465e3d6b98972b7810f9969c913d365e75b09b7ba813f5f0577fe1ac9f4", - "sha256:75e1de9ba7c155d89bcf67d149b1c741df553c8158536e8d27e63167403159af", - "sha256:ab87dd91c0c4073758d07334c1e5f712ce8fe48f007b86f8238773963ee700a6" - ], - "version": "==1.11.2" + "sha256:5d0d7023b72794ea847725680e2156d1d01bc698a9007fccce46d03c904fe093", + "sha256:86903c0afab4a3390170aca61f753f5adad8ffff947030719ee44dedc5b68403", + "sha256:7d35678a54da0d3f1bc30e3a58a232043753d57c691875b5a75e4e062793bc9a", + "sha256:824cac33906be5c8e976f0d950924d88ec058989ef9cd2f77f5cd53cec417635", + "sha256:6ca52651f6bd4b8647cb7dee15c82619de3e13490f8e0bc0620830a2245b51d1", + "sha256:a183959a4b1e01d6172aeed356e2523ec8682596075aa6cf0003fe08da959a49", + "sha256:9532c5bc0108bd0fe43c0eb3faa2ef98a2db60fc0d4019f106b88d46803dd663", + "sha256:96652215ef328262b5f1d5647632bd342ac6b31dfbc495b21f1ab27cb06d621d", + "sha256:6c99d19225e3135f6190a3bfce2a614cae8eaa5dcaf9e0705d4ccb79a3959a3f", + "sha256:12cbf4c04c1ad07124bfc9e928c01e282feac9ec7dd72a18042d4fc56456289a", + "sha256:69c37089ccf10692361c8d14dbf4138b00b46741ffe9628755054499f06ed548", + "sha256:b8d1454ef627098dc76ccfd6211a08065e6f84efe3754d8d112049fec3768e71", + "sha256:cd13f347235410c592f6e36395ee1c136a64b66534f10173bfa4df1dc88f47d0", + "sha256:0640f12f04f257c4467075a804a4920a5d07ef91e11c525fc65d715c08231c81", + "sha256:89a8d05b96bdeca8fdc89c5fa9469a357d30f6c066262e92c0c8d2e4d3c53cae", + "sha256:a67c430a9bde73ae85b0c885fcf41b556760e42ea74c16dc70431a349989b448", + "sha256:7a831170b621e98f45ed1d5758325be19619a593924127a0a47af9a72a117319", + "sha256:796d0379102e6da5215acfcd20e8e69cca9d97309215b4ce088fe175b1c2f586", + "sha256:0fe3b3d571543a4065059d1d3d6d39f4ca6da0f2207ad13547094522e32ead46", + "sha256:678135090c311780382b1dd3f828f715583ea8a69687ed053c047d3cec6625d6", + "sha256:f4992cd7b4c867f453d44c213ee29e8fd484cf81cfece4b6e836d0982b6fa1cf", + "sha256:6d191fb20138fe1948727b20e7b96582b7b7e676135eabf72d910e10bf7bfa65", + "sha256:ec208ca16e57904dd7f4c7568665f80b1f7eb7e3214be014560c28def219060d", + "sha256:b3653644d6411bf4bd64c1f2ca3cb1b093f98c68439ade5cef328609bbfabf8c", + "sha256:f4719d0bafc5f0a67b2ec432086d40f653840698d41fa6e9afa679403dea9d78", + "sha256:87f837459c3c78d75cb4f5aadf08a7104db15e8c7618a5c732e60f252279c7a6", + "sha256:df9083a992b17a28cd4251a3f5c879e0198bb26c9e808c4647e0a18739f1d11d" + ], + "version": "==1.11.4" }, "chardet": { "hashes": [ @@ -182,6 +185,19 @@ ], "version": "==2.1.4" }, + "decorator": { + "hashes": [ + "sha256:94d1d8905f5010d74bbbd86c30471255661a14187c45f8d7f3e5aa8540fdb2e5", + "sha256:7d46dd9f3ea1cf5f06ee0e4e1277ae618cf48dfb10ada7c8427cd46c42702a0e" + ], + "version": "==4.2.1" + }, + "dogpile.cache": { + "hashes": [ + "sha256:a73aa3049cd88d7ec57a1c2e8946abdf4f14188d429c1023943fcc55c4568da1" + ], + "version": "==0.6.4" + }, "enum34": { "hashes": [ "sha256:6bd0f6ad48ec2aa117d3d141940d484deccda84d4fcd884f5c3d93c23ecd8c79", @@ -207,12 +223,20 @@ "markers": "python_version == '2.7'", "version": "==3.2.3.post2" }, + "futures": { + "hashes": [ + "sha256:ec0a6cb848cc212002b9828c3e34c675e0c9ff6741dc445cab6fdd4e1085d1f1", + "sha256:9ec02aa7d674acb8618afb127e27fde7fc68994c0437ad759fa094a574adb265" + ], + "markers": "python_version == '2.7' or python_version == '2.6'", + "version": "==3.2.0" + }, "humanfriendly": { "hashes": [ - "sha256:abe35f7096e2d27ef6059355a33386b089eecbcd5157201be05dc99e50fb2c28", - "sha256:928eff707f0682029f1968cefe108fd2870ead5a2f8d80875231a27ba2b20410" + "sha256:587b16ce804bec8e3cbb8c420decea051b38e3d895272b2c1e38fc69b4286b1c", + "sha256:d0e74171b87318a94b99520e4f0c5651e944b5f11d696c46be3330bb82b85300" ], - "version": "==4.6" + "version": "==4.8" }, "idna": { "hashes": [ @@ -231,9 +255,17 @@ "hashes": [ "sha256:200d8686011d470b5e4de207d803445deee427455cd0cb7c982b68cf82524f81" ], - "markers": "python_version < '3'", + "markers": "python_version < '3.3'", "version": "==1.0.19" }, + "iso8601": { + "hashes": [ + "sha256:210e0134677cc0d02f6028087fee1df1e1d76d372ee1db0bf30bf66c5c1c89a3", + "sha256:bbbae5fb4a7abfe71d4688fd64bff70b91bbd74ef6a99d964bab18f7fdf286dd", + "sha256:49c4b20e1f38aa5cf109ddcd39647ac419f928512c869dc01d5c7098eddede82" + ], + "version": "==0.1.12" + }, "itsdangerous": { "hashes": [ "sha256:cbb3fcf8d3e33df861709ecaf89d9e6629cff0a217bc2848f1b41cd30d360519" @@ -247,6 +279,27 @@ ], "version": "==2.10" }, + "jmespath": { + "hashes": [ + "sha256:f11b4461f425740a1d908e9a3f7365c3d2e569f6ca68a2ff8bc5bcd9676edd63", + "sha256:6a81d4c9aa62caf061cb517b4d9ad1dd300374cd4706997aff9cd6aedd61fc64" + ], + "version": "==0.9.3" + }, + "jsonpatch": { + "hashes": [ + "sha256:404811758da710e58d703adf74f1935c59f8d9450137f971765931083042ee45", + "sha256:11f5ffdf543a83047a2f54ac28f8caad7f34724cb1ea26b27547fd974f1a2153" + ], + "version": "==1.21" + }, + "jsonpointer": { + "hashes": [ + "sha256:ff379fa021d1b81ab539f5ec467c7745beb1a5671463f9dcc2b2d458bd361c1e", + "sha256:c192ba86648e05fdae4f08a17ec25180a9aef5008d973407b581798a83975362" + ], + "version": "==2.0" + }, "jsonschema": { "hashes": [ "sha256:000e68abd33c972a5248544925a0cae7d1125f9bf6c58280d37546b946769a08", @@ -254,6 +307,13 @@ ], "version": "==2.6.0" }, + "keystoneauth1": { + "hashes": [ + "sha256:5a242ded38cf3f6fb0290ecb24e0db9290507667e520dfa41a129ad769d327ec", + "sha256:9f1565eb261677e6d726c1323ce8ed8da3e1b0f70e9cee14f094ebd03fbeb328" + ], + "version": "==3.4.0" + }, "markupsafe": { "hashes": [ "sha256:a6be69091dac236ea9c6bc7d012beab42010fa914c459791d627dad4910eb665" @@ -268,6 +328,36 @@ "markers": "python_version == '2.6' or python_version == '2.7' or python_version == '3.0' or python_version == '3.1' or python_version == '3.2'", "version": "==1.4" }, + "munch": { + "hashes": [ + "sha256:62fb4fb318e965a464b088e6af52a63e0905a50500b770596a939d3855e7aa15" + ], + "version": "==2.2.0" + }, + "netifaces": { + "hashes": [ + "sha256:137a77c2e0a68a3e409a532fe73340c3df6a59ffe8eb565ec8b1f0a131402d09", + "sha256:ef223d45b73cc96c25a6295f471106b3195d2367b7f153e43490673d89e9240e", + "sha256:4ddf0f329d83516bba096b7eb1ad2ee354a98e2483f89ad3a590e653ece963c8", + "sha256:61fd2706de21aac11475c921ba0fd98af19d5280702a11c5c8e2e910765dc378", + "sha256:48275e627ce9220acfed2e1ca1e4cf01f58940412f2aebac7995750b50232701", + "sha256:f8b352247ae4b6731192d33fd35b27f247e3e4618a2d5cf65de41d46bbb53223", + "sha256:a0c7c19e1fb62ac6018582f72d15ac056e75c3d2ab222fb25369e7766ed67453", + "sha256:8c3a2c7d573511507f0f29c9d1a28ce1b2a958b8d0d7a1b1966c6fd0fa5d2953", + "sha256:5a0114933657eebe4985fdf7b0099a27ec75501901000770addca6ad7bd23008", + "sha256:3b19bf224b3e46c62f5f5e65a9fbd2e9731cda09289c76aca110a3dbdf0c3332", + "sha256:2245677ee3aa1244bbd0fbf3d6e0158d38b612eba406e7be9639e7efe0371bfa", + "sha256:7925add91982cb689963cc28fb8718c006f7713b527d262e32b29b4491cec295", + "sha256:0c4da523f36d36f1ef92ee183f2512f3ceb9a9d2a45f7d19cda5a42c6689ebe0", + "sha256:337f0fae970ab7a9acf5690516f7c7795f41934350cc1e8ad33c5c0331904ac0", + "sha256:563a18f942a9c9f64eed27fe2a1b3dfb5866a440cdaf4d833213798699cc1789", + "sha256:88d8fa4fcccaca07519141e95b42f52fb650bed2e8f5b29c44e22968b92b7097", + "sha256:60f25e5b6d2a682a394c87a6d2bf4d38c8dd8999ee32b955af88ceccaef7fe93", + "sha256:c455ca29737bf9b298408fd78a48f8fc6ddaa1f50a6eb92d814a8874412c631b", + "sha256:369eb616a6c844987bd4df486bb5f591aa0d5552378c6831f56ed81cfc228cab" + ], + "version": "==0.10.6" + }, "numpy": { "hashes": [ "sha256:428cd3c0b197cf857671353d8c85833193921af9fafcc169a1f29c7185833d50", @@ -296,6 +386,13 @@ ], "version": "==1.14.0" }, + "os-client-config": { + "hashes": [ + "sha256:f602f18ba58e4fe14ff607bebee00d20d34c517bca1289fd0c63f9e777f1ce43", + "sha256:e98bdde50e30396d47d237cfb23e209e8c0a6f834ada190a2dcfe5305bd42af0" + ], + "version": "==1.29.0" + }, "paramiko": { "hashes": [ "sha256:8851e728e8b7590989e68e3936c48ee3ca4dad91d29e3d7ff0305b6c5fc582db", @@ -401,6 +498,20 @@ ], "version": "==2.18.4" }, + "requestsexceptions": { + "hashes": [ + "sha256:3083d872b6e07dc5c323563ef37671d992214ad9a32b0ca4a3d7f5500bf38ce3", + "sha256:b095cbc77618f066d459a02b137b020c37da9f46d9b057704019c9f77dba3065" + ], + "version": "==1.4.0" + }, + "shade": { + "hashes": [ + "sha256:0964299431967f7ea46c6f310d7d2d58788e70c56a064a07c3a17c93bcea5e34", + "sha256:c46307010f1bd5b84b700e542fa0942c59b613ab88372343f6e52e333306c32e" + ], + "version": "==1.26.0" + }, "six": { "hashes": [ "sha256:832dc0e10feb1aa2c68dcc57dbb658f1c7e65b9b61af69048abc87a2db00a0eb", @@ -408,6 +519,13 @@ ], "version": "==1.11.0" }, + "stevedore": { + "hashes": [ + "sha256:e3d96b2c4e882ec0c1ff95eaebf7b575a779fd0ccb4c741b9832bed410d58b3d", + "sha256:f1c7518e7b160336040fee272174f1f7b29a46febb3632502a8f2055f973d60b" + ], + "version": "==1.28.0" + }, "swagger-spec-validator": { "hashes": [ "sha256:aedacb6c6b475026a1b5ac218fb590382d08064e227da254eb961d17cfd2b7c1", @@ -417,11 +535,11 @@ }, "typing": { "hashes": [ - "sha256:349b1f9c109c84b53ac79ac1d822eaa68fc91d63b321bd9392df15098f746f53", - "sha256:63a8255fe7c6269916baa440eb9b6a67139b0b97a01af632e7bd2842e1e02f15", - "sha256:d514bd84b284dd3e844f0305ac07511f097e325171f6cc4a20878d11ad771849" + "sha256:b2c689d54e1144bbcfd191b0832980a21c2dbcf7b5ff7a66248a60c90e951eb8", + "sha256:3a887b021a77b292e151afb75323dea88a7bc1b3dfa92176cff8e44c8b68bddf", + "sha256:d400a9344254803a2368533e4533a4200d21eb7b6b729c173bc38201a74db3f2" ], - "version": "==3.6.2" + "version": "==3.6.4" }, "urllib3": { "hashes": [ @@ -448,48 +566,44 @@ }, "coverage": { "hashes": [ - "sha256:d1ee76f560c3c3e8faada866a07a32485445e16ed2206ac8378bd90dadffb9f0", - "sha256:007eeef7e23f9473622f7d94a3e029a45d55a92a1f083f0f3512f5ab9a669b05", - "sha256:17307429935f96c986a1b1674f78079528833410750321d22b5fb35d1883828e", - "sha256:845fddf89dca1e94abe168760a38271abfc2e31863fbb4ada7f9a99337d7c3dc", - "sha256:3f4d0b3403d3e110d2588c275540649b1841725f5a11a7162620224155d00ba2", - "sha256:4c4f368ffe1c2e7602359c2c50233269f3abe1c48ca6b288dcd0fb1d1c679733", - "sha256:f8c55dd0f56d3d618dfacf129e010cbe5d5f94b6951c1b2f13ab1a2f79c284da", - "sha256:cdd92dd9471e624cd1d8c1a2703d25f114b59b736b0f1f659a98414e535ffb3d", - "sha256:2ad357d12971e77360034c1596011a03f50c0f9e1ecd12e081342b8d1aee2236", - "sha256:e9a0e1caed2a52f15c96507ab78a48f346c05681a49c5b003172f8073da6aa6b", - "sha256:eea9135432428d3ca7ee9be86af27cb8e56243f73764a9b6c3e0bda1394916be", - "sha256:700d7579995044dc724847560b78ac786f0ca292867447afda7727a6fbaa082e", - "sha256:66f393e10dd866be267deb3feca39babba08ae13763e0fc7a1063cbe1f8e49f6", - "sha256:5ff16548492e8a12e65ff3d55857ccd818584ed587a6c2898a9ebbe09a880674", - "sha256:d00e29b78ff610d300b2c37049a41234d48ea4f2d2581759ebcf67caaf731c31", - "sha256:87d942863fe74b1c3be83a045996addf1639218c2cb89c5da18c06c0fe3917ea", - "sha256:358d635b1fc22a425444d52f26287ae5aea9e96e254ff3c59c407426f44574f4", - "sha256:81912cfe276e0069dca99e1e4e6be7b06b5fc8342641c6b472cb2fed7de7ae18", - "sha256:079248312838c4c8f3494934ab7382a42d42d5f365f0cf7516f938dbb3f53f3f", - "sha256:b0059630ca5c6b297690a6bf57bf2fdac1395c24b7935fd73ee64190276b743b", - "sha256:493082f104b5ca920e97a485913de254cbe351900deed72d4264571c73464cd0", - "sha256:e3ba9b14607c23623cf38f90b23f5bed4a3be87cbfa96e2e9f4eabb975d1e98b", - "sha256:82cbd3317320aa63c65555aa4894bf33a13fb3a77f079059eb5935eea415938d", - "sha256:9721f1b7275d3112dc7ccf63f0553c769f09b5c25a26ee45872c7f5c09edf6c1", - "sha256:bd4800e32b4c8d99c3a2c943f1ac430cbf80658d884123d19639bcde90dad44a", - "sha256:f29841e865590af72c4b90d7b5b8e93fd560f5dea436c1d5ee8053788f9285de", - "sha256:f3a5c6d054c531536a83521c00e5d4004f1e126e2e2556ce399bef4180fbe540", - "sha256:dd707a21332615108b736ef0b8513d3edaf12d2a7d5fc26cd04a169a8ae9b526", - "sha256:2e1a5c6adebb93c3b175103c2f855eda957283c10cf937d791d81bef8872d6ca", - "sha256:f87f522bde5540d8a4b11df80058281ac38c44b13ce29ced1e294963dd51a8f8", - "sha256:a7cfaebd8f24c2b537fa6a271229b051cdac9c1734bb6f939ccfc7c055689baa", - "sha256:309d91bd7a35063ec7a0e4d75645488bfab3f0b66373e7722f23da7f5b0f34cc", - "sha256:0388c12539372bb92d6dde68b4627f0300d948965bbb7fc104924d715fdc0965", - "sha256:ab3508df9a92c1d3362343d235420d08e2662969b83134f8a97dc1451cbe5e84", - "sha256:43a155eb76025c61fc20c3d03b89ca28efa6f5be572ab6110b2fb68eda96bfea", - "sha256:f98b461cb59f117887aa634a66022c0bd394278245ed51189f63a036516e32de", - "sha256:b6cebae1502ce5b87d7c6f532fa90ab345cfbda62b95aeea4e431e164d498a3d", - "sha256:a4497faa4f1c0fc365ba05eaecfb6b5d24e3c8c72e95938f9524e29dadb15e76", - "sha256:2b4d7f03a8a6632598cbc5df15bbca9f778c43db7cf1a838f4fa2c8599a8691a", - "sha256:1afccd7e27cac1b9617be8c769f6d8a6d363699c9b86820f40c74cfb3328921c" - ], - "version": "==4.4.2" + "sha256:464d85d6959497cc4adfa9f0d36fca809e2ca7ec5f4625f548317892cac6ed7c", + "sha256:e958ab5b6a7f3b88289a25c95d031f2b62bc73219141c09d261fd97f244c124c", + "sha256:67288f8834a0a64c1af66286b22fd325b5524ceaa153a51c3e2e30f7e8b3f826", + "sha256:cfb6b7035c6605e2a87abe7d84ea35a107e6c432014a3f1ca243ab57a558fbcd", + "sha256:c86a12b3dc004bcbe97a3849354bd1f93eb6fb69b0e4eb58831fd7adba7740ec", + "sha256:8ddcf308f894d01a1a0ae01283d19b613751815b7190113266a0b7f9d076e86d", + "sha256:adab01e4c63a01bdf036f57f0114497994aa2195d8659d12a3d69673c3f27939", + "sha256:54d73fe68a7ac9c847af69a234a7461bbaf3cad95f258317d4584d14dd53f679", + "sha256:a0d98c71d026c1757c7393a99d24c6e42091ff41e20e68238b17e145252c2d0a", + "sha256:464e0eda175c7fe2dc730d9d02acde5b8a8518d9417413dee6ca187d1f65ef89", + "sha256:2890cb40464686c0c1dccc1223664bbc34d85af053bc5dbcd71ea13959e264f2", + "sha256:0f2315c793b1360f80a9119fff76efb7b4e5ab5062651dff515e681719f29689", + "sha256:85c028e959312285225040cdac5ad3db6189e958a234f09ae6b4ba5f539c842d", + "sha256:da6585339fc8a25086003a2b2c0167438b8ab0cd0ccae468d22ed603e414bba1", + "sha256:e837865a7b20c01a8a2f904c05fba36e8406b146649ff9174cbddf32e217b777", + "sha256:b718efb33097c7651a60a03b4b38b14776f92194bc0e9e598ce05ddaef7c70e7", + "sha256:7413f078fbba267de44814584593a729f88fc37f2d938263844b7f4daf1e36ec", + "sha256:47ad00a0c025f87a7528cc13d013c54e4691ae8730430e49ec9c7ace7e0e1fba", + "sha256:95f9f5072afeb2204401401cbd0ab978a9f86ef1ebc5cd267ba431cfa581cc4d", + "sha256:ca8827a5dad1176a8da6bf5396fd07e66549d1bc842047b76cdf69e196597a80", + "sha256:c68164c4f49cfc2e66ca1ded62e4a1092a6bd4b2c65222059b867700ad19151c", + "sha256:61e0bcf15aa0385e15d1fe4a86022a6b813d08c785855e3fab56ba6d7ac3dd21", + "sha256:981a64063242a2c6c88dda33ccafe3583026847961fe56636b6a00c47674e258", + "sha256:21e47d2ff9c75e25880dd12b316db11379e9afc98b39e9516149d189c15c564b", + "sha256:f6b822c68f68f48d480d23fcfcd1d4df7d42ff03cf5d7b574d09e662c0b95b43", + "sha256:53fa7aa7643a22eeadcf8b781b97a51f37d43ba1d897a05238aa7e4d11bc0667", + "sha256:95ce1a70323d47c0f6b8d6cfd3c14c38cb30d51fd1ab4f6414734fa33a78b17e", + "sha256:b7a06a523dfeaf417da630d46ad4f4e11ca1bae6202c9312c4cb987dde5792fc", + "sha256:585e8db44b8f3af2a4152b00dd8a7a36bc1d2aba7de5e50fc17a54178428f0d6", + "sha256:102933e14b726bd4fdfafb541e122ad36c150732aee36db409d8c8766e11537e", + "sha256:15f92238487d93f7f34a3ba03be3bd4615c69cffc88388b4dd1ea99af74fc1bf", + "sha256:319190dd7fa08c23332215782b563a9ef12b76fb15e4a325915592b825eca9ed", + "sha256:af14e9628c0a3152b6a1fbba4471e6a3e5f5567ecae614f84b84ff3441c58692", + "sha256:72bc3f91a25a87fd87eb57983c8cefbb8aa5bacd50d73516ade398271d652d77", + "sha256:c3905f10786dcf386f3f6cebe0ae4a36f47e5e256471161fb944ca537e97e928", + "sha256:3344079d73a4849341aaaecd9b391141824b8c9a96732fbd6ef95ba9566895d3" + ], + "version": "==4.5" }, "docopt": { "hashes": [ @@ -499,17 +613,17 @@ }, "faker": { "hashes": [ - "sha256:2f6ccc9da046d4cd20401734cf6a1ac73a4e4d8256e7b283496ee6827ad2eb60", - "sha256:e928cf853ef69d7471421f2a3716a1239e43de0fa9855f4016ee0c9f1057328a" + "sha256:91c3e79db508f6e2b8ff3d3099fee0967c95ce02caa1f4028096641696336fc0", + "sha256:9da4c829d5592202a614d2db3c3e05d17684f5b590b7dbd6d7e70dc1053213a6" ], - "version": "==0.8.8" + "version": "==0.8.10" }, "funcsigs": { "hashes": [ "sha256:330cc27ccbf7f1e992e69fef78261dc7c6569012cf397db8d3de0234e6c937ca", "sha256:a7bb0f2cf3a3fd1ab2732cb49eba4252c2af4240442415b4abce3b87022a8f50" ], - "markers": "python_version < '3.3'", + "markers": "python_version < '3.0'", "version": "==1.0.2" }, "ipaddress": { @@ -555,10 +669,10 @@ }, "pytest": { "hashes": [ - "sha256:b84878865558194630c6147f44bdaef27222a9f153bbd4a08908b16bf285e0b1", - "sha256:53548280ede7818f4dc2ad96608b9f08ae2cc2ca3874f2ceb6f97e3583f25bc4" + "sha256:95fa025cd6deb5d937e04e368a00552332b58cae23f63b76c8c540ff1733ab6d", + "sha256:6074ea3b9c999bd6d0df5fa9d12dd95ccd23550df2a582f5f5b848331d2e82ca" ], - "version": "==3.3.2" + "version": "==3.4.0" }, "pytest-cov": { "hashes": [ diff --git a/contrib/nettest/Dockerfile b/contrib/nettest/Dockerfile new file mode 100644 index 00000000..a0ecabf9 --- /dev/null +++ b/contrib/nettest/Dockerfile @@ -0,0 +1,47 @@ +########################################################## +# Dockerfile to run a flask-based web application# Based on an ubuntu:16.04 +########################################################## + +# Set the base image to use to centos +FROM ubuntu:16.04 + +# Set the file maintainer +MAINTAINER Qiang.Dai@spirent.com +LABEL version="0.1" description="Spirent networking test Docker container" + +# Set env varibles used in this Dockerfile (add a unique prefix, such as DOCKYARD) +# Local directory with project source +ENV DOCKYARD_SRC=nettest \ + DOCKYARD_SRCHOME=/opt \ + DOCKYARD_SRCPROJ=/opt/nettest + +# Update the defualt application repository source list +RUN apt-get update && apt-get install -y \ + gcc \ + python-dev \ + python-pip \ + python-setuptools \ + --no-install-recommends \ + && rm -rf /var/lib/apt/lists/* + +# Copy application source code to SRCDIR +COPY $DOCKYARD_SRC $DOCKYARD_SRCPROJ + +# Create application subdirectories +WORKDIR $DOCKYARD_SRCPROJ +RUN mkdir -p log +VOLUME ["$DOCKYARD_SRCPROJ/log/"] + +# Install Python dependencies +RUN pip install -U pip \ + && pip install -U setuptools \ + && pip install -r $DOCKYARD_SRCPROJ/requirements.txt + +# Port to expose +EXPOSE 5001 + +# Copy entrypoint script into the image +WORKDIR $DOCKYARD_SRCPROJ + +#CMD ["/bin/bash"] +CMD ["/bin/bash", "start.sh"] diff --git a/contrib/nettest/README.md b/contrib/nettest/README.md new file mode 100644 index 00000000..e69de29b --- /dev/null +++ b/contrib/nettest/README.md diff --git a/contrib/nettest/nettest/heat_2stcv.yaml b/contrib/nettest/nettest/heat_2stcv.yaml new file mode 100644 index 00000000..77c6e6e8 --- /dev/null +++ b/contrib/nettest/nettest/heat_2stcv.yaml @@ -0,0 +1,170 @@ +##############################################################################
+# Copyright (c) 2018 Spirent Communications and others.
+#
+# All rights reserved. This program and the accompanying materials
+# are made available under the terms of the Apache License, Version 2.0
+# which accompanies this distribution, and is available at
+# http://www.apache.org/licenses/LICENSE-2.0
+##############################################################################
+
+heat_template_version: 2016-10-14
+
+description: Template for deploying 2 STCv and 1 labserver
+
+parameters:
+ public_net_name: {default: external, description: Public network to allocate floating IPs to VMs', type: string}
+ #public_net_id: {description: public_network id for exernal connectivity,type: string}
+ mgmt_net_name: {default: admin, description: Name of STCv mgmt network to be created, type: string}
+ mgmt_net_cidr: {default: 10.10.10.0/24, description: STCv mgmt network CIDR,type: string}
+ mgmt_net_gw: {default: 10.10.10.1, description: STCv mgmt network gateway address, type: string}
+ mgmt_net_pool_start: {default: 10.10.10.10, description: Start of mgmt network IP address allocation pool, type: string}
+ mgmt_net_pool_end: {default: 10.10.10.20, description: End of mgmt network IP address allocation pool, type: string}
+ tst_net_name: {default: tst, description: Name of STCv private network to be created, type: string}
+ tst_net_cidr: {default: 192.168.1.0/24, description: STCv private network CIDR,type: string}
+ tst_net_gw: {default: 192.168.1.1, description: STCv private network gateway address, type: string}
+ tst_net_pool_start: {default: 192.168.1.10, description: Start of private network IP address allocation pool, type: string}
+ tst_net_pool_end: {default: 192.168.1.20, description: End of private network IP address allocation pool, type: string}
+ stcv_image: {default: "stcv-4.79", description: Image name to use for STCv, type: string}
+ stcv_flavor: {default: "m1.tiny", description: Flavor to use for STCv, type: string}
+ #stcv_user_data: {default: "", description: user data such as ntp server ip for stcv, type: string}
+ #stcv_config_file: {default: "stcv_config_file", description: user data such as ntp server ip for stcv, type: string}
+ ntp_server_ip: {default: "", description: user data such as ntp server ip for stcv, type: string}
+ stcv_sg_name: {default: stcv_sg, description: server group name, type: string}
+ stcv_sg_affinity: {default: affinity, description: server group affinity for stcv, type: string}
+
+resources:
+ stcv_server_group:
+ type: OS::Nova::ServerGroup
+ properties:
+ name: {get_param: stcv_sg_name}
+ policies: [{get_param: stcv_sg_affinity}]
+ mgmt_net:
+ type: OS::Neutron::Net
+ properties:
+ name: {get_param: mgmt_net_name}
+ mgmt_net_subnet:
+ type: OS::Neutron::Subnet
+ properties:
+ allocation_pools:
+ - end: {get_param: mgmt_net_pool_end}
+ start: {get_param: mgmt_net_pool_start}
+ cidr: {get_param: mgmt_net_cidr}
+ gateway_ip: {get_param: mgmt_net_gw}
+ network: {get_resource: mgmt_net}
+ public_router:
+ type: OS::Neutron::Router
+ properties:
+ external_gateway_info:
+ network: {get_param: public_net_name}
+ router_interface:
+ type: OS::Neutron::RouterInterface
+ properties:
+ router: {get_resource: public_router}
+ subnet: {get_resource: mgmt_net_subnet}
+ tst_net:
+ type: OS::Neutron::Net
+ properties:
+ name: {get_param: tst_net_name}
+ tst_subnet:
+ type: OS::Neutron::Subnet
+ properties:
+ allocation_pools:
+ - end: {get_param: tst_net_pool_end}
+ start: {get_param: tst_net_pool_start}
+ cidr: {get_param: tst_net_cidr}
+ gateway_ip: {get_param: tst_net_gw}
+ network: {get_resource: tst_net}
+ stcv_1_port_1:
+ type: OS::Neutron::Port
+ properties:
+ network: {get_resource: mgmt_net}
+ fixed_ips:
+ - subnet: {get_resource: mgmt_net_subnet}
+ floating_ip1:
+ type: OS::Neutron::FloatingIP
+ properties:
+ floating_network: {get_param: public_net_name}
+ port_id: {get_resource: stcv_1_port_1}
+ stcv_1_port_2:
+ type: OS::Neutron::Port
+ properties:
+ network: {get_resource: tst_net}
+ port_security_enabled: False
+ fixed_ips:
+ - subnet: {get_resource: tst_subnet}
+ STCv_1:
+ type: OS::Nova::Server
+ properties:
+ #availability_zone : {get_param: availability_zone_name}
+ flavor: {get_param: stcv_flavor}
+ image: {get_param: stcv_image}
+ name: STCv_1
+ user_data:
+ str_replace:
+ template: |
+ #cloud-config
+ spirent:
+ ntp: $ntp_server_ip
+ params:
+ $ntp_server_ip: {get_param: ntp_server_ip}
+ user_data_format: RAW
+ config_drive: True
+ scheduler_hints:
+ group: {get_resource: stcv_server_group}
+ networks:
+ - port: {get_resource: stcv_1_port_1}
+ - port: {get_resource: stcv_1_port_2}
+ stcv_2_port_1:
+ type: OS::Neutron::Port
+ properties:
+ network: {get_resource: mgmt_net}
+ fixed_ips:
+ - subnet: {get_resource: mgmt_net_subnet}
+ floating_ip2:
+ type: OS::Neutron::FloatingIP
+ properties:
+ floating_network: {get_param: public_net_name}
+ port_id: {get_resource: stcv_2_port_1}
+ stcv_2_port_2:
+ type: OS::Neutron::Port
+ properties:
+ network: {get_resource: tst_net}
+ port_security_enabled: False
+ fixed_ips:
+ - subnet: {get_resource: tst_subnet}
+ STCv_2:
+ type: OS::Nova::Server
+ properties:
+ #availability_zone : {get_param: availability_zone_name}
+ flavor: {get_param: stcv_flavor}
+ image: {get_param: stcv_image}
+ name: STCv_2
+ user_data:
+ str_replace:
+ template: |
+ #cloud-config
+ spirent:
+ ntp: $ntp_server_ip
+ params:
+ $ntp_server_ip: {get_param: ntp_server_ip}
+ user_data_format: RAW
+ config_drive: True
+ scheduler_hints:
+ group: {get_resource: stcv_server_group}
+ networks:
+ - port: {get_resource: stcv_2_port_1}
+ - port: {get_resource: stcv_2_port_2}
+outputs:
+ STCv_1_Mgmt_Ip:
+ value: {get_attr: [floating_ip1, floating_ip_address]}
+ description: STCv_1 Mgmt IP
+ STCv_2_Mgmt_Ip:
+ value: {get_attr: [floating_ip2, floating_ip_address]}
+ description: STCv_2 Mgmt IP
+ STCv_1_Tst_Ip:
+ value: {get_attr: [stcv_1_port_2, fixed_ips]}
+ description: STCv_1 Tst IP
+ STCv_2_Tst_Ip:
+ value: {get_attr: [stcv_2_port_2, fixed_ips]}
+ description: STCv_2 Tst IP
+
diff --git a/contrib/nettest/nettest/nettest.py b/contrib/nettest/nettest/nettest.py new file mode 100644 index 00000000..c5a203e0 --- /dev/null +++ b/contrib/nettest/nettest/nettest.py @@ -0,0 +1,157 @@ +############################################################################## +# Copyright (c) 2018 Spirent Communications and others. +# +# All rights reserved. This program and the accompanying materials +# are made available under the terms of the Apache License, Version 2.0 +# which accompanies this distribution, and is available at +# http://www.apache.org/licenses/LICENSE-2.0 +############################################################################## + +import logging +from time import sleep + +from rfc2544test import StcRfc2544Test +from stcv_stack import StcvStack + + +class NetTestMaster(object): + + def __init__(self): + self.logger = logging.getLogger(__name__) + + self.stacks = [] + self.testcases = [] + + self.stack_created = False + self.status_reason = '' + + def get_stack_by_id(self, id): + for stack in self.stacks: + if id == stack.stack_id: + return stack + return None + + def get_stack_by_name(self, name): + for stack in self.stacks: + if name == stack.name: + return stack + return None + + def create_stack(self, name, stack_type, pub_net_name, **kwargs): + if stack_type != 'stcv': + raise Exception('only support stcv stack type currently') + + try: + stack = StcvStack(name=name, + pub_net_name=pub_net_name, + ntp_server_ip=kwargs.get('license_server_ip'), + lab_server_ip=kwargs.get('lab_server_ip'), + stcv_image=kwargs.get('stcv_image'), + stcv_flavor=kwargs.get('stcv_flavor'), + stcv_affinity=kwargs.get('stcv_affinity')) + stack.create_stack() + self.stacks.append(stack) + + except Exception as err: + self.logger.error('create stack fail. err = %s', str(err)) + raise err + + return stack + + def delete_stack(self, stack_id): + stack = self.get_stack_by_id(stack_id) + if stack is None: + raise Exception('stack does not exist, stack_id = %s', stack_id) + + self.stacks.remove(stack) + stack.delete_stack() + + def get_tc_result(self, tc_id): + tc = self.get_tc_by_id(tc_id) + return tc.get_result() + + def get_tc_status(self, tc_id): + tc = self.get_tc_by_id(tc_id) + return tc.get_status() + + def execute_testcase(self, name, category, stack_id, **kwargs): + if category != 'rfc2544': + raise Exception("currently only support rfc2544 test") + + stack = self.get_stack_by_id(stack_id) + if stack is None: + raise Exception("defined stack not exist, stack_id = %s", stack_id) + + tc = StcRfc2544Test(name=name, + lab_server_ip=stack.lab_server_ip, + license_server_ip=stack.ntp_server_ip, + west_stcv_admin_ip=stack.get_west_stcv_ip(), + west_stcv_tst_ip=stack.get_west_stcv_tst_ip(), + east_stcv_admin_ip=stack.get_east_stcv_ip(), + east_stcv_tst_ip=stack.get_east_stcv_tst_ip(), + stack_id=stack_id, + **kwargs) + self.testcases.append(tc) + tc.execute() + + return tc.tc_id + + def get_tc_by_id(self, id): + for tc in self.testcases: + if id == tc.tc_id: + return tc + return None + + def delete_testcase(self, tc_id): + tc = self.get_tc_by_id(tc_id) + + if tc.status == 'finished': + tc.delete_result() + + if tc.status == 'running': + tc.cancel_run() + + self.testcases.remove(tc) + + +if __name__ == "__main__": + try: + nettest = NetTestMaster() + stack_params = { + "stcv_affinity": True, + "stcv_image": "stcv-4.79", + "stcv_flavor": "m1.tiny", + "lab_server_ip": "192.168.37.122", + "license_server_ip": "192.168.37.251" + } + + stack = nettest.create_stack(name='stack1', + stack_type='stcv', + pub_net_name='external', + **stack_params) + tc_params = { + 'metric': 'throughput', + 'framesizes': [64, 128] + } + tc = nettest.execute_testcase(name='tc1', + category='rfc2544', + stack_id=stack.stack_id, + **tc_params) + + print "test case id is %s" % tc.id + + status = tc.get_status() + while (status != tc.TC_STATUS_FINISHED): + if status == tc.TC_STATUS_ERROR: + print "tc exectue fail, reason %s" % tc.get_err_reason() + break + sleep(2) + if status == tc.TC_STATUS_FINISHED: + print tc.get_result() + + nettest.delete_testcase(tc.id) + + nettest.delete_stack(stack.stack_id) + + except Exception as err: + print err diff --git a/contrib/nettest/nettest/requirements.txt b/contrib/nettest/nettest/requirements.txt new file mode 100644 index 00000000..3efb124b --- /dev/null +++ b/contrib/nettest/nettest/requirements.txt @@ -0,0 +1,9 @@ +flask +flask_cors +flask_restful +flask_restful_swagger +#openstacksdk +keystoneauth1 +python-heatclient +stcrestclient + diff --git a/contrib/nettest/nettest/rest_server.py b/contrib/nettest/nettest/rest_server.py new file mode 100644 index 00000000..ee13c91b --- /dev/null +++ b/contrib/nettest/nettest/rest_server.py @@ -0,0 +1,343 @@ +############################################################################## +# Copyright (c) 2018 Spirent Communications and others. +# +# All rights reserved. This program and the accompanying materials +# are made available under the terms of the Apache License, Version 2.0 +# which accompanies this distribution, and is available at +# http://www.apache.org/licenses/LICENSE-2.0 +############################################################################## + +import logging + +from flask import Flask, abort, jsonify, request +from flask_cors import CORS +from flask_restful import Api, Resource, fields +from flask_restful_swagger import swagger + +from nettest import NetTestMaster + +app = Flask(__name__) +CORS(app) +api = swagger.docs(Api(app), apiVersion="1.0") + +stcv_master = NetTestMaster() + + +@swagger.model +class StackRequestModel: + resource_fields = { + 'stack_name': fields.String, + 'stack_type': fields.String, + 'public_network': fields.String, + "stack_params": fields.Nested, + } + + +@swagger.model +class StackResponseModel: + resource_fields = { + 'stack_name': fields.String, + 'stack_created': fields.Boolean, + "stack_id": fields.String + } + + +class Stack(Resource): + def __init__(self): + self.logger = logging.getLogger(__name__) + + @swagger.operation( + notes='Fetch the stack configuration', + parameters=[ + { + "name": "id", + "description": "The UUID of the stack in the format " + "NNNNNNNN-NNNN-NNNN-NNNN-NNNNNNNNNNNN", + "required": True, + "type": "string", + "allowMultiple": False, + "paramType": "query" + }, + ], + type=StackResponseModel.__name__ + ) + def get(self): + stack_id = request.args.get('id') + stack = stcv_master.get_stack_by_id(stack_id) + + if not stack: + abort(404) + + return jsonify({ + 'stack_name': stack.name, + 'stack_created': True, + "stack_id": stack_id}) + + @swagger.operation( + notes='''set the current agent configuration and create a stack in + the controller. Returns once the stack create is completed.''', + parameters=[ + { + "name": "stack", + "description": '''Configuration to be set. All parameters are + necessory. + ''', + "required": True, + "type": "StackRequestModel", + "paramType": "body" + } + ], + type=StackResponseModel.__name__ + ) + def post(self): + if not request.json: + abort(400, "ERROR: No data specified") + + self.logger.info(request.json) + + try: + params = { + 'lab_server_ip': request.json['stack_params'].get('lab_server_ip'), + 'license_server_ip': request.json['stack_params'].get('license_server_ip'), + 'stcv_image': request.json['stack_params'].get('stcv_image'), + 'stcv_flavor': request.json['stack_params'].get('stcv_flavor'), + 'stcv_affinity': request.json['stack_params'].get('stcv_affinity') + } + + stack = stcv_master.create_stack(name=request.json['stack_name'], + stack_type=request.json['stack_type'], + pub_net_name=request.json['public_network'], + **params) + if stack is None: + abort(400, "ERROR: create stack fail") + + return jsonify({'stack_name': request.json['stack_name'], + 'stack_created': True, + 'stack_id': stack.stack_id}) + + except Exception as e: + abort(400, str(e)) + + @swagger.operation( + notes='delete deployed stack', + parameters=[ + { + "name": "id", + "description": "The UUID of the stack in the format " + "NNNNNNNN-NNNN-NNNN-NNNN-NNNNNNNNNNNN", + "required": True, + "type": "string", + "allowMultiple": False, + "paramType": "query" + }, + ], + responseMessages=[ + { + "code": 200, + "message": "Stack ID found, response in JSON format" + }, + { + "code": 404, + "message": "Stack ID not found" + } + ] + ) + def delete(self): + try: + stack_id = request.args.get('id') + stcv_master.delete_stack(stack_id) + except Exception as e: + abort(400, str(e)) + + +@swagger.model +class TestcaseRequestModel: + resource_fields = { + 'name': fields.String, + 'category': fields.String, + 'stack_id': fields.String, + 'params': fields.Nested + } + + +@swagger.model +class TestcaseResponseModel: + resource_fields = { + 'name': fields.String, + 'category': fields.String, + 'stack_id': fields.String, + 'tc_id': fields.String + } + + +class TestCase(Resource): + + """TestCase API""" + + def __init__(self): + self.logger = logging.getLogger(__name__) + + @swagger.operation( + notes='Fetch the metrics of the specified testcase', + parameters=[ + { + "name": "id", + "description": "The UUID of the testcase in the format " + "NNNNNNNN-NNNN-NNNN-NNNN-NNNNNNNNNNNN", + "required": True, + "type": "string", + "allowMultiple": False, + "paramType": "query" + }, + { + "name": "type", + "description": "The type of metrics to report. May be " + "metrics (default), metadata, or status", + "required": True, + "type": "string", + "allowMultiple": False, + "paramType": "query" + } + ], + responseMessages=[ + { + "code": 200, + "message": "Workload ID found, response in JSON format" + }, + { + "code": 404, + "message": "Workload ID not found" + } + ] + ) + def get(self): + tc_id = request.args.get('id') + query_type = request.args.get('type') + ret = {} + + try: + tc = stcv_master.get_tc_by_id(tc_id) + if query_type == "result": + ret = tc.get_result() + + if query_type == "status": + status = tc.get_status() + ret['status'] = status + if 'error' == status: + reason = tc.get_err_reason() + ret['reason'] = reason + + return jsonify(ret) + + except Exception as err: + abort(400, str(err)) + + @swagger.operation( + parameters=[ + { + "name": "body", + "description": """Start execution of a testcase with the +parameters, only support rfc25cc test + """, + "required": True, + "type": "TestcaseRequestModel", + "paramType": "body" + } + ], + type=TestcaseResponseModel.__name__, + responseMessages=[ + { + "code": 200, + "message": "TestCase submitted" + }, + { + "code": 400, + "message": "Missing configuration data" + } + ] + ) + def post(self): + if not request.json: + abort(400, "ERROR: Missing configuration data") + + self.logger.info(request.json) + + try: + name = request.json['name'] + category = request.json['category'] + stack_id = request.json['stack_id'] + tc_id = stcv_master.execute_testcase(name=request.json['name'], + category=request.json['category'], + stack_id=request.json['stack_id'], + **request.json['params']) + + return jsonify({'name': name, + 'category': category, + 'stack_id': stack_id, + 'tc_id': tc_id}) + + except Exception as e: + abort(400, str(e)) + + @swagger.operation( + notes='Cancels the currently running testcase or delete testcase result', + parameters=[ + { + "name": "id", + "description": "The UUID of the testcase in the format " + "NNNNNNNN-NNNN-NNNN-NNNN-NNNNNNNNNNNN", + "required": True, + "type": "string", + "allowMultiple": False, + "paramType": "query" + }, + ], + responseMessages=[ + { + "code": 200, + "message": "Wordload ID found, response in JSON format" + }, + ] + ) + def delete(self): + try: + tc_id = request.args.get("id") + self.logger.info("receive delete testcase msg. tc_id = %s", tc_id) + + stcv_master.delete_testcase(tc_id) + + except Exception as e: + abort(400, str(e)) + + +api.add_resource(Stack, "/api/v1.0/stack") +api.add_resource(TestCase, "/api/v1.0/testcase") + +''' +@app.route("/") +def hello_world(): + return 'hello world' + +@app.route("/testcases") +def get_testcases(): + return [] + + +@app.route("/testcases/<int: tc_id>") +def query_testcase(tc_id): + return [] + +@app.route("/stctest/api/v1.0/testcase/<string: tc_name>", methods = ['GET']) +def query_tc_result(tc_name): + return [] + +@app.route("/stctest/api/v1.0/testcase", methods = ['POST']) +def execut_testcase(): + return [] +''' + + +if __name__ == "__main__": + logger = logging.getLogger("nettest").setLevel(logging.DEBUG) + + app.run(host="0.0.0.0", debug=True, threaded=True) diff --git a/contrib/nettest/nettest/rfc2544test.py b/contrib/nettest/nettest/rfc2544test.py new file mode 100644 index 00000000..688b4d12 --- /dev/null +++ b/contrib/nettest/nettest/rfc2544test.py @@ -0,0 +1,576 @@ +############################################################################## +# Copyright (c) 2018 Spirent Communications and others. +# +# All rights reserved. This program and the accompanying materials +# are made available under the terms of the Apache License, Version 2.0 +# which accompanies this distribution, and is available at +# http://www.apache.org/licenses/LICENSE-2.0 +############################################################################## + +import base64 +import copy +import logging +import os +import shutil +import threading +from time import sleep +import uuid + +import requests +from stcrestclient import stchttp + + +class Stcv2Net1Stack(object): + ADMIN_NETWORK_NAME = "admin" + ADMIN_SUBNET_ADDR = "50.50.50.0/24" + ADMIN_GW_IP = "50.50.50.1" + TST_NETWORK_NAME = "tst" + TST_SUBNET_ADDR = "192.168.0.0/24" + TST_GW_IP = "192.168.0.1" + ROUTER_NAME = "router" + WEST_STCV_NAME = "west_stcv" + EAST_STCV_NAME = "east_stcv" + AFFINITY_SG_NAME = "affinity" + STCV_USER_DATA = '''#cloud-config +spirent: + ntp: ''' + + def __init__(self, name, conn, ext_network_name, params): + self.logger = logging.getLogger(__name__) + + self.name = name + self.conn = conn + self.ext_network_name = ext_network_name + self.image_name = params['stcv_image'] + self.flavor_name = params['stcv_flavor'] + self.ntp_server_ip = params['license_server_ip'] + self.affinity = params['stcv_affinity'] + + self.stack_id = str(uuid.uuid4()) + self.admin_network = None + self.admin_subnet = None + self.tst_network = None + self.tst_subnet = None + self.ext_network = None + self.router = None + self.affinity_sg = None + + self.west_stcv = None + self.west_stcv_ip = '' + self.east_stcv = None + self.east_stcv_ip = '' + + def _deploy_test_network(self): + + # create tst network and subnet + self.tst_network = self.conn.network.create_network( + name=self.TST_NETWORK_NAME) + self.tst_subnet = self.conn.network.create_subnet( + name=self.TST_NETWORK_NAME + '_subnet', + network_id=self.tst_network.id, + ip_version='4', + cidr=self.TST_SUBNET_ADDR, + gateway_ip=self.TST_GW_IP, + is_dhcp_enabled=True) + + # create admin network and subnet + self.admin_network = self.conn.network.create_network( + name=self.ADMIN_NETWORK_NAME) + self.admin_subnet = self.conn.network.create_subnet( + name=self.ADMIN_NETWORK_NAME + '_subnet', + network_id=self.admin_network.id, + ip_version='4', + cidr=self.ADMIN_SUBNET_ADDR, + gateway_ip=self.ADMIN_GW_IP, + is_dhcp_enabled=True) + + # create external gateway and connect admin subnet to router + self.ext_network = self.conn.network.find_network(self.ext_network_name) + self.router = self.conn.network.create_router(name=self.ROUTER_NAME, + external_gateway_info={"network_id": self.ext_network.id}, + is_admin_state_up=True) + self.conn.network.add_interface_to_router(self.router, subnet_id=self.admin_subnet.id) + + def _depoly_stcv(self, name, image_id, flavor_id, scheduler_hints, user_data): + + stcv = self.conn.compute.create_server( + name=name, image_id=image_id, flavor_id=flavor_id, + networks=[{"uuid": self.admin_network.id}, {"uuid": self.tst_network.id}], + config_drive=True, + user_data=base64.encodestring(user_data) + ) + stcv = self.conn.compute.wait_for_server(stcv) + + stcv_fixed_ip = stcv.addresses[self.admin_network.name][0]['addr'] + stcv_floating_ip = self.conn.network.create_ip(floating_network_id=self.ext_network.id) + self.conn.compute.add_floating_ip_to_server(server=stcv, address=stcv_floating_ip.floating_ip_address, + fixed_address=stcv_fixed_ip) + + return {'stcv': stcv, 'fixed_ip': stcv_fixed_ip, 'floating_ip': stcv_floating_ip} + + def create_stack(self): + + image = self.conn.compute.find_image(self.image_name) + flavor = self.conn.compute.find_flavor(self.flavor_name) + + if self.affinity: + self.affinity_sg = \ + self.conn.compute.create_server_group(name=self.AFFINITY_SG_NAME, + policies=["affinity"]) + else: + self.affinity_sg = \ + self.conn.compute.create_server_group(name=self.AFFINITY_SG_NAME, + policies=["anti-affinity"]) + self._deploy_test_network() + + user_data = self.STCV_USER_DATA + self.ntp_server_ip + + stcv = self._depoly_stcv(name=self.WEST_STCV_NAME, + image_id=image.id, + flavor_id=flavor.id, + scheduler_hints=self.affinity_sg, + user_data=user_data) + self.west_stcv = stcv['stcv'] + self.west_stcv_ip = stcv['floating_ip'] + + stcv = self._depoly_stcv(name=self.EAST_STCV_NAME, + image_id=image.id, + flavor_id=flavor.id, + scheduler_hints=self.affinity_sg, + user_data=user_data) + self.east_stcv = stcv['stcv'] + self.east_stcv_ip = stcv['floating_ip'] + + def delete_stack(self): + + self.conn.compute.delete_server(self.west_stcv, ignore_missing=True) + self.conn.compute.delete_server(self.east_stcv, ignore_missing=True) + + self.conn.compute.delete_server_group(server_group=self.affinity_sg, + ignore_missing=True) + + # delete external gateway + self.conn.network.delete_router(self.router, ignore_missing=True) + + # delete tst network + self.conn.network.delete_subnet(self.tst_subnet, ignore_missing=True) + self.conn.network.delete_network(self.tst_network, ignore_missing=True) + + # delete admin network + self.conn.network.delete_subnet(self.admin_subnet, ignore_missing=True) + self.conn.network.delete_network(self.admin_network, ignore_missing=True) + + +class StcSession: + """ wrapper class for stc session""" + + def __init__(self, labserver_addr, user_name, session_name): + self.logger = logging.getLogger(__name__) + + # create connection obj + self.stc = stchttp.StcHttp(labserver_addr) + self.user_name = user_name + self.session_name = session_name + + # create session on labserver + self.session_id = self.stc.new_session(self.user_name, self.session_name) + self.stc.join_session(self.session_id) + return + + def __del__(self): + # destroy resource on labserver + self.stc.end_session() + + def clean_all_session(self): + session_urls = self.stc.session_urls() + for session in session_urls: + resp = requests.delete(session) + self.logger.info("delete session resp: %s", str(resp)) + return + + +class StcRfc2544Test: + """ RFC2544 test class""" + + RESULT_PATH_PREFIX = './tc_results/rfc2544/' + TC_STATUS_INIT = 'init' + TC_STATUS_RUNNING = 'running' + TC_STATUS_FINISHED = 'finished' + TC_STATUS_ERROR = 'error' + + default_additional_params = { + "AcceptableFrameLoss": 0.0, + "Duration": 60, + "FrameSizeList": 64, + "LearningMode": 'AUTO', + "NumOfTrials": 1, + "RateInitial": 99.0, + "RateLowerLimit": 99.0, + "RateStep": 10.0, + "RateUpperLimit": 99.0, + "Resolution": 1.0, + "SearchMode": 'BINARY', + "TrafficPattern": 'PAIR' + } + + def __init__(self, name, lab_server_ip, license_server_ip, + west_stcv_admin_ip, west_stcv_tst_ip, + east_stcv_admin_ip, east_stcv_tst_ip, + stack_id=None, **kwargs): + self.logger = logging.getLogger(__name__) + + self.name = name + self.lab_server_ip = lab_server_ip + self.license_server_ip = license_server_ip + self.west_stcv_ip = west_stcv_admin_ip + self.west_stcv_tst_ip = west_stcv_tst_ip + self.east_stcv_ip = east_stcv_admin_ip + self.east_stcv_tst_ip = east_stcv_tst_ip + self.stack_id = stack_id + self.metric = kwargs.get('metric') + self.additional_params = copy.copy(self.default_additional_params) + self.additional_params['FrameSizeList'] = kwargs.get('framesizes') + + self.tc_id = str(uuid.uuid4()) + + self.stc = None + self.sess = None + self.executor = None + self.status = 'init' + self.err_reason = '' + + def config_license(self, license_server_addr): + license_mgr = self.stc.get("system1", "children-licenseservermanager") + self.stc.create("LicenseServer", + under=license_mgr, + attributes={"server": license_server_addr}) + return + + def create_project(self, traffic_custom=None): + self.project = self.stc.get("System1", "children-Project") + # Configure any custom traffic parameters + if traffic_custom == "cont": + self.stc.create("ContinuousTestConfig", under=self.project) + return + + def config_test_port(self, chassis_addr, slot_no, port_no, intf_addr, gateway_addr): + # create test port + port_loc = "//%s/%s/%s" % (chassis_addr, slot_no, port_no) + chassis_port = self.stc.create('port', self.project) + self.stc.config(chassis_port, {'location': port_loc}) + + # Create emulated genparam for east port + device_gen_params = self.stc.create("EmulatedDeviceGenParams", + under=self.project, + attributes={"Port": chassis_port}) + # Create the DeviceGenEthIIIfParams object + self.stc.create("DeviceGenEthIIIfParams", + under=device_gen_params, + attributes={"UseDefaultPhyMac": "True"}) + + # Configuring Ipv4 interfaces + self.stc.create("DeviceGenIpv4IfParams", + under=device_gen_params, + attributes={"Addr": intf_addr, "Gateway": gateway_addr}) + + # Create Devices using the Device Wizard + self.stc.perform("DeviceGenConfigExpand", + params={"DeleteExisting": "No", "GenParams": device_gen_params}) + + return + + def do_test(self): + if self.metric == "throughput": + self.stc.perform("Rfc2544SetupThroughputTestCommand", self.additional_params) + elif self.metric == "backtoback": + self.stc.perform("Rfc2544SetupBackToBackTestCommand", self.additional_params) + elif self.metric == "frameloss": + self.stc.perform("Rfc2544SetupFrameLossTestCommand", self.additional_params) + elif self.metric == "latency": + self.stc.perform("Rfc2544SetupLatencyTestCommand", self.additional_params) + else: + raise Exception("invalid rfc2544 test metric.") + + # Save the configuration + self.stc.perform("SaveToTcc", params={"Filename": "2544.tcc"}) + + # Connect to the hardware... + self.stc.perform("AttachPorts", + params={"portList": self.stc.get("system1.project", "children-port"), + "autoConnect": "TRUE"}) + + # Apply configuration. + self.stc.apply() + self.stc.perform("SequencerStart") + self.stc.wait_until_complete() + + return + + def write_query_results_to_csv(self, results_path, csv_results_file_prefix, query_results): + filec = os.path.join(results_path, csv_results_file_prefix + ".csv") + with open(filec, "wb") as result_file: + result_file.write(query_results["Columns"].replace(" ", ",") + "\n") + for row in (query_results["Output"].replace("} {", ",").replace("{", "").replace("}", "").split(",")): + result_file.write(row.replace(" ", ",") + "\n") + + def format_result(self, metric, original_result_dict): + result = {} + if metric == 'throughput': + columns = original_result_dict["Columns"].split(' ') + index_framesize = columns.index("ConfiguredFrameSize") + index_result = columns.index("Result") + index_throughput = columns.index("Throughput(%)") + index_ForwardingRate = columns.index("ForwardingRate(fps)") + outputs = \ + original_result_dict["Output"].replace('} {', ',').replace("{", "").replace("}", "").split(",") + + for row in outputs: + output = row.split(' ') + result[output[index_framesize]] = {'Result': output[index_result], + "Throughput(%)": output[index_throughput], + "ForwardingRate(fps)": output[index_ForwardingRate]} + + elif self.metric == "latency": + pass + + elif self.metric == "frameloss": + pass + + elif self.metric == "backtoback": + pass + + return result + + def collect_result(self, local_dir): + # Determine what the results database filename is... + lab_server_resultsdb = self.stc.get( + "system1.project.TestResultSetting", "CurrentResultFileName") + self.stc.perform("CSSynchronizeFiles", + params={"DefaultDownloadDir": local_dir}) + + resultsdb = local_dir + lab_server_resultsdb.split("/Results")[1] + + if not os.path.exists(resultsdb): + resultsdb = lab_server_resultsdb + self.logger.info("Failed to create the local summary DB File, using" + " the remote DB file instead.") + else: + self.logger.info( + "The local summary DB file has been saved to %s", resultsdb) + + if self.metric == "throughput": + resultsdict = self.stc.perform("QueryResult", + params={ + "DatabaseConnectionString": lab_server_resultsdb, + "ResultPath": "RFC2544ThroughputTestResultDetailedSummaryView"}) + elif self.metric == "backtoback": + resultsdict = self.stc.perform("QueryResult", + params={ + "DatabaseConnectionString": lab_server_resultsdb, + "ResultPath": "RFC2544Back2BackTestResultDetailedSummaryView"}) + elif self.metric == "frameloss": + resultsdict = self.stc.perform("QueryResult", + params={ + "DatabaseConnectionString": lab_server_resultsdb, + "ResultPath": "RFC2544LatencyTestResultDetailedSummaryView"}) + elif self.metric == "latency": + resultsdict = self.stc.perform("QueryResult", + params={ + "DatabaseConnectionString": lab_server_resultsdb, + "ResultPath": "RFC2544FrameLossTestResultDetailedSummaryView"}) + else: + raise Exception("invalid rfc2544 test metric.") + + self.write_query_results_to_csv(self.results_dir, self.metric, resultsdict) + + self.result = self.format_result(self.metric, resultsdict) + + return + + def thread_entry(self): + self.status = self.TC_STATUS_RUNNING + try: + # create session on lab server + self.sess = StcSession(self.lab_server_ip, session_name=self.name, user_name=self.name) + self.stc = self.sess.stc + + # create test result directory + self.results_dir = self.RESULT_PATH_PREFIX + self.tc_id + '/' + os.makedirs(self.results_dir) + + # Bring up license server + self.config_license(self.license_server_ip) + + self.logger.info("config license success, license_server_addr = %s.", self.license_server_ip) + + # Create the root project object and Configure any custom traffic parameters + self.create_project() + + self.logger.info("create project success.") + + # configure test port + self.config_test_port(self.west_stcv_ip, 1, 1, self.west_stcv_tst_ip, self.east_stcv_tst_ip) + self.config_test_port(self.east_stcv_ip, 1, 1, self.east_stcv_tst_ip, self.west_stcv_tst_ip) + + self.logger.info("config test port success, west_chassis_addr = %s, east_chassis_addr = %s.", + self.west_stcv_ip, self.east_stcv_ip) + + # execute test + self.do_test() + + self.logger.info("execute test success.") + + # collect test result + self.collect_result(self.results_dir) + + self.logger.info("collect result file success, results_dir = %s.", self.results_dir) + + self.status = self.TC_STATUS_FINISHED + + except Exception as err: + self.logger.error("Failed to execute Rfc2544 testcase, err: %s", str(err)) + self.err_reason = str(err) + self.status = self.TC_STATUS_ERROR + + finally: + if self.sess is not None: + self.sess.clean_all_session() + + def execute(self): + + self.executor = threading.Thread(name='rfc2544', target=self.thread_entry()) + self.executor.start() + + def get_result(self): + if self.status != self.TC_STATUS_FINISHED: + return {'name': self.name, + 'tc_id': self.tc_id, + 'status': self.status + } + + return {'name': self.name, + 'category': 'rfc2544', + 'id': self.tc_id, + 'params': { + 'metric': self.metric, + 'framesizes': self.additional_params.get('FrameSizeList')}, + 'result': self.result} + + def get_status(self): + return self.status + + def delete_result(self): + shutil.rmtree(self.results_dir) + pass + + def cancel_run(self): + pass + + def get_err_reason(self): + return self.err_reason + + +if __name__ == '__main__': + + lab_server_ip = '192.168.37.122' + license_server_ip = '192.168.37.251' + west_stcv_admin_ip = '192.168.37.202' + west_stcv_tst_ip = '192.168.1.20' + east_stcv_admin_ip = '192.168.37.212' + east_stcv_tst_ip = '192.168.1.17' + + tc = StcRfc2544Test(name='tc1', + lab_server_ip=lab_server_ip, + license_server_ip=license_server_ip, + west_stcv_admin_ip=west_stcv_admin_ip, + west_stcv_tst_ip=west_stcv_tst_ip, + east_stcv_admin_ip=east_stcv_admin_ip, + east_stcv_tst_ip=east_stcv_tst_ip, + metric="throughput", + framesizes=[64, 128, 256, 512, 1024]) + tc.execute() + status = tc.get_status() + while(status != tc.TC_STATUS_FINISHED): + if status == tc.TC_STATUS_ERROR: + print "tc exectue fail, reason %s" % tc.get_err_reason() + break + sleep(2) + if status == tc.TC_STATUS_FINISHED: + print tc.get_result() +''' + tc = StcRfc2544Test(name='tc2', + lab_server_ip=lab_server_ip, + license_server_ip=license_server_ip, + west_stcv_admin_ip=west_stcv_admin_ip, + west_stcv_tst_ip=west_stcv_tst_ip, + east_stcv_admin_ip=east_stcv_admin_ip, + east_stcv_tst_ip=east_stcv_tst_ip, + metric="latency", + framesizes=[64, 128, 256, 512, 1024]) + tc.execute() + status = tc.get_status() + while(status != tc.TC_STATUS_FINISHED): + if status == tc.TC_STATUS_ERROR: + print "tc exectue fail, reason %s" % tc.get_err_reason() + break + sleep(2) + if status == tc.TC_STATUS_FINISHED: + print tc.get_result() + + tc = StcRfc2544Test(name='tc3', + lab_server_ip=lab_server_ip, + license_server_ip=license_server_ip, + west_stcv_admin_ip=west_stcv_admin_ip, + west_stcv_tst_ip=west_stcv_tst_ip, + east_stcv_admin_ip=east_stcv_admin_ip, + east_stcv_tst_ip=east_stcv_tst_ip, + metric="backtoback", + framesizes=[64, 128, 256, 512, 1024]) + tc.execute() + status = tc.get_status() + while(status != tc.TC_STATUS_FINISHED): + if status == tc.TC_STATUS_ERROR: + print "tc exectue fail, reason %s" % tc.get_err_reason() + break + sleep(2) + if status == tc.TC_STATUS_FINISHED: + print tc.get_result() + + tc = StcRfc2544Test(name='tc4', + lab_server_ip=lab_server_ip, + license_server_ip=license_server_ip, + west_stcv_admin_ip=west_stcv_admin_ip, + west_stcv_tst_ip=west_stcv_tst_ip, + east_stcv_admin_ip=east_stcv_admin_ip, + east_stcv_tst_ip=east_stcv_tst_ip, + metric="frameloss", + framesizes=[64, 128, 256, 512, 1024]) + tc.execute() + status = tc.get_status() + while(status != tc.TC_STATUS_FINISHED): + if status == tc.TC_STATUS_ERROR: + print "tc exectue fail, reason %s" % tc.get_err_reason() + break + sleep(2) + if status == tc.TC_STATUS_FINISHED: + print tc.get_result() +''' + +''' +class Testcase(object): + + def __init__(self, stack): + self.stack = stack + + def execute(self): + pass + +class TestcaseFactory(object): + + def __init__(self): + + def create_tc(self, tc_metadata): + self.tc_name = tc_metadata['tc_name'] + self.tc_id = str(uuid.uuid4()) + if +''' diff --git a/contrib/nettest/nettest/start.sh b/contrib/nettest/nettest/start.sh new file mode 100644 index 00000000..12ae3eb0 --- /dev/null +++ b/contrib/nettest/nettest/start.sh @@ -0,0 +1,11 @@ +#!/bin/bash +############################################################################## +# Copyright (c) 2018 Spirent Communications and others. +# +# All rights reserved. This program and the accompanying materials +# are made available under the terms of the Apache License, Version 2.0 +# which accompanies this distribution, and is available at +# http://www.apache.org/licenses/LICENSE-2.0 +############################################################################## + +exec /usr/bin/python rest_server.py diff --git a/contrib/nettest/nettest/stcv_stack.py b/contrib/nettest/nettest/stcv_stack.py new file mode 100644 index 00000000..6e69f479 --- /dev/null +++ b/contrib/nettest/nettest/stcv_stack.py @@ -0,0 +1,174 @@ +############################################################################## +# Copyright (c) 2018 Spirent Communications and others. +# +# All rights reserved. This program and the accompanying materials +# are made available under the terms of the Apache License, Version 2.0 +# which accompanies this distribution, and is available at +# http://www.apache.org/licenses/LICENSE-2.0 +############################################################################## + +import logging +import os +from time import sleep +import traceback + +import heatclient.client as heatclient +from keystoneauth1 import loading +from keystoneauth1 import session + + +class StcvStack(object): + STCV_CONFIG_FILE = 'stcv_config_file' + STCV_HEAT_FILE = './heat_2stcv.yaml' + STCV_USER_DATA = '''#cloud-config + spirent: + ntp: ''' + + def __init__(self, name, **kwargs): + self.logger = logging.getLogger(__name__) + + self.name = name + self.pub_net_name = kwargs.get('pub_net_name') + self.ntp_server_ip = kwargs.get('ntp_server_ip') + self.lab_server_ip = kwargs.get('lab_server_ip') + self.stcv_image = kwargs.get('stcv_image') + self.stcv_flavor = kwargs.get('stcv_flavor') + if kwargs.get('stcv_affinity'): + self.stcv_affinity = 'affinity' + else: + self.stcv_affinity = 'anti-affinity' + + self.stack_id = None + self._heatc_lient = None + + def _attach_to_openstack(self): + creds = {"username": os.environ.get('OS_USERNAME'), + "password": os.environ.get('OS_PASSWORD'), + "auth_url": os.environ.get('OS_AUTH_URL'), + "project_domain_id": os.environ.get('OS_PROJECT_DOMAIN_ID'), + "project_domain_name": os.environ.get('OS_PROJECT_DOMAIN_NAME'), + "project_id": os.environ.get('OS_PROJECT_ID'), + "project_name": os.environ.get('OS_PROJECT_NAME'), + "tenant_name": os.environ.get('OS_TENANT_NAME'), + "tenant_id": os.environ.get("OS_TENANT_ID"), + "user_domain_id": os.environ.get('OS_USER_DOMAIN_ID'), + "user_domain_name": os.environ.get('OS_USER_DOMAIN_NAME') + } + + self.logger.debug("Creds: %s" % creds) + + loader = loading.get_plugin_loader('password') + auth = loader.load_from_options(**creds) + sess = session.Session(auth) + self._heat_client = heatclient.Client("1", session=sess) + + def _make_parameters(self): + user_data = self.STCV_USER_DATA + self.ntp_server_ip + file_path = os.getcwd() + '/' + self.STCV_CONFIG_FILE + fd = open(file_path, 'w') + fd.writelines(user_data) + fd.close() + + return { + 'public_net_name': self.pub_net_name, + 'stcv_image': self.stcv_image, + 'stcv_flavor': self.stcv_flavor, + 'stcv_sg_affinity': self.stcv_affinity, + 'ntp_server_ip': self.ntp_server_ip + } + + def acquire_ip_from_stack_output(self, output, key_name): + ip = None + for item in output: + if item['output_key'] == key_name: + ip = item['output_value'] + if isinstance(ip, list): + ip = ip[0]['ip_address'] + break + + return ip + + def create_stack(self): + with open(self.STCV_HEAT_FILE) as fd: + template = fd.read() + + self._attach_to_openstack() + + self.logger.debug("Creating stack") + + stack = self._heat_client.stacks.create( + stack_name=self.name, + template=template, + parameters=self._make_parameters()) + + self.stack_id = stack['stack']['id'] + + while True: + stack = self._heat_client.stacks.get(self.stack_id) + status = getattr(stack, 'stack_status') + self.logger.debug("Stack status=%s" % (status,)) + if (status == u'CREATE_COMPLETE'): + self.stcv1_ip = self.acquire_ip_from_stack_output(stack.outputs, "STCv_1_Mgmt_Ip") + self.stcv2_ip = self.acquire_ip_from_stack_output(stack.outputs, "STCv_2_Mgmt_Ip") + self.stcv1_tst_ip = self.acquire_ip_from_stack_output(stack.outputs, "STCv_1_Tst_Ip") + self.stcv2_tst_ip = self.acquire_ip_from_stack_output(stack.outputs, "STCv_2_Tst_Ip") + break + if (status == u'DELETE_COMPLETE'): + self.stack_id = None + break + if (status == u'CREATE_FAILED'): + self.status_reason = getattr(stack, 'stack_status_reason') + sleep(5) + self._heat_client.stacks.delete(stack_id=self.stack_id) + sleep(2) + + def delete_stack(self): + if self.stack_id is None: + raise Exception('stack does not exist') + + self._attach_to_openstack() + while True: + stack = self._heat_client.stacks.get(self.stack_id) + status = getattr(stack, 'stack_status') + self.logger.debug("Stack status=%s" % (status,)) + if (status == u'CREATE_COMPLETE'): + self._heat_client.stacks.delete(stack_id=self.stack_id) + if (status == u'DELETE_COMPLETE'): + self.stack_id = None + break + if (status == u'DELETE_FAILED'): + sleep(5) + self._heat_client.stacks.delete(stack_id=self.stack_id) + sleep(2) + + def get_west_stcv_ip(self): + return self.stcv1_ip + + def get_west_stcv_tst_ip(self): + return self.stcv1_tst_ip + + def get_east_stcv_ip(self): + return self.stcv2_ip + + def get_east_stcv_tst_ip(self): + return self.stcv2_tst_ip + + +if __name__ == '__main__': + try: + stack = StcvStack(name='stack1', + pub_net_name='external', + ntp_server_ip='192.168.37.151', + stcv_image='stcv-4.79', + stcv_flavor='m1.tiny', + affinity=False) + stack.create_stack() + + print stack.get_east_stcv_ip() + print stack.get_east_stcv_tst_ip() + print stack.get_west_stcv_ip() + print stack.get_west_stcv_tst_ip() + + except Exception as err: + excstr = traceback.format_exc() + print excstr diff --git a/docs/conf.py b/docs/conf.py index 4469b229..5c21c3a6 100644 --- a/docs/conf.py +++ b/docs/conf.py @@ -1,3 +1,12 @@ +############################################################################## +# Copyright (c) 2017 ZTE Corporation and others. +# +# All rights reserved. This program and the accompanying materials +# are made available under the terms of the Apache License, Version 2.0 +# which accompanies this distribution, and is available at +# http://www.apache.org/licenses/LICENSE-2.0 +############################################################################## + # -*- coding: utf-8 -*- # # QTIP documentation build configuration file, created by diff --git a/qtip/cli/commands/cmd_project.py b/qtip/cli/commands/cmd_project.py index 2836fa69..11fa63e5 100644 --- a/qtip/cli/commands/cmd_project.py +++ b/qtip/cli/commands/cmd_project.py @@ -59,7 +59,16 @@ def cli(): help='Host configured for ssh client or IP addresses and domain name') @click.option('--scenario', help='OPNFV scenario') -def create(project_name, project_template, pod_name, installer_type, installer_host, scenario): +@click.option('--sut', + prompt='System Under Test type', + help='Type of system can be vnf') +def create(project_name, + project_template, + pod_name, + installer_type, + installer_host, + scenario, + sut): qtip_generator_role = os.path.join(utils.QTIP_ANSIBLE_ROLES, 'qtip-generator') extra_vars = { 'qtip_package': utils.QTIP_PACKAGE, @@ -69,7 +78,8 @@ def create(project_name, project_template, pod_name, installer_type, installer_h 'pod_name': pod_name, 'installer_type': installer_type, 'installer_host': installer_host, - 'scenario': scenario + 'scenario': scenario, + 'sut': sut } os.system("ANSIBLE_ROLES_PATH={roles_path} ansible-playbook" " -i {hosts}" diff --git a/qtip/scripts/quickstart.sh b/qtip/scripts/quickstart.sh index b430aa3f..22257f5b 100644 --- a/qtip/scripts/quickstart.sh +++ b/qtip/scripts/quickstart.sh @@ -13,7 +13,8 @@ set -o pipefail set -x usage(){ - echo "usage: $0 -q <qtip_test_suite> -t <installer_type> -i <installer_ip> -p <pod_name> -s <scenario> -r <report_url>" >&2 + echo "usage: $0 -q <qtip_test_suite> -t <installer_type> -i <installer_ip> -p <pod_name> -s <scenario> -r <report_url> + -u <sut>" >&2 } verify_connectivity(){ @@ -30,7 +31,7 @@ verify_connectivity(){ } #Getoptions -while getopts ":t:i:p:s:r:he" optchar; do +while getopts ":t:i:p:s:r:u:he" optchar; do case "${optchar}" in q) test_suite=${OPTARG} ;; t) installer_type=${OPTARG} ;; @@ -38,6 +39,7 @@ while getopts ":t:i:p:s:r:he" optchar; do p) pod_name=${OPTARG} ;; s) scenario=${OPTARG} ;; r) testapi_url=${OPTARG} ;; + u) sut=${OPTARG} ;; h) usage exit 0 ;; @@ -55,6 +57,7 @@ test_suite=${test_suite:-$TEST_SUITE} pod_name=${pod_name:-$NODE_NAME} scenario=${scenario:-$SCENARIO} testapi_url=${testapi_url:-$TESTAPI_URL} +sut=${sut:-node} # we currently support fuel, apex and mcp if [[ ! "$installer_type" =~ (fuel|apex|mcp) ]]; then @@ -76,7 +79,7 @@ esac cd /home/opnfv qtip create --project-template ${test_suite} --pod-name ${pod_name} --installer-type ${installer_type} \ ---installer-host ${installer_ip} --scenario ${scenario} ${test_suite} +--installer-host ${installer_ip} --scenario ${scenario} --sut ${sut} ${test_suite} cd ${test_suite} diff --git a/resources/ansible_roles/openstack/defaults/main.yml b/resources/ansible_roles/openstack/defaults/main.yml index 3b83d6c0..830def62 100644 --- a/resources/ansible_roles/openstack/defaults/main.yml +++ b/resources/ansible_roles/openstack/defaults/main.yml @@ -9,9 +9,10 @@ --- +image_url: https://cloud-images.ubuntu.com/releases/16.04/release-20180222/ubuntu-16.04-server-cloudimg-amd64-disk1.img +checksum: 027b3e9d219f0f6c17b5448ed67dc41e +temp_dir: /tmp/qtip + +flavor_name: qtip_flavor stack_name: qtip_stack image_name: qtip_image -flavor_name: m1.large -net_name: qtip_net -subnet_name: qtip_subnet -instance_name: qtip_vm
\ No newline at end of file diff --git a/resources/ansible_roles/openstack/tasks/main.yml b/resources/ansible_roles/openstack/tasks/main.yml index f4d3d18c..ed043831 100644 --- a/resources/ansible_roles/openstack/tasks/main.yml +++ b/resources/ansible_roles/openstack/tasks/main.yml @@ -1,6 +1,6 @@ ############################################################################## -# Copyright (c) 2017 ZTE Corporation and others. -# taseer94@gmail.com +# Copyright (c) 2018 ZTE Corporation and others. +# # All rights reserved. This program and the accompanying materials # are made available under the terms of the Apache License, Version 2.0 # which accompanies this distribution, and is available at @@ -9,14 +9,78 @@ --- +- name: Delete QTIP stack + os_stack: + name: "{{ stack_name }}" + state: absent + +- name: Delete QTIP keypair + os_keypair: + name: QtipKey + state: absent + +- name: Delete QTIP flavor + os_nova_flavor: + name: "{{ flavor_name }}" + state: absent + +- name: Delete qtip image + os_image: + name: "{{ image_name }}" + state: absent + +- name: Create temp directory for QTIP + file: + path: "{{ temp_dir }}" + state: directory + +- name: Clean the existing SSH keypair + file: + state: absent + path: "{{ temp_dir }}/{{ item }}" + with_items: + - QtipKey.pub + - QtipKey + +- name: Generate a SSH key for QTIP VM + shell: ssh-keygen -t rsa -N "" -f "{{ temp_dir }}/QtipKey" -q + +- name: Create QTIP keypair + os_keypair: + name: QtipKey + public_key_file: "{{ temp_dir }}/QtipKey.pub" + state: present + +- name: Create QTIP flavor + os_nova_flavor: + name: "{{ flavor_name }}" + ram: 2048 + vcpus: 3 + disk: 4 + state: present + +- name: Download image as qtip image + get_url: + url: "{{ image_url }}" + dest: "{{ temp_dir }}/{{ image_name }}.img" + checksum: "md5:{{ checksum }}" + when: image_url | search("https://") + +- name: Upload qtip image + os_image: + name: "{{ image_name }}" + container_format: bare + disk_format: qcow2 + state: present + filename: "{{ temp_dir }}/{{ image_name }}.img" + - name: create qtip stack os_stack: name: "{{ stack_name }}" state: present - template: heat_template.yml + template: "{{ heat_template }}" parameters: - image_name: "{{ image_name }}" - flavor_name: "{{ flavor_name }}" - net_name: "{{ net_name }}" - subnet_name: "{{ subnet_name }}" - instance_name: "{{ inst_name }}" + image: "{{ image_name }}" + flavor: "{{ flavor_name }}" + keypair: QtipKey + external_network: "{{ external_network }}" diff --git a/resources/ansible_roles/openstack/templates/heat_template.yml b/resources/ansible_roles/openstack/templates/heat_template.yml deleted file mode 100644 index ed5a3ab7..00000000 --- a/resources/ansible_roles/openstack/templates/heat_template.yml +++ /dev/null @@ -1,70 +0,0 @@ -############################################################################## -# Copyright (c) 2017 ZTE Corporation and others. -# taseer94@gmail.com -# All rights reserved. This program and the accompanying materials -# are made available under the terms of the Apache License, Version 2.0 -# which accompanies this distribution, and is available at -# http://www.apache.org/licenses/LICENSE-2.0 -############################################################################## - ---- - -heat_template_version: 2015-04-30 - -description: Simple template to deploy a single compute instance - -parameters: - image_name: - type: string - label: Image ID - description: Image to be used for compute instance - default: Ubuntu 16.04 x86_64 - flavor_name: - type: string - label: Instance Type - description: Type of instance (flavor) to be used - default: m1.large - net_name: - type: string - label: Test network name - description: The name of the stack's network - default: qtip_net - subnet_name: - type: string - label: Test subnet name - description: The name of the stack's subnet - default: qtip_subnet - instance_name: - type: string - label: Test VM name - description: The name of the spawned vm - default: qtip_vm - -resources: - private_net: - type: OS::Neutron::Net - properties: - name: { get_param: net_name } - - private_subnet: - type: OS::Neutron::Subnet - properties: - name: { get_param: subnet_name } - network_id: { get_resource: private_net } - cidr: 10.0.0.0/24 - - server1_port: - type: OS::Neutron::Port - properties: - network_id: { get_resource: private_net } - fixed_ips: - - subnet_id: { get_resource: private_subnet } - - my_instance: - type: OS::Nova::Server - properties: - name: { get_param: inst_name } - image: { get_param: image_name } - flavor: { get_param: flavor_name } - networks: - - port: { get_resource: server1_port } diff --git a/resources/ansible_roles/opnfv-testapi/tasks/report.yml b/resources/ansible_roles/opnfv-testapi/tasks/report.yml index 0633eafb..6db3a785 100644 --- a/resources/ansible_roles/opnfv-testapi/tasks/report.yml +++ b/resources/ansible_roles/opnfv-testapi/tasks/report.yml @@ -21,7 +21,7 @@ project_name: "{{ project_name }}" case_name: "{{ case_name }}" pod_name: "{{ pod_name }}" - installer: "{{ installer_type }}" + installer: "{{ installer_type_adapter[installer_type] }}" version: "{{ version }}" scenario: "{{ scenario }}" start_date: "{{ ansible_date_time.date }}" diff --git a/resources/ansible_roles/qtip-generator/defaults/main.yml b/resources/ansible_roles/qtip-generator/defaults/main.yml index 344feb54..6665d11f 100644 --- a/resources/ansible_roles/qtip-generator/defaults/main.yml +++ b/resources/ansible_roles/qtip-generator/defaults/main.yml @@ -16,6 +16,10 @@ installer_group: apex: apex-underclouds mcp: salt-master +network_group: + mcp: floating_net + apex: external + project_name: 'qtip-project' project_template: 'compute' diff --git a/resources/ansible_roles/qtip-generator/files/compute/group_vars/all.yml b/resources/ansible_roles/qtip-generator/files/compute/group_vars/all.yml index efb2bdb7..1b34fd69 100644 --- a/resources/ansible_roles/qtip-generator/files/compute/group_vars/all.yml +++ b/resources/ansible_roles/qtip-generator/files/compute/group_vars/all.yml @@ -21,3 +21,7 @@ installer_group: fuel: fuel-masters apex: apex-underclouds mcp: salt-master + +network_group: + mcp: floating_net + apex: external diff --git a/resources/ansible_roles/qtip-generator/files/compute/heat_template.yml b/resources/ansible_roles/qtip-generator/files/compute/heat_template.yml new file mode 100644 index 00000000..cb67e624 --- /dev/null +++ b/resources/ansible_roles/qtip-generator/files/compute/heat_template.yml @@ -0,0 +1,101 @@ +############################################################################## +# Copyright (c) 2018 ZTE Corporation and others. +# +# All rights reserved. This program and the accompanying materials +# are made available under the terms of the Apache License, Version 2.0 +# which accompanies this distribution, and is available at +# http://www.apache.org/licenses/LICENSE-2.0 +############################################################################## +heat_template_version: 2015-04-30 + +description: > + Used to run VMs for QTIP + +parameters: + image: + type: string + description: Name of the image + default: qtip_image + + flavor: + type: string + description: Name of flavor + default: qtip_flavor + + keypair: + type: string + description: Name of keypair + default: QtipKey + + external_network: + type: string + description: Name of the external network + +resources: + + network: + type: OS::Neutron::Net + properties: + name: qtip_net + + subnet: + type: OS::Neutron::Subnet + properties: + name: qtip_subnet + ip_version: 4 + cidr: 192.168.0.0/24 + network: { get_resource: network } + dns_nameservers: [8.8.8.8] + + management_router: + type: OS::Neutron::Router + properties: + name: qtip_router + external_gateway_info: + network: { get_param: external_network } + + management_router_interface: + type: OS::Neutron::RouterInterface + properties: + router: { get_resource: management_router } + subnet: { get_resource: subnet } + + floating_ip: + type: OS::Neutron::FloatingIP + properties: + floating_network: { get_param: external_network } + + floating_ip_association: + type: OS::Nova::FloatingIPAssociation + properties: + floating_ip: { get_resource: floating_ip } + server_id: { get_resource: qtip_instance } + + security_group: + type: OS::Neutron::SecurityGroup + properties: + name: qtip_security_group + rules: + - port_range_min: 22 + port_range_max: 5201 + protocol: tcp + - port_range_min: 22 + port_range_max: 5201 + protocol: udp + - protocol: icmp + + qtip_instance: + type: OS::Nova::Server + depends_on: [subnet] + properties: + name: { get_param: "OS::stack_name" } + image: { get_param: image } + flavor: { get_param: flavor } + key_name: { get_param: keypair } + security_groups: [{ get_resource: security_group }] + networks: + - network: { get_resource: network } +outputs: + instance_ip: + description: The IP address of the instance + value: { get_attr: [floating_ip, floating_ip_address] } diff --git a/resources/ansible_roles/qtip-generator/files/compute/host_vars/localhost.yml b/resources/ansible_roles/qtip-generator/files/compute/host_vars/localhost.yml index cc587c69..7f339045 100644 --- a/resources/ansible_roles/qtip-generator/files/compute/host_vars/localhost.yml +++ b/resources/ansible_roles/qtip-generator/files/compute/host_vars/localhost.yml @@ -14,3 +14,9 @@ case_name: "{{ case_name|default('compute') }}" pod_name: "{{ pod_name|default('qtip-pod') }}" scenario: "{{ scenario|default('generic') }}" version: "{{ lookup('env','OPNFV_RELEASE')|default('master') }}" + +installer_type_adapter: + fuel: fuel + mcp: fuel + apex: apex + manual: manual
\ No newline at end of file diff --git a/resources/ansible_roles/qtip-generator/files/compute/setup.yml b/resources/ansible_roles/qtip-generator/files/compute/setup.yml index f5920c1f..037f5746 100644 --- a/resources/ansible_roles/qtip-generator/files/compute/setup.yml +++ b/resources/ansible_roles/qtip-generator/files/compute/setup.yml @@ -10,6 +10,14 @@ # Prepare connection to SUT (System Under Test) --- + +{% if sut == 'vnf' %} +- hosts: localhost + gather_facts: no + roles: + - { role: openstack, external_network: {{ network_group[installer_type] }}, heat_template: heat_template.yml } +{% endif %} + {% if installer_type == 'manual' %} - hosts: localhost gather_facts: no diff --git a/resources/ansible_roles/qtip-generator/files/compute/teardown.yml b/resources/ansible_roles/qtip-generator/files/compute/teardown.yml index dc659930..e483d7f6 100644 --- a/resources/ansible_roles/qtip-generator/files/compute/teardown.yml +++ b/resources/ansible_roles/qtip-generator/files/compute/teardown.yml @@ -7,8 +7,12 @@ # http://www.apache.org/licenses/LICENSE-2.0 ############################################################################## + - hosts: SUT roles: # teardown environment - { role: qtip, tasks: teardown } + + + diff --git a/resources/ansible_roles/qtip-generator/files/doctor/group_vars/all.yml b/resources/ansible_roles/qtip-generator/files/doctor/group_vars/all.yml index 55d5b250..766520b9 100644 --- a/resources/ansible_roles/qtip-generator/files/doctor/group_vars/all.yml +++ b/resources/ansible_roles/qtip-generator/files/doctor/group_vars/all.yml @@ -1,3 +1,12 @@ +############################################################################## +# Copyright (c) 2017 ZTE Corporation and others. +# +# All rights reserved. This program and the accompanying materials +# are made available under the terms of the Apache License, Version 2.0 +# which accompanies this distribution, and is available at +# http://www.apache.org/licenses/LICENSE-2.0 +############################################################################## + {% raw %} doctor_project: doctor doctor_user: doctor diff --git a/tests/ci/compute/docker-compose.yaml b/tests/ci/compute/docker-compose.yaml index 712c9c54..b6bdf13d 100644 --- a/tests/ci/compute/docker-compose.yaml +++ b/tests/ci/compute/docker-compose.yaml @@ -11,7 +11,7 @@ version: '2' services: qtip: - container_name: compute_qtip + container_name: compute_qtip_${SUT} image: opnfv/qtip:${DOCKER_TAG} env_file: ${ENV_FILE} volumes: diff --git a/tests/ci/experimental.sh b/tests/ci/experimental.sh index e4bcc577..e45611e5 100755 --- a/tests/ci/experimental.sh +++ b/tests/ci/experimental.sh @@ -20,18 +20,19 @@ export CI_DEBUG='false' export TEST_SUITE='compute' export TESTAPI_URL='' export SSH_CREDENTIALS='/root/.ssh' - export WORKSPACE=${WORKSPACE:-$(pwd)} +export SUT='vnf' source ${script_dir}/utils/start_services.sh cd ${WORKSPACE} qtip_repo='/home/opnfv/repos/qtip' -docker cp . ${TEST_SUITE}_qtip:${qtip_repo} -docker exec ${TEST_SUITE}_qtip bash -c "cd ${qtip_repo} && pip install -U -e ." +docker cp . ${TEST_SUITE}_qtip_${SUT}:${qtip_repo} +docker exec ${TEST_SUITE}_qtip_${SUT} bash -c "cd ${qtip_repo} && pip install -U -e ." + +docker exec ${TEST_SUITE}_qtip_${SUT} bash -x ${qtip_repo}/qtip/scripts/quickstart.sh -u "${SUT}" -docker exec ${TEST_SUITE}_qtip bash -x ${qtip_repo}/qtip/scripts/quickstart.sh echo "QTIP: Verify ${TEST_SUITE} done!" exit 0 diff --git a/tests/ci/periodic.sh b/tests/ci/periodic.sh index 87fa7627..e37f236e 100755 --- a/tests/ci/periodic.sh +++ b/tests/ci/periodic.sh @@ -13,7 +13,7 @@ script_dir="$( cd "$( dirname "${BASH_SOURCE[0]}" )" && pwd )" source ${script_dir}/utils/start_services.sh -docker exec ${TEST_SUITE}_qtip bash -x /home/opnfv/repos/qtip/qtip/scripts/quickstart.sh +docker exec ${TEST_SUITE}_qtip_${SUT} bash -x /home/opnfv/repos/qtip/qtip/scripts/quickstart.sh -u "${SUT}" echo "${TEST_SUITE} QPI done!" |