summaryrefslogtreecommitdiffstats
diff options
context:
space:
mode:
-rw-r--r--docs/release-notes/functest-release.rst32
-rw-r--r--testcases/features/sfc/SSHUtils.py120
-rw-r--r--testcases/features/sfc/ovs_utils.py117
-rwxr-xr-xtestcases/features/sfc/sfc.py75
-rwxr-xr-xtestcases/features/sfc/sfc_colorado1.py77
5 files changed, 403 insertions, 18 deletions
diff --git a/docs/release-notes/functest-release.rst b/docs/release-notes/functest-release.rst
index 6361aaec5..1c9819f5a 100644
--- a/docs/release-notes/functest-release.rst
+++ b/docs/release-notes/functest-release.rst
@@ -206,6 +206,17 @@ precised, the scenario is a HA scenario):
| multisite | | | X | |
+---------------------+---------+---------+---------+---------+
+For Colorado 2.0, the following scenarios have been released:
+
++---------------------+---------+---------+---------+---------+
+| Scenario | Apex | Compass | Fuel | Joid |
++=====================+=========+=========+=========+=========+
+| onos-nofeature | X | X | X | X |
++---------------------+---------+---------+---------+---------+
+| odl_l2-sfc-ha | | | X | |
++---------------------+---------+---------+---------+---------+
+
+
In Colorado, the functional tests have been sliced in 6 different
categories:
@@ -280,6 +291,9 @@ Apex
+------------------+---------+---------+-------------------+
| odl_l2-sfc-noha | 18/24 | 75% | `apex-res-217`_ |
+------------------+---------+---------+-------------------+
+| onos-nofeature-ha| 20/21 | 95% | `apex-res-423`_ |
++------------------+---------+---------+-------------------+
+
Compass
-------
@@ -300,6 +314,8 @@ Compass
+------------------+---------+---------+-------------------+
| onos-sfc-ha | 17/18 | 95% | `compass-res-76`_ |
+------------------+---------+---------+-------------------+
+| onos-nofeature-ha| 15/15 | 100% | `compass-res-285`_|
++------------------+---------+---------+-------------------+
Note: all the Compass tests for Colorado have been executed on virtual
environment. Bare metal resources were used for Master branch.
@@ -334,6 +350,8 @@ Fuel
+---------------------+---------+---------+-------------------+
| odl_l2-sfc-noha | 6/21 | 29% | `fuel-res-219`_ |
+---------------------+---------+---------+-------------------+
+| odl_l2-sfc-ha | 16/21 | 76% | `fuel-res-376`_ |
++---------------------+---------+---------+-------------------+
| odl_l3 | 15/18 | 83% | `fuel-res-115`_ |
+---------------------+---------+---------+-------------------+
| odl_l3-noha | 12/15 | 80% | `fuel-res-164`_ |
@@ -346,6 +364,8 @@ Fuel
+---------------------+---------+---------+-------------------+
| onos-sfc-noha | 21/21 | 100% | `fuel-res-129`_ |
+---------------------+---------+---------+-------------------+
+| onos-nofeature-ha | 18/18 | 100% | `fuel-res-492`_ |
++---------------------+---------+---------+-------------------+
| multisite | N.R | 100% | `fuel-res-8`_ |
+---------------------+---------+---------+-------------------+
@@ -388,6 +408,8 @@ Joid
+---------------------+---------+---------+-----------------+
| onos-sfc | 24/24 | 100% | `joid-res-97`_ |
+---------------------+---------+---------+-----------------+
+| onos-nofeature-ha | 21/21 | 100% | `joid-res-345`_ |
++---------------------+---------+---------+-----------------+
It is highly recommended to install a json viewer in your browser
(e.g. https://addons.mozilla.org/fr/firefox/addon/jsonview/)
@@ -541,6 +563,8 @@ Useful links
.. _`apex-res-235`: http://testresults.opnfv.org/test/api/v1/results?build_tag=jenkins-functest-apex-apex-daily-colorado-daily-colorado-235
+.. _`apex-res-423`: http://testresults.opnfv.org/test/api/v1/results?build_tag=jenkins-functest-apex-apex-daily-colorado-daily-colorado-423
+
.. _`compass-res-55`: http://testresults.opnfv.org/test/api/v1/results?build_tag=jenkins-functest-compass-virtual-daily-colorado-55
.. _`compass-res-59`: http://testresults.opnfv.org/test/api/v1/results?build_tag=jenkins-functest-compass-virtual-daily-colorado-59
@@ -553,6 +577,8 @@ Useful links
.. _`compass-res-567`: http://testresults.opnfv.org/test/api/v1/results?build_tag=jenkins-functest-compass-virtual-daily-master-567
+.. _`compass-res-285`: http://testresults.opnfv.org/test/api/v1/results?build_tag=jenkins-functest-compass-virtual-daily-master-285
+
.. _`fuel-res-8`: https://build.opnfv.org/ci/view/functest/job/functest-fuel-virtual-suite-colorado/lastSuccessfulBuild/console
.. _`fuel-res-115`: http://testresults.opnfv.org/test/api/v1/results?build_tag=jenkins-functest-fuel-baremetal-daily-colorado-115
@@ -587,6 +613,10 @@ Useful links
.. _`fuel-res-219`: http://testresults.opnfv.org/test/api/v1/results?build_tag=jenkins-functest-fuel-virtual-daily-colorado-219
+.. _`fuel-res-376`: http://testresults.opnfv.org/test/api/v1/results?build_tag=jenkins-functest-fuel-virtual-daily-colorado-376
+
+.. _`fuel-res-492`: http://testresults.opnfv.org/test/api/v1/results?build_tag=jenkins-functest-fuel-virtual-daily-colorado-492
+
.. _`fuel-arm-res-29`: https://build.opnfv.org/ci/view/armband/job/functest-fuel-armband-baremetal-daily-colorado/29/console
.. _`fuel-arm-res-24`: https://build.opnfv.org/ci/view/armband/job/functest-fuel-armband-baremetal-daily-colorado/24/console
@@ -609,3 +639,5 @@ Useful links
.. _`joid-res-104`: http://testresults.opnfv.org/test/api/v1/results?build_tag=jenkins-functest-joid-baremetal-daily-colorado-104
+.. _`joid-res-345`: http://testresults.opnfv.org/test/api/v1/results?build_tag=jenkins-functest-joid-baremetal-daily-colorado-345
+
diff --git a/testcases/features/sfc/SSHUtils.py b/testcases/features/sfc/SSHUtils.py
new file mode 100644
index 000000000..9c8c2c727
--- /dev/null
+++ b/testcases/features/sfc/SSHUtils.py
@@ -0,0 +1,120 @@
+##############################################################################
+# Copyright (c) 2015 Ericsson AB and others.
+# Authors: George Paraskevopoulos (geopar@intracom-telecom.com)
+# Jose Lausuch (jose.lausuch@ericsson.com)
+# All rights reserved. This program and the accompanying materials
+# are made available under the terms of the Apache License, Version 2.0
+# which accompanies this distribution, and is available at
+# http://www.apache.org/licenses/LICENSE-2.0
+##############################################################################
+
+
+import paramiko
+import functest.utils.functest_logger as rl
+import os
+
+logger = rl.Logger('SSHUtils').getLogger()
+
+
+def get_ssh_client(hostname, username, password=None, proxy=None):
+ client = None
+ try:
+ if proxy is None:
+ client = paramiko.SSHClient()
+ else:
+ client = ProxyHopClient()
+ client.configure_jump_host(proxy['ip'],
+ proxy['username'],
+ proxy['password'])
+
+ if client is None:
+ raise Exception('Could not connect to client')
+
+ client.set_missing_host_key_policy(paramiko.AutoAddPolicy())
+ client.connect(hostname,
+ username=username,
+ password=password)
+ return client
+ except Exception, e:
+ logger.error(e)
+ return None
+
+
+def get_file(ssh_conn, src, dest):
+ try:
+ sftp = ssh_conn.open_sftp()
+ sftp.get(src, dest)
+ return True
+ except Exception, e:
+ logger.error("Error [get_file(ssh_conn, '%s', '%s']: %s" %
+ (src, dest, e))
+ return None
+
+
+def put_file(ssh_conn, src, dest):
+ try:
+ sftp = ssh_conn.open_sftp()
+ sftp.put(src, dest)
+ return True
+ except Exception, e:
+ logger.error("Error [put_file(ssh_conn, '%s', '%s']: %s" %
+ (src, dest, e))
+ return None
+
+
+class ProxyHopClient(paramiko.SSHClient):
+ '''
+ Connect to a remote server using a proxy hop
+ '''
+ def __init__(self, *args, **kwargs):
+ self.logger = rl.Logger("ProxyHopClient").getLogger()
+ self.proxy_ssh = None
+ self.proxy_transport = None
+ self.proxy_channel = None
+ self.proxy_ip = None
+ self.proxy_ssh_key = None
+ self.local_ssh_key = os.path.join(os.getcwd(), 'id_rsa')
+ super(ProxyHopClient, self).__init__(*args, **kwargs)
+
+ def configure_jump_host(self, jh_ip, jh_user, jh_pass,
+ jh_ssh_key='/root/.ssh/id_rsa'):
+ self.proxy_ip = jh_ip
+ self.proxy_ssh_key = jh_ssh_key
+ self.proxy_ssh = paramiko.SSHClient()
+ self.proxy_ssh.set_missing_host_key_policy(paramiko.AutoAddPolicy())
+ self.proxy_ssh.connect(jh_ip,
+ username=jh_user,
+ password=jh_pass)
+ self.proxy_transport = self.proxy_ssh.get_transport()
+
+ def connect(self, hostname, port=22, username='root', password=None,
+ pkey=None, key_filename=None, timeout=None, allow_agent=True,
+ look_for_keys=True, compress=False, sock=None, gss_auth=False,
+ gss_kex=False, gss_deleg_creds=True, gss_host=None,
+ banner_timeout=None):
+ try:
+ if self.proxy_ssh is None:
+ raise Exception('You must configure the jump '
+ 'host before calling connect')
+
+ get_file_res = get_file(self.proxy_ssh,
+ self.proxy_ssh_key,
+ self.local_ssh_key)
+ if get_file_res is None:
+ raise Exception('Could\'t fetch SSH key from jump host')
+ proxy_key = (paramiko.RSAKey
+ .from_private_key_file(self.local_ssh_key))
+
+ self.proxy_channel = self.proxy_transport.open_channel(
+ "direct-tcpip",
+ (hostname, 22),
+ (self.proxy_ip, 22))
+
+ self.set_missing_host_key_policy(paramiko.AutoAddPolicy())
+ super(ProxyHopClient, self).connect(hostname,
+ username=username,
+ pkey=proxy_key,
+ sock=self.proxy_channel)
+ os.remove(self.local_ssh_key)
+ except Exception, e:
+ self.logger.error(e)
diff --git a/testcases/features/sfc/ovs_utils.py b/testcases/features/sfc/ovs_utils.py
new file mode 100644
index 000000000..29eed3844
--- /dev/null
+++ b/testcases/features/sfc/ovs_utils.py
@@ -0,0 +1,117 @@
+##############################################################################
+# Copyright (c) 2015 Ericsson AB and others.
+# Author: George Paraskevopoulos (geopar@intracom-telecom.com)
+# All rights reserved. This program and the accompanying materials
+# are made available under the terms of the Apache License, Version 2.0
+# which accompanies this distribution, and is available at
+# http://www.apache.org/licenses/LICENSE-2.0
+##############################################################################
+
+import functest.utils.functest_logger as rl
+import os
+import time
+import shutil
+
+logger = rl.Logger('ovs_utils').getLogger()
+
+
+class OVSLogger(object):
+ def __init__(self, basedir, ft_resdir):
+ self.ovs_dir = basedir
+ self.ft.resdir = ft_resdir
+ self.__mkdir_p(self.ovs_dir)
+
+ def __mkdir_p(self, dirpath):
+ if not os.path.exists(dirpath):
+ os.makedirs(dirpath)
+
+ def __ssh_host(self, ssh_conn, host_prefix='10.20.0'):
+ try:
+ _, stdout, _ = ssh_conn.exec_command('hostname -I')
+ hosts = stdout.readline().strip().split(' ')
+ found_host = [h for h in hosts if h.startswith(host_prefix)][0]
+ return found_host
+ except Exception, e:
+ logger.error(e)
+
+ def __dump_to_file(self, operation, host, text, timestamp=None):
+ ts = (timestamp if timestamp is not None
+ else time.strftime("%Y%m%d-%H%M%S"))
+ dumpdir = os.path.join(self.ovs_dir, ts)
+ self.__mkdir_p(dumpdir)
+ fname = '{0}_{1}'.format(operation, host)
+ with open(os.path.join(dumpdir, fname), 'w') as f:
+ f.write(text)
+
+ def __remote_cmd(self, ssh_conn, cmd):
+ try:
+ _, stdout, stderr = ssh_conn.exec_command(cmd)
+ errors = stderr.readlines()
+ if len(errors) > 0:
+ host = self.__ssh_host(ssh_conn)
+ logger.error(''.join(errors))
+ raise Exception('Could not execute {0} in {1}'
+ .format(cmd, host))
+ output = ''.join(stdout.readlines())
+ return output
+ except Exception, e:
+ logger.error('[__remote_command(ssh_client, {0})]: {1}'
+ .format(cmd, e))
+ return None
+
+ def create_artifact_archive(self):
+ shutil.make_archive(self.ovs_dir,
+ 'zip',
+ root_dir=os.path.dirname(self.ovs_dir),
+ base_dir=self.ovs_dir)
+ shutil.copy2('{0}.zip'.format(self.ovs_dir), self.ft_resdir)
+
+ def ofctl_dump_flows(self, ssh_conn, br='br-int',
+ choose_table=None, timestamp=None):
+ try:
+ cmd = 'ovs-ofctl -OOpenFlow13 dump-flows {0}'.format(br)
+ if choose_table is not None:
+ cmd = '{0} table={1}'.format(cmd, choose_table)
+ output = self.__remote_cmd(ssh_conn, cmd)
+ operation = 'ofctl_dump_flows'
+ host = self.__ssh_host(ssh_conn)
+ self.__dump_to_file(operation, host, output, timestamp=timestamp)
+ return output
+ except Exception, e:
+ logger.error('[ofctl_dump_flows(ssh_client, {0}, {1})]: {2}'
+ .format(br, choose_table, e))
+ return None
+
+ def vsctl_show(self, ssh_conn, timestamp=None):
+ try:
+ cmd = 'ovs-vsctl show'
+ output = self.__remote_cmd(ssh_conn, cmd)
+ operation = 'vsctl_show'
+ host = self.__ssh_host(ssh_conn)
+ self.__dump_to_file(operation, host, output, timestamp=timestamp)
+ return output
+ except Exception, e:
+ logger.error('[vsctl_show(ssh_client)]: {0}'.format(e))
+ return None
+
+ def dump_ovs_logs(self, controller_clients, compute_clients,
+ related_error=None, timestamp=None):
+ if timestamp is None:
+ timestamp = time.strftime("%Y%m%d-%H%M%S")
+
+ for controller_client in controller_clients:
+ self.ofctl_dump_flows(controller_client,
+ timestamp=timestamp)
+ self.vsctl_show(controller_client,
+ timestamp=timestamp)
+
+ for compute_client in compute_clients:
+ self.ofctl_dump_flows(compute_client,
+ timestamp=timestamp)
+ self.vsctl_show(compute_client,
+ timestamp=timestamp)
+
+ if related_error is not None:
+ dumpdir = os.path.join(self.ovs_dir, timestamp)
+ with open(os.path.join(dumpdir, 'error'), 'w') as f:
+ f.write(related_error)
diff --git a/testcases/features/sfc/sfc.py b/testcases/features/sfc/sfc.py
index 122ed5d9d..96ea3ab39 100755
--- a/testcases/features/sfc/sfc.py
+++ b/testcases/features/sfc/sfc.py
@@ -2,13 +2,14 @@ import os
import subprocess
import sys
import time
-
import argparse
import paramiko
import functest.utils.functest_logger as ft_logger
import functest.utils.functest_utils as ft_utils
import functest.utils.openstack_utils as os_utils
+import SSHUtils as ssh_utils
+import ovs_utils
parser = argparse.ArgumentParser()
@@ -21,6 +22,7 @@ args = parser.parse_args()
""" logging configuration """
logger = ft_logger.Logger("ODL_SFC").getLogger()
+FUNCTEST_RESULTS_DIR = '/home/opnfv/functest/results/odl-sfc'
FUNCTEST_REPO = ft_utils.FUNCTEST_REPO
HOME = os.environ['HOME'] + "/"
@@ -54,6 +56,12 @@ TACKER_CHANGECLASSI = "sfc_change_classi.bash"
ssh_options = '-o UserKnownHostsFile=/dev/null -o StrictHostKeyChecking=no'
+PROXY = {
+ 'ip': '10.20.0.2',
+ 'username': 'root',
+ 'password': 'r00tme'
+}
+
def check_ssh(ip):
cmd = "sshpass -p opnfv ssh " + ssh_options + " -q " + ip + " exit"
@@ -82,6 +90,15 @@ def main():
stderr=subprocess.PIPE)
ip_server = process.stdout.readline().rstrip()
+ comp_cmd = ("sshpass -p r00tme ssh " + ssh_options + " root@10.20.0.2"
+ " 'fuel node'|grep compute|awk '{print $10}'")
+ logger.info("Executing script to get compute IPs: '%s'" % comp_cmd)
+ process = subprocess.Popen(comp_cmd,
+ shell=True,
+ stdout=subprocess.PIPE,
+ stderr=subprocess.PIPE)
+ ip_computes = [ip.strip() for ip in process.stdout.readlines()]
+
iptable_cmd1 = ("sshpass -p r00tme ssh " + ssh_options + " root@10.20.0.2"
" ssh " + ip_server + " iptables -P INPUT ACCEPT ")
iptable_cmd2 = ("sshpass -p r00tme ssh " + ssh_options + " root@10.20.0.2"
@@ -106,6 +123,20 @@ def main():
neutron_client = os_utils.get_neutron_client()
glance_client = os_utils.get_glance_client()
+ ovs_logger = ovs_utils.OVSLogger(
+ os.path.join(os.getcwd(), 'ovs-logs'),
+ FUNCTEST_RESULTS_DIR)
+
+ controller_clients = [ssh_utils.get_ssh_client(ip_server,
+ 'root',
+ proxy=PROXY)]
+ compute_clients = []
+ for c_ip in ip_computes:
+ c_client = ssh_utils.get_ssh_client(c_ip,
+ 'root',
+ proxy=PROXY)
+ compute_clients.append(c_client)
+
# Download the image
if not os.path.isfile(IMAGE_PATH):
@@ -382,8 +413,14 @@ def main():
i = i + 1
json_results.update({"Test 1: SSH Blocked": "Passed"})
else:
- logger.error('\033[91m' + "TEST 1 [FAILED] "
- "==> SSH NOT BLOCKED" + '\033[0m')
+ timestamp = time.strftime("%Y%m%d-%H%M%S")
+ error = ('\033[91m' + "TEST 1 [FAILED] "
+ "==> SSH NOT BLOCKED" + '\033[0m')
+ logger.error(error)
+ ovs_logger.dump_ovs_logs(controller_clients,
+ compute_clients,
+ related_error=error,
+ timestamp=timestamp)
status = "FAIL"
json_results.update({"Test 1: SSH Blocked": "Failed"})
failures += 1
@@ -405,8 +442,14 @@ def main():
i = i + 1
json_results.update({"Test 2: HTTP works": "Passed"})
else:
- logger.error('\033[91m' + "TEST 2 [FAILED] "
- "==> HTTP BLOCKED" + '\033[0m')
+ timestamp = time.strftime("%Y%m%d-%H%M%S")
+ error = ('\033[91m' + "TEST 2 [FAILED] "
+ "==> HTTP BLOCKED" + '\033[0m')
+ logger.error(error)
+ ovs_logger.dump_ovs_logs(controller_clients,
+ compute_clients,
+ related_error=error,
+ timestamp=timestamp)
status = "FAIL"
json_results.update({"Test 2: HTTP works": "Failed"})
failures += 1
@@ -438,8 +481,14 @@ def main():
i = i + 1
json_results.update({"Test 3: HTTP Blocked": "Passed"})
else:
- logger.error('\033[91m' + "TEST 3 [FAILED] "
- "==> HTTP NOT BLOCKED" + '\033[0m')
+ timestamp = time.strftime("%Y%m%d-%H%M%S")
+ error = ('\033[91m' + "TEST 3 [FAILED] "
+ "==> HTTP NOT BLOCKED" + '\033[0m')
+ logger.error(error)
+ ovs_logger.dump_ovs_logs(controller_clients,
+ compute_clients,
+ related_error=error,
+ timestamp=timestamp)
status = "FAIL"
json_results.update({"Test 3: HTTP Blocked": "Failed"})
failures += 1
@@ -461,8 +510,14 @@ def main():
i = i + 1
json_results.update({"Test 4: SSH works": "Passed"})
else:
- logger.error('\033[91m' + "TEST 4 [FAILED] "
- "==> SSH BLOCKED" + '\033[0m')
+ timestamp = time.strftime("%Y%m%d-%H%M%S")
+ error = ('\033[91m' + "TEST 4 [FAILED] "
+ "==> SSH BLOCKED" + '\033[0m')
+ logger.error(error)
+ ovs_logger.dump_ovs_logs(controller_clients,
+ compute_clients,
+ related_error=error,
+ timestamp=timestamp)
status = "FAIL"
json_results.update({"Test 4: SSH works": "Failed"})
failures += 1
@@ -471,6 +526,8 @@ def main():
time.sleep(6)
# timeout -= 1
+ ovs_logger.create_artifact_archive()
+
if i == 4:
for x in range(0, 5):
logger.info('\033[92m' + "SFC TEST WORKED"
diff --git a/testcases/features/sfc/sfc_colorado1.py b/testcases/features/sfc/sfc_colorado1.py
index a9c397e68..8dd6c808e 100755
--- a/testcases/features/sfc/sfc_colorado1.py
+++ b/testcases/features/sfc/sfc_colorado1.py
@@ -8,6 +8,8 @@ import paramiko
import functest.utils.functest_logger as ft_logger
import functest.utils.functest_utils as ft_utils
import functest.utils.openstack_utils as os_utils
+import SSHUtils as ssh_utils
+import ovs_utils
parser = argparse.ArgumentParser()
@@ -20,6 +22,7 @@ args = parser.parse_args()
""" logging configuration """
logger = ft_logger.Logger("ODL_SFC").getLogger()
+FUNCTEST_RESULTS_DIR = '/home/opnfv/functest/results/odl-sfc'
FUNCTEST_REPO = ft_utils.FUNCTEST_REPO
HOME = os.environ['HOME'] + "/"
@@ -53,6 +56,12 @@ TACKER_CHANGECLASSI = "sfc_change_classi.bash"
ssh_options = '-o UserKnownHostsFile=/dev/null -o StrictHostKeyChecking=no'
+PROXY = {
+ 'ip': '10.20.0.2',
+ 'username': 'root',
+ 'password': 'r00tme'
+}
+
def check_ssh(ip):
cmd = "sshpass -p opnfv ssh " + ssh_options + " -q " + ip + " exit"
@@ -81,6 +90,15 @@ def main():
stderr=subprocess.PIPE)
ip_server = process.stdout.readline().rstrip()
+ comp_cmd = ("sshpass -p r00tme ssh " + ssh_options + " root@10.20.0.2"
+ " 'fuel node'|grep compute|awk '{print $10}'")
+ logger.info("Executing script to get compute IPs: '%s'" % comp_cmd)
+ process = subprocess.Popen(comp_cmd,
+ shell=True,
+ stdout=subprocess.PIPE,
+ stderr=subprocess.PIPE)
+ ip_computes = [ip.strip() for ip in process.stdout.readlines()]
+
iptable_cmd1 = ("sshpass -p r00tme ssh " + ssh_options + " root@10.20.0.2"
" ssh " + ip_server + " iptables -P INPUT ACCEPT ")
iptable_cmd2 = ("sshpass -p r00tme ssh " + ssh_options + " root@10.20.0.2"
@@ -105,6 +123,20 @@ def main():
neutron_client = os_utils.get_neutron_client()
glance_client = os_utils.get_glance_client()
+ ovs_logger = ovs_utils.OVSLogger(
+ os.path.join(os.getcwd(), 'ovs-logs'),
+ FUNCTEST_RESULTS_DIR)
+
+ controller_clients = [ssh_utils.get_ssh_client(ip_server,
+ 'root',
+ proxy=PROXY)]
+ compute_clients = []
+ for c_ip in ip_computes:
+ c_client = ssh_utils.get_ssh_client(c_ip,
+ 'root',
+ proxy=PROXY)
+ compute_clients.append(c_client)
+
# Download the image
if not os.path.isfile(IMAGE_PATH):
@@ -404,8 +436,14 @@ def main():
i = i + 1
json_results.update({"Test 1: SSH Blocked": "Passed"})
else:
- logger.error('\033[91m' + "TEST 1 [FAILED] "
- "==> SSH NOT BLOCKED" + '\033[0m')
+ timestamp = time.strftime("%Y%m%d-%H%M%S")
+ error = ('\033[91m' + "TEST 1 [FAILED] "
+ "==> SSH NOT BLOCKED" + '\033[0m')
+ logger.error(error)
+ ovs_logger.dump_ovs_logs(controller_clients,
+ compute_clients,
+ related_error=error,
+ timestamp=timestamp)
status = "FAIL"
json_results.update({"Test 1: SSH Blocked": "Failed"})
failures += 1
@@ -427,8 +465,14 @@ def main():
i = i + 1
json_results.update({"Test 2: HTTP works": "Passed"})
else:
- logger.error('\033[91m' + "TEST 2 [FAILED] "
- "==> HTTP BLOCKED" + '\033[0m')
+ timestamp = time.strftime("%Y%m%d-%H%M%S")
+ error = ('\033[91m' + "TEST 2 [FAILED] "
+ "==> HTTP BLOCKED" + '\033[0m')
+ logger.error(error)
+ ovs_logger.dump_ovs_logs(controller_clients,
+ compute_clients,
+ related_error=error,
+ timestamp=timestamp)
status = "FAIL"
json_results.update({"Test 2: HTTP works": "Failed"})
failures += 1
@@ -460,8 +504,14 @@ def main():
i = i + 1
json_results.update({"Test 3: HTTP Blocked": "Passed"})
else:
- logger.error('\033[91m' + "TEST 3 [FAILED] "
- "==> HTTP NOT BLOCKED" + '\033[0m')
+ timestamp = time.strftime("%Y%m%d-%H%M%S")
+ error = ('\033[91m' + "TEST 3 [FAILED] "
+ "==> HTTP NOT BLOCKED" + '\033[0m')
+ logger.error(error)
+ ovs_logger.dump_ovs_logs(controller_clients,
+ compute_clients,
+ related_error=error,
+ timestamp=timestamp)
status = "FAIL"
json_results.update({"Test 3: HTTP Blocked": "Failed"})
failures += 1
@@ -483,8 +533,14 @@ def main():
i = i + 1
json_results.update({"Test 4: SSH works": "Passed"})
else:
- logger.error('\033[91m' + "TEST 4 [FAILED] "
- "==> SSH BLOCKED" + '\033[0m')
+ timestamp = time.strftime("%Y%m%d-%H%M%S")
+ error = ('\033[91m' + "TEST 4 [FAILED] "
+ "==> SSH BLOCKED" + '\033[0m')
+ logger.error(error)
+ ovs_logger.dump_ovs_logs(controller_clients,
+ compute_clients,
+ related_error=error,
+ timestamp=timestamp)
status = "FAIL"
json_results.update({"Test 4: SSH works": "Failed"})
failures += 1
@@ -493,6 +549,8 @@ def main():
time.sleep(6)
# timeout -= 1
+ ovs_logger.create_artifact_archive()
+
iterator += 1
if i == 4:
for x in range(0, 5):
@@ -501,7 +559,8 @@ def main():
break
else:
logger.info("Iterating again!")
- delete = ("bash delete.sh")
+ delete = "bash %s/testcases/features/sfc/delete.sh" % \
+ (FUNCTEST_REPO)
try:
subprocess.call(delete, shell=True, stderr=subprocess.PIPE)
time.sleep(10)