diff options
-rw-r--r-- | ansible/site.gate.yml | 1 | ||||
-rw-r--r-- | ansible/site.yml | 1 | ||||
-rw-r--r-- | docker/benchmarking/Dockerfile | 12 | ||||
-rw-r--r-- | docker/benchmarking/testcases.yaml | 11 | ||||
-rw-r--r-- | functest_kubernetes/netperf/__init__.py | 0 | ||||
-rw-r--r-- | functest_kubernetes/netperf/netperf.py | 66 | ||||
-rw-r--r-- | setup.cfg | 1 |
7 files changed, 92 insertions, 0 deletions
diff --git a/ansible/site.gate.yml b/ansible/site.gate.yml index 68c868c3..13197cf0 100644 --- a/ansible/site.gate.yml +++ b/ansible/site.gate.yml @@ -56,6 +56,7 @@ - container: functest-kubernetes-benchmarking tests: - xrally_kubernetes_full + - netperf - container: functest-kubernetes-cnf tests: - k8s_vims diff --git a/ansible/site.yml b/ansible/site.yml index 27c4e2f3..5acf6893 100644 --- a/ansible/site.yml +++ b/ansible/site.yml @@ -41,6 +41,7 @@ - container: functest-kubernetes-benchmarking tests: - xrally_kubernetes_full + - netperf - container: functest-kubernetes-cnf tests: - k8s_vims diff --git a/docker/benchmarking/Dockerfile b/docker/benchmarking/Dockerfile index 6625322c..d209b009 100644 --- a/docker/benchmarking/Dockerfile +++ b/docker/benchmarking/Dockerfile @@ -1,4 +1,16 @@ FROM opnfv/functest-kubernetes-smoke +COPY plotperf.py.patch /tmp/plotperf.py.patch +RUN apk --no-cache add --update py3-matplotlib && \ + apk --no-cache add --virtual .build-deps --update patch go && \ + ln -s /usr/bin/python3 /usr/bin/python && \ + git clone https://github.com/kubernetes/perf-tests && \ + (cd perf-tests/network/benchmarks/netperf && go build -o /usr/local/bin/launch launch.go) && \ + curl https://raw.githubusercontent.com/girishkalele/pyplot-docker/master/plotperf.py \ + --output /usr/local/bin/plotperf.py && \ + (cd /usr/local/bin && patch -p0 < /tmp/plotperf.py.patch && \ + mv plotperf.py plotperf && chmod a+x plotperf) && \ + rm -rf perf-tests /tmp/plotperf.py.patch && \ + apk del .build-deps COPY testcases.yaml /usr/lib/python3.8/site-packages/xtesting/ci/testcases.yaml CMD ["run_tests", "-t", "all"] diff --git a/docker/benchmarking/testcases.yaml b/docker/benchmarking/testcases.yaml index 1a850e22..bcf40d45 100644 --- a/docker/benchmarking/testcases.yaml +++ b/docker/benchmarking/testcases.yaml @@ -20,3 +20,14 @@ tiers: times: 10 concurrency: 4 namespaces_count: 3 + - case_name: netperf + project_name: functest + criteria: 100 + blocking: false + description: >- + A standardized benchmark to measure Kubernetes networking performance + on multiple host platforms and network stacks. + dependencies: + - DEPLOY_SCENARIO: "k8-*" + run: + name: netperf diff --git a/functest_kubernetes/netperf/__init__.py b/functest_kubernetes/netperf/__init__.py new file mode 100644 index 00000000..e69de29b --- /dev/null +++ b/functest_kubernetes/netperf/__init__.py diff --git a/functest_kubernetes/netperf/netperf.py b/functest_kubernetes/netperf/netperf.py new file mode 100644 index 00000000..3135a6c5 --- /dev/null +++ b/functest_kubernetes/netperf/netperf.py @@ -0,0 +1,66 @@ +#!/usr/bin/env python + +# Copyright (c) 2021 Orange and others. +# +# All rights reserved. This program and the accompanying materials +# are made available under the terms of the Apache License, Version 2.0 +# which accompanies this distribution, and is available at +# http://www.apache.org/licenses/LICENSE-2.0 + +""" +Benchmarking Kubernetes Networking Performance +""" + +import glob +import logging +import os +import shutil +import subprocess +import time + +from xtesting.core import testcase + + +class Netperf(testcase.TestCase): + """Run Benchmarking Kubernetes Networking Performance""" + + __logger = logging.getLogger(__name__) + + def __init__(self, **kwargs): + super(Netperf, self).__init__(**kwargs) + self.output_log_name = 'functest-kubernetes.log' + self.output_debug_log_name = 'functest-kubernetes.debug.log' + + def check_requirements(self): + """Check if launch is in $PATH""" + self.is_skipped = not ( + shutil.which("launch") and shutil.which("plotperf")) + if self.is_skipped: + self.__logger.warning("launch or plotperf is missing") + + def run(self, **kwargs): + self.start_time = time.time() + try: + if not os.path.exists(self.res_dir): + os.makedirs(self.res_dir) + cmd = ['launch', '-iterations', '1', '-kubeConfig', + '/root/.kube/config'] + output = subprocess.check_output(cmd, stderr=subprocess.STDOUT) + self.__logger.info("%s\n%s", " ".join(cmd), output.decode("utf-8")) + lfiles = glob.glob(os.path.join( + 'results_netperf-latest', 'netperf-latest*.csv')) + results = max(lfiles, key=os.path.getmtime) + shutil.move(results, os.path.join(self.res_dir, 'netperf.csv')) + cmd = ['plotperf', '-c', + os.path.join(self.res_dir, 'netperf.csv'), + '-o', self.res_dir, '-s', 'netperf'] + output = subprocess.check_output(cmd, stderr=subprocess.STDOUT) + self.__logger.info("%s\n%s", " ".join(cmd), output.decode("utf-8")) + self.result = 100 + status = testcase.TestCase.EX_OK + except Exception: # pylint: disable=broad-except + self.__logger.exception("Can not run Netperf") + self.result = 0 + status = testcase.TestCase.EX_RUN_ERROR + self.stop_time = time.time() + return status @@ -17,3 +17,4 @@ xtesting.testcase = kube_hunter = functest_kubernetes.security.security:KubeHunter kube_bench = functest_kubernetes.security.security:KubeBench cnf_testsuite = functest_kubernetes.cnf_conformance.conformance:CNFConformance + netperf = functest_kubernetes.netperf.netperf:Netperf |