From af556fefdb137c92c8682fef1eee762bf7410d0c Mon Sep 17 00:00:00 2001 From: Cédric Ollivier Date: Sun, 9 May 2021 17:15:33 +0200 Subject: Benchmark Kubernetes Networking Performance MIME-Version: 1.0 Content-Type: text/plain; charset=UTF-8 Content-Transfer-Encoding: 8bit https://github.com/kubernetes/perf-tests/tree/master/network/benchmarks/netperf Change-Id: I6facd567f1c52c5949b53484a1fb107dcf34d622 Signed-off-by: Cédric Ollivier (cherry picked from commit 3b5c1b115e234d636cb4f2a17d27ced872fee924) --- ansible/site.gate.yml | 1 + ansible/site.yml | 1 + docker/benchmarking/Dockerfile | 16 ++++++++ docker/benchmarking/plotperf.py.patch | 42 +++++++++++++++++++++ docker/benchmarking/testcases.yaml | 11 ++++++ functest_kubernetes/netperf/__init__.py | 0 functest_kubernetes/netperf/netperf.py | 66 +++++++++++++++++++++++++++++++++ setup.cfg | 1 + 8 files changed, 138 insertions(+) create mode 100644 docker/benchmarking/plotperf.py.patch create mode 100644 functest_kubernetes/netperf/__init__.py create mode 100644 functest_kubernetes/netperf/netperf.py diff --git a/ansible/site.gate.yml b/ansible/site.gate.yml index 6c3e4d19..6ca2b4c8 100644 --- a/ansible/site.gate.yml +++ b/ansible/site.gate.yml @@ -59,6 +59,7 @@ - container: functest-kubernetes-benchmarking tests: - xrally_kubernetes_full + - netperf - container: functest-kubernetes-cnf tests: - k8s_vims diff --git a/ansible/site.yml b/ansible/site.yml index f2c23266..38b055da 100644 --- a/ansible/site.yml +++ b/ansible/site.yml @@ -44,6 +44,7 @@ - container: functest-kubernetes-benchmarking tests: - xrally_kubernetes_full + - netperf - container: functest-kubernetes-cnf tests: - k8s_vims diff --git a/docker/benchmarking/Dockerfile b/docker/benchmarking/Dockerfile index 45e24b43..fdc0fac8 100644 --- a/docker/benchmarking/Dockerfile +++ b/docker/benchmarking/Dockerfile @@ -1,4 +1,20 @@ FROM opnfv/functest-kubernetes-smoke:kali +ARG NETPERF_TAG=8a5a7a23f2165b29e46b4d32aad7d5f85e4b9516 +ARG PLOTPERF_TAG=2455313f4b9581795a8f642243acaad472d91804 + +COPY plotperf.py.patch /tmp/plotperf.py.patch +RUN apk --no-cache add --update py3-matplotlib && \ + apk --no-cache add --virtual .build-deps --update patch go && \ + ln -s /usr/bin/python3 /usr/bin/python && \ + git clone https://github.com/kubernetes/perf-tests && \ + (cd perf-tests && git checkout $NETPERF_TAG) && \ + (cd perf-tests/network/benchmarks/netperf && go build -o /usr/local/bin/launch launch.go) && \ + curl https://raw.githubusercontent.com/girishkalele/pyplot-docker/$PLOTPERF_TAG/plotperf.py \ + --output /usr/local/bin/plotperf.py && \ + (cd /usr/local/bin && patch -p0 < /tmp/plotperf.py.patch && \ + mv plotperf.py plotperf && chmod a+x plotperf) && \ + rm -rf perf-tests /tmp/plotperf.py.patch && \ + apk del .build-deps COPY testcases.yaml /usr/lib/python3.8/site-packages/xtesting/ci/testcases.yaml CMD ["run_tests", "-t", "all"] diff --git a/docker/benchmarking/plotperf.py.patch b/docker/benchmarking/plotperf.py.patch new file mode 100644 index 00000000..45a64e1e --- /dev/null +++ b/docker/benchmarking/plotperf.py.patch @@ -0,0 +1,42 @@ +--- plotperf.py.orig 2021-05-09 10:42:17.858983226 +0200 ++++ plotperf.py 2021-05-09 10:43:09.410934186 +0200 +@@ -18,11 +18,13 @@ + # Generates matplotlib line and bar charts from the netperf.csv raw data file + # + ++from __future__ import print_function ++from builtins import range + try: + import matplotlib.pyplot as plt +-except Exception, e: ++except Exception as e: + # Translate the traceback to a more friendly error message +- print "Exception (%s) while importing matplotlib - install with apt-get install python-matplotlib (or equivalent package manager)" % e ++ print("Exception (%s) while importing matplotlib - install with apt-get install python-matplotlib (or equivalent package manager)" % e) + raise + + import numpy +@@ -93,7 +95,7 @@ + for ext in [ "png", "svg" ]: + fname = os.path.join(options.outputdir, "{0}.{1}".format(options.suffix, ext)) + plt.savefig(fname, dpi=100) +- print "Saved {0}".format(fname) ++ print("Saved {0}".format(fname)) + + barlabels = [] + barvalues = [] +@@ -105,7 +107,7 @@ + barvalues.append(float(data[n][1])) + + plt.clf() +- plt.barh(bottom=range(0, len(data)-1), ++ plt.barh(list(range(0, len(data)-1)), + height=0.5, + width=barvalues, + align='center') +@@ -117,4 +119,4 @@ + for ext in [ "png", "svg" ]: + fname = os.path.join(options.outputdir, "{0}.bar.{1}".format(options.suffix, ext)) + plt.savefig(fname, dpi=100) +- print "Saved {0}".format(fname) ++ print("Saved {0}".format(fname)) diff --git a/docker/benchmarking/testcases.yaml b/docker/benchmarking/testcases.yaml index 1a850e22..bcf40d45 100644 --- a/docker/benchmarking/testcases.yaml +++ b/docker/benchmarking/testcases.yaml @@ -20,3 +20,14 @@ tiers: times: 10 concurrency: 4 namespaces_count: 3 + - case_name: netperf + project_name: functest + criteria: 100 + blocking: false + description: >- + A standardized benchmark to measure Kubernetes networking performance + on multiple host platforms and network stacks. + dependencies: + - DEPLOY_SCENARIO: "k8-*" + run: + name: netperf diff --git a/functest_kubernetes/netperf/__init__.py b/functest_kubernetes/netperf/__init__.py new file mode 100644 index 00000000..e69de29b diff --git a/functest_kubernetes/netperf/netperf.py b/functest_kubernetes/netperf/netperf.py new file mode 100644 index 00000000..3135a6c5 --- /dev/null +++ b/functest_kubernetes/netperf/netperf.py @@ -0,0 +1,66 @@ +#!/usr/bin/env python + +# Copyright (c) 2021 Orange and others. +# +# All rights reserved. This program and the accompanying materials +# are made available under the terms of the Apache License, Version 2.0 +# which accompanies this distribution, and is available at +# http://www.apache.org/licenses/LICENSE-2.0 + +""" +Benchmarking Kubernetes Networking Performance +""" + +import glob +import logging +import os +import shutil +import subprocess +import time + +from xtesting.core import testcase + + +class Netperf(testcase.TestCase): + """Run Benchmarking Kubernetes Networking Performance""" + + __logger = logging.getLogger(__name__) + + def __init__(self, **kwargs): + super(Netperf, self).__init__(**kwargs) + self.output_log_name = 'functest-kubernetes.log' + self.output_debug_log_name = 'functest-kubernetes.debug.log' + + def check_requirements(self): + """Check if launch is in $PATH""" + self.is_skipped = not ( + shutil.which("launch") and shutil.which("plotperf")) + if self.is_skipped: + self.__logger.warning("launch or plotperf is missing") + + def run(self, **kwargs): + self.start_time = time.time() + try: + if not os.path.exists(self.res_dir): + os.makedirs(self.res_dir) + cmd = ['launch', '-iterations', '1', '-kubeConfig', + '/root/.kube/config'] + output = subprocess.check_output(cmd, stderr=subprocess.STDOUT) + self.__logger.info("%s\n%s", " ".join(cmd), output.decode("utf-8")) + lfiles = glob.glob(os.path.join( + 'results_netperf-latest', 'netperf-latest*.csv')) + results = max(lfiles, key=os.path.getmtime) + shutil.move(results, os.path.join(self.res_dir, 'netperf.csv')) + cmd = ['plotperf', '-c', + os.path.join(self.res_dir, 'netperf.csv'), + '-o', self.res_dir, '-s', 'netperf'] + output = subprocess.check_output(cmd, stderr=subprocess.STDOUT) + self.__logger.info("%s\n%s", " ".join(cmd), output.decode("utf-8")) + self.result = 100 + status = testcase.TestCase.EX_OK + except Exception: # pylint: disable=broad-except + self.__logger.exception("Can not run Netperf") + self.result = 0 + status = testcase.TestCase.EX_RUN_ERROR + self.stop_time = time.time() + return status diff --git a/setup.cfg b/setup.cfg index 52e19752..9721cef6 100644 --- a/setup.cfg +++ b/setup.cfg @@ -17,3 +17,4 @@ xtesting.testcase = kube_hunter = functest_kubernetes.security.security:KubeHunter kube_bench = functest_kubernetes.security.security:KubeBench cnf_testsuite = functest_kubernetes.cnf_conformance.conformance:CNFConformance + netperf = functest_kubernetes.netperf.netperf:Netperf -- cgit 1.2.3-korg