aboutsummaryrefslogtreecommitdiffstats
diff options
context:
space:
mode:
-rw-r--r--ansible/site.yml1
-rw-r--r--docker/core/Dockerfile3
-rw-r--r--docker/smoke/testcases.yaml12
-rw-r--r--functest_kubernetes/rally/__init__.py0
-rw-r--r--functest_kubernetes/rally/all-in-one.yaml386
-rw-r--r--functest_kubernetes/rally/rally_kubernetes.py85
-rw-r--r--requirements.txt2
-rw-r--r--setup.cfg1
-rw-r--r--tox.ini1
9 files changed, 491 insertions, 0 deletions
diff --git a/ansible/site.yml b/ansible/site.yml
index 8e4706ac..fe366664 100644
--- a/ansible/site.yml
+++ b/ansible/site.yml
@@ -38,4 +38,5 @@
- repo: opnfv
container: functest-kubernetes-smoke
tests:
+ - xrally_kubernetes
- k8s_conformance
diff --git a/docker/core/Dockerfile b/docker/core/Dockerfile
index f51be0ca..13447fed 100644
--- a/docker/core/Dockerfile
+++ b/docker/core/Dockerfile
@@ -18,6 +18,9 @@ RUN apk --no-cache add --update python3 py3-pip bash git grep libffi openssl mai
rm -rf /src/functest-kubernetes && \
bash -c "mkdir -p /var/lib/xtesting /home/opnfv" && \
ln -s /var/lib/xtesting /home/opnfv/functest && \
+ mkdir -p /etc/rally && \
+ printf "[database]\nconnection = 'sqlite:////var/lib/rally/database/rally.sqlite'" > /etc/rally/rally.conf && \
+ mkdir -p /var/lib/rally/database && rally db create && \
apk del .build-deps
COPY logging.ini /usr/lib/python3.6/site-packages/xtesting/ci/logging.ini
CMD ["run_tests", "-t", "all"]
diff --git a/docker/smoke/testcases.yaml b/docker/smoke/testcases.yaml
index 6144070b..012fed74 100644
--- a/docker/smoke/testcases.yaml
+++ b/docker/smoke/testcases.yaml
@@ -19,3 +19,15 @@ tiers:
- DEPLOY_SCENARIO: 'k8-*'
run:
name: k8s_conformance
+ -
+ case_name: xrally_kubernetes
+ project_name: functest
+ criteria: 100
+ blocking: false
+ description: >-
+ All in one tasks for checking basic functionality of
+ Kubernetes cluster.
+ dependencies:
+ - DEPLOY_SCENARIO: 'k8-*'
+ run:
+ name: xrally_kubernetes
diff --git a/functest_kubernetes/rally/__init__.py b/functest_kubernetes/rally/__init__.py
new file mode 100644
index 00000000..e69de29b
--- /dev/null
+++ b/functest_kubernetes/rally/__init__.py
diff --git a/functest_kubernetes/rally/all-in-one.yaml b/functest_kubernetes/rally/all-in-one.yaml
new file mode 100644
index 00000000..134c9f5c
--- /dev/null
+++ b/functest_kubernetes/rally/all-in-one.yaml
@@ -0,0 +1,386 @@
+---
+version: 2
+title: All in one tasks for checking basic functionality of Kubernetes cluster
+subtasks:
+
+ - title: Run a single workload with listing existing kubernetes namespaces
+ scenario:
+ Kubernetes.list_namespaces: {}
+ runner:
+ constant:
+ concurrency: 1
+ times: 1
+
+ - title: Run a single workload with create/read/delete namespace
+ scenario:
+ Kubernetes.create_and_delete_namespace: {}
+ runner:
+ constant:
+ concurrency: 1
+ times: 1
+
+ - title: Run a single workload with create/read/delete pod
+ scenario:
+ Kubernetes.create_and_delete_pod:
+ image: kubernetes/pause
+ runner:
+ constant:
+ concurrency: 1
+ times: 1
+ contexts:
+ namespaces:
+ count: 3
+ with_serviceaccount: true
+
+ - title: Run a single workload with create/read/delete replication controller
+ scenario:
+ Kubernetes.create_and_delete_replication_controller:
+ image: kubernetes/pause
+ replicas: 2
+ runner:
+ constant:
+ concurrency: 1
+ times: 1
+ contexts:
+ namespaces:
+ count: 3
+ with_serviceaccount: true
+
+ - title: Run a single workload with create/scale/delete replication controller
+ scenario:
+ Kubernetes.create_scale_and_delete_replication_controller:
+ image: kubernetes/pause
+ replicas: 2
+ scale_replicas: 3
+ runner:
+ constant:
+ concurrency: 1
+ times: 1
+ contexts:
+ namespaces:
+ count: 3
+ with_serviceaccount: true
+
+ - title: Run a single workload with create/read/delete replicaset
+ scenario:
+ Kubernetes.create_and_delete_replicaset:
+ image: kubernetes/pause
+ replicas: 1
+ runner:
+ constant:
+ concurrency: 1
+ times: 1
+ contexts:
+ namespaces:
+ count: 3
+ with_serviceaccount: true
+
+ - title: Run a single workload with create/scale/delete replicaset
+ scenario:
+ Kubernetes.create_scale_and_delete_replicaset:
+ image: kubernetes/pause
+ replicas: 1
+ scale_replicas: 2
+ runner:
+ constant:
+ concurrency: 1
+ times: 1
+ contexts:
+ namespaces:
+ count: 3
+ with_serviceaccount: true
+
+ - title:
+ Run a single workload with create/read/delete pod with emptyDir volume
+ scenario:
+ Kubernetes.create_and_delete_pod_with_emptydir_volume:
+ image: kubernetes/pause
+ mount_path: /opt/check
+ runner:
+ constant:
+ concurrency: 1
+ times: 1
+ contexts:
+ namespaces:
+ count: 3
+ with_serviceaccount: true
+
+ - title: >-
+ Run a single workload with create/read/check/delete pod with emptyDir
+ volume
+ scenario:
+ Kubernetes.create_and_delete_pod_with_emptydir_volume:
+ image: busybox
+ command:
+ - sleep
+ - "3600"
+ mount_path: /opt/check
+ check_cmd:
+ - ls
+ - /opt/check
+ error_regexp: No such file
+ runner:
+ constant:
+ concurrency: 1
+ times: 1
+ contexts:
+ namespaces:
+ count: 3
+ with_serviceaccount: true
+
+ - title: Run a single workload with create/read/delete pod with secret volume
+ scenario:
+ Kubernetes.create_and_delete_pod_with_secret_volume:
+ image: busybox
+ command:
+ - sleep
+ - "3600"
+ mount_path: /opt/check
+ runner:
+ constant:
+ concurrency: 1
+ times: 1
+ contexts:
+ namespaces:
+ count: 3
+ with_serviceaccount: true
+
+ - title: Run a single workload with create/check/delete pod with secret volume
+ scenario:
+ Kubernetes.create_and_delete_pod_with_secret_volume:
+ image: busybox
+ command:
+ - sleep
+ - "3600"
+ mount_path: /opt/check
+ check_cmd:
+ - ls
+ - /opt/check
+ error_regexp: No such file
+ runner:
+ constant:
+ concurrency: 1
+ times: 1
+ contexts:
+ namespaces:
+ count: 3
+ with_serviceaccount: true
+
+ - title: >-
+ Run a single workload with create/read/check/delete pod with hostPath
+ volume
+ scenario:
+ Kubernetes.create_and_delete_pod_with_hostpath_volume:
+ image: busybox
+ command:
+ - sleep
+ - "3600"
+ mount_path: /opt/check
+ check_cmd:
+ - ls
+ - /opt/check
+ error_regexp: No such file
+ volume_type: Directory
+ volume_path: /tmp/
+ runner:
+ constant:
+ concurrency: 1
+ times: 1
+ contexts:
+ namespaces:
+ count: 3
+ with_serviceaccount: true
+
+ - title:
+ Run a single workload with create/read/delete pod with configMap volume
+ scenario:
+ Kubernetes.create_and_delete_pod_with_configmap_volume:
+ image: busybox
+ command:
+ - "sleep"
+ - "3600"
+ mount_path: /var/log/check.txt
+ subpath: check.txt
+ configmap_data:
+ check.txt: |
+ test
+ runner:
+ constant:
+ concurrency: 1
+ times: 1
+ contexts:
+ namespaces:
+ count: 3
+ with_serviceaccount: true
+
+ - title: >-
+ Run a single workload with create/read/check/delete pod with configMap
+ volume
+ scenario:
+ Kubernetes.create_and_delete_pod_with_configmap_volume:
+ image: busybox
+ command:
+ - "sleep"
+ - "3600"
+ mount_path: /var/log/check.txt
+ subpath: check.txt
+ configmap_data:
+ check.txt: |
+ test
+ check_cmd:
+ - cat
+ - /var/log/check.txt
+ error_regexp: No such file
+ runner:
+ constant:
+ concurrency: 1
+ times: 1
+ contexts:
+ namespaces:
+ count: 3
+ with_serviceaccount: true
+
+ - title: Run a single workload with create/read/delete deployment
+ scenario:
+ Kubernetes.create_and_delete_deployment:
+ image: kubernetes/pause
+ replicas: 2
+ runner:
+ constant:
+ concurrency: 1
+ times: 1
+ contexts:
+ namespaces:
+ count: 3
+ with_serviceaccount: true
+
+ - title: Run a single workload with create/rollout/delete deployment
+ scenario:
+ Kubernetes.create_rollout_and_delete_deployment:
+ image: busybox
+ replicas: 1
+ command:
+ - sleep
+ - "3600"
+ env:
+ - name: "UPD"
+ value: "false"
+ changes:
+ env:
+ - name: "UPD"
+ value: "true"
+ runner:
+ constant:
+ concurrency: 1
+ times: 1
+ contexts:
+ namespaces:
+ count: 3
+ with_serviceaccount: true
+
+ - title: Run a single workload with create/read/delete statefulset
+ scenario:
+ Kubernetes.create_and_delete_statefulset:
+ image: kubernetes/pause
+ replicas: 2
+ runner:
+ constant:
+ concurrency: 1
+ times: 1
+ contexts:
+ namespaces:
+ count: 3
+ with_serviceaccount: true
+
+ - title: Run a single workload with create/scale/delete statefulset
+ scenario:
+ Kubernetes.create_scale_and_delete_statefulset:
+ image: kubernetes/pause
+ replicas: 1
+ scale_replicas: 2
+ runner:
+ constant:
+ concurrency: 1
+ times: 1
+ contexts:
+ namespaces:
+ count: 3
+ with_serviceaccount: true
+
+ - title: Run a single workload with create/read/delete job
+ scenario:
+ Kubernetes.create_and_delete_job:
+ image: busybox
+ command:
+ - echo
+ - "SUCCESS"
+ runner:
+ constant:
+ concurrency: 1
+ times: 1
+ contexts:
+ namespaces:
+ count: 3
+ with_serviceaccount: true
+
+ - title: Run a single workload with create/check/delete clusterIP service
+ scenario:
+ Kubernetes.create_check_and_delete_pod_with_cluster_ip_service:
+ image: gcr.io/google-samples/hello-go-gke:1.0
+ port: 80
+ protocol: TCP
+ runner:
+ constant:
+ concurrency: 1
+ times: 2
+ contexts:
+ namespaces:
+ count: 3
+ with_serviceaccount: true
+
+ - title: Run a single workload with create/check/delete daemonset
+ scenario:
+ Kubernetes.create_check_and_delete_daemonset:
+ image: kubernetes/pause
+ runner:
+ constant:
+ concurrency: 1
+ times: 1
+ contexts:
+ namespaces:
+ count: 3
+ with_serviceaccount: true
+
+ - title: >-
+ Run a single workload with create/check/delete clusterIP service with
+ custom endpoint
+ scenario:
+ Kubernetes.create_check_and_delete_pod_with_cluster_ip_service:
+ image: gcr.io/google-samples/hello-go-gke:1.0
+ port: 80
+ protocol: TCP
+ custom_endpoint: true
+ runner:
+ constant:
+ concurrency: 1
+ times: 1
+ contexts:
+ namespaces:
+ count: 3
+ with_serviceaccount: true
+
+ - title: Run a single workload with create/check/delete NodePort service
+ scenario:
+ Kubernetes.create_check_and_delete_pod_with_node_port_service:
+ image: gcr.io/google-samples/hello-go-gke:1.0
+ port: 80
+ protocol: TCP
+ request_timeout: 10
+ runner:
+ constant:
+ concurrency: 1
+ times: 1
+ contexts:
+ namespaces:
+ count: 3
+ with_serviceaccount: true
diff --git a/functest_kubernetes/rally/rally_kubernetes.py b/functest_kubernetes/rally/rally_kubernetes.py
new file mode 100644
index 00000000..5955b894
--- /dev/null
+++ b/functest_kubernetes/rally/rally_kubernetes.py
@@ -0,0 +1,85 @@
+#!/usr/bin/env python
+
+"""Run workloads via Rally against Kubernetes platform
+
+xrally/kubernetes_ provides xRally plugins for Kubernetes platform.
+
+.. _xrally/kubernetes: https://github.com/xrally/xrally-kubernetes/
+"""
+
+import logging
+import os
+import time
+
+import pkg_resources
+from rally import api
+from rally import exceptions
+from rally.common import yamlutils as yaml
+import rally.common.logging
+from rally.env import env_mgr
+
+from xtesting.core import testcase
+
+
+class RallyKubernetes(testcase.TestCase):
+ """Run tasks for checking basic functionality of Kubernetes cluster"""
+
+ __logger = logging.getLogger(__name__)
+
+ def __init__(self, **kwargs):
+ super(RallyKubernetes, self).__init__(**kwargs)
+ self.res_dir = "/home/opnfv/functest/results/{}".format(
+ self.case_name)
+
+ def run(self, **kwargs):
+ self.start_time = time.time()
+ if not os.path.exists(self.res_dir):
+ os.makedirs(self.res_dir)
+ rapi = api.API()
+ api.CONF.set_default("use_stderr", False)
+ api.CONF.set_default('log_dir', self.res_dir)
+ api.CONF.set_default('log_file', 'rally.log')
+ rally.common.logging.setup("rally")
+ spec = env_mgr.EnvManager.create_spec_from_sys_environ()["spec"]
+ try:
+ env_mgr.EnvManager.get('my-kubernetes').delete(force=True)
+ except exceptions.DBRecordNotFound:
+ pass
+ env = env_mgr.EnvManager.create('my-kubernetes', spec)
+ result = env.check_health()
+ self.__logger.debug("check health %s: %s", 'my-kubernetes', result)
+ if not result['existing@kubernetes']['available']:
+ self.__logger.error(
+ "Cannot check env heath: %s",
+ result['existing@kubernetes']['message'])
+ return
+ input_task = open(
+ pkg_resources.resource_filename(
+ 'functest_kubernetes', 'rally/all-in-one.yaml')).read()
+ task = yaml.safe_load(input_task)
+ rapi.task.validate(deployment='my-kubernetes', config=task)
+ task_instance = rapi.task.create(deployment='my-kubernetes')
+ rapi.task.start(
+ deployment='my-kubernetes', config=task,
+ task=task_instance["uuid"])
+ self.details = rapi.task.get(task_instance["uuid"], detailed=True)
+ self.__logger.debug("details: %s", self.details)
+ if self.details['pass_sla']:
+ self.result = 100
+ result = rapi.task.export(
+ [task_instance["uuid"]], "html",
+ output_dest=os.path.join(
+ self.res_dir, "{}.html".format(self.case_name)))
+ if "files" in result:
+ for path in result["files"]:
+ with open(path, "w+") as output:
+ output.write(result["files"][path])
+ result = rapi.task.export(
+ [task_instance["uuid"]], "junit-xml",
+ output_dest=os.path.join(
+ self.res_dir, "{}.xml".format(self.case_name)))
+ if "files" in result:
+ for path in result["files"]:
+ with open(path, "w+") as output:
+ output.write(result["files"][path])
+ self.stop_time = time.time()
diff --git a/requirements.txt b/requirements.txt
index 60ca9422..742dcbf3 100644
--- a/requirements.txt
+++ b/requirements.txt
@@ -3,4 +3,6 @@
# process, which may cause wedges in the gate later.
pbr!=2.1.0 # Apache-2.0
xtesting # Apache-2.0
+rally
+xrally-kubernetes
kubernetes # Apache-2.0
diff --git a/setup.cfg b/setup.cfg
index 5a17c043..eb2b87e7 100644
--- a/setup.cfg
+++ b/setup.cfg
@@ -10,6 +10,7 @@ packages = functest_kubernetes
xtesting.testcase =
k8s_smoke = functest_kubernetes.k8stest:K8sSmokeTest
k8s_conformance = functest_kubernetes.k8stest:K8sConformanceTest
+ xrally_kubernetes = functest_kubernetes.rally.rally_kubernetes:RallyKubernetes
k8s_vims = functest_kubernetes.ims.ims:Vims
kube_hunter = functest_kubernetes.security.security:KubeHunter
kube_bench = functest_kubernetes.security.security:KubeBench
diff --git a/tox.ini b/tox.ini
index 67aad411..f4631e00 100644
--- a/tox.ini
+++ b/tox.ini
@@ -30,6 +30,7 @@ basepython = python3.6
files =
.travis.yml
docker
+ functest_kubernetes/rally/all-in-one.yaml
commands =
yamllint {[testenv:yamllint]files}