diff options
20 files changed, 710 insertions, 11 deletions
diff --git a/monitor/monitoring.sh b/monitor/monitoring.sh index 26b63fc5..758f77ec 100644 --- a/monitor/monitoring.sh +++ b/monitor/monitoring.sh @@ -52,7 +52,7 @@ while [[ $# > 0 ]] INSTALLER_TYPE="$2" shift ;; - -i|--openstack-env) + -o|--openstack-env) OPENSTACK_ENV="$2" shift ;; @@ -104,7 +104,6 @@ sudo docker run --name bottlenecks-node-exporter \ -v "/proc:/host/proc:ro" \ -v "/sys:/host/sys:ro" \ -v "/:/rootfs:ro" \ - --net="host" \ quay.io/prometheus/node-exporter:v0.14.0 \ -collector.procfs /host/proc \ -collector.sysfs /host/sys \ diff --git a/monitor/uninstall.py b/monitor/uninstall.py index 3a9cf0c7..26351722 100644 --- a/monitor/uninstall.py +++ b/monitor/uninstall.py @@ -50,3 +50,4 @@ local_del_docker('cadvisor') local_del_docker('barometer') local_del_docker('grafana') local_del_docker('collectd') +local_del_docker('openstack-exporter') diff --git a/requirements/requirements.txt b/requirements/requirements.txt index dacffcea..381cb5cd 100644 --- a/requirements/requirements.txt +++ b/requirements/requirements.txt @@ -18,3 +18,4 @@ nose==1.3.7 pyroute2==0.4.10 elasticsearch==5.0.1 docker==2.0.2 +kubernetes==6.0.0 diff --git a/testing-scheduler/server/conductorclient/__init__.py b/testing-scheduler/server/conductorclient/__init__.py new file mode 100644 index 00000000..bb02be17 --- /dev/null +++ b/testing-scheduler/server/conductorclient/__init__.py @@ -0,0 +1,8 @@ +##############################################################################
+# Copyright (c) 2018 Huawei Technologies Co.,Ltd. and others
+#
+# All rights reserved. This program and the accompanying materials
+# are made available under the terms of the Apache License, Version 2.0
+# which accompanies this distribution, and is available at
+# http://www.apache.org/licenses/LICENSE-2.0
+##############################################################################
diff --git a/testing-scheduler/server/conductorclient/mock_tasks.json b/testing-scheduler/server/conductorclient/mock_tasks.json new file mode 100644 index 00000000..4fea48bf --- /dev/null +++ b/testing-scheduler/server/conductorclient/mock_tasks.json @@ -0,0 +1,13 @@ +{
+ "task_group_1":[
+ {
+ "name": "http_yardstick_test",
+ "retryCount": 3,
+ "timeOutSeconds": 1200,
+ "timeOutPolicy": "TIME_OUT_WF",
+ "retryLogic": "FIXED",
+ "retryDelaySeconds": 600,
+ "responseTimeOutSeconds": 3600
+ }
+ ]
+}
\ No newline at end of file diff --git a/testing-scheduler/server/conductorclient/mock_workflow.json b/testing-scheduler/server/conductorclient/mock_workflow.json new file mode 100644 index 00000000..8f6251c0 --- /dev/null +++ b/testing-scheduler/server/conductorclient/mock_workflow.json @@ -0,0 +1,24 @@ +{
+ "name": "workflow_demo_05",
+ "description": "run a workflow of yardstick test service",
+ "version": 1,
+ "tasks": [
+ {
+ "name": "http_yardstick_test",
+ "taskReferenceName": "ping_test",
+ "inputParameters": {
+ "http_request": {
+ "uri": "http://192.168.199.105:8080/greet",
+ "method": "GET"
+ }
+ },
+ "type": "HTTP"
+ }
+ ],
+ "outputParameters": {
+ "header": "${ping_test.output.response.headers}",
+ "response": "${ping_test.output.response.body}",
+ "status": "${ping_test.output.response.statusCode}"
+ },
+ "schemaVersion": 2
+}
\ No newline at end of file diff --git a/testing-scheduler/server/conductorclient/run_new_workflow.py b/testing-scheduler/server/conductorclient/run_new_workflow.py new file mode 100644 index 00000000..0acb96a0 --- /dev/null +++ b/testing-scheduler/server/conductorclient/run_new_workflow.py @@ -0,0 +1,71 @@ +##############################################################################
+# Copyright (c) 2018 Huawei Technologies Co.,Ltd. and others
+#
+# All rights reserved. This program and the accompanying materials
+# are made available under the terms of the Apache License, Version 2.0
+# which accompanies this distribution, and is available at
+# http://www.apache.org/licenses/LICENSE-2.0
+##############################################################################
+
+from conductor import conductor
+import json
+
+
+class WorkflowMgr(object):
+ def __init__(self, serverAddr):
+ self._serverAddr = serverAddr + '/api'
+ self._metaDataClient = conductor.MetadataClient(self._serverAddr)
+ self._workflowClient = conductor.WorkflowClient(self._serverAddr)
+ self._tasksDefined = False
+ self._workflowDefined = False
+ self._workflowName = ""
+
+ def setTaskDef(self, taskJson):
+ jsonObj = json.loads(taskJson)
+ print "define tasks:\n", taskJson
+ for (k, v) in jsonObj.items():
+ self._metaDataClient.registerTaskDefs(v)
+ self._tasksDefined = True
+
+ def setWorkflowDef(self, workflowJson):
+ jsonObj = json.loads(workflowJson)
+ print "define workflow:\n", workflowJson
+ try:
+ self._metaDataClient.createWorkflowDef(jsonObj)
+ except Exception as e:
+ print e
+ self._workflowName = jsonObj['name']
+ self._workflowDefined = True
+
+ def startWorkflow(self, param={}):
+ workflowId = ''
+ if not self._tasksDefined:
+ print "error: please define the task at first\n"
+ elif not self._workflowDefined:
+ print "error: please define the workflow at first\n"
+ else:
+ workflowId = self._workflowClient.startWorkflow(
+ self._workflowName, param)
+ return workflowId
+
+ def setTaskDefFromFile(self, taskFilePath):
+ with open(taskFilePath, 'r') as f:
+ self.setTaskDef(f.read())
+
+ def setWorkflowFromFile(self, workflowFilePath):
+ with open(workflowFilePath, 'r') as f:
+ self.setWorkflowDef(f.read())
+
+
+# test demo
+def main():
+ serverAddr = "http://192.168.199.131:8080"
+ wfMgr = WorkflowMgr(serverAddr)
+ wfMgr.setTaskDefFromFile('mock_tasks.json')
+ wfMgr.setWorkflowFromFile('mock_workflow.json')
+ inputParam = {'input': 'fake'}
+ wfMgr.startWorkflow(inputParam)
+
+
+if __name__ == "__main__":
+ main()
diff --git a/testing-scheduler/server/src/conductor_processor/__init__.py b/testing-scheduler/server/src/conductor_processor/__init__.py new file mode 100644 index 00000000..bb02be17 --- /dev/null +++ b/testing-scheduler/server/src/conductor_processor/__init__.py @@ -0,0 +1,8 @@ +##############################################################################
+# Copyright (c) 2018 Huawei Technologies Co.,Ltd. and others
+#
+# All rights reserved. This program and the accompanying materials
+# are made available under the terms of the Apache License, Version 2.0
+# which accompanies this distribution, and is available at
+# http://www.apache.org/licenses/LICENSE-2.0
+##############################################################################
diff --git a/testing-scheduler/server/src/conductor_processor/defaultTaskFile.json b/testing-scheduler/server/src/conductor_processor/defaultTaskFile.json new file mode 100644 index 00000000..a98a5819 --- /dev/null +++ b/testing-scheduler/server/src/conductor_processor/defaultTaskFile.json @@ -0,0 +1,9 @@ +{
+ "name": "",
+ "retryCount": 6,
+ "timeOutSeconds": 1200,
+ "timeOutPolicy": "TIME_OUT_WF",
+ "retryLogic": "FIXED",
+ "retryDelaySeconds": 3,
+ "responseTimeOutSeconds": 3600
+}
\ No newline at end of file diff --git a/testing-scheduler/server/src/conductor_processor/defaultWorkflowFile.json b/testing-scheduler/server/src/conductor_processor/defaultWorkflowFile.json new file mode 100644 index 00000000..8f6251c0 --- /dev/null +++ b/testing-scheduler/server/src/conductor_processor/defaultWorkflowFile.json @@ -0,0 +1,24 @@ +{
+ "name": "workflow_demo_05",
+ "description": "run a workflow of yardstick test service",
+ "version": 1,
+ "tasks": [
+ {
+ "name": "http_yardstick_test",
+ "taskReferenceName": "ping_test",
+ "inputParameters": {
+ "http_request": {
+ "uri": "http://192.168.199.105:8080/greet",
+ "method": "GET"
+ }
+ },
+ "type": "HTTP"
+ }
+ ],
+ "outputParameters": {
+ "header": "${ping_test.output.response.headers}",
+ "response": "${ping_test.output.response.body}",
+ "status": "${ping_test.output.response.statusCode}"
+ },
+ "schemaVersion": 2
+}
\ No newline at end of file diff --git a/testing-scheduler/server/src/conductor_processor/task.py b/testing-scheduler/server/src/conductor_processor/task.py new file mode 100644 index 00000000..6f25aef8 --- /dev/null +++ b/testing-scheduler/server/src/conductor_processor/task.py @@ -0,0 +1,28 @@ +##############################################################################
+# Copyright (c) 2018 Huawei Technologies Co.,Ltd. and others
+#
+# All rights reserved. This program and the accompanying materials
+# are made available under the terms of the Apache License, Version 2.0
+# which accompanies this distribution, and is available at
+# http://www.apache.org/licenses/LICENSE-2.0
+##############################################################################
+
+import json
+import os
+
+
+class TaskFile(object):
+ def __init__(self, taskName='task_0'):
+ self._defaultConfFile = self._getFilePath("defaultTaskFile.json")
+ with open(self._defaultConfFile) as defaultConf:
+ self._jsonObj = json.load(defaultConf)
+ self._jsonObj['name'] = taskName
+
+ def generateFromStep(self, stepObject):
+ self._jsonObj['name'] = stepObject.getName()
+ print "taskFile:", self._jsonObj['name']
+ return self._jsonObj
+
+ def _getFilePath(self, fileName):
+ dirPath = os.path.dirname(os.path.realpath(__file__))
+ return os.path.join(dirPath, fileName)
diff --git a/testing-scheduler/server/src/conductor_processor/workflow.py b/testing-scheduler/server/src/conductor_processor/workflow.py new file mode 100644 index 00000000..19f0896c --- /dev/null +++ b/testing-scheduler/server/src/conductor_processor/workflow.py @@ -0,0 +1,243 @@ +##############################################################################
+# Copyright (c) 2018 Huawei Technologies Co.,Ltd. and others
+#
+# All rights reserved. This program and the accompanying materials
+# are made available under the terms of the Apache License, Version 2.0
+# which accompanies this distribution, and is available at
+# http://www.apache.org/licenses/LICENSE-2.0
+##############################################################################
+
+import random
+import collections
+import re
+from src.conductor_processor.task import TaskFile
+
+
+class WorkflowFile(object):
+ def __init__(self, name):
+ self._name = "workflow_" + name + "(%s)" % getRandString(10)
+ self._description = ''
+ self._version = 1
+ self._schemaVersion = 2
+ self._tasks = []
+ self._outputParameters = {}
+
+ def getDict(self):
+ d = collections.OrderedDict()
+ d['name'] = self._name
+ d['description'] = self._description
+ d['version'] = self._version
+ d['schemaVersion'] = self._schemaVersion
+ d['tasks'] = self._tasks
+ d['outputParameters'] = self._outputParameters
+
+ return d
+
+ def generateMetaData(self, flowList, stepObjArr):
+ flowParser = FlowParser(flowList, stepObjArr)
+ self._tasks, taskMetaList = flowParser.parseMainFlow()
+ normalTasks = flowParser.getNormalTaskList()
+ for normalTask in normalTasks:
+ taskName = normalTask['name']
+ referenceName = normalTask['taskReferenceName']
+ self._outputParameters["%s(%s)" % (taskName, referenceName)] = \
+ "${%s.output.response.body}" % referenceName
+ return self.getDict(), taskMetaList
+
+
+class FlowParser(object):
+ def __init__(self, flowList, stepObjArr):
+ self._mainFlow = {}
+ self._subFlowDict = {}
+ self._stepObjArr = stepObjArr
+ self._normalTasks = []
+ for flow in flowList:
+ if flow['name'] == "main":
+ self._mainFlow = flow
+ else:
+ self._subFlowDict[flow['name']] = flow
+
+ def parseMainFlow(self):
+ return self.parseOrderList(self._mainFlow['orders'], self._stepObjArr)
+
+ def parse(self, obj, stepObjArr):
+ if isinstance(obj, str):
+ return self.parseFlow(obj, stepObjArr)
+ else:
+ return self.parseOrderList(obj, stepObjArr)
+
+ def parseFlow(self, flowName, stepObjArr):
+ orderList = self._subFlowDict[flowName]['orders']
+ return self.parseOrderList(orderList, stepObjArr)
+
+ def parseOrderList(self, orderList, stepObjArr):
+ tasks = []
+ taskMetaAllList = []
+ for order in orderList:
+ if order['type'] == "normal":
+ genTask = NormalTask(order, stepObjArr, self)
+ self._normalTasks.append(genTask)
+ elif order['type'] == "switch":
+ genTask = SwitchTask(order, stepObjArr, self)
+ elif order['type'] == "parallel":
+ genTask = ParallelTask(order, stepObjArr, self)
+ tasks.append(genTask.getDict())
+
+ if order['type'] == "parallel":
+ joinTask = genTask.getJoinTask()
+ tasks.append(joinTask.getDict())
+
+ taskMetaList = genTask.getTaskMetaList()
+ if taskMetaList is not None:
+ taskMetaAllList.extend(taskMetaList)
+ return tasks, taskMetaAllList
+
+ def getNormalTaskList(self):
+ normalTasksDict = []
+ for normalTask in self._normalTasks:
+ normalTasksDict.append(normalTask.getDict())
+ return normalTasksDict
+
+ def getNormalTask(self, stepId):
+ for normalTask in self._normalTasks:
+ if normalTask.getStepId() == stepId:
+ return normalTask
+ return None
+
+
+class BaseWorkflowTask(object):
+ def __init__(self, name):
+ self._name = name
+ self._taskReferenceName = self._name + "_task_%s" % getRandString(10)
+ self._type = ''
+ self._args = {}
+
+ def __str__(self):
+ dictObj = self.getDict()
+ return str(dictObj)
+
+ def getDict(self):
+ d1 = {
+ "name": self._name,
+ "taskReferenceName": self._taskReferenceName,
+ "type": self._type
+ }
+ return dict(d1, **self._args)
+
+ def getName(self):
+ return self._name
+
+ def getReferenceName(self):
+ return self._taskReferenceName
+
+ def getTaskMetaList(self):
+ taskFile = TaskFile()
+ return [taskFile.generateFromStep(self)]
+
+
+class NormalTask(BaseWorkflowTask):
+ def __init__(self, order, stepObjArr, flowParser):
+ relatedStepObj = stepObjArr[order['step'] - 1]
+ super(NormalTask, self).__init__(relatedStepObj.getName())
+ self._taskReferenceName = "task_%s" % getRandString(10)
+ self._stepId = relatedStepObj.getId()
+ self._type = "HTTP"
+ self._args['inputParameters'] = relatedStepObj.getArgs()
+ self._paramTransform(self._args['inputParameters'], flowParser)
+ print "NormalTask:----------------------\n", relatedStepObj.getArgs()
+
+ def _paramTransform(self, argsDict, flowParser):
+ for (k, v) in argsDict.items():
+ if isinstance(v, str):
+ if re.match("^\(\(\d+\..*\)\)", v):
+ v = v[2:-2]
+ stepId, outputParam = v.split(".")
+ stepId = int(stepId)
+ normalTask = flowParser.getNormalTask(stepId)
+ if normalTask is None:
+ continue
+ argsDict[k] = "${%s.output.response.body.%s}" % \
+ (normalTask.getReferenceName(), outputParam)
+ elif isinstance(v, dict):
+ self._paramTransform(v, flowParser)
+
+ def getStepId(self):
+ return self._stepId
+
+
+class SwitchTask(BaseWorkflowTask):
+ seqNumber = 0
+
+ def __init__(self, order, stepObjArr, flowParser):
+ super(SwitchTask, self).__init__("switch_" + str(SwitchTask.seqNumber))
+ SwitchTask.seqNumber = SwitchTask.seqNumber + 1
+ if 'name' in order:
+ self._name = order['name']
+ self._type = "DECISION"
+ caseValueParam = 'value'
+ order['value'] = order['value'][2:-2]
+ stepId, outputParam = order['value'].split(".")
+ stepId = int(stepId)
+ normalTask = flowParser.getNormalTask(stepId)
+ caseValue = "${%s.output.response.body.%s}" % \
+ (normalTask.getReferenceName(), outputParam)
+ self._args['inputParameters'] = {caseValueParam: caseValue}
+ self._args['caseValueParam'] = caseValueParam
+ self._args['decisionCases'] = {}
+ self._childTaskMetaList = []
+ for case, caseOrders in order['cases'].items():
+ self._args['decisionCases'][case], taskMetaList = \
+ flowParser.parse(caseOrders, stepObjArr)
+ if taskMetaList is not None:
+ self._childTaskMetaList.extend(taskMetaList)
+
+ def getTaskMetaList(self):
+ selfTaskMetaList = super(SwitchTask, self).getTaskMetaList()
+ selfTaskMetaList.extend(self._childTaskMetaList)
+ return selfTaskMetaList
+
+
+class ParallelTask(BaseWorkflowTask):
+ seqNumber = 0
+
+ def __init__(self, order, stepObjArr, flowParser):
+ InstSeqNumber = ParallelTask.seqNumber
+ super(ParallelTask, self).__init__("parallel_" + str(InstSeqNumber))
+ ParallelTask.seqNumber = ParallelTask.seqNumber + 1
+ if 'name' in order:
+ self._name = order['name']
+ self._type = "FORK_JOIN"
+ self._args['forkTasks'] = []
+ self._childTaskMetaList = []
+ lastTasksNameList = []
+ parallelList = order['parallel'].items()
+ parallelList.sort()
+ for key, orderList in parallelList:
+ print orderList
+ taskList, taskMetaList = flowParser.parse(orderList, stepObjArr)
+ self._args['forkTasks'].append(taskList)
+ lastTasksNameList.append(taskList[-1]['taskReferenceName'])
+ if taskMetaList is not None:
+ self._childTaskMetaList.extend(taskMetaList)
+ self._joinTaskObj = ParallelJoinTask(InstSeqNumber, lastTasksNameList)
+
+ def getTaskMetaList(self):
+ selfTaskMetaList = super(ParallelTask, self).getTaskMetaList()
+ selfTaskMetaList.extend(self._childTaskMetaList)
+ selfTaskMetaList.extend(self._joinTaskObj.getTaskMetaList())
+ return selfTaskMetaList
+
+ def getJoinTask(self):
+ return self._joinTaskObj
+
+
+class ParallelJoinTask(BaseWorkflowTask):
+ def __init__(self, seqNumber, joinOnList):
+ super(ParallelJoinTask, self).__init__(
+ "paralleljoin_" + str(seqNumber))
+ self._type = "JOIN"
+ self._args['joinOn'] = joinOnList
+
+
+def getRandString(length):
+ return "".join(random.choice(str("0123456789")) for i in range(length))
diff --git a/utils/env_prepare/config_prepare.sh b/utils/env_prepare/config_prepare.sh index b13b5a02..8de60438 100644 --- a/utils/env_prepare/config_prepare.sh +++ b/utils/env_prepare/config_prepare.sh @@ -155,7 +155,7 @@ if [[ ${INSTALLER_TYPE} != "" ]]; then echo "export OS_CACERT=${OS_CACERT}" >> ${OPENRC} cat ${OPENRC} else - error "Couldn't find openstack cacert file: ${OS_CACERT}, please check if the it's been properly provided." + info "Couldn't find openstack cacert file: ${OS_CACERT}, please check if the it's been properly provided." fi else error "Couldn't find openstack rc file: ${OPENRC}, please check if the it's been properly provided." @@ -166,6 +166,6 @@ if [[ ${INSTALLER_TYPE} != "" ]]; then if [[ -f "/tmp/id_rsa" ]]; then info "Path of ssh key file for openstack nodes is /tmp/id_rsa" else - error "Couldn't find the ssh key file for openstack nodes. If you are using user/pwd in pod.yaml, please ignore." + info "Couldn't find the ssh key file for openstack nodes. If you are using user/pwd in pod.yaml, please ignore." fi fi diff --git a/utils/infra_setup/heat/common.py b/utils/infra_setup/heat/common.py index a0d6d83c..f0512b0f 100755 --- a/utils/infra_setup/heat/common.py +++ b/utils/infra_setup/heat/common.py @@ -66,14 +66,16 @@ def get_session_auth(): def get_session(): auth = get_session_auth() - try: - cacert = os.environ['OS_CACERT'] - except KeyError: - return session.Session(auth=auth) - else: - insecure = os.getenv('OS_INSECURE', '').lower() == 'true' - cacert = False if insecure else cacert + if os.getenv('OS_INSECURE', '').lower() == 'true': + cacert = False return session.Session(auth=auth, verify=cacert) + else: + try: + cacert = os.environ['OS_CACERT'] + except KeyError: + return session.Session(auth=auth) + else: + return session.Session(auth=auth, verify=cacert) def get_endpoint(service_type, endpoint_type='publicURL'): diff --git a/utils/k8s_setup/__init__.py b/utils/k8s_setup/__init__.py new file mode 100644 index 00000000..e69de29b --- /dev/null +++ b/utils/k8s_setup/__init__.py diff --git a/utils/k8s_setup/golang_install.sh b/utils/k8s_setup/golang_install.sh new file mode 100644 index 00000000..06c54cee --- /dev/null +++ b/utils/k8s_setup/golang_install.sh @@ -0,0 +1,100 @@ +#!/bin/bash +############################################################################## +# Copyright (c) 2018 Huawei Technologies Co.,Ltd and others. +# All rights reserved. This program and the accompanying materials +# are made available under the terms of the Apache License, Version 2.0 +# which accompanies this distribution, and is available at +# http://www.apache.org/licenses/LICENSE-2.0 +############################################################################## +usage="Script to install and config golang of specific version. + +usage: + bash $(basename "$0") [-h|--help] [-v|--version <version>] [--debug] + +where: + -h|--help show the help text + -v|--version input the version of golang + --debug debug option switch +examples: + $(basename "$0") -v 1.10.3" + +# Debug option +redirect="/dev/null" + +# Process input variables +while [[ $# > 0 ]] + do + key="$1" + case $key in + -h|--help) + echo "$usage" + exit 0 + shift + ;; + -v|--version) + GOLANG_VERSION="$2" + shift + ;; + --debug) + redirect="/dev/stdout" + shift + ;; + *) + echo "unkown option $1 $2" + exit 1 + ;; + esac + shift +done + +#set -e + +echo "=======Downloading golang of version: ${GOLANG_VERSION}========" + +if [[ -f go${GOLANG_VERSION}.linux-amd64.tar.gz ]]; then + rm go${GOLANG_VERSION}.linux-amd64.tar.gz +fi +curl -O https://storage.googleapis.com/golang/go${GOLANG_VERSION}.linux-amd64.tar.gz >${redirect} + +echo "Installing golang of version: ${GOLANG_VERSION}" +if [[ -d /usr/local/go ]]; then + rm -rf /usr/local/go +fi + +tar -C /usr/local -xzf go${GOLANG_VERSION}.linux-amd64.tar.gz >${redirect} + +if [[ -d $HOME/go ]]; then + rm -rf ${HOME}/go + mkdir ${HOME}/go + mkdir ${HOME}/go/bin + mkdir ${HOME}/go/src +else + mkdir ${HOME}/go + mkdir ${HOME}/go/bin + mkdir ${HOME}/go/src +fi + +echo "Adding golang env to ~/.bashrc" +GOROOT=/usr/local/go +GOPATH=${HOME}/go + +if [[ $(cat ${HOME}/.bashrc | grep GOROOT) ]]; then + echo "golang env alreay in ${HOME}/.bashrc" +else + cat <<EOF >> ${HOME}/.bashrc + +export GOROOT=/usr/local/go +export GOPATH=${HOME}/go +export PATH=${PATH}:${GOROOT}/bin:${GOPATH}/bin +EOF +fi + +export GOROOT=/usr/local/go +export GOPATH=${HOME}/go +export PATH=${PATH}:${GOROOT}/bin:${GOPATH}/bin + +echo "Running go version command:" +go version + +echo "=======Installation of golang-${GOLANG_VERSION} complete=======" + diff --git a/utils/k8s_setup/k8s_config_pre.sh b/utils/k8s_setup/k8s_config_pre.sh new file mode 100644 index 00000000..f41ba78d --- /dev/null +++ b/utils/k8s_setup/k8s_config_pre.sh @@ -0,0 +1,66 @@ +#!/bin/bash +############################################################################## +# Copyright (c) 2018 Huawei Tech and others. +# +# All rights reserved. This program and the accompanying materials +# are made available under the terms of the Apache License, Version 2.0 +# which accompanies this distribution, and is available at +# http://www.apache.org/licenses/LICENSE-2.0 +############################################################################## +K8S_CONFIG="/tmp/k8s_conig" + +usage="Script to prepare kubenetes test configurations. + +usage: + bash $(basename "$0") [-h|--help] [-i|--installer <installer typer>] [-c|--config <k8s config>] + +where: + -h|--help show the help text + -i|--installer specify the installer for the system to be monitored + <installer type> + one of the following: + (compass) +examples: + $(basename "$0") -i compass" + + +info () { + logger -s -t "BOTTLENECKS INFO" "$*" +} + +error () { + logger -s -t "BOTTLENECKS ERROR" "$*" + exit 1 +} + +# Process input variables +while [[ $# > 0 ]] + do + key="$1" + case $key in + -h|--help) + echo "$usage" + exit 0 + shift + ;; + -i|--installer) + INSTALLER_TYPE="$2" + shift + ;; + -c|--config) + K8S_CONFIG="$2" + shift + ;; + *) + error "unkown input options $1 $2" + exit 1 + ;; + esac + shift +done + +if [[ ${INSTALLER_TYPE} == 'compass' ]]; then + sshpass -p root scp root@192.16.1.222:~/.kube/config ${K8S_CONFIG} +else + echo "BOTTLENECKS EROOR: unrecognized installer" +fi diff --git a/utils/k8s_setup/k8s_env.sh b/utils/k8s_setup/k8s_env.sh new file mode 100644 index 00000000..855dea2f --- /dev/null +++ b/utils/k8s_setup/k8s_env.sh @@ -0,0 +1,13 @@ +#!/bin/bash +############################################################################## +# Copyright (c) 2018 Huawei Technologies Co.,Ltd and others. +# All rights reserved. This program and the accompanying materials +# are made available under the terms of the Apache License, Version 2.0 +# which accompanies this distribution, and is available at +# http://www.apache.org/licenses/LICENSE-2.0 +############################################################################## + +export GOROOT=/usr/local/go +export GOPATH=${HOME}/go +export PATH=${PATH}:${GOROOT}/bin:${GOPATH}/bin + diff --git a/utils/k8s_setup/k8s_utils.py b/utils/k8s_setup/k8s_utils.py new file mode 100644 index 00000000..7195bf23 --- /dev/null +++ b/utils/k8s_setup/k8s_utils.py @@ -0,0 +1,56 @@ +#!/usr/bin/env python +############################################################################## +# Copyright (c) 2018 Huawei Technologies Co.,Ltd and others. +# +# All rights reserved. This program and the accompanying materials +# are made available under the terms of the Apache License, Version 2.0 +# which accompanies this distribution, and is available at +# http://www.apache.org/licenses/LICENSE-2.0 +############################################################################## + +import os +import utils.logger as log +from kubernetes import client, watch + + +LOG = log.Logger(__name__).getLogger() +INSTALLER_TYPE = os.getenv("INSTALLER_TYPE") + + +def get_config_path(INSTALLER_TYPE=None, CONFIG_PATH="/tmp/k8s_config"): + if INSTALLER_TYPE: + CMD = "bash k8s_config_pre.sh -i " + INSTALLER_TYPE + \ + " -c " + CONFIG_PATH + LOG.info("Executing command: " + CMD) + CONFIG_PATH = os.popen(CMD) + else: + if not os.path.exists(CONFIG_PATH): + raise Exception("Must at least specify the path \ +of k8s config!") + return CONFIG_PATH + + +def get_core_api(version='v1'): + if version.lower() == 'v1': + API = client.CoreV1Api() + LOG.info(API) + else: + raise Exception("Must input a validate verison!") + return API + + +def watch_namespace(namespace, count=3, stop=None, request_timeout=0): + w = watch.Watch() + LOG.debug("Watch object generated: {}".format(w)) + LOG.info("Watch stream generated: {}".format( + w.stream(namespace, _request_timeout=request_timeout))) + for event in w.stream(namespace, _request_timeout=request_timeout): + LOG.info("Event: %s %s" % + (event['type'], event['object'].metadata.name)) + if event['object'].metadata.name == stop: + LOG.info("Namesapce successfully added.\n") + w.stop() + count -= 1 + if not count: + LOG.info("Ended.\n") + w.stop() diff --git a/utils/k8s_setup/kubectl_install.sh b/utils/k8s_setup/kubectl_install.sh new file mode 100644 index 00000000..14f97f2b --- /dev/null +++ b/utils/k8s_setup/kubectl_install.sh @@ -0,0 +1,33 @@ +#!/bin/bash +############################################################################## +# Copyright (c) 2018 Huawei Tech and others. +# +# All rights reserved. This program and the accompanying materials +# are made available under the terms of the Apache License, Version 2.0 +# which accompanies this distribution, and is available at +# http://www.apache.org/licenses/LICENSE-2.0 +############################################################################## + +OS_TYPE=$(uname -a) +OS_UBUNTU=$(echo $OS_TYPE | grep ubuntu) + +if [[ $OS_UBUNTU ]]; then + apt-get update && apt-get install -y apt-transport-https + curl -s https://packages.cloud.google.com/apt/doc/apt-key.gpg | apt-key add - + touch /etc/apt/sources.list.d/kubernetes.list + echo "deb http://apt.kubernetes.io/ kubernetes-xenial main" | tee -a /etc/apt/sources.list.d/kubernetes.list + apt-get update + apt-get install -y kubectl +else + cat <<EOF > /etc/yum.repos.d/kubernetes.repo +[kubernetes] +name=Kubernetes +baseurl=https://packages.cloud.google.com/yum/repos/kubernetes-el7-x86_64 +enabled=1 +gpgcheck=1 +repo_gpgcheck=1 +gpgkey=https://packages.cloud.google.com/yum/doc/yum-key.gpg https://packages.cloud.google.com/yum/doc/rpm-package-key.gpg +EOF + yum install -y kubectl +fi + |