summaryrefslogtreecommitdiffstats
path: root/vstf/vstf/controller
diff options
context:
space:
mode:
Diffstat (limited to 'vstf/vstf/controller')
-rwxr-xr-xvstf/vstf/controller/__init__.py15
-rwxr-xr-xvstf/vstf/controller/api_server.py403
-rwxr-xr-xvstf/vstf/controller/database/__init__.py14
-rwxr-xr-xvstf/vstf/controller/database/constants.py53
-rwxr-xr-xvstf/vstf/controller/database/dbinterface.py567
-rwxr-xr-xvstf/vstf/controller/database/tables.py291
-rwxr-xr-xvstf/vstf/controller/env_build/README15
-rwxr-xr-xvstf/vstf/controller/env_build/__init__.py14
-rwxr-xr-xvstf/vstf/controller/env_build/cfg_intent_parse.py130
-rwxr-xr-xvstf/vstf/controller/env_build/env_build.py77
-rwxr-xr-xvstf/vstf/controller/env_build/env_collect.py30
-rwxr-xr-xvstf/vstf/controller/fabricant.py49
-rwxr-xr-xvstf/vstf/controller/reporters/README109
-rwxr-xr-xvstf/vstf/controller/reporters/__init__.py14
-rwxr-xr-xvstf/vstf/controller/reporters/mail/__init__.py14
-rwxr-xr-xvstf/vstf/controller/reporters/mail/mail.py117
-rwxr-xr-xvstf/vstf/controller/reporters/mail/sendmail.py64
-rwxr-xr-xvstf/vstf/controller/reporters/report/__init__.py14
-rwxr-xr-xvstf/vstf/controller/reporters/report/data_factory.py494
-rwxr-xr-xvstf/vstf/controller/reporters/report/html/__init__.py14
-rwxr-xr-xvstf/vstf/controller/reporters/report/html/html_base.py42
-rwxr-xr-xvstf/vstf/controller/reporters/report/html/html_text.py68
-rwxr-xr-xvstf/vstf/controller/reporters/report/html/htmlcreator.py117
-rwxr-xr-xvstf/vstf/controller/reporters/report/pdf/__init__.py14
-rwxr-xr-xvstf/vstf/controller/reporters/report/pdf/element.py781
-rwxr-xr-xvstf/vstf/controller/reporters/report/pdf/pdfcreator.py446
-rwxr-xr-xvstf/vstf/controller/reporters/report/pdf/pdftemplate.py107
-rwxr-xr-xvstf/vstf/controller/reporters/report/pdf/story.py191
-rwxr-xr-xvstf/vstf/controller/reporters/report/pdf/styles.py198
-rwxr-xr-xvstf/vstf/controller/reporters/report/provider/__init__.py14
-rwxr-xr-xvstf/vstf/controller/reporters/report/provider/html_provider.py63
-rwxr-xr-xvstf/vstf/controller/reporters/reporter.py110
-rwxr-xr-xvstf/vstf/controller/res/Traffic-types.gifbin0 -> 160486 bytes
-rwxr-xr-xvstf/vstf/controller/res/Traffic-types.jpgbin0 -> 541886 bytes
-rwxr-xr-xvstf/vstf/controller/res/__init__.py14
-rwxr-xr-xvstf/vstf/controller/res/deployment/Ti-direct.gifbin0 -> 41247 bytes
-rwxr-xr-xvstf/vstf/controller/res/deployment/Ti-direct.jpgbin0 -> 147185 bytes
-rwxr-xr-xvstf/vstf/controller/res/deployment/Ti.gifbin0 -> 61062 bytes
-rwxr-xr-xvstf/vstf/controller/res/deployment/Ti.jpgbin0 -> 221926 bytes
-rwxr-xr-xvstf/vstf/controller/res/deployment/Tn.gifbin0 -> 57362 bytes
-rwxr-xr-xvstf/vstf/controller/res/deployment/Tn.jpgbin0 -> 213515 bytes
-rwxr-xr-xvstf/vstf/controller/res/deployment/Tnv.gifbin0 -> 89853 bytes
-rwxr-xr-xvstf/vstf/controller/res/deployment/Tnv.jpgbin0 -> 308215 bytes
-rwxr-xr-xvstf/vstf/controller/res/deployment/Tu.gifbin0 -> 56592 bytes
-rwxr-xr-xvstf/vstf/controller/res/deployment/Tu.jpgbin0 -> 183475 bytes
-rwxr-xr-xvstf/vstf/controller/res/iperf/Ti-3.gifbin0 -> 83575 bytes
-rwxr-xr-xvstf/vstf/controller/res/iperf/Ti-3.jpgbin0 -> 277597 bytes
-rwxr-xr-xvstf/vstf/controller/res/logo.jpgbin0 -> 544271 bytes
-rwxr-xr-xvstf/vstf/controller/res/pktgen/Ti-1.gifbin0 -> 71843 bytes
-rwxr-xr-xvstf/vstf/controller/res/pktgen/Ti-1.jpgbin0 -> 251078 bytes
-rwxr-xr-xvstf/vstf/controller/res/pktgen/Ti-2.gifbin0 -> 73543 bytes
-rwxr-xr-xvstf/vstf/controller/res/pktgen/Ti-2.jpgbin0 -> 249443 bytes
-rwxr-xr-xvstf/vstf/controller/res/pktgen/Ti-direct-1.gifbin0 -> 53683 bytes
-rwxr-xr-xvstf/vstf/controller/res/pktgen/Ti-direct-1.jpgbin0 -> 177600 bytes
-rwxr-xr-xvstf/vstf/controller/res/pktgen/Ti-direct-2.gifbin0 -> 51763 bytes
-rwxr-xr-xvstf/vstf/controller/res/pktgen/Ti-direct-2.jpgbin0 -> 183484 bytes
-rwxr-xr-xvstf/vstf/controller/res/pktgen/Tn-1.gifbin0 -> 66762 bytes
-rwxr-xr-xvstf/vstf/controller/res/pktgen/Tn-1.jpgbin0 -> 244190 bytes
-rwxr-xr-xvstf/vstf/controller/res/pktgen/Tn-1v.gifbin0 -> 93153 bytes
-rwxr-xr-xvstf/vstf/controller/res/pktgen/Tn-1v.jpgbin0 -> 327571 bytes
-rwxr-xr-xvstf/vstf/controller/res/pktgen/Tn-2.gifbin0 -> 77273 bytes
-rwxr-xr-xvstf/vstf/controller/res/pktgen/Tn-2.jpgbin0 -> 269108 bytes
-rwxr-xr-xvstf/vstf/controller/res/pktgen/Tn-2v.gifbin0 -> 103475 bytes
-rwxr-xr-xvstf/vstf/controller/res/pktgen/Tn-2v.jpgbin0 -> 353221 bytes
-rwxr-xr-xvstf/vstf/controller/res/pktgen/Tu-1.gifbin0 -> 67571 bytes
-rwxr-xr-xvstf/vstf/controller/res/pktgen/Tu-1.jpgbin0 -> 224490 bytes
-rwxr-xr-xvstf/vstf/controller/res/pktgen/Tu-2.gifbin0 -> 67834 bytes
-rwxr-xr-xvstf/vstf/controller/res/pktgen/Tu-2.jpgbin0 -> 224479 bytes
-rwxr-xr-xvstf/vstf/controller/res/pktgen/Tu-3.gifbin0 -> 74698 bytes
-rwxr-xr-xvstf/vstf/controller/res/pktgen/Tu-3.jpgbin0 -> 243850 bytes
-rwxr-xr-xvstf/vstf/controller/res/spirent/Tn-1.gifbin0 -> 53330 bytes
-rwxr-xr-xvstf/vstf/controller/res/spirent/Tn-1.jpgbin0 -> 207261 bytes
-rwxr-xr-xvstf/vstf/controller/res/spirent/Tn-1v.gifbin0 -> 76052 bytes
-rwxr-xr-xvstf/vstf/controller/res/spirent/Tn-1v.jpgbin0 -> 289977 bytes
-rwxr-xr-xvstf/vstf/controller/res/spirent/Tn-2.gifbin0 -> 53711 bytes
-rwxr-xr-xvstf/vstf/controller/res/spirent/Tn-2.jpgbin0 -> 210919 bytes
-rwxr-xr-xvstf/vstf/controller/res/spirent/Tn-2v.gifbin0 -> 72834 bytes
-rwxr-xr-xvstf/vstf/controller/res/spirent/Tn-2v.jpgbin0 -> 286486 bytes
-rwxr-xr-xvstf/vstf/controller/settings/README61
-rwxr-xr-xvstf/vstf/controller/settings/__init__.py14
-rwxr-xr-xvstf/vstf/controller/settings/cpu_settings.py63
-rwxr-xr-xvstf/vstf/controller/settings/data_settings.py31
-rwxr-xr-xvstf/vstf/controller/settings/device_settings.py18
-rwxr-xr-xvstf/vstf/controller/settings/flows_settings.py122
-rwxr-xr-xvstf/vstf/controller/settings/forwarding_settings.py18
-rwxr-xr-xvstf/vstf/controller/settings/html_settings.py51
-rwxr-xr-xvstf/vstf/controller/settings/mail_settings.py121
-rwxr-xr-xvstf/vstf/controller/settings/perf_settings.py102
-rwxr-xr-xvstf/vstf/controller/settings/settings.py286
-rwxr-xr-xvstf/vstf/controller/settings/settings_input.py44
-rwxr-xr-xvstf/vstf/controller/settings/tester_settings.py18
-rwxr-xr-xvstf/vstf/controller/settings/tool_settings.py84
-rwxr-xr-xvstf/vstf/controller/spirent/__init__.py14
-rwxr-xr-xvstf/vstf/controller/spirent/appliance.py92
-rwxr-xr-xvstf/vstf/controller/spirent/common/__init__.py14
-rwxr-xr-xvstf/vstf/controller/spirent/common/model.py462
-rwxr-xr-xvstf/vstf/controller/spirent/common/result_analysis.py172
-rwxr-xr-xvstf/vstf/controller/sw_perf/README39
-rwxr-xr-xvstf/vstf/controller/sw_perf/__init__.py14
-rwxr-xr-xvstf/vstf/controller/sw_perf/flow_producer.py137
-rwxr-xr-xvstf/vstf/controller/sw_perf/model.py190
-rwxr-xr-xvstf/vstf/controller/sw_perf/perf_provider.py209
-rwxr-xr-xvstf/vstf/controller/sw_perf/performance.py396
-rwxr-xr-xvstf/vstf/controller/sw_perf/raw_data.py124
-rwxr-xr-xvstf/vstf/controller/unittest/README49
-rwxr-xr-xvstf/vstf/controller/unittest/__init__.py14
-rwxr-xr-xvstf/vstf/controller/unittest/configuration.py17
-rwxr-xr-xvstf/vstf/controller/unittest/model.py27
-rwxr-xr-xvstf/vstf/controller/unittest/test_cfg_intent_parse.py32
-rwxr-xr-xvstf/vstf/controller/unittest/test_collect.py41
-rwxr-xr-xvstf/vstf/controller/unittest/test_driver_function.py27
-rwxr-xr-xvstf/vstf/controller/unittest/test_env_build.py55
-rwxr-xr-xvstf/vstf/controller/unittest/test_perf.py120
-rwxr-xr-xvstf/vstf/controller/unittest/test_ssh.py32
-rwxr-xr-xvstf/vstf/controller/vstfadm.py270
115 files changed, 8243 insertions, 0 deletions
diff --git a/vstf/vstf/controller/__init__.py b/vstf/vstf/controller/__init__.py
new file mode 100755
index 00000000..4dc8a6aa
--- /dev/null
+++ b/vstf/vstf/controller/__init__.py
@@ -0,0 +1,15 @@
+# Copyright Huawei Technologies Co., Ltd. 1998-2015.
+# All Rights Reserved.
+#
+# Licensed under the Apache License, Version 2.0 (the License); you may
+# not use this file except in compliance with the License. You may obtain
+# a copy of the License at
+#
+# http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an AS IS BASIS, WITHOUT
+# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
+# License for the specific language governing permissions and limitations
+# under the License.
+
diff --git a/vstf/vstf/controller/api_server.py b/vstf/vstf/controller/api_server.py
new file mode 100755
index 00000000..d3547011
--- /dev/null
+++ b/vstf/vstf/controller/api_server.py
@@ -0,0 +1,403 @@
+import uuid
+import time
+import os
+import sys
+import logging
+import signal
+import json
+
+from vstf.common import unix, message, cliutil, excepts
+from vstf.common.vstfcli import VstfParser
+from vstf.common.log import setup_logging
+from vstf.common import daemon
+from vstf.rpc_frame_work import rpc_producer
+from vstf.controller.fabricant import Fabricant
+from vstf.agent.env.basic.commandline import CommandLine
+from vstf.controller.env_build.env_build import EnvBuildApi as Builder
+from vstf.controller.env_build.env_collect import EnvCollectApi
+from vstf.controller.database.dbinterface import DbManage
+import vstf.controller.sw_perf.performance as pf
+from vstf.controller.settings.tester_settings import TesterSettings
+from vstf.controller.settings.device_settings import DeviceSettings
+from vstf.controller.settings.flows_settings import FlowsSettings
+from vstf.controller.settings.mail_settings import MailSettings
+from vstf.controller.settings.tool_settings import ToolSettings
+from vstf.controller.settings.perf_settings import PerfSettings
+from vstf.controller.sw_perf.perf_provider import PerfProvider
+from vstf.controller.sw_perf.flow_producer import FlowsProducer
+import vstf.controller.reporters.reporter as rp
+import vstf.common.constants as cst
+import vstf.common.check as chk
+
+LOG = logging.getLogger(__name__)
+cmd = CommandLine()
+
+
+class OpsChains(object):
+ def __init__(self, monitor, port):
+ """The ops chains will setup the proxy to rabbitmq
+ and setup a thread to watch the queues of rabbitmq
+
+ """
+ super(OpsChains, self).__init__()
+ if not os.path.exists(cst.VSTFCPATH):
+ os.mkdir(cst.VSTFCPATH)
+
+ LOG.info("VSTF Manager start to listen to %s", monitor)
+ self.chanl = rpc_producer.Server(host=monitor, port=port)
+ self.dbconn = DbManage()
+ self.collection = EnvCollectApi(self.chanl)
+
+ def list_devs(self, **kwargs):
+ target = kwargs.get('host')
+ if not target:
+ respond = "the target is empty, not support now."
+ else:
+ respond = self.chanl.call(self.chanl.make_msg("list_nic_devices"), target)
+ return respond
+
+ def src_install(self, host, config_file):
+ if not os.path.exists(config_file):
+ raise Exception("Can not found the config file.")
+ cfg = json.load(open(config_file))
+ msg = self.chanl.make_msg("src_install", cfg=cfg)
+ return self.chanl.call(msg, host, timeout=1000)
+
+ def create_images(self, host, config_file):
+ if not os.path.exists(config_file):
+ raise Exception("Can not found the config file.")
+ cfg = json.load(open(config_file))
+ msg = self.chanl.make_msg("create_images", cfg=cfg)
+ return self.chanl.call(msg, host, timeout=1000)
+
+ def clean_images(self, host, config_file):
+ if not os.path.exists(config_file):
+ raise Exception("Can not found the config file.")
+ cfg = json.load(open(config_file))
+ msg = self.chanl.make_msg("clean_images", cfg=cfg)
+ return self.chanl.call(msg, host, timeout=1000)
+
+ def apply_model(self, host, model=None, config_file=None):
+ if config_file is None:
+ config_file = "/etc/vstf/env/%s.json" % model
+ if not os.path.exists(config_file):
+ raise Exception("Can not found the config file.")
+ env = Builder(self.chanl, config_file)
+ ret = env.build()
+ return ret
+
+ def disapply_model(self, host, model=None, config_file=None):
+ if config_file is None:
+ config_file = "/etc/vstf/env/%s.json" % model
+ if not os.path.exists(config_file):
+ raise Exception("Can not found the config file.")
+ env = Builder(self.chanl, config_file)
+ ret = env.clean()
+ return ret
+
+ def list_tasks(self):
+ ret = self.dbconn.query_tasks()
+ head = [["Task ID", "Task Name", "Task Date", "Task Remarks"]]
+ if ret:
+ ret = head + ret
+ return ret
+
+ def affctl_list(self, host):
+ if not host:
+ return "Need input the host"
+ return Fabricant(host, self.chanl).affctl_list()
+
+ def _create_task(self, scenario):
+ taskid = self.dbconn.create_task(str(uuid.uuid4()), time.strftime(cst.TIME_FORMAT),
+ desc=scenario + "Test")
+ LOG.info("new Task id:%s" % taskid)
+ if -1 == taskid:
+ raise Exception("DB create task failed.")
+
+ device = DeviceSettings().settings
+ hosts = [device["host"], device["tester"]]
+ for host in hosts:
+ LOG.info(host)
+
+ devs = host["devs"][0]
+ keys = ["bdf", "iface", "mac"]
+ key = devs.keys()[0]
+ if key in keys:
+ name = devs[key]
+ else:
+ raise Exception("error devs :%s", devs)
+
+ query = Fabricant(host["agent"], self.chanl)
+ nic_info = query.get_device_detail(identity=name)
+
+ LOG.info(nic_info)
+
+ os_info, cpu_info, mem_info, hw_info = self.collection.collect_host_info(host["agent"])
+ LOG.info(os_info)
+ LOG.info(cpu_info)
+ LOG.info(mem_info)
+ LOG.info(hw_info)
+
+ self.dbconn.add_host_2task(taskid,
+ host["agent"],
+ json.dumps(hw_info[cst.HW_INFO]),
+ json.dumps(cpu_info[cst.CPU_INFO]),
+ json.dumps(mem_info[cst.MEMORY_INFO]),
+ nic_info["desc"],
+ json.dumps(os_info[cst.OS_INFO]))
+
+ self.dbconn.add_extent_2task(taskid, "CETH", "driver", "version 2.0")
+ self.dbconn.add_extent_2task(taskid, "EVS", "switch", "version 3.0")
+ return taskid
+
+ def settings(self, mail=False, perf=False):
+ LOG.info("mail:%s, perf:%s" % (mail, perf))
+ if mail:
+ MailSettings().input()
+ if perf:
+ PerfSettings().input()
+
+ def report(self, rpath='./', mail_off=False, taskid=-1):
+ report = rp.Report(self.dbconn, rpath)
+ if taskid == -1:
+ taskid = self.dbconn.get_last_taskid()
+ report.report(taskid, mail_off)
+ info_str = "do report over"
+ return info_str
+
+ def run_perf_cmd(self, case, rpath='./', affctl=False, build_on=False, save_on=False, report_on=False, mail_on=False):
+ LOG.info(case)
+ LOG.info("build_on:%s report_on:%s mail_on:%s" % (build_on, report_on, mail_on))
+ casetag = case['case']
+ tool = case['tool']
+ protocol = case['protocol']
+ profile = case['profile']
+ ttype = case['type']
+ sizes = case['sizes']
+
+ ret, ret_str = chk.check_case_params(protocol, ttype, tool)
+ if not ret:
+ return ret_str
+
+ scenario = self.dbconn.query_scenario(casetag)
+ LOG.info(scenario)
+ if not scenario:
+ LOG.warn("not support the case:%s", casetag)
+ return
+
+ config_file = os.path.join("/etc/vstf/env", scenario + ".json")
+
+ LOG.info(config_file)
+ env = Builder(self.chanl, config_file)
+ if build_on:
+ env.build()
+ flows_settings = FlowsSettings()
+ tool_settings = ToolSettings()
+ tester_settings = TesterSettings()
+ flow_producer = FlowsProducer(self.chanl, flows_settings)
+ provider = PerfProvider(flows_settings.settings, tool_settings.settings, tester_settings.settings)
+
+ perf = pf.Performance(self.chanl, provider)
+ flow_producer.create(scenario, casetag)
+ result = perf.run(tool, protocol, ttype, sizes, affctl)
+ LOG.info(flows_settings.settings)
+ LOG.info(result)
+ if save_on:
+ taskid = self._create_task(scenario)
+ testid = self.dbconn.add_test_2task(taskid, casetag, protocol, profile, ttype, tool)
+ LOG.info(testid)
+ self.dbconn.add_data_2test(testid, result)
+ if report_on:
+ self.report(rpath, not mail_on, taskid)
+ return result
+
+ def run_perf_file(self, rpath='./', affctl=False, report_on=True, mail_on=True):
+ perf_settings = PerfSettings()
+ flows_settings = FlowsSettings()
+ tool_settings = ToolSettings()
+ tester_settings = TesterSettings()
+ flow_producer = FlowsProducer(self.chanl, flows_settings)
+ provider = PerfProvider(flows_settings.settings, tool_settings.settings, tester_settings.settings)
+ perf = pf.Performance(self.chanl, provider)
+ tests = perf_settings.settings
+
+ for scenario, cases in tests.items():
+ LOG.info(scenario)
+ if not cases:
+ continue
+
+ config_file = os.path.join("/etc/vstf/env", scenario + ".json")
+
+ LOG.info(config_file)
+ env = Builder(self.chanl, config_file)
+ env.build()
+
+ taskid = self._create_task(scenario)
+
+ for case in cases:
+ LOG.info(case)
+ casetag = case['case']
+ tool = case['tool']
+ protocol = case['protocol']
+ profile = case['profile']
+ ttype = case['type']
+ sizes = case['sizes']
+
+ ret, ret_str = chk.check_case_params(protocol, ttype, tool)
+ if not ret:
+ LOG.warn(ret_str)
+ continue
+
+ flow_producer.create(scenario, casetag)
+ result = perf.run(tool, protocol, ttype, sizes, affctl)
+ LOG.info(result)
+
+ testid = self.dbconn.add_test_2task(taskid, casetag, protocol, profile, ttype, tool)
+ LOG.info(testid)
+
+ self.dbconn.add_data_2test(testid, result)
+
+ if report_on:
+ self.report(rpath, not mail_on, taskid)
+
+ info_str = "do batch perf test successfully"
+ return info_str
+
+ def collect_host_info(self, target):
+ if self.collection is not None:
+ return self.collection.collect_host_info(target)
+ else:
+ return "collection is None"
+
+
+class Manager(daemon.Daemon):
+ def __init__(self):
+ """
+ The manager will create a socket for vstfadm.
+ also the manager own a ops chains
+ """
+ super(Manager, self).__init__(cst.vstf_pid)
+ # the connection of socket
+ self.conn = None
+ # the operations of manager
+ self.ops = None
+ # record the daemon run flag
+ self.run_flag = True
+
+ def deal_unknown_obj(self, obj):
+ return "unknown response %s" % obj
+
+ def run(self):
+ signal.signal(signal.SIGTERM, self.daemon_die)
+ # setup the socket server for communicating with vstfadm
+ try:
+ self.conn = unix.UdpServer()
+ self.conn.bind()
+ self.conn.listen()
+ except Exception as e:
+ raise e
+
+ # accept the connection of vstfadm and recv the command
+ # run the command from vstfadm and return the response
+ while self.run_flag:
+ conn, addr = self.conn.accept()
+ LOG.debug("accept the conn: %(conn)s", {'conn': conn})
+
+ # recv the msg until the conn break.
+
+ while True:
+ try:
+ data = message.recv(conn.recv)
+ LOG.debug("Manager recv the msg: %(msg)s", {'msg': data})
+ msg = message.decode(data)
+ body = message.get_body(msg)
+ context = message.get_context(msg)
+ except RuntimeError:
+ LOG.debug("manage catch the connection close!")
+ break
+ except Exception as e:
+ LOG.error("Manager recv message from socket failed.")
+ self.daemon_die()
+ raise e
+
+ try:
+ func = getattr(self.ops, body.get('method'))
+ LOG.info("Call function:%s, args:%s",
+ func.__name__, body.get('args'))
+ response = func(**body.get('args'))
+ LOG.info("response: %s", response)
+ except excepts.UnsolvableExit as e:
+ msg = "The manager opps, exit now"
+ LOG.error(msg)
+ # the manager has no need to be continue, just return
+ # this msg and exit normal
+ self.daemon_die()
+ raise e
+ except Exception as e:
+ # here just the function failed no need exit, just return the msg
+ msg = "Run function failed. [ %s ]" % (e)
+ response = msg
+ LOG.error(msg)
+ try:
+ response = message.add_context(response, **context)
+ LOG.debug("Manager send the response: <%(r)s", {'r': response})
+ message.send(conn.send, message.encode(response))
+ except Exception as e:
+ self.daemon_die()
+ raise e
+ # close the connection when conn down
+ conn.close()
+
+ def daemon_die(self, signum, frame):
+ """overwrite daemon.Daemon.daemon_die(self)"""
+ LOG.info("manage catch the signal %s to exit." % signum)
+ if self.conn:
+ # we can not close the conn direct, just tell manager to stop accept
+ self.run_flag = False
+
+ if self.ops:
+ # stop the ops's proxy
+ # maybe happen AttributeError: 'BlockingConnection' object has no attribute 'disconnect'
+ # this a know bug in pika. fix in 0.9.14 release
+ try:
+ self.ops.chanl.close()
+ except AttributeError:
+ LOG.warning("The connection close happens attribute error")
+
+ def start_manage(self, monitor="localhost", port=5672):
+ try:
+ # create manager's ops chains here will create a proxy to rabbitmq
+ self.ops = OpsChains(monitor, port)
+ except Exception as e:
+ raise e
+ self.start()
+
+ def stop_manage(self):
+ self.stop()
+
+
+@cliutil.arg("--monitor",
+ dest="monitor",
+ default="localhost",
+ action="store",
+ help="which ip to be monitored")
+@cliutil.arg("--port",
+ dest="port",
+ default="5672",
+ action="store",
+ help="rabbitmq conn server")
+def do_start(args):
+ Manager().start_manage(args.monitor, args.port)
+
+
+def do_stop(args):
+ Manager().stop_manage()
+
+
+def main():
+ """this is for vstfctl"""
+ setup_logging(level=logging.INFO, log_file="/var/log/vstf/vstf-manager.log", clevel=logging.INFO)
+ parser = VstfParser(prog="vstf-manager", description="vstf manager command line")
+ parser.set_subcommand_parser(target=sys.modules[__name__])
+ args = parser.parse_args()
+ args.func(args)
diff --git a/vstf/vstf/controller/database/__init__.py b/vstf/vstf/controller/database/__init__.py
new file mode 100755
index 00000000..89dcd4e2
--- /dev/null
+++ b/vstf/vstf/controller/database/__init__.py
@@ -0,0 +1,14 @@
+# Copyright Huawei Technologies Co., Ltd. 1998-2015.
+# All Rights Reserved.
+#
+# Licensed under the Apache License, Version 2.0 (the License); you may
+# not use this file except in compliance with the License. You may obtain
+# a copy of the License at
+#
+# http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an AS IS BASIS, WITHOUT
+# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
+# License for the specific language governing permissions and limitations
+# under the License.
diff --git a/vstf/vstf/controller/database/constants.py b/vstf/vstf/controller/database/constants.py
new file mode 100755
index 00000000..d1aef398
--- /dev/null
+++ b/vstf/vstf/controller/database/constants.py
@@ -0,0 +1,53 @@
+SCENARIO_NAME_LEN = 16
+DESC_LEN = 256
+FIGURE_PATH_LEN = 128
+
+CASE_TAG_LEN = 16
+CASE_NAME_LEN = 128
+DIRECTION_LEN = 128
+CONF_LEN = 256
+
+HOST_NAME_LEN = 32
+
+CPU_INFO_LEN = 1024
+
+NORMAL_VAR_LEN = 32
+NORMAL_VAR_LEN1 = 64
+
+PROTOCOL_LEN = 16
+PROVIDER_LEN = 16
+
+TOOLS_LEN = 32
+TYPE_LEN = 16
+
+EXT_INFO_LEN = 256
+DBPATH = "/opt/vstf/vstf.db"
+# CaseTag, ScenarioName, CaseName, FigurePath, Description, Direction, Configure
+CASE_INFO_LIST = [
+ ['Ti-1', 'Ti', 'Ti_VM_RX_Tester-VM', 'res/', ' ', 'Tester->VM', 'tx', 'w/,wo VLAN'],
+ ['Ti-2', 'Ti', 'Ti_VM_TX_VM-Tester', 'res/', ' ', 'VM->Tester', 'rx', 'w/,wo VLAN'],
+ ['Ti-3', 'Ti', 'Ti_VM_RXTX_VM-Tester', 'res/', ' ', 'Tester<->VM', 'rxtx', 'w/,wo VLAN'],
+ ['Ti-4', 'Ti', 'Ti_VM_RX_Tester-VM_VXLAN', 'res/', ' ', 'Tester->VM', 'tx', 'VXLAN'],
+ ['Ti-5', 'Ti', 'Ti_VM_TX_VM-Tester_VXLAN', 'res/', ' ', 'VM->Tester', 'rx', 'VXLAN'],
+ ['Ti-6', 'Ti', 'Ti_VM_RXTX_VM-Tester_VXLAN', 'res/', ' ', 'Tester<->VM', 'rxtx', 'VXLAN'],
+ ['Tu-1', 'Tu', 'Tu_VM_RX_VM-VM', 'res/', ' ', 'Tester->VM', 'tx', 'w/,wo VLAN'],
+ ['Tu-2', 'Tu', 'Tu_VM_TX_VM-VM', 'res/', ' ', 'VM->Tester', 'rx', 'w/,wo VLAN'],
+ ['Tu-3', 'Tu', 'Tu_VM_RXTX_VM-VM', 'res/', ' ', 'Tester<->VM', 'rxtx', 'w/,wo VLAN'],
+ ['Tu-4', 'Tu', 'Tu_VM_RX_VM-VM_VXLAN', 'res/', ' ', 'Tester->VM', 'tx', 'VXLAN'],
+ ['Tu-5', 'Tu', 'Tu_VM_TX_VM-VM_VXLAN', 'res/', ' ', 'VM->Tester', 'rx', 'VXLAN'],
+ ['Tu-6', 'Tu', 'Tu_VM_RXTX_VM-VM_VXLAN', 'res/', ' ', 'VM<->Tester', 'rxtx', 'VXLAN'],
+ ['Tn-1', 'Tn', 'Tn_VSW_FWD_Tester-Tester', 'res/', ' ', 'Tester->Tester', 'tx', 'w/,wo VLAN'],
+ ['Tn-2', 'Tn', 'Tn_VSW_FWD-BI_Tester-Tester', 'res/', ' ', 'Tester<->Tester', 'rxtx', 'w/,wo VLAN'],
+ ['Tn-3', 'Tn', 'Tn_VSW_FWD_Tester-Tester_VXLAN', 'res/', ' ', 'Tester->Tester', 'tx', 'VXLAN'],
+ ['Tn-4', 'Tn', 'Tn_VSW_FWD-BI_Tester-Tester_VXLAN', 'res/', ' ', 'Tester<->Tester', 'rxtx', 'VXLAN'],
+ ['Tnv-1', 'Tnv', 'TnV_VSW_FWD_Tester-Tester', 'res/', ' ', 'Tester->Tester', 'tx', 'w/,wo VLAN'],
+ ['Tnv-2', 'Tnv', 'TnV_VSW_FWD-BI_Tester-Tester', 'res/', ' ', 'Tester<->Tester', 'rxtx', 'w/,wo VLAN'],
+ ['Tnv-3', 'Tnv', 'TnV_VSW_FWD_Tester-Tester_VXLAN', 'res/', ' ', 'Tester->Tester', 'tx', 'VXLAN'],
+ ['Tnv-4', 'Tnv', 'TnV_VSW_FWD-BI_Tester-Tester_VXLAN', 'res/', ' ', 'Tester<->Tester', 'rxtx', 'VXLAN']
+]
+SCENARIO_INFO_LIST = [
+ ['Ti', 'res/', ' '],
+ ['Tu', 'res/', ' '],
+ ['Tn', 'res/', ' '],
+ ['Tnv', 'res/', ' '],
+]
diff --git a/vstf/vstf/controller/database/dbinterface.py b/vstf/vstf/controller/database/dbinterface.py
new file mode 100755
index 00000000..ae34c861
--- /dev/null
+++ b/vstf/vstf/controller/database/dbinterface.py
@@ -0,0 +1,567 @@
+#!/usr/bin/python
+# -*- coding: utf8 -*-
+# author: wly
+# date: 2015-07-29
+# see license for license details
+__version__ = ''' '''
+import os
+import logging
+
+from sqlalchemy import create_engine
+from sqlalchemy.orm import sessionmaker
+from sqlalchemy import and_
+from vstf.controller.database.tables import *
+
+LOG = logging.getLogger(__name__)
+
+"""
+@event.listens_for(Engine, "before_cursor_execute")
+def before_cursor_execute(conn, cursor, statement,
+ parameters, context, executemany):
+ conn.info.setdefault('query_start_time', []).append(time.time())
+ logging.debug("Start Query: %s", statement)
+@event.listens_for(Engine, "after_cursor_execute")
+def after_cursor_execute(conn, cursor, statement,
+ parameters, context, executemany):
+ total = time.time() - conn.info['query_start_time'].pop(-1)
+ logging.debug("Query Complete!")
+ logging.debug("Total Time: %f", total)"""
+
+
+class DbManage(object):
+ def __init__(self, db_name=const.DBPATH):
+ db_exists = os.path.exists(db_name)
+ try:
+ self._engine = create_engine('sqlite:///%s' % db_name, echo=False)
+ db_session = sessionmaker(bind=self._engine)
+ self._session = db_session()
+ except Exception as e:
+ raise e
+
+ # if the db is new , cleate all tables and init static tables
+ if not db_exists:
+ self.create_tables()
+ self.init_tables()
+
+ def __delete__(self):
+ self._engine.close_all()
+
+ def create_tables(self):
+ Base.metadata.create_all(self._engine)
+ self._session.commit()
+
+ def drop_tables(self):
+ Base.metadata.drop_all(self._engine)
+ self._session.commit()
+
+ def init_tables(self):
+ self.init_casetable()
+ self.init_scenario_table()
+ self._session.commit()
+
+ def init_scenario_table(self):
+ items = []
+ for values in const.SCENARIO_INFO_LIST:
+ item = TblScenarioInfo(ScenarioName=values[0],
+ FigurePath=values[1],
+ Description=values[2])
+ items.append(item)
+ self._session.add_all(items)
+
+ # Single TblCaseInfo API
+ def init_casetable(self):
+ items = []
+ for values in const.CASE_INFO_LIST:
+ item = TblCaseInfo(CaseTag=values[0],
+ ScenarioName=values[1],
+ CaseName=values[2],
+ FigurePath=values[3],
+ Description=values[4],
+ Direction=values[5],
+ Directiontag=values[6],
+ Configure=values[7])
+ items.append(item)
+ self._session.add_all(items)
+
+ def query_caseinfo(self):
+ query = self._session.query(TblCaseInfo.ScenarioName,
+ TblCaseInfo.CaseTag,
+ TblCaseInfo.CaseName,
+ TblCaseInfo.Direction,
+ TblCaseInfo.Configure)
+ return query.all()
+
+ def query_case(self, casetag):
+ query = self._session.query(TblCaseInfo.ScenarioName,
+ TblCaseInfo.Directiontag)
+ return query.first()
+
+ # Single TblTaskList API
+ def get_last_taskid(self):
+ query = self._session.query(TblTaskList.TaskID)
+ if query:
+ return query.all()[-1][0]
+ else:
+ return 0
+
+ def query_tasklist(self):
+ query = self._session.query(TblTaskList)
+ return query.all()
+
+ def query_taskdate(self, taskid):
+ query = self._session.query(TblTaskList.Date).filter(and_(
+ TblTaskList.TaskID == taskid))
+ result = ""
+ if query:
+ result += query.first()[0]
+ return result
+
+ def query_taskname(self, taskid):
+ query = self._session.query(TblTaskList.TaskName).filter(and_(
+ TblTaskList.TaskID == taskid))
+ result = ""
+ if query:
+ result += query.first()[0]
+ return result
+
+ def create_task(self, name, date, desc):
+ try:
+ item = TblTaskList(name, date, desc)
+ self._session.add(item)
+ self._session.commit()
+ except Exception:
+ return -1
+
+ return self.get_last_taskid()
+
+ # Single TblHostInfo API
+ def add_host_2task(self, taskid, name, machine, cpu, men, nic, os):
+ """All var except task must be string"""
+ item = TblHostInfo(taskid, name, machine, cpu, men, nic, os)
+
+ self._session.add(item)
+ self._session.commit()
+
+ def query_task_host_list(self, taskid):
+ query = self._session.query(TblHostInfo.HostName,
+ TblHostInfo.Server,
+ TblHostInfo.CPU,
+ TblHostInfo.MEM,
+ TblHostInfo.NIC,
+ TblHostInfo.OS).filter(
+ TblHostInfo.TaskID == taskid)
+ return query.all()
+
+ # Single TblTestList API
+ def get_last_testid(self):
+ query = self._session.query(TblTestList.TestID)
+ if query:
+ return query.all()[-1][0]
+ else:
+ return 0
+
+ def add_test_2task(self, task, case, protocol, provider, typ, tool):
+ try:
+ item = TblTestList(task, case, protocol, provider, typ, tool)
+ self._session.add(item)
+ self._session.commit()
+ except Exception:
+ return -1
+
+ return self.get_last_testid()
+
+ def get_test_type(self, testid):
+ query = self._session.query(TblTestList.Type).filter(
+ TblTestList.TestID == testid)
+ return query.first()
+
+ def add_extent_2task(self, task, name, content, description):
+ item = TblEXTInfo(task, name, content, description)
+ self._session.add(item)
+ self._session.commit()
+
+ def add_data_2test(self, testid, data):
+ """
+ :data example {'64':{
+ 'AvgFrameSize':0
+ 'OfferedLoad':0
+ 'PercentLoss':0
+ 'Bandwidth':0
+ 'MinimumLatency':0
+ 'MaximumLatency':0
+ 'AverageLatency':0
+ 'TxFrameCount':0
+ 'RxFrameCount':0
+ 'Duration':0
+ 'CPU':0
+ 'MppspGhz':0
+ }}
+ """
+ ptype = self.get_test_type(testid)
+ instance_map = {
+ 'throughput': TblThroughput,
+ 'frameloss': TblFrameloss,
+ 'latency': TblLatency
+ }
+
+ if ptype and ptype[0] not in instance_map:
+ print "cant find this test(id=%d)" % (testid)
+ return False
+
+ test_table_instance = instance_map[ptype[0]]
+ for pktlen in data.iterkeys():
+ args = data.get(pktlen)
+ query = self._session.query(test_table_instance).filter(and_(
+ test_table_instance.TestID == testid,
+ test_table_instance.AvgFrameSize == pktlen))
+ if query.all():
+ data_dict = {}
+ for key, value in data.items():
+ if key in test_table_instance.__dict__:
+ data_dict[test_table_instance.__dict__[key]] = value
+ query.update(data_dict)
+ else:
+ print args
+ tester = test_table_instance(testid, pktlen, **args)
+ self._session.add(tester)
+ self._session.commit()
+
+ def query_tasks(self):
+ result = []
+ ret = self._session.query(TblTaskList)
+ if ret:
+ for tmp in ret.all():
+ result.append([tmp.TaskID, tmp.TaskName, tmp.Date, tmp.EXTInfo])
+ return result
+
+ def query_all_task_id(self):
+ query = self._session.query(TblTaskList.TaskID)
+ if query:
+ return query.all()
+ else:
+ return []
+
+ def get_caseinfo(self):
+ query = self._session.query(TblCaseInfo.ScenarioName,
+ TblCaseInfo.CaseTag,
+ TblCaseInfo.CaseName,
+ TblCaseInfo.Direction,
+ TblCaseInfo.Configure)
+ return query.all()
+
+ def query_scenario(self, casetag):
+ query = self._session.query(TblCaseInfo.ScenarioName).filter(TblCaseInfo.CaseTag == casetag)
+ ret = ""
+ if query and query.first():
+ ret = query.first()[0]
+ return ret
+
+ def query_casefigure(self, casetag, tools):
+ query = self._session.query(TblCaseInfo.FigurePath).filter(and_(
+ TblCaseInfo.CaseTag == casetag))
+ result = ""
+ if query:
+ result += query.first()[0]
+ print tools, casetag
+ result += tools + '/' + casetag + '.jpg'
+ return result
+
+ def query_casename(self, casetag):
+ query = self._session.query(TblCaseInfo.CaseName).filter(and_(
+ TblCaseInfo.CaseTag == casetag))
+ result = ""
+ if query:
+ result += query.first()[0]
+ return result
+
+ # Single TblScenarioInfo API
+
+ def query_caselist(self, taskid, scenario):
+ query = self._session.query(TblTestList.CaseTag).filter(and_(
+ TblTestList.CaseTag == TblCaseInfo.CaseTag,
+ TblCaseInfo.ScenarioName == scenario,
+ TblTestList.TaskID == taskid)).group_by(TblCaseInfo.CaseTag)
+ return query.all()
+
+ def query_casetool(self, taskid, casetag, provider, ptype):
+ query = self._session.query(TblTestList.Tools).filter(and_(
+ TblTestList.TaskID == taskid,
+ TblTestList.CaseTag == casetag,
+ TblTestList.Provider == provider,
+ TblTestList.Type == ptype))
+ return query.all()
+
+ def query_casetools(self, taskid, casetag):
+ query = self._session.query(TblTestList.Tools).filter(and_(
+ TblTestList.CaseTag == casetag,
+ TblTestList.TaskID == taskid)).group_by(TblTestList.Tools)
+ return query.all()
+
+ def query_scenariolist(self, taskid):
+ query = self._session.query(TblCaseInfo.ScenarioName).filter(and_(
+ TblTestList.CaseTag == TblCaseInfo.CaseTag,
+ TblTestList.TaskID == taskid)).group_by(TblCaseInfo.ScenarioName)
+ return query.all()
+
+ def query_throughput_load(self, taskid, casetag, provider):
+ ptype = 'throughput'
+ query = self._session.query(TblThroughput.AvgFrameSize, TblThroughput.OfferedLoad).filter(and_(
+ TblTestList.TaskID == taskid,
+ TblTestList.CaseTag == casetag,
+ TblTestList.Provider == provider, TblTestList.Type == ptype,
+ TblTestList.TestID == TblThroughput.TestID))
+ return query.all()
+
+ def query_throughput_bandwidth(self, taskid, casetag, provider):
+ ptype = 'throughput'
+ query = self._session.query(TblThroughput.AvgFrameSize, TblThroughput.Bandwidth).filter(and_(
+ TblTestList.TaskID == taskid,
+ TblTestList.CaseTag == casetag,
+ TblTestList.Provider == provider, TblTestList.Type == ptype,
+ TblTestList.TestID == TblThroughput.TestID))
+ return query.all()
+
+ def query_throughput_table(self, taskid, casetag, provider):
+ ptype = 'throughput'
+ query = self._session.query(TblThroughput.AvgFrameSize,
+ TblThroughput.Bandwidth,
+ TblThroughput.OfferedLoad,
+ TblThroughput.CPU,
+ TblThroughput.MppspGhz,
+ TblThroughput.MinimumLatency,
+ TblThroughput.MaximumLatency,
+ TblThroughput.AverageLatency,
+ ).filter(and_(
+ TblTestList.TaskID == taskid,
+ TblTestList.CaseTag == casetag,
+ TblTestList.Provider == provider, TblTestList.Type == ptype,
+ TblTestList.TestID == TblThroughput.TestID))
+ return query.all()
+
+ def query_throughput_simpletable(self, taskid, casetag, provider):
+ ptype = 'throughput'
+ query = self._session.query(TblThroughput.AvgFrameSize,
+ TblThroughput.Bandwidth,
+ TblThroughput.OfferedLoad,
+ TblThroughput.CPU,
+ TblThroughput.MppspGhz,
+ TblThroughput.AverageLatency,
+ ).filter(and_(
+ TblTestList.TaskID == taskid,
+ TblTestList.CaseTag == casetag,
+ TblTestList.Provider == provider, TblTestList.Type == ptype,
+ TblTestList.TestID == TblThroughput.TestID))
+ return query.all()
+
+ def query_throughput_avg(self, taskid, casetag, provider):
+ ptype = 'throughput'
+ query = self._session.query(TblThroughput.AvgFrameSize, TblThroughput.AverageLatency).filter(and_(
+ TblTestList.TaskID == taskid,
+ TblTestList.CaseTag == casetag,
+ TblTestList.Provider == provider, TblTestList.Type == ptype,
+ TblTestList.TestID == TblThroughput.TestID))
+ return query.all()
+
+ def query_frameloss_bandwidth(self, taskid, casetag, provider):
+ ptype = 'frameloss'
+ query = self._session.query(TblFrameloss.AvgFrameSize, TblFrameloss.Bandwidth).filter(and_(
+ TblTestList.TaskID == taskid,
+ TblTestList.CaseTag == casetag,
+ TblTestList.Provider == provider, TblTestList.Type == ptype,
+ TblTestList.TestID == TblFrameloss.TestID))
+ return query.all()
+
+ def query_frameloss_load(self, taskid, casetag, provider):
+ ptype = 'frameloss'
+ query = self._session.query(TblFrameloss.AvgFrameSize, TblFrameloss.OfferedLoad).filter(and_(
+ TblTestList.TaskID == taskid,
+ TblTestList.CaseTag == casetag,
+ TblTestList.Provider == provider, TblTestList.Type == ptype,
+ TblTestList.TestID == TblFrameloss.TestID))
+ return query.all()
+
+ def query_frameloss_table(self, taskid, casetag, provider):
+ ptype = 'frameloss'
+ query = self._session.query(TblFrameloss.AvgFrameSize,
+ TblFrameloss.Bandwidth,
+ TblFrameloss.OfferedLoad,
+ TblFrameloss.CPU,
+ TblFrameloss.MppspGhz,
+ TblFrameloss.MinimumLatency,
+ TblFrameloss.MaximumLatency,
+ TblFrameloss.AverageLatency,
+ ).filter(and_(
+ TblTestList.TaskID == taskid,
+ TblTestList.CaseTag == casetag,
+ TblTestList.Provider == provider, TblTestList.Type == ptype,
+ TblTestList.TestID == TblFrameloss.TestID))
+ return query.all()
+
+ def query_frameloss_simpletable(self, taskid, casetag, provider):
+ ptype = 'frameloss'
+ query = self._session.query(TblFrameloss.AvgFrameSize,
+ TblFrameloss.Bandwidth,
+ TblFrameloss.OfferedLoad,
+ TblFrameloss.CPU,
+ TblFrameloss.MppspGhz,
+ TblFrameloss.AverageLatency,
+ ).filter(and_(
+ TblTestList.TaskID == taskid,
+ TblTestList.CaseTag == casetag,
+ TblTestList.Provider == provider, TblTestList.Type == ptype,
+ TblTestList.TestID == TblFrameloss.TestID))
+ return query.all()
+
+ def query_frameloss_avg(self, taskid, casetag, provider):
+ ptype = 'frameloss'
+ query = self._session.query(TblFrameloss.AvgFrameSize, TblFrameloss.AverageLatency).filter(and_(
+ TblTestList.TaskID == taskid,
+ TblTestList.CaseTag == casetag,
+ TblTestList.Provider == provider, TblTestList.Type == ptype,
+ TblTestList.TestID == TblFrameloss.TestID))
+ return query.all()
+
+ def query_latency_avg(self, taskid, casetag, provider):
+ ptype = 'latency'
+ query = self._session.query(TblLatency.AvgFrameSize, TblLatency.AverageLatency).filter(and_(
+ TblTestList.TaskID == taskid,
+ TblTestList.CaseTag == casetag,
+ TblTestList.Provider == provider, TblTestList.Type == ptype,
+ TblTestList.TestID == TblLatency.TestID))
+ return query.all()
+
+ def query_summary_table(self, taskid, casetag, provider, ptype):
+ if ptype in ['throughput', 'frameloss']:
+ qfunc = getattr(self, "query_%s_table" % (ptype))
+ return qfunc(taskid, casetag, provider)
+ return []
+
+ def query_summary_simpletable(self, taskid, casetag, provider, ptype):
+ if ptype in ['throughput', 'frameloss']:
+ qfunc = getattr(self, "query_%s_simpletable" % (ptype))
+ return qfunc(taskid, casetag, provider)
+ return []
+
+ def query_bandwidth(self, taskid, casetag, provider, ptype):
+ if ptype in ['throughput', 'frameloss']:
+ qfunc = getattr(self, "query_%s_bandwidth" % (ptype))
+ return qfunc(taskid, casetag, provider)
+ return []
+
+ def query_load(self, taskid, casetag, provider, ptype):
+ if ptype in ['throughput', 'frameloss']:
+ qfunc = getattr(self, "query_%s_load" % (ptype))
+ return qfunc(taskid, casetag, provider)
+ return []
+
+ def query_avglatency(self, taskid, casetag, provider, ptype):
+ if ptype in ['throughput', 'frameloss', 'latency']:
+ qfunc = getattr(self, "query_%s_avg" % (ptype))
+ return qfunc(taskid, casetag, provider)
+ return []
+
+ def query_throughput_provider(self, taskid, casetag, provider):
+ query = self._session.query(TblThroughput).filter(and_(TblTestList.CaseTag == casetag,
+ TblTestList.Provider == provider,
+ TblTestList.TaskID == taskid,
+ TblTestList.TestID == TblThroughput.TestID))
+ return query.all()
+
+ def query_frameloss_provider(self, taskid, casetag, provider):
+ query = self._session.query(TblFrameloss).filter(and_(TblTestList.CaseTag == casetag,
+ TblTestList.Provider == provider,
+ TblTestList.TaskID == taskid,
+ TblTestList.TestID == TblFrameloss.TestID))
+ return query.all()
+
+ def query_latency_provider(self, taskid, casetag, provider):
+ query = self._session.query(TblLatency).filter(and_(TblTestList.CaseTag == casetag,
+ TblTestList.Provider == provider,
+ TblTestList.TaskID == taskid,
+ TblTestList.TestID == TblLatency.TestID))
+ return query.all()
+
+ def query_case_type_count(self, taskid, casetag, ptype):
+ query = self._session.query(TblTestList).filter(and_(TblTestList.CaseTag == casetag,
+ TblTestList.Type == ptype, TblTestList.TaskID == taskid))
+
+ return query.count()
+
+ def query_case_provider_count(self, taskid, casetag, provider):
+ query = self._session.query(TblTestList).filter(and_(TblTestList.CaseTag == casetag,
+ TblTestList.Provider == provider,
+ TblTestList.TaskID == taskid))
+ return query.count()
+
+ def query_case_type_provider_count(self, taskid, casetag, provider, ptype):
+ query = self._session.query(TblTestList).filter(and_(TblTestList.CaseTag == casetag,
+ TblTestList.Type == ptype,
+ TblTestList.Provider == provider,
+ TblTestList.TaskID == taskid))
+
+ return query.count()
+
+ def query_exten_info(self, taskid):
+ query = self._session.query(TblEXTInfo.EXTName,
+ TblEXTInfo.EXTContent,
+ TblEXTInfo.Description).filter(TblEXTInfo.TaskID == taskid)
+ return query.all()
+
+
+def unit_test():
+ import time
+ dbase = DbManage()
+
+ taskid = dbase.create_task("test", str(time.ctime()), "this is a unit test")
+ dbase.add_host_2task(taskid, "hosta", "hw82576", "xxx", "x", "82599", "ubuntu")
+ dbase.add_extent_2task(taskid, "CETH", "driver", "version 2.0")
+ dbase.add_extent_2task(taskid, "EVS", "switch", "version 3.0")
+
+ testid = dbase.add_test_2task(taskid, "Tn-1", 'udp', "rdp", "throughput", "netperf")
+ data = {
+ '64': {
+ 'OfferedLoad': 2,
+ 'PercentLoss': 3,
+ 'Bandwidth': 4,
+ 'MinimumLatency': 5,
+ 'MaximumLatency': 6,
+ 'AverageLatency': 7,
+ 'TxFrameCount': 8,
+ 'RxFrameCount': 9,
+ 'Duration': 10,
+ 'CPU': 11,
+ 'MppspGhz': 12,
+ }
+ }
+ dbase.add_data_2test(testid, data)
+
+ testid = dbase.add_test_2task(taskid, "Tn-1", 'udp', "rdp", "frameloss", "netperf")
+ data = {
+ '64': {
+ 'OfferedLoad': 2,
+ 'PercentLoss': 3,
+ 'Bandwidth': 4,
+ 'MinimumLatency': 5,
+ 'MaximumLatency': 6,
+ 'AverageLatency': 7,
+ 'TxFrameCount': 8,
+ 'RxFrameCount': 9,
+ 'Duration': 10,
+ 'CPU': 11,
+ 'MppspGhz': 12,
+ }
+ }
+ dbase.add_data_2test(testid, data)
+
+ testid = dbase.add_test_2task(taskid, "Tn-1", 'udp', "rdp", "latency", "netperf")
+ data = {
+ 64: {'MaximumLatency': 0.0, 'AverageLatency': 0.0, 'MinimumLatency': 0.0, 'OfferedLoad': 0.0},
+ 128: {'MaximumLatency': 0.0, 'AverageLatency': 0.0, 'MinimumLatency': 0.0, 'OfferedLoad': 0.0},
+ 512: {'MaximumLatency': 0.0, 'AverageLatency': 0.0, 'MinimumLatency': 0.0, 'OfferedLoad': 0.0},
+ 1024: {'MaximumLatency': 0.0, 'AverageLatency': 0.0, 'MinimumLatency': 0.0, 'OfferedLoad': 0.0}
+ }
+ dbase.add_data_2test(testid, data)
+
+
+if __name__ == '__main__':
+ unit_test()
diff --git a/vstf/vstf/controller/database/tables.py b/vstf/vstf/controller/database/tables.py
new file mode 100755
index 00000000..a7658f49
--- /dev/null
+++ b/vstf/vstf/controller/database/tables.py
@@ -0,0 +1,291 @@
+#!/usr/bin/python
+# -*- coding: utf8 -*-
+# author: wly
+# date: 2015-07-29
+# see license for license details
+__version__ = ''' '''
+from sqlalchemy import Column, Integer, String, Float, ForeignKey
+from sqlalchemy.ext.declarative import declarative_base
+from vstf.controller.database import constants as const
+
+Base = declarative_base()
+
+
+class TblScenarioInfo(Base):
+ __tablename__ = "TblScenarioInfo"
+ ScenarioID = Column(Integer, primary_key=True)
+ ScenarioName = Column(String(const.SCENARIO_NAME_LEN), unique=True)
+ FigurePath = Column(String(const.FIGURE_PATH_LEN))
+ Description = Column(String(const.DESC_LEN))
+
+ def __init__(self, ScenarioName, FigurePath, Description, **kwargs):
+ """
+ :param ScenarioName: name of the scenario, like Tn
+ :param FigurePath: ??
+ :param Description: desc of scenario table
+ """
+ self.ScenarioName = ScenarioName
+ self.FigurePath = FigurePath
+ self.Description = Description
+
+ def __repr__(self):
+ return "<User(ScenarioName='%s', FigurePath='%s', Description='%s')>" % (
+ self.ScenarioName, self.FigurePath, self.Description)
+
+
+class TblCaseInfo(Base):
+ __tablename__ = "TblCaseInfo"
+ CaseID = Column(Integer, primary_key=True)
+ CaseTag = Column(String(const.CASE_TAG_LEN), unique=True)
+ CaseName = Column(String(const.CASE_NAME_LEN), unique=True)
+ ScenarioName = Column(String(const.SCENARIO_NAME_LEN))
+ FigurePath = Column(String(const.FIGURE_PATH_LEN))
+ Direction = Column(String(const.DIRECTION_LEN))
+ Directiontag = Column(String(const.DIRECTION_LEN))
+ Configure = Column(String(const.CONF_LEN))
+ Description = Column(String(const.DESC_LEN))
+
+ def __init__(self, CaseTag, CaseName,
+ ScenarioName, FigurePath, Direction, Directiontag,
+ Configure, Description, **kwargs):
+ """
+ :param CaseID:
+ :param CaseTag: ??
+ :param CaseName: name of case, like tester-vm
+ :param ScenarioName: name of scenario, like Tn
+ :param FigurePath:
+ :param Direction: the test direction, Tx or Rx
+ :param Configure:
+ :param Description: desc of table case info
+ """
+ # CaseID will auto builded by db
+ self.CaseTag = CaseTag
+ self.CaseName = CaseName
+ self.ScenarioName = ScenarioName
+ self.FigurePath = FigurePath
+ self.Direction = Direction
+ self.Directiontag = Directiontag
+ self.Configure = Configure
+ self.Description = Description
+
+ def __repr__(self):
+ return "<User(CaseTag='%s', CaseName='%s',ScenarioName='%s',FigurePath='%s', Direction='%s', \
+ Directiontag='%s', Configure='%s', Description='%s')>" % (self.CaseTag, self.CaseName,
+ self.ScenarioName, self.FigurePath,
+ self.Direction, self.Directiontag, self.Configure,
+ self.Description)
+
+
+class TblHostInfo(Base):
+ __tablename__ = "TblHostInfo"
+ Index = Column(Integer, primary_key=True)
+ TaskID = Column(Integer, ForeignKey('TblTaskList.TaskID'))
+ HostName = Column(String(const.HOST_NAME_LEN))
+ Server = Column(String(const.NORMAL_VAR_LEN1))
+ CPU = Column(String(const.CPU_INFO_LEN))
+ MEM = Column(String(const.NORMAL_VAR_LEN))
+ NIC = Column(String(const.NORMAL_VAR_LEN))
+ OS = Column(String(const.NORMAL_VAR_LEN))
+
+ def __init__(self, TaskID, HostName, Server, CPU, MEM, NIC, OS, **kwargs):
+ """table of host info
+ """
+ self.TaskID = TaskID
+ self.HostName = HostName
+ self.Server = Server
+ self.CPU = CPU
+ self.MEM = MEM
+ self.NIC = NIC
+ self.OS = OS
+
+ def __repr__(self):
+ return "<User(HostName='%s', Server='%s', CPU='%s', MEM='%s', NIC='%s',\
+ OS='%s')>" % (self.HostName, self.Server, self.CPU, self.MEM, self.NIC, self.OS)
+
+
+class TblTaskList(Base):
+ __tablename__ = "TblTaskList"
+ TaskID = Column(Integer, primary_key=True)
+ TaskName = Column(String(const.NORMAL_VAR_LEN1))
+ Date = Column(String(const.NORMAL_VAR_LEN1))
+ EXTInfo = Column(String(const.EXT_INFO_LEN))
+
+ def __init__(self, TaskName, Date, EXTInfo="", **kwargs):
+ """Table of task"""
+ self.TaskName = TaskName
+ self.Date = Date
+ self.EXTInfo = EXTInfo
+
+ def __repr__(self):
+ return "<User(TaskID='%s', TaskName='%s', Date='%s', EXTInfo='%s')>" % (
+ self.TaskID, self.TaskName, self.Date, self.EXTInfo)
+
+
+class TblTestList(Base):
+ __tablename__ = "TblTestList"
+ TestID = Column(Integer, primary_key=True)
+ TaskID = Column(Integer, ForeignKey('TblTaskList.TaskID'))
+ CaseTag = Column(String(const.CASE_TAG_LEN))
+ Protocol = Column(String(const.PROTOCOL_LEN))
+ Provider = Column(String(const.PROVIDER_LEN))
+ Type = Column(String(const.TYPE_LEN))
+ Tools = Column(String(const.TOOLS_LEN))
+
+ def __init__(self, taskid, casetag, protocol, provider, typ, tools, **kwargs):
+ """Table of test"""
+ self.TaskID = taskid
+ self.CaseTag = casetag
+ self.Protocol = protocol
+ self.Provider = provider
+ self.Type = typ
+ self.Tools = tools
+
+ def __repr__(self):
+ return "<User(TaskID='%d', CaseTag='%s', Protocol='%s', Provider=%s, Type='%s', Tools='%s')>" % (
+ self.TaskID, self.CaseTag, self.Protocol, self.Provider, self.Type, self.Tools)
+
+
+class TblThroughput(Base):
+ __tablename__ = "TblThroughput"
+ Index = Column(Integer, primary_key=True)
+ TestID = Column(Integer, ForeignKey('TblTestList.TestID'))
+ AvgFrameSize = Column(Integer)
+ OfferedLoad = Column(Float)
+ PercentLoss = Column(Float)
+ Bandwidth = Column(Float)
+ MinimumLatency = Column(Float)
+ MaximumLatency = Column(Float)
+ AverageLatency = Column(Float)
+ TxFrameCount = Column(Float)
+ RxFrameCount = Column(Float)
+ Duration = Column(Float)
+ CPU = Column(Float)
+ MppspGhz = Column(Float)
+
+ def __init__(self, TestID, AvgFrameSize,
+ OfferedLoad, PercentLoss, Bandwidth,
+ MinimumLatency, MaximumLatency, AverageLatency,
+ TxFrameCount, RxFrameCount, Duration,
+ CPU, MppspGhz, **kwargs):
+ """table of throughput"""
+ self.TestID = TestID
+ self.AvgFrameSize = AvgFrameSize
+ self.OfferedLoad = OfferedLoad
+ self.PercentLoss = PercentLoss
+ self.Bandwidth = Bandwidth
+ self.MinimumLatency = MinimumLatency
+ self.MaximumLatency = MaximumLatency
+ self.AverageLatency = AverageLatency
+ self.TxFrameCount = TxFrameCount
+ self.RxFrameCount = RxFrameCount
+ self.Duration = Duration
+ self.CPU = CPU
+ self.MppspGhz = MppspGhz
+
+ def __repr__(self):
+ return "<User(TestID='%d', AvgFrameSize='%d', OfferedLoad='%f', \
+ PercentLoss='%f', MinimumLatency='%f', AverageLatency='%f', MaximumLatency='%f',\
+ TxFrameCount='%f', RxFrameCount='%f', Duration='%f', CPU='%f', MppspGhz='%f', \
+ Bandwidth='%f')>" % (self.TestID,
+ self.AvgFrameSize, self.OfferedLoad, self.PercentLoss,
+ self.MinimumLatency, self.AverageLatency, self.MaximumLatency,
+ self.TxFrameCount,
+ self.RxFrameCount, self.Duration, self.CPU, self.MppspGhz, self.Bandwidth)
+
+
+class TblFrameloss(Base):
+ __tablename__ = "TblFrameloss"
+ Index = Column(Integer, primary_key=True)
+ TestID = Column(Integer, ForeignKey('TblTestList.TestID'))
+ AvgFrameSize = Column(Integer)
+ OfferedLoad = Column(Float)
+ PercentLoss = Column(Float)
+ Bandwidth = Column(Float)
+ MinimumLatency = Column(Float)
+ MaximumLatency = Column(Float)
+ AverageLatency = Column(Float)
+ TxFrameCount = Column(Float)
+ RxFrameCount = Column(Float)
+ Duration = Column(Float)
+ CPU = Column(Float)
+ MppspGhz = Column(Float)
+
+ def __init__(self, TestID, AvgFrameSize,
+ OfferedLoad, PercentLoss, Bandwidth,
+ MinimumLatency, MaximumLatency, AverageLatency,
+ TxFrameCount, RxFrameCount, Duration,
+ CPU, MppspGhz, **kwargs):
+ """table of frameloss"""
+ self.TestID = TestID
+ self.AvgFrameSize = AvgFrameSize
+ self.OfferedLoad = OfferedLoad
+ self.PercentLoss = PercentLoss
+ self.Bandwidth = Bandwidth
+ self.MinimumLatency = MinimumLatency
+ self.MaximumLatency = MaximumLatency
+ self.AverageLatency = AverageLatency
+ self.TxFrameCount = TxFrameCount
+ self.RxFrameCount = RxFrameCount
+ self.Duration = Duration
+ self.CPU = CPU
+ self.MppspGhz = MppspGhz
+
+ def __repr__(self):
+ return "<User(TestID='%d', AvgFrameSize='%d', OfferedLoad='%f', \
+ PercentLoss='%f', MinimumLatency='%f', AverageLatency='%f', MaximumLatency='%f',\
+ TxFrameCount='%f', RxFrameCount='%f', Duration='%f', CPU='%f', MppspGhz='%f', \
+ Bandwidth='%f')>" % (self.TestID,
+ self.AvgFrameSize, self.OfferedLoad, self.PercentLoss,
+ self.MinimumLatency, self.AverageLatency, self.MaximumLatency,
+ self.TxFrameCount,
+ self.RxFrameCount, self.Duration, self.CPU, self.MppspGhz, self.Bandwidth)
+
+
+class TblLatency(Base):
+ __tablename__ = "TblLatency"
+ Index = Column(Integer, primary_key=True)
+ TestID = Column(Integer, ForeignKey('TblTestList.TestID'))
+ AvgFrameSize = Column(Integer)
+ OfferedLoad = Column(Float)
+ MinimumLatency = Column(Float)
+ MaximumLatency = Column(Float)
+ AverageLatency = Column(Float)
+
+ def __init__(self, TestID, AvgFrameSize, OfferedLoad,
+ MinimumLatency, MaximumLatency, AverageLatency, **kwargs):
+ """table of latency"""
+ self.TestID = TestID
+ self.AvgFrameSize = AvgFrameSize
+ self.OfferedLoad = OfferedLoad
+ self.MinimumLatency = MinimumLatency
+ self.MaximumLatency = MaximumLatency
+ self.AverageLatency = AverageLatency
+
+ def __repr__(self):
+ return "<User(TestID='%d', AvgFrameSize='%d', OfferedLoad='%f', \
+ MinimumLatency='%f', AverageLatency='%f', MaximumLatency='%f')>" % (self.TestID,
+ self.AvgFrameSize,
+ self.OfferedLoad,
+ self.MinimumLatency,
+ self.AverageLatency,
+ self.MaximumLatency)
+
+
+class TblEXTInfo(Base):
+ __tablename__ = "TblEXTInfo"
+ Index = Column(Integer, primary_key=True)
+ TaskID = Column(Integer)
+ EXTName = Column(String(const.NORMAL_VAR_LEN))
+ EXTContent = Column(String(const.DESC_LEN))
+ Description = Column(String(const.NORMAL_VAR_LEN1))
+
+ def __init__(self, TaskID, EXTName, EXTContent, Description, **kwargs):
+ """table extern info"""
+ self.TaskID = TaskID
+ self.EXTName = EXTName
+ self.EXTContent = EXTContent
+ self.Description = Description
+
+ def __repr__(self):
+ return "<User(TaskID='%d', CodeType='%s', EXTContent='%s',Version='%s')>" % (
+ self.TaskID, self.EXTName, self.EXTContent, self.Version)
diff --git a/vstf/vstf/controller/env_build/README b/vstf/vstf/controller/env_build/README
new file mode 100755
index 00000000..ecb4e118
--- /dev/null
+++ b/vstf/vstf/controller/env_build/README
@@ -0,0 +1,15 @@
+ env_build.py contains a quick test code for create virtual network in a remote host.
+
+ usage:
+
+ python env_build.py --rpc_server 192.168.188.10 --config /etc/vstf/env/Tn.json
+
+ --rpc_server RPC_SERVER
+ the rabbitmq server for deliver messages.
+ --config CONFIG
+ env-build config file to parse
+
+the above command will build a 'Tn-type' network according to config file: /etc/vstf/env/Tn.json.
+
+
+
diff --git a/vstf/vstf/controller/env_build/__init__.py b/vstf/vstf/controller/env_build/__init__.py
new file mode 100755
index 00000000..89dcd4e2
--- /dev/null
+++ b/vstf/vstf/controller/env_build/__init__.py
@@ -0,0 +1,14 @@
+# Copyright Huawei Technologies Co., Ltd. 1998-2015.
+# All Rights Reserved.
+#
+# Licensed under the Apache License, Version 2.0 (the License); you may
+# not use this file except in compliance with the License. You may obtain
+# a copy of the License at
+#
+# http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an AS IS BASIS, WITHOUT
+# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
+# License for the specific language governing permissions and limitations
+# under the License.
diff --git a/vstf/vstf/controller/env_build/cfg_intent_parse.py b/vstf/vstf/controller/env_build/cfg_intent_parse.py
new file mode 100755
index 00000000..8c7c10b8
--- /dev/null
+++ b/vstf/vstf/controller/env_build/cfg_intent_parse.py
@@ -0,0 +1,130 @@
+"""
+Created on 2015-10-13
+
+@author: y00228926
+"""
+import json
+import logging
+from vstf.common.utils import randomMAC
+
+LOG = logging.getLogger(__name__)
+
+
+class IntentParser(object):
+ def __init__(self, cfg_file):
+ self.cfg_file = cfg_file
+ with file(cfg_file) as fp:
+ self.cfg_intent = json.load(fp)
+
+ def parse_cfg_file(self):
+ self.set_default()
+ self.parse_br_type()
+ self.parse_vms_cfg()
+ return self.cfg_intent
+
+ def set_default(self):
+ for host_cfg in self.cfg_intent['env-build']:
+ host_cfg.setdefault("scheme", 'libvirt')
+ host_cfg.setdefault("drivers", [])
+ host_cfg.setdefault("vms", [])
+ host_cfg.setdefault("bridges", [])
+ for vm_cfg in host_cfg["vms"]:
+ vm_cfg.setdefault("init_config", {})
+ vm_cfg["init_config"].setdefault('amqp_port', 5672)
+ vm_cfg["init_config"].setdefault('amqp_user', "guest")
+ vm_cfg["init_config"].setdefault('amqp_passwd', "guest")
+ vm_cfg["init_config"].setdefault('amqp_id', "")
+
+ def _nomornize_boolean(self, flag):
+ if isinstance(flag, bool):
+ return flag
+ lflag = flag.lower()
+ if lflag == 'true':
+ return True
+ if lflag == 'false':
+ return False
+ raise Exception("flag %s cannot be nomonized to bool value" % flag)
+
+ def parse_br_type(self):
+ for host_cfg in self.cfg_intent['env-build']:
+ br_cfgs = host_cfg['bridges']
+ br_type_set = set()
+ for br_cfg in br_cfgs:
+ br_type_set.add(br_cfg["type"])
+ for vm_cfg in host_cfg['vms']:
+ for tap_cfg in vm_cfg['taps']:
+ br_type_set.add(tap_cfg["br_type"])
+ if len(br_type_set) > 1:
+ raise Exception("specified more than one type of vswitchfor host:%s" % host_cfg['ip'])
+ if len(br_type_set) > 0:
+ br_type = br_type_set.pop()
+ host_cfg['br_type'] = br_type
+
+ def parse_vms_cfg(self):
+ for host_cfg in self.cfg_intent['env-build']:
+ vm_cfgs = host_cfg["vms"]
+ self._parse_vm_init_cfg(vm_cfgs)
+ self._parse_vm_ctrl_cfg(vm_cfgs)
+ for vm_cfg in vm_cfgs:
+ self._parse_taps_cfg(vm_cfg['taps'])
+
+ def _parse_taps_cfg(self, tap_cfgs):
+ tap_name_set = set()
+ tap_mac_set = set()
+ count = 0
+ for tap_cfg in tap_cfgs:
+ count += 1
+ tap_name_set.add(tap_cfg["tap_mac"])
+ tap_mac_set.add(tap_cfg["tap_name"])
+ if len(tap_mac_set) != len(tap_name_set) != count:
+ raise Exception('config same tap_mac/tap_name for different taps')
+ LOG.info("tap_name_set: %s", tap_name_set)
+ LOG.info("tap_mac_set: %s", tap_mac_set)
+
+ def _parse_vm_init_cfg(self, vm_cfgs):
+ count = 0
+ ip_set = set()
+ gw_set = set()
+ required_options = {"ctrl_ip_setting", "ctrl_gw", "amqp_server"}
+ for vm_cfg in vm_cfgs:
+ init_cfg = vm_cfg["init_config"]
+ sub = required_options - set(init_cfg.keys())
+ if sub:
+ raise Exception("unset required options:%s" % sub)
+ count += 1
+ ip_set.add(init_cfg["ctrl_ip_setting"])
+ gw_set.add(init_cfg["ctrl_gw"])
+ if len(gw_set) > 1:
+ raise Exception("cannot config more than one gw for vm")
+ if len(ip_set) < count:
+ raise Exception("config same ip for different vm")
+ LOG.info("ip_set: %s", ip_set)
+ LOG.info("gw_set: %s", gw_set)
+
+ def _parse_vm_ctrl_cfg(self, vm_cfgs):
+ count = 0
+ ctrl_mac_set = set()
+ ctrl_br_set = set()
+ for vm_cfg in vm_cfgs:
+ count += 1
+ vm_cfg.setdefault("ctrl_mac", randomMAC())
+ vm_cfg.setdefault("ctrl_br", 'br0')
+ ctrl_mac_set.add(vm_cfg['ctrl_mac'])
+ ctrl_br_set.add(vm_cfg['ctrl_br'])
+ if len(ctrl_br_set) > 1:
+ raise Exception("cannot config more than one ctrl_br_set.")
+ if len(ctrl_mac_set) < count:
+ raise Exception("config same ctrl_mac_set for different vm.")
+ LOG.info("ctrl_mac_set: %s", ctrl_mac_set)
+ LOG.info("ctrl_br_set: %s", ctrl_br_set)
+
+
+if __name__ == '__main__':
+ import argparse
+
+ parser = argparse.ArgumentParser()
+ parser.add_argument('--config', help='config file to parse')
+ args = parser.parse_args()
+ logging.basicConfig(level=logging.INFO)
+ p = IntentParser(args.config)
+ LOG.info(json.dumps(p.parse_cfg_file(), indent=4))
diff --git a/vstf/vstf/controller/env_build/env_build.py b/vstf/vstf/controller/env_build/env_build.py
new file mode 100755
index 00000000..85ad5d29
--- /dev/null
+++ b/vstf/vstf/controller/env_build/env_build.py
@@ -0,0 +1,77 @@
+"""
+Created on 2015-8-27
+
+@author: y00228926
+"""
+import logging
+
+from vstf.controller.fabricant import Fabricant
+from vstf.rpc_frame_work.rpc_producer import Server
+from vstf.controller.env_build.cfg_intent_parse import IntentParser
+
+LOG = logging.getLogger(__name__)
+
+
+class EnvBuildApi(object):
+ def __init__(self, conn, config_file):
+ LOG.info("welcome to EnvBuilder")
+ self.conn = conn
+ intent_parser = IntentParser(config_file)
+ self.cfg_intent = intent_parser.parse_cfg_file()
+
+ def build(self):
+ LOG.info("start build")
+ for host_cfg in self.cfg_intent['env-build']:
+ rpc = Fabricant(host_cfg['ip'], self.conn)
+ rpc.build_env(timeout=1800, cfg_intent=host_cfg)
+ return True
+
+ def clean(self):
+ for host_cfg in self.cfg_intent['env-build']:
+ rpc = Fabricant(host_cfg['ip'], self.conn)
+ rpc.clean_env(timeout=120)
+ return True
+
+ def get_hosts(self):
+ result = []
+ for host_cfg in self.cfg_intent['env-build']:
+ host = {
+ 'name': host_cfg['ip'],
+ 'nic': "82599ES 10-Gigabit"
+ }
+ result.append(host)
+ return result
+
+
+class TransmitterBuild(object):
+ def __init__(self, conn, config_file):
+ LOG.info("welcome to TransmitterBuild")
+ self.conn = conn
+ self._cfg_intent = config_file["transmitter-build"]
+
+ def build(self):
+ LOG.info("start build")
+ for cfg in self.cfg_intent:
+ rpc = Fabricant(cfg['ip'], self.conn)
+ cfg.setdefault("scheme", 'transmitter')
+ rpc.build_env(timeout=1800, cfg_intent=cfg)
+ return True
+
+ def clean(self):
+ for cfg in self.cfg_intent:
+ rpc = Fabricant(cfg['ip'], self.conn)
+ rpc.clean_env(timeout=10)
+ return True
+
+
+if __name__ == "__main__":
+ import argparse
+
+ parser = argparse.ArgumentParser()
+ parser.add_argument('--rpc_server', help='rabbitmq server for deliver messages.')
+ parser.add_argument('--config', help='config file to parse')
+ args = parser.parse_args()
+ logging.basicConfig(level=logging.INFO)
+ conn = Server(args.rpc_server)
+ tn = EnvBuildApi(conn, args.config)
+ tn.build()
diff --git a/vstf/vstf/controller/env_build/env_collect.py b/vstf/vstf/controller/env_build/env_collect.py
new file mode 100755
index 00000000..888f71c7
--- /dev/null
+++ b/vstf/vstf/controller/env_build/env_collect.py
@@ -0,0 +1,30 @@
+from vstf.rpc_frame_work import rpc_producer
+
+
+class EnvCollectApi(object):
+ def __init__(self, rb_mq_server):
+ """
+ When use collect, a connection of rabbitmq is needed.
+ """
+ super(EnvCollectApi, self).__init__()
+ if rb_mq_server is None:
+ raise Exception("The connection of rabbitmq is None.")
+ self.conn = rb_mq_server
+
+ def collect_host_info(self, host):
+ msg = self.conn.make_msg("collect_host_info")
+ return self.conn.call(msg, host, timeout=2)
+
+ def get_device_detail(self, host, nic_identity):
+ msg = self.conn.make_msg("get_device_detail", identity=nic_identity)
+ return self.conn.call(msg, host, timeout=2)
+
+ def list_nic_devices(self, host):
+ msg = self.conn.make_msg("list_nic_devices")
+ return self.conn.call(msg, host, timeout=2)
+
+
+if __name__ == "__main__":
+ conn = rpc_producer.Server("192.168.188.10")
+ c = EnvCollectApi(conn)
+ print c.collect_host_info("local")
diff --git a/vstf/vstf/controller/fabricant.py b/vstf/vstf/controller/fabricant.py
new file mode 100755
index 00000000..c67bfa19
--- /dev/null
+++ b/vstf/vstf/controller/fabricant.py
@@ -0,0 +1,49 @@
+from vstf.rpc_frame_work import constant as const
+import vstf.common.constants as cst
+
+
+class Fabricant(object):
+ def __init__(self, target, conn):
+ self.conn = conn
+ self.target = target
+
+ self.all_commands = self.declare_commands
+ self.instance_commands()
+
+ @property
+ def declare_commands(self):
+ driver = {"install_drivers", "clean_drivers", "autoneg_on", "autoneg_off", "autoneg_query"}
+
+ builder = {"build_env", "clean_env"}
+
+ cpu = {"affctl_load", "affctl_list", "run_cpuwatch", "kill_cpuwatch"}
+
+ perf = {"perf_run", "run_vnstat", "kill_vnstat", "force_clean"}
+
+ device_mgr = {"get_device_detail", "list_nic_devices", "get_device_verbose"}
+
+ netns = {"clean_all_namespace", "config_dev", "recover_dev", "ping"}
+
+ collect = {"collect_host_info"}
+
+ cmdline = {"execute"}
+
+ spirent = {"send_packet", "stop_flow", "mac_learning", "run_rfc2544suite", "run_rfc2544_throughput",
+ "run_rfc2544_frameloss", "run_rfc2544_latency"}
+
+ equalizer = {"get_numa_core", "get_nic_numa", "get_nic_interrupt_proc", "get_vm_info", "bind_cpu",
+ "catch_thread_info"}
+
+ return driver | cpu | builder | perf | device_mgr | netns | cmdline | collect | spirent | equalizer
+
+ def instance_commands(self):
+ for command in self.all_commands:
+ setattr(self, command, self.__transfer_msg(command))
+
+ def __transfer_msg(self, command):
+ def infunc(timeout=cst.TIMEOUT, **kwargs):
+ msg = self.conn.make_msg(command, **kwargs)
+ return self.conn.call(msg, self.target, timeout)
+
+ infunc.__name__ = command
+ return infunc
diff --git a/vstf/vstf/controller/reporters/README b/vstf/vstf/controller/reporters/README
new file mode 100755
index 00000000..1ed65360
--- /dev/null
+++ b/vstf/vstf/controller/reporters/README
@@ -0,0 +1,109 @@
+Tree
+
+├── __init__.py
+├── mail
+│ ├── __init__.py
+│ ├── mail.py
+│ └── sendmail.py
+├── report
+│ ├── data_factory.py
+│ ├── html
+│ │ ├── html_base.py
+│ │ ├── htmlcreater.py
+│ │ ├── html_text.py
+│ │ └── __init__.py
+│ ├── __init__.py
+│ ├── pdf
+│ │ ├── element.py
+│ │ ├── __init__.py
+│ │ ├── pdfcreater.py
+│ │ ├── pdftemplate.py
+│ │ ├── story.py
+│ │ └── styles.py
+│ └── provider
+│ ├── html_provider.py
+│ └── __init__.py
+└── reporter.py
+
+
+Entry:
+ reporter.py
+
+ usage: reporter.py [-h] [-rpath RPATH] [-mail_off] [--taskid TASKID]
+
+ optional arguments:
+ -h, --help show this help message and exit
+ -rpath RPATH the path name of test results
+ -mail_off is need send mail the for the report
+ --taskid TASKID report depand of a history task id.
+
+Settings:
+ mail_settings
+
+ {
+ "server":
+ {
+ "host": "localhost",
+ "username": null,
+ "password": null
+ },
+ "body":
+ {
+ "from": ["vstf_from@vstf.com"],
+ "to": ["vstf_to@vstf.com"],
+ "cc": ["vstf_cc@vstf.com"],
+ "bcc": ["vstf_bcc@vstf.com"],
+ "subject": "Elastic Virtual Switching Performance Test Report"
+ }
+ }
+
+ html_settings
+
+ {
+ "style":{
+ "table":{
+ "font-family":"\"Trebuchet MS\", Arial, Helvetica, sans-serif",
+ "border":"1px solid green",
+ "border-collapse":"collapse",
+ "padding":"8px",
+ "text-align":"center"
+ },
+ "td":{
+ "border":"1px solid green",
+ "padding":"8px",
+ "word-wrap":"break-all"
+ },
+ "th":{
+ "background-color":"#EAF2D3",
+ "border":"1px solid green",
+ "padding":"8px"
+ }
+ }
+ }
+
+ data_settings
+
+ {
+ "ovs":{
+ "content":{
+ "version":3.0
+ },
+ "title":"Ovs info"
+ },
+ "result":{
+ "content":{},
+ "title":"Performance Result"
+ },
+ "subject":"ATF Performance Test Tnv Model"
+ }
+Module:
+ mail
+ html
+ pdf
+
+
+Others:
+ pip processes the package "reportlab"
+
+ pip install reportlab
+
diff --git a/vstf/vstf/controller/reporters/__init__.py b/vstf/vstf/controller/reporters/__init__.py
new file mode 100755
index 00000000..89dcd4e2
--- /dev/null
+++ b/vstf/vstf/controller/reporters/__init__.py
@@ -0,0 +1,14 @@
+# Copyright Huawei Technologies Co., Ltd. 1998-2015.
+# All Rights Reserved.
+#
+# Licensed under the Apache License, Version 2.0 (the License); you may
+# not use this file except in compliance with the License. You may obtain
+# a copy of the License at
+#
+# http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an AS IS BASIS, WITHOUT
+# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
+# License for the specific language governing permissions and limitations
+# under the License.
diff --git a/vstf/vstf/controller/reporters/mail/__init__.py b/vstf/vstf/controller/reporters/mail/__init__.py
new file mode 100755
index 00000000..89dcd4e2
--- /dev/null
+++ b/vstf/vstf/controller/reporters/mail/__init__.py
@@ -0,0 +1,14 @@
+# Copyright Huawei Technologies Co., Ltd. 1998-2015.
+# All Rights Reserved.
+#
+# Licensed under the Apache License, Version 2.0 (the License); you may
+# not use this file except in compliance with the License. You may obtain
+# a copy of the License at
+#
+# http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an AS IS BASIS, WITHOUT
+# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
+# License for the specific language governing permissions and limitations
+# under the License.
diff --git a/vstf/vstf/controller/reporters/mail/mail.py b/vstf/vstf/controller/reporters/mail/mail.py
new file mode 100755
index 00000000..42d60b1a
--- /dev/null
+++ b/vstf/vstf/controller/reporters/mail/mail.py
@@ -0,0 +1,117 @@
+import smtplib
+import logging
+import os
+from email.mime.application import MIMEApplication
+from email.mime.text import MIMEText
+from email.mime.multipart import MIMEMultipart
+
+LOG = logging.getLogger(__name__)
+SRV = 'localhost'
+USER = None
+PASSWD = None
+
+
+class Mail(object):
+ def __init__(self, srv=SRV, user=USER, passwd=PASSWD):
+ self.srv = srv
+ self.user = USER
+ self.passwd = PASSWD
+ self._msg = MIMEMultipart('mixed')
+
+ # addr type
+ self.TO = "To"
+ self.FROM = "From"
+ self.CC = "Cc"
+ self.BCC = "Bcc"
+ self.__addr_choice = [self.TO, self.FROM, self.CC, self.BCC]
+
+ # text mode
+ self.HTML = "html"
+ self.PLAIN = "plain"
+ self.__mode = [self.HTML, self.PLAIN]
+ # self._charset = 'gb2312'
+
+ # timeout
+ self.timeout = 10
+
+ def attach_addr(self, addr, addr_type):
+ """
+ :param addr: a list of email address.
+ :param addr_type: must be one of [to, from, cc, bcc]
+ """
+ if not addr or not isinstance(addr, list):
+ LOG.error("The addr must be a list")
+ return False
+
+ if addr_type not in self.__addr_choice:
+ LOG.error("Not support addr type")
+ return False
+
+ if not self._msg[addr_type]:
+ self._msg[addr_type] = ','.join(addr)
+ self._msg[addr_type].join(addr)
+
+ def attach_title(self, title):
+ """Notice:
+ each time attach title, the old title will be covered.
+ """
+ if title:
+ self._msg["Subject"] = str(title)
+
+ def attach_text(self, text, mode):
+ if mode not in self.__mode:
+ LOG.error("The text mode not support.")
+ return False
+
+ msg_alternative = MIMEMultipart('alternative')
+ msg_text = MIMEText(text, mode)
+ msg_alternative.attach(msg_text)
+
+ return self._msg.attach(msg_alternative)
+
+ def attach_files(self, files):
+ for _file in files:
+ part = MIMEApplication(open(_file, "rb").read())
+ part.add_header('Content-Disposition', 'attachment', filename=os.path.basename(_file))
+ self._msg.attach(part)
+
+ def send(self):
+ server = smtplib.SMTP(self.srv, timeout=self.timeout)
+ if self.user:
+ server.ehlo()
+ server.starttls()
+ server.ehlo()
+ server.login(self.user, self.passwd)
+ maillist = []
+ if self._msg[self.TO]:
+ maillist += self._msg[self.TO].split(',')
+ if self._msg[self.CC]:
+ maillist += self._msg[self.CC].split(',')
+ if self._msg[self.BCC]:
+ maillist += self._msg[self.BCC].split(',')
+ ret = server.sendmail(self._msg[self.FROM].split(','),
+ maillist, self._msg.as_string())
+ LOG.info("send mail ret:%s", ret)
+ server.close()
+
+
+if __name__ == "__main__":
+ m = Mail()
+ m.attach_addr(["vstf_server@vstf.com"], m.FROM)
+ m.attach_addr(["wangli11@huawei.com"], m.TO)
+ context = """
+ <!DOCTYPE html>
+ <html>
+ <head>
+ <title>vstf</title>
+ </head>
+
+ <body>
+ hello vstf
+ </body>
+
+ </html>
+ """
+ m.attach_text(context, m.HTML)
+ m.attach_title("Email from xeson Check")
+ m.send()
diff --git a/vstf/vstf/controller/reporters/mail/sendmail.py b/vstf/vstf/controller/reporters/mail/sendmail.py
new file mode 100755
index 00000000..ecc6fe93
--- /dev/null
+++ b/vstf/vstf/controller/reporters/mail/sendmail.py
@@ -0,0 +1,64 @@
+#!/usr/bin/python
+# -*- coding: utf8 -*-
+# author: wly
+# date: 2015-09-07
+# see license for license details
+__version__ = ''' '''
+
+import logging
+from vstf.controller.reporters.mail.mail import Mail
+from vstf.controller.settings.mail_settings import MailSettings
+LOG = logging.getLogger(__name__)
+
+
+class SendMail(object):
+ def __init__(self, mail_info):
+ self._mail_info = mail_info
+
+ def send(self):
+ send = Mail(self._mail_info['server']['host'],
+ self._mail_info['server']['username'],
+ self._mail_info['server']['password']
+ )
+ send.attach_addr(self._mail_info['body']['from'], send.FROM)
+ send.attach_addr(self._mail_info['body']['to'], send.TO)
+ send.attach_addr(self._mail_info['body']['cc'], send.CC)
+ send.attach_addr(self._mail_info['body']['bcc'], send.CC)
+
+ LOG.info(self._mail_info['body'])
+
+ if 'attach' in self._mail_info['body']:
+ send.attach_files(self._mail_info['body']['attach'])
+ send.attach_text(self._mail_info['body']['content'], self._mail_info['body']['subtype'])
+ send.attach_title(self._mail_info['body']['subject'])
+ send.send()
+
+
+def unit_test():
+ mail_settings = MailSettings()
+ mail = SendMail(mail_settings.settings)
+
+ attach_list = ['1', '2']
+ mail_settings.set_attach(attach_list)
+
+ context = """
+ <!DOCTYPE html>
+ <html>
+ <head>
+ <title>vstf</title>
+ </head>
+
+ <body>
+ hello vstf
+ </body>
+
+ </html>
+ """
+ mail_settings.set_subtype('html')
+ mail_settings.set_content(context)
+
+ mail.send()
+
+
+if __name__ == '__main__':
+ unit_test()
diff --git a/vstf/vstf/controller/reporters/report/__init__.py b/vstf/vstf/controller/reporters/report/__init__.py
new file mode 100755
index 00000000..89dcd4e2
--- /dev/null
+++ b/vstf/vstf/controller/reporters/report/__init__.py
@@ -0,0 +1,14 @@
+# Copyright Huawei Technologies Co., Ltd. 1998-2015.
+# All Rights Reserved.
+#
+# Licensed under the Apache License, Version 2.0 (the License); you may
+# not use this file except in compliance with the License. You may obtain
+# a copy of the License at
+#
+# http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an AS IS BASIS, WITHOUT
+# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
+# License for the specific language governing permissions and limitations
+# under the License.
diff --git a/vstf/vstf/controller/reporters/report/data_factory.py b/vstf/vstf/controller/reporters/report/data_factory.py
new file mode 100755
index 00000000..39c534b6
--- /dev/null
+++ b/vstf/vstf/controller/reporters/report/data_factory.py
@@ -0,0 +1,494 @@
+#!/usr/bin/python
+# -*- coding: utf8 -*-
+# author: wly
+# date: 2015-07-29
+# see license for license details
+__version__ = ''' '''
+
+from vstf.controller.database.dbinterface import DbManage
+
+
+class DataProvider(object):
+ def __init__(self, taskid, dbase):
+ self._dbase = dbase
+ self._taskid = taskid
+
+
+class CommonData(DataProvider):
+ def get_components(self):
+ result = []
+ query = self._dbase.query_exten_info(self._taskid)
+ print "CommonData", query
+ for item in query:
+ if item[2]:
+ context = "%s:%s(%s)" % (item[0], item[1], item[2])
+ else:
+ context = "%s:%s" % (item[0], item[1])
+ result.append(context)
+ return result
+
+ def get_software(self):
+ result = [
+ " Host OS: ubuntu 14.04.2",
+ " Guest OS: ubuntu 12.04.4"
+ ]
+ return result
+
+ def get_hardware(self):
+ result = [
+ " Server: Dell R920",
+ " CPU: E7-8893/2P/3.4GHz/10-Cores/37.5M-L3C",
+ " MEM: 128G",
+ " NIC: Intel 82599"
+ ]
+ return result
+
+ def get_taskname(self):
+ return self._dbase.query_taskname(self._taskid)
+
+ def get_gitinfo_tabledata(self):
+ result = []
+ return result
+
+ def get_profileparameters_tabledData(self):
+ result = [
+ ]
+ return result
+
+ def get_testingoptions_tabledata(self):
+ result = [
+ ]
+ return result
+
+ def get_systeminfo_tabledata(self):
+ result = [
+ ]
+ return result
+
+ def get_systeminfo(self):
+ systable = [
+ ['host', 'Server', 'CPU', 'MEM', 'NIC', 'OS'],
+ ]
+ query = self._dbase.query_task_host_list(self._taskid)
+ query = map(lambda x: list(x), query)
+ # rows = len(query)
+ # cols = len(zip(*query))
+ # for i in range(rows):
+ # for j in range(cols):
+ # query[i][j] = query[i][j].replace('\",','\"\n')
+ systable += query
+ systable = map(lambda x: list(x), zip(*systable))
+ return systable
+
+ def get_introduct_tabledata(self):
+ result = [
+ ["Type", "Case", "Name", "Direction", "Configure"]
+ ]
+ query = self._dbase.query_caseinfo()
+ result += map(lambda x: list(x), query)
+ return result
+
+ def get_scenariolist(self):
+ query = self._dbase.query_scenariolist(self._taskid)
+ result = map(lambda x: list(x), zip(*query))
+ if result:
+ return result[0]
+ else:
+ return result
+
+ def is_scenario_start(self):
+ scenarioList = self.get_scenariolist()
+ print "scenarioList: ", scenarioList
+ if scenarioList:
+ return True
+ return False
+
+ def get_contact(self):
+ result = [
+ "Name: xxx",
+ "ID: xxxxxxxx",
+ "Email: xxxx@xxx.com"
+ ]
+ return result
+
+ def get_casename(self, case):
+ return self._dbase.query_casename(case)
+
+ def get_casefigure(self, case, tools):
+ return self._dbase.query_casefigure(case, tools)
+
+
+class ScenarioData(DataProvider):
+ def __init__(self, taskid, dbase, scenario):
+ print "ScenarioData in"
+ DataProvider.__init__(self, taskid, dbase)
+ self._scenario = scenario
+
+ def get_covertitle(self):
+ result = [
+ "",
+ "",
+ "Elastic Virtual Switching Performance "
+ "Test Report",
+ "Scenario %s" % (self._scenario)
+ ]
+ return result
+
+ def get_test(self):
+ result = [
+ "Scenario: %s" % (self._scenario),
+ "Configuration: without VLAN",
+ ]
+ return result
+
+ def get_test_tools(self, case):
+ query = self._dbase.query_casetools(self._taskid, case)
+ result = map(lambda x: list(x), query)
+ if result:
+ return result[0][0]
+ else:
+ return result
+
+ def get_caselist(self):
+ query = self._dbase.query_caselist(self._taskid, self._scenario)
+ result = map(lambda x: list(x), zip(*query))
+ if result:
+ return result[0]
+ else:
+ return result
+
+ def is_provider_start(self, case, provider):
+ count = self._dbase.query_case_provider_count(self._taskid, case, provider)
+ if count:
+ return True
+ return False
+
+ def is_type_provider_start(self, case, provider, ptype):
+ count = self._dbase.query_case_type_provider_count(self._taskid, case, provider, ptype)
+ if count:
+ return True
+ return False
+
+ def is_type_start(self, case, ptype):
+ count = self._dbase.query_case_type_count(self._taskid, case, ptype)
+ if count:
+ return True
+ return False
+
+ def is_throughput_start(self, case):
+ test_type = "throughput"
+ return self.is_type_start(case, test_type)
+
+ def is_frameloss_start(self, case):
+ test_type = "frameloss"
+ return self.is_type_start(case, test_type)
+
+ def is_latency_start(self, case):
+ test_type = "latency"
+ return self.is_type_start(case, test_type)
+
+ def get_summary_throughput_data(self, case, provider):
+ test_type = "throughput"
+ return self.get_summary_tabledata(case, provider, test_type)
+
+ def get_summary_frameLoss_data(self, case, provider):
+ test_type = "frameloss"
+ return self.get_summary_tabledata(case, provider, test_type)
+
+ def get_summary_tabledata(self, case, provider, test_type, table_type='pdf'):
+ table_head = []
+ table_body = []
+ type_title = {
+ "frameloss": "Load",
+ "throughput": "Load"
+ }
+ tools = self.get_test_tools(case)
+ if "spirent" in tools:
+ table_body = self._dbase.query_summary_table(self._taskid, case, provider, test_type)
+ if 'pdf' == table_type:
+ table_head = [
+ ["FrameSize (byte)", test_type, "", "", "", "Latency(uSec)", "", ""],
+ ["", " Mpps ", " " + type_title[test_type] + " (%) ", "CPU Used (%)", " Mpps/Ghz ",
+ " Min ", " Max ", " Avg "]
+ ]
+ else:
+ table_head = [
+ ["FrameSize (byte)", " Mpps ", " " + type_title[test_type] + " (%) ", "CPU Used (%)",
+ " Mpps/Ghz ", "MinLatency(uSec)", "MaxLatency(uSec)", "AvgLatency(uSec)"],
+ ]
+ else:
+ table_body = self._dbase.query_summary_simpletable(self._taskid, case, provider, test_type)
+ if 'pdf' == table_type:
+ table_head = [
+ ["FrameSize (byte)", test_type, "", "", "", "Latency(uSec)"],
+ ["", " Mpps ", " " + type_title[test_type] + " (%)", "CPU Used (%)", " Mpps/Ghz ",
+ " Avg "]
+ ]
+ else:
+ table_head = [
+ ["FrameSize (byte)", " Mpps ", " " + type_title[test_type] + " (%) ", "CPU Used (%)",
+ " Mpps/Ghz ", "AvgLatency(uSec)"],
+ ]
+ return table_head + table_body
+
+ def get_tabledata(self, case, test_type, item):
+ type_dict = {
+ "FrameSize": "FrameSize (byte)",
+ "fastlink": "fastlink",
+ "l2switch": "l2switch",
+ "rdp": "kernel rdp",
+ "line": "line speed"
+ }
+ item_dict = {
+ "Percent": " ",
+ "Mpps": " ",
+ "Avg": " ",
+ }
+ provider_list = ["fastlink", "rdp", "l2switch"]
+ table = []
+ line_speed = 20.0 if case in ["Tn-2v", "Tn-2"] else 10.0
+
+ for provider in provider_list:
+ if self.is_provider_start(case, provider):
+ if item == 'Percent':
+ query = self._dbase.query_load(self._taskid, case, provider, test_type)
+ elif item == 'Mpps':
+ query = self._dbase.query_bandwidth(self._taskid, case, provider, test_type)
+ else:
+ query = self._dbase.query_avglatency(self._taskid, case, provider, test_type)
+ query = map(lambda x: list(x), zip(*query))
+ if query:
+ table_head = [[type_dict["FrameSize"]] + map(lambda x: " %4d " % (x), query[0])]
+ if item == "Avg":
+ data = map(lambda x: item_dict[item] + "%.1f" % (x) + item_dict[item], query[1])
+ else:
+ data = map(lambda x: item_dict[item] + "%.2f" % (x) + item_dict[item], query[1])
+ if item == "Mpps":
+ line_table = map(lambda x: "%.2f" % (line_speed * 1000 / (8 * (x + 20))), query[0])
+ table.append([type_dict[provider]] + data)
+ if table:
+ if item == "Mpps":
+ table.append([type_dict["line"]] + line_table)
+ table = table_head + table
+ return table
+
+ def get_frameloss_tabledata(self, case, test_type):
+ item = "Percent"
+ table = self.get_tabledata(case, test_type, item)
+ return table
+
+ def get_frameloss_chartdata(self, case, test_type):
+ result = self.get_frameloss_tabledata(case, test_type)
+ result = map(list, zip(*result))
+ return result
+
+ def get_framerate_tabledata(self, case, test_type):
+ item = "Mpps"
+ table = self.get_tabledata(case, test_type, item)
+ return table
+
+ def get_framerate_chartdata(self, case, test_type):
+ result = self.get_framerate_tabledata(case, test_type)
+ result = map(list, zip(*result))
+ return result
+
+ def get_latency_tabledata(self, case):
+ test_type = "latency"
+ item = "Avg"
+ table = self.get_tabledata(case, test_type, item)
+ return table
+
+ def get_latency_chartdata(self, case):
+ result = self.get_latency_tabledata(case)
+ result = map(list, zip(*result))
+ return result
+
+ def get_latency_bardata(self, case):
+ table_data = self.get_latency_tabledata(case)
+ result = []
+ if table_data:
+ ytitle = "Average Latency (uSec)"
+ category_names = map(lambda x: "FS:%4d" % int(float(x)) + "LOAD:50", table_data[0][1:])
+ bar_ = map(lambda x: x[0], table_data[1:])
+ data = map(lambda x: x[1:], table_data[1:])
+ result = [ytitle, category_names, bar_, data]
+ return result
+
+ def get_bardata(self, case, provider, test_type):
+ if test_type == "latency":
+ query = self._dbase.query_avglatency(self._taskid, case, provider, test_type)
+ item = "Avg"
+ else:
+ query = self._dbase.query_load(self._taskid, case, provider, test_type)
+ item = "Percent"
+
+ title_dict = {
+ "Avg": "Latency (uSec)",
+ "Percent": test_type + " (%)"
+ }
+ name_dict = {
+ "Avg": " LOAD:50",
+ "Percent": " OF:100 "
+ }
+ color_dict = {
+ "Avg": "latency",
+ "Percent": "loss"
+ }
+ ytitle = title_dict[item]
+ query = map(lambda x: list(x), zip(*query))
+ result = []
+ if query:
+ category_names = map(lambda x: "FS:%4d" % x + name_dict[item], query[0])
+ data = query[1:]
+ bar_ = [color_dict[item]]
+ result = [ytitle, category_names, bar_, data]
+ return result
+
+
+class TaskData(object):
+ def __init__(self, taskid, dbase):
+ self.__common = CommonData(taskid, dbase)
+ scenario_list = self.__common.get_scenariolist()
+ scenario_dic = {}
+ for scenario in scenario_list:
+ scenario_dic[scenario] = ScenarioData(taskid, dbase, scenario)
+ self.__dict__.update(scenario_dic)
+
+ @property
+ def common(self):
+ return self.__common
+
+
+class HistoryData(DataProvider):
+ def get_data(self, task_list, case, provider, ttype, item):
+ """
+ @provider in ["fastlink", "rdp", "l2switch"]
+ @ttype in ["throughput", "frameloss", "latency"]
+ @item in ["avg", "ratep", "load"]
+ """
+ table = []
+ table_head = []
+ datas = []
+ sizes = []
+ for taskid in task_list:
+ if item == 'ratep':
+ query = self._dbase.query_bandwidth(taskid, case, provider, ttype)
+ else:
+ query = self._dbase.query_avglatency(taskid, case, provider, ttype)
+
+ if query:
+ data = {}
+ for size, value in query:
+ data[size] = value
+ sizes.extend(data.keys())
+ sizes = {}.fromkeys(sizes).keys()
+ sizes.sort()
+ datas.append({taskid: data})
+
+ result = []
+ for data in datas:
+ print data
+ taskid = data.keys()[0]
+ data_th = self._dbase.query_taskdate(taskid)
+ testdata = data[taskid]
+ item = [data_th]
+ for size in sizes:
+ item.append(str(testdata.get(size, '')))
+ result.append(item)
+
+ if result:
+ head_th = "FrameSize (byte)"
+ table_head = [[head_th] + map(lambda x: " %4d " % (x), sizes)]
+ table = table_head + result
+
+ return table
+
+ def get_tasklist(self, count=5):
+ task_list = []
+ query = self._dbase.query_tasklist()
+ if query:
+ for item in query:
+ if item.TaskID <= self._taskid:
+ task_list.append(item.TaskID)
+
+ task_list = task_list[-count:]
+ return task_list
+
+ def get_history_info(self, case):
+ providers = ["fastlink", "rdp", "l2switch"]
+ provider_dict = {"fastlink": "Fast Link ", "l2switch": "L2Switch ", "rdp": "Kernel RDP "}
+ ttype_dict = {
+ "throughput": "Throughput Testing ",
+ "frameloss": "Frame Loss Testing ",
+ "latency": "Latency Testing "
+ }
+
+ items_dict = {
+ "ratep": "RX Frame Rate(Mpps) ",
+ "avg": "Average Latency (uSec) "
+ }
+
+ task_list = self.get_tasklist()
+ result = []
+
+ ttypes = ["throughput", "frameloss", "latency"]
+ for ttype in ttypes:
+ content = {}
+ if ttype == "latency":
+ item = "avg"
+ else:
+ item = "ratep"
+
+ for provider in providers:
+ table_data = self.get_data(task_list, case, provider, ttype, item)
+ if table_data:
+ data = {
+ "title": provider_dict[provider] + items_dict[item],
+ "data": table_data
+ }
+ content["title"] = ttype_dict[ttype]
+ content.setdefault("data", [])
+ content["data"].append(data)
+ if content:
+ result.append(content)
+ print "xxxxxxxxxxxxxx"
+ print result
+ print "xxxxxxxxxxxxxx"
+ return result
+
+
+def unit_test():
+ dbase = DbManage()
+ taskid = dbase.get_last_taskid()
+ hdata = HistoryData(taskid, dbase)
+ task_list = hdata.get_tasklist()
+
+ cdata = CommonData(taskid, dbase)
+ scenario_list = cdata.get_scenariolist()
+ print scenario_list
+
+ scenario = "Tn"
+ sdata = ScenarioData(taskid, dbase, scenario)
+
+ case_list = sdata.get_caselist()
+ print case_list
+
+ case = "Tn-1"
+
+ providers = ["fastlink", "rdp", "l2switch"]
+ ttypes = ["throughput", "frameloss"]
+ items = ["ratep", "load"]
+
+ for provider in providers:
+ for ttype in ttypes:
+ for item in items:
+ print provider
+ print ttype
+ print item
+ print hdata.get_data(task_list, case, provider, ttype, item)
+
+ hdata.get_history_info(case)
+
+
+if __name__ == '__main__':
+ unit_test()
diff --git a/vstf/vstf/controller/reporters/report/html/__init__.py b/vstf/vstf/controller/reporters/report/html/__init__.py
new file mode 100755
index 00000000..89dcd4e2
--- /dev/null
+++ b/vstf/vstf/controller/reporters/report/html/__init__.py
@@ -0,0 +1,14 @@
+# Copyright Huawei Technologies Co., Ltd. 1998-2015.
+# All Rights Reserved.
+#
+# Licensed under the Apache License, Version 2.0 (the License); you may
+# not use this file except in compliance with the License. You may obtain
+# a copy of the License at
+#
+# http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an AS IS BASIS, WITHOUT
+# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
+# License for the specific language governing permissions and limitations
+# under the License.
diff --git a/vstf/vstf/controller/reporters/report/html/html_base.py b/vstf/vstf/controller/reporters/report/html/html_base.py
new file mode 100755
index 00000000..270ef394
--- /dev/null
+++ b/vstf/vstf/controller/reporters/report/html/html_base.py
@@ -0,0 +1,42 @@
+#!/usr/bin/python
+# -*- coding: utf8 -*-
+# author: wly
+# date: 2015-09.25
+# see license for license details
+__version__ = ''' '''
+
+import os
+from vstf.common.pyhtml import *
+
+
+class HtmlBase(object):
+ def __init__(self, provider, ofile='text.html'):
+ self._page = PyHtml('HtmlBase Text')
+ self._ofile = ofile
+ self._provider = provider
+ self._chapter = 1
+
+ def save(self):
+ if self._ofile:
+ os.system('rm -rf %s' % self._ofile)
+ self._page.output(self._ofile)
+
+ def as_string(self):
+ return self._page.as_string()
+
+ def add_table(self, data):
+ self._page.add_table(data)
+
+ def add_style(self):
+ style = self._provider.get_style()
+ self._page.add_style(style)
+
+ def create(self, is_save=True):
+ self.add_style()
+ self.create_story()
+ if is_save:
+ self.save()
+ return self.as_string()
+
+ def create_story(self):
+ raise NotImplementedError("abstract HtmlBase")
diff --git a/vstf/vstf/controller/reporters/report/html/html_text.py b/vstf/vstf/controller/reporters/report/html/html_text.py
new file mode 100755
index 00000000..86505b8f
--- /dev/null
+++ b/vstf/vstf/controller/reporters/report/html/html_text.py
@@ -0,0 +1,68 @@
+#!/usr/bin/python
+# -*- coding: utf8 -*-
+# author: wly
+# date: 2015-09-24
+# see license for license details
+__version__ = ''' '''
+
+import logging
+
+LOG = logging.getLogger(__name__)
+import vstf.common.constants as cst
+from vstf.controller.reporters.report.html.html_base import *
+
+
+class HtmlCreator(HtmlBase):
+ def add_subject(self):
+ title = self._provider.get_subject()
+ self._page << H1(title)
+
+ def add_ovs(self):
+ title = "%s %s" % (self._chapter, self._provider.get_ovs_title())
+ self._page << H2(title)
+ data = self._provider.get_ovs_table()
+ self.add_table(data)
+ self._chapter += 1
+
+ def add_result(self):
+ title = "%s %s" % (self._chapter, self._provider.get_result_title())
+ self._page << H2(title)
+
+ section = 1
+ for ttype in cst.TTYPES:
+ data = self._provider.get_result_table(ttype)
+ if data:
+ title = "%s.%s %s" % (self._chapter, section, ttype.title())
+ self._page << H3(title)
+ self.add_table(data)
+ section += 1
+ self._chapter += 1
+
+ def create_story(self):
+ self.add_subject()
+ self.add_ovs()
+ self.add_result()
+
+
+def unit_test():
+ from vstf.common.log import setup_logging
+ setup_logging(level=logging.DEBUG, log_file="/var/log/html-test.log", clevel=logging.INFO)
+
+ from vstf.controller.settings.html_settings import HtmlSettings
+ from vstf.controller.settings.data_settings import DataSettings
+
+ html_settings = HtmlSettings()
+ LOG.info(html_settings.settings)
+ data_settings = DataSettings()
+ LOG.info(data_settings.settings)
+
+ from vstf.controller.reporters.report.provider.html_provider import HtmlProvider
+ provider = HtmlProvider(data_settings.settings, html_settings.settings)
+ html = HtmlCreator(provider)
+
+ result = html.create()
+ print result
+
+
+if __name__ == '__main__':
+ unit_test()
diff --git a/vstf/vstf/controller/reporters/report/html/htmlcreator.py b/vstf/vstf/controller/reporters/report/html/htmlcreator.py
new file mode 100755
index 00000000..e6c75caf
--- /dev/null
+++ b/vstf/vstf/controller/reporters/report/html/htmlcreator.py
@@ -0,0 +1,117 @@
+#!/usr/bin/python
+# -*- coding: utf8 -*-
+# author: wly
+# date: 2015-08-04
+# see license for license details
+__version__ = ''' '''
+
+import logging
+
+from vstf.controller.reporters.report.data_factory import TaskData
+from vstf.controller.database.dbinterface import DbManage
+from vstf.controller.reporters.report.html.html_base import *
+
+LOG = logging.getLogger(__name__)
+
+
+class HtmlvSwitchCreator(HtmlBase):
+ def __init__(self, task_data, provider, ofile='creator.html'):
+ HtmlBase.__init__(self, provider, ofile)
+ self._task = task_data
+ self._table_type = 'html'
+
+ def create_story(self):
+ self.add_subject()
+ self.add_gitinfo()
+ self.add_envinfo()
+ self.add_scenarios()
+
+ def add_subject(self):
+ job_name = "JOB_NAME: " + self._task.common.get_taskname()
+ self._page << H2(job_name)
+
+ def add_gitinfo(self):
+ self._page << H2("Trigger and Repository Info")
+
+ git_table = self._task.common.get_gitinfo_tabledata()
+ if git_table:
+ self.add_table(git_table)
+
+ def add_envinfo(self):
+ self._page << H2("System Environment Information")
+ env_table = self._task.common.get_systeminfo()
+ LOG.info(env_table)
+ if env_table:
+ self.add_table(env_table)
+
+ def add_scenarios(self):
+ scenario_list = self._task.common.get_scenariolist()
+ self._page << H2("Scenario List: " + ', '.join(scenario_list))
+ for scenario in scenario_list:
+ self._page << H2("Scenario: " + scenario)
+ data = getattr(self._task, scenario)
+ self.add_scenario(data)
+
+ def add_scenario(self, scenario_data):
+ case_list = scenario_data.get_caselist()
+ for case in case_list:
+ self.add_case(scenario_data, case)
+
+ def add_case(self, scenario_data, case):
+ case_name = self._task.common.get_casename(case)
+ title = "Case : %s (%s)" % (case, case_name)
+ self._page << H2(title)
+
+ provider_list = ["fastlink", "rdp", "l2switch"]
+ provider_dict = {"fastlink": "Fast Link", "l2switch": "L2Switch", "rdp": "Kernel RDP"}
+
+ for provider in provider_list:
+ if scenario_data.is_provider_start(case, provider):
+ title = " %s (%s_%s)" % (provider_dict[provider], case_name, provider)
+ self._page << H3(title)
+ test_types = ["throughput", "frameloss"]
+ for test_type in test_types:
+ if scenario_data.is_type_provider_start(case, provider, test_type):
+ self.add_casedata(scenario_data, case, provider, test_type)
+
+ if scenario_data.is_latency_start(case):
+ self.add_latency_result(scenario_data, case)
+
+ def add_casedata(self, scenario_data, case, provider, test_type):
+ table_content = scenario_data.get_summary_tabledata(case, provider, test_type, self._table_type)
+ if table_content:
+ title = "Test type:%s" % (test_type)
+ self._page << H4(title)
+ self.add_table(table_content)
+
+ def add_latency_result(self, scenario_data, case):
+ title = "Average Latency Summary"
+ table_content = scenario_data.get_latency_tabledata(case)
+ if table_content:
+ self._page << H2(title)
+ self.add_table(table_content)
+
+
+def unit_test():
+ from vstf.common.log import setup_logging
+ setup_logging(level=logging.DEBUG, log_file="/var/log/html-creator.log", clevel=logging.INFO)
+
+ dbase = DbManage()
+ taskid = dbase.get_last_taskid()
+ task_data = TaskData(taskid, dbase)
+
+ from vstf.controller.settings.html_settings import HtmlSettings
+ from vstf.controller.reporters.report.provider.html_provider import StyleProvider
+
+ html_settings = HtmlSettings()
+ LOG.info(html_settings.settings)
+
+ provider = StyleProvider(html_settings.settings)
+ html = HtmlvSwitchCreator(task_data, provider)
+
+ result = html.create(True)
+ print result
+
+
+if __name__ == '__main__':
+ unit_test()
diff --git a/vstf/vstf/controller/reporters/report/pdf/__init__.py b/vstf/vstf/controller/reporters/report/pdf/__init__.py
new file mode 100755
index 00000000..89dcd4e2
--- /dev/null
+++ b/vstf/vstf/controller/reporters/report/pdf/__init__.py
@@ -0,0 +1,14 @@
+# Copyright Huawei Technologies Co., Ltd. 1998-2015.
+# All Rights Reserved.
+#
+# Licensed under the Apache License, Version 2.0 (the License); you may
+# not use this file except in compliance with the License. You may obtain
+# a copy of the License at
+#
+# http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an AS IS BASIS, WITHOUT
+# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
+# License for the specific language governing permissions and limitations
+# under the License.
diff --git a/vstf/vstf/controller/reporters/report/pdf/element.py b/vstf/vstf/controller/reporters/report/pdf/element.py
new file mode 100755
index 00000000..2528f2c5
--- /dev/null
+++ b/vstf/vstf/controller/reporters/report/pdf/element.py
@@ -0,0 +1,781 @@
+#!/usr/bin/python
+# -*- coding: utf8 -*-
+# author: wly
+# date: 2015-05-04
+# see license for license details
+__version__ = ''' '''
+__doc__ = """
+it contains the base element for pdf
+eImage is used to draw picture on the pdf document
+eDataTable is used to draw table on the pdf document
+eGraphicsTable is used to draw plot on the pdf document
+eParagraph is used to draw text on the pdf document
+"""
+from reportlab.platypus import Image, Table
+from reportlab.graphics.shapes import Drawing
+from reportlab.graphics.charts.lineplots import LinePlot
+from reportlab.graphics.charts.linecharts import HorizontalLineChart
+from reportlab.platypus.paragraph import Paragraph
+from reportlab.graphics.widgets.markers import makeMarker
+from reportlab.graphics.charts.legends import Legend
+from reportlab.graphics.charts.textlabels import Label
+from reportlab.graphics.charts.axes import XValueAxis
+from reportlab.graphics.shapes import Group
+from reportlab.graphics.charts.barcharts import VerticalBarChart
+from vstf.controller.reporters.report.pdf.styles import *
+
+
+class eImage(Image):
+ """ an image(digital picture)which contains the function of auto zoom picture """
+
+ def __init__(self, filename, width=None, height=None, kind='direct', mask="auto", lazy=1, hAlign='CENTRE',
+ vAlign='BOTTOM'):
+ Image.__init__(self, filename, None, None, kind, mask, lazy)
+ print height, width
+ print self.drawHeight, self.drawWidth
+ if self.drawWidth * height > self.drawHeight * width:
+ self.drawHeight = width * self.drawHeight / self.drawWidth
+ self.drawWidth = width
+ else:
+ self.drawWidth = height * self.drawWidth / self.drawHeight
+ self.drawHeight = height
+ self.hAlign = hAlign
+ self.vAlign = vAlign
+ print self.drawHeight, self.drawWidth
+
+
+class eTable(object):
+ """ an abstract table class, which is contains the base functions to create table """
+
+ def __init__(self, data, style=TableStyle(name="default")):
+ self._tablestyle = style
+ self._table = []
+ self._spin = False
+ self._colWidths = None
+ self._data = self.analysisData(data)
+ if self._data:
+ self.create()
+
+ def analysisData(self, data):
+ raise NotImplementedError("abstract eTable")
+
+ def create(self):
+ self._table = Table(self._data, style=self._style, splitByRow=1)
+ self._table.hAlign = self._tablestyle.table_hAlign
+ self._table.vAlign = self._tablestyle.table_vAlign
+ self._table.colWidths = self._tablestyle.table_colWidths
+ if self._spin or self._colWidths:
+ self._table.colWidths = self._colWidths
+ self._table.rowHeights = self._tablestyle.table_rowHeights
+
+ @property
+ def table(self):
+ return self._table
+
+
+class eCommonTable(eTable):
+ def analysisData(self, data):
+ self._style = [
+ ('ALIGN', (0, 0), (-1, -1), 'CENTER'),
+ ('VALIGN', (0, 0), (-1, -1), 'MIDDLE'),
+ ('GRID', (0, 0), (-1, -1), 0.5, colors.grey),
+ ('BOX', (0, 0), (-1, -1), 1.2, colors.black)
+ ]
+ return data
+
+
+class eConfigTable(eTable):
+ def analysisData(self, data):
+ self._style = [
+ ('ALIGN', (0, 0), (-1, -1), 'CENTER'),
+ ('VALIGN', (0, 0), (-1, -1), 'MIDDLE'),
+ ('GRID', (0, 0), (-1, -1), 0.5, colors.grey),
+ ('BOX', (0, 0), (-1, -1), 1, colors.black),
+ ('SPAN', (2, 0), (3, 0)),
+ ('SPAN', (2, 1), (3, 1)),
+ ('SPAN', (2, 8), (3, 8)),
+ ('SPAN', (2, 9), (3, 9)),
+ ('SPAN', (2, 10), (3, 10)),
+ ('SPAN', (0, 0), (0, 7)),
+ ('SPAN', (0, 8), (0, 10)),
+ ('SPAN', (0, 11), (0, 19)),
+ ('SPAN', (1, 2), (1, 6)),
+ ('SPAN', (1, 12), (1, 13)),
+ ('SPAN', (1, 14), (1, 16)),
+ ('SPAN', (1, 17), (1, 19)),
+ ('SPAN', (2, 3), (2, 6))
+ ]
+ return data
+
+
+class eSummaryTable(eTable):
+ def analysisData(self, data):
+ self._style = [
+ ('ALIGN', (0, 0), (-1, -1), 'CENTER'),
+ ('VALIGN', (0, 0), (-1, -1), 'MIDDLE'),
+ ('GRID', (0, 0), (-1, -1), 0.5, colors.grey),
+ ('BOX', (0, 0), (-1, -1), 1, colors.black),
+ ('SPAN', (0, 0), (0, 1)),
+ ('SPAN', (1, 0), (4, 0)),
+ ('SPAN', (5, 0), (-1, 0))
+ ]
+ return data
+
+
+class eGitInfoTable(eTable):
+ def analysisData(self, data):
+ self._style = [
+ ('ALIGN', (0, 0), (-1, -1), 'CENTER'),
+ ('VALIGN', (0, 0), (-1, -1), 'MIDDLE'),
+ ('GRID', (0, 0), (-1, -1), 0.5, colors.grey),
+ ('BOX', (0, 0), (-1, -1), 1, colors.black),
+ ('SPAN', (0, 0), (0, 2)),
+ ('SPAN', (0, 3), (0, 5)),
+ ('SPAN', (0, 6), (0, 8))
+ ]
+ return data
+
+
+class eScenarioTable(eTable):
+ def analysisData(self, data):
+ self._style = [
+ ('ALIGN', (0, 0), (-1, -1), 'CENTER'),
+ ('VALIGN', (0, 0), (-1, -1), 'MIDDLE'),
+ ('GRID', (0, 0), (-1, -1), 0.5, colors.grey),
+ ('BOX', (0, 0), (-1, -1), 1, colors.black),
+ ('ALIGN', (2, 1), (-1, -1), 'LEFT'),
+ ('SPAN', (0, 1), (0, 6)),
+ ('SPAN', (0, 7), (0, 12)),
+ ('SPAN', (0, 13), (0, 16)),
+ ('SPAN', (0, 17), (0, 20))
+ ]
+ return data
+
+
+class eOptionsTable(eTable):
+ def analysisData(self, data):
+ self._style = [
+ ('ALIGN', (0, 0), (-1, -1), 'CENTER'),
+ ('VALIGN', (0, 0), (-1, -1), 'MIDDLE'),
+ ('GRID', (0, 0), (-1, -1), 0.5, colors.grey),
+ ('BOX', (0, 0), (-1, -1), 1, colors.black),
+ ('SPAN', (2, 0), (4, 0)),
+ ('SPAN', (2, 1), (4, 1)),
+ ('SPAN', (0, 0), (0, -1)),
+ ('SPAN', (1, 2), (1, 16)),
+ ('SPAN', (1, 17), (1, 19)),
+ ('SPAN', (1, 20), (1, 22)),
+ ('SPAN', (1, 23), (1, 24)),
+ ('SPAN', (2, 2), (2, 4)),
+ ('SPAN', (2, 5), (2, 12)),
+ ('SPAN', (2, 13), (2, 16)),
+ ('SPAN', (2, 17), (2, 19)),
+ ('SPAN', (2, 20), (2, 22)),
+ ('SPAN', (2, 23), (2, 24))
+ ]
+ return data
+
+
+class eProfileTable(eTable):
+ def analysisData(self, data):
+ self._style = [
+ ('ALIGN', (0, 0), (-1, -1), 'CENTER'),
+ ('VALIGN', (0, 0), (-1, -1), 'MIDDLE'),
+ ('GRID', (0, 0), (-1, -1), 0.5, colors.grey),
+ ('BOX', (0, 0), (-1, -1), 1, colors.black),
+ ('SPAN', (0, 1), (0, -1)),
+ ('SPAN', (1, 0), (2, 0)),
+ ]
+ return data
+
+
+class eDataTable(eTable):
+ def analysisData(self, data):
+ result = data
+ self._style = [
+ ('ALIGN', (0, 0), (-1, -1), 'CENTER'),
+ ('VALIGN', (0, 0), (-1, -1), 'MIDDLE'),
+ ('LEADING', (0, 0), (-1, -1), 18),
+ ('GRID', (0, 0), (-1, -1), 0.5, colors.grey),
+ ('BOX', (0, 0), (-1, -1), 1, colors.black),
+ ('LINEBEFORE', (1, 0), (1, -1), 0.8, colors.black),
+ # ('LINEBEFORE', (3, 0), (3, -1), 1, colors.black),
+ # ('LINEBEFORE', (5, 0), (5, -1), 1, colors.black),
+ ('LINEBELOW', (0, 0), (-1, 0), 0.8, colors.black),
+ # ('SPAN', (0, 0), (0, 1)),
+ # ('SPAN', (1, 0), (2, 0)),
+ # ('SPAN', (3, 0), (4, 0))
+ ]
+ if self._spin is True:
+ print "start spin"
+ result = map(list, zip(*result))
+ style = []
+ for value in self._style:
+ value = list(value)
+ value[1] = (value[1][1], value[1][0])
+ value[2] = (value[2][1], value[2][0])
+ if value[0] == 'LINEBELOW':
+ value[0] = 'LINEAFTER'
+ elif value[0] == 'LINEBEFORE':
+ value[0] = 'LINEABOVE'
+ value = tuple(value)
+ style.append(value)
+ self._style = style
+ return result
+
+
+class eGraphicsTable(eTable):
+ def analysisData(self, data):
+ self._style = [
+ ('ALIGN', (0, 0), (-1, -1), 'CENTER'),
+ ('VALIGN', (0, 0), (-1, -1), 'MIDDLE')
+ ]
+ return data
+
+
+class noScaleXValueAxis(XValueAxis):
+ def __init__(self):
+ XValueAxis.__init__(self)
+
+ def makeTickLabels(self):
+ g = Group()
+ if not self.visibleLabels: return g
+
+ f = self._labelTextFormat # perhaps someone already set it
+ if f is None:
+ f = self.labelTextFormat or (self._allIntTicks() and '%.0f' or str)
+ elif f is str and self._allIntTicks():
+ f = '%.0f'
+ elif hasattr(f, 'calcPlaces'):
+ f.calcPlaces(self._tickValues)
+ post = self.labelTextPostFormat
+ scl = self.labelTextScale
+ pos = [self._x, self._y]
+ d = self._dataIndex
+ pos[1 - d] = self._labelAxisPos()
+ labels = self.labels
+ if self.skipEndL != 'none':
+ if self.isXAxis:
+ sk = self._x
+ else:
+ sk = self._y
+ if self.skipEndL == 'start':
+ sk = [sk]
+ else:
+ sk = [sk, sk + self._length]
+ if self.skipEndL == 'end':
+ del sk[0]
+ else:
+ sk = []
+
+ nticks = len(self._tickValues)
+ nticks1 = nticks - 1
+ for i, tick in enumerate(self._tickValues):
+ label = i - nticks
+ if label in labels:
+ label = labels[label]
+ else:
+ label = labels[i]
+ if f and label.visible:
+ v = self.scale(i)
+ if sk:
+ for skv in sk:
+ if abs(skv - v) < 1e-6:
+ v = None
+ break
+ if v is not None:
+ if scl is not None:
+ t = tick * scl
+ else:
+ t = tick
+ if isinstance(f, str):
+ txt = f % t
+ elif isSeq(f):
+ # it's a list, use as many items as we get
+ if i < len(f):
+ txt = f[i]
+ else:
+ txt = ''
+ elif hasattr(f, '__call__'):
+ if isinstance(f, TickLabeller):
+ txt = f(self, t)
+ else:
+ txt = f(t)
+ else:
+ raise ValueError('Invalid labelTextFormat %s' % f)
+ if post: txt = post % txt
+ pos[d] = v
+ label.setOrigin(*pos)
+ label.setText(txt)
+
+ # special property to ensure a label doesn't project beyond the bounds of an x-axis
+ if self.keepTickLabelsInside:
+ if isinstance(self, XValueAxis): # not done yet for y axes
+ a_x = self._x
+ if not i: # first one
+ x0, y0, x1, y1 = label.getBounds()
+ if x0 < a_x:
+ label = label.clone(dx=label.dx + a_x - x0)
+ if i == nticks1: # final one
+ a_x1 = a_x + self._length
+ x0, y0, x1, y1 = label.getBounds()
+ if x1 > a_x1:
+ label = label.clone(dx=label.dx - x1 + a_x1)
+ g.add(label)
+
+ return g
+
+ def ___calcScaleFactor(self):
+ """Calculate the axis' scale factor.
+ This should be called only *after* the axis' range is set.
+ Returns a number.
+ """
+ self._scaleFactor = self._length / (len(self._tickValues) + 1)
+ return self._scaleFactor
+
+ def scale(self, value):
+ """Converts a numeric value to a plotarea position.
+ The chart first configures the axis, then asks it to
+ """
+ assert self._configured, "Axis cannot scale numbers before it is configured"
+ if value is None: value = 0
+ # this could be made more efficient by moving the definition of org and sf into the configuration
+ org = (self._x, self._y)[self._dataIndex]
+ sf = self._length / (len(self._tickValues) + 1)
+ if self.reverseDirection:
+ sf = -sf
+ org += self._length
+ return org + sf * (value + 1)
+
+
+class noScaleLinePlot(LinePlot):
+ def __init__(self):
+ LinePlot.__init__(self)
+ self.xValueAxis = noScaleXValueAxis()
+
+ def calcPositions(self):
+ """Works out where they go.
+
+ Sets an attribute _positions which is a list of
+ lists of (x, y) matching the data.
+ """
+ self._seriesCount = len(self.data)
+ self._rowLength = max(map(len, self.data))
+
+ self._positions = []
+ for rowNo in range(len(self.data)):
+ line = []
+ len_row = len(self.data[rowNo])
+ for colNo in range(len_row):
+ datum = self.data[rowNo][colNo] # x, y value
+ x = self.x + self.width / (len_row + 1) * (colNo + 1)
+ self.xValueAxis.labels[colNo].x = self.x + self.width / (len_row + 1) * (colNo + 1)
+ y = self.yValueAxis.scale(datum[1])
+ # print self.width, " ", x
+ line.append((x, y))
+ self._positions.append(line)
+
+
+# def _innerDrawLabel(self, rowNo, colNo, x, y):
+# return None
+class eLinePlot(object):
+ def __init__(self, data, style):
+ self._lpstyle = style
+ self._linename = data[0]
+ self._data = self.analysisData(data[1:])
+ if self._data:
+ self.create()
+
+ @property
+ def draw(self):
+ return self._draw
+
+ def analysisData(self, data):
+ columns = len(data)
+ # print data
+ data = map(list, zip(*data))
+ rows = len(data)
+
+ for i in range(rows):
+ for j in range(columns):
+ data[i][j] = float(data[i][j])
+ self._linename = self._linename[1:]
+ """
+ delcnt = 0
+ delrows = []
+ for i in range(columns):
+ delrows.append(0.0)
+ del_line = [self._linename[0]]
+ for i in range(rows):
+ for j in range(columns):
+ data[i][j] = float(data[i][j])
+ if data[i] == delrows:
+ delcnt += 1
+ del_line.append(self._linename[i])
+ for i in range(delcnt):
+ data.remove(delrows)
+ for name in del_line:
+ self._linename.remove(name)
+
+ rows = len(data)
+ """
+ # print rows
+ # print data
+ xvalueSteps = data[0]
+ xvalueMin = data[0][0]
+ xvalueMax = data[0][0]
+ yvalueMin = data[1][0]
+ yvalueMax = data[1][0]
+ yvalueSteps = []
+ result = []
+ for j in range(columns):
+ if xvalueMin > data[0][j]:
+ xvalueMin = data[0][j]
+ if xvalueMax < data[0][j]:
+ xvalueMax = data[0][j]
+
+ for i in range(rows - 1):
+ lst = []
+ for j in range(columns):
+ lst.append((data[0][j], data[i + 1][j]))
+ if yvalueMin > data[i + 1][j]:
+ yvalueMin = data[i + 1][j]
+ if yvalueMax < data[i + 1][j]:
+ yvalueMax = data[i + 1][j]
+ yvalueSteps.append(int(data[i + 1][j] * 2.5) / 2.5)
+ result.append(tuple(lst))
+ xvalueMin = int(xvalueMin) / 100 * 100
+ xvalueMax = int(xvalueMax) / 100 * 100 + 200
+ yvalueMin = int(yvalueMin) * 1.0 - 1
+ if yvalueMin < 0:
+ yvalueMin = 0.0
+ yvalueMax = int(yvalueMax) + 2.0
+ yvalueSteps.append(yvalueMin)
+ yvalueSteps.append(yvalueMax)
+ yvalueSteps = {}.fromkeys(yvalueSteps).keys()
+
+ self._xvalue = (xvalueMin, xvalueMax, xvalueSteps)
+ self._yvalue = (yvalueMin, yvalueMax, yvalueSteps)
+ print result
+ return result
+
+ def create(self):
+ lpw = self._lpstyle.width
+ lph = self._lpstyle.height
+ draw = Drawing(lpw, lph)
+ line_cnts = len(self._linename)
+ # lp = noScaleLinePlot()
+ lp = LinePlot()
+ lg_line = (line_cnts + 3) / 4
+ lp.x = self._lpstyle.left
+ lp.y = self._lpstyle.bottom
+
+ lp.height = lph - self._lpstyle.bottom * (lg_line + 1.5)
+ lp.width = lpw - lp.x * 2
+ lp.data = self._data
+ lp.joinedLines = 1
+ lp.strokeWidth = self._lpstyle.strokeWidth
+ line_cnts = len(self._data)
+ sytle_cnts = len(self._lpstyle.linestyle)
+ color_paris = []
+ for i in range(line_cnts):
+ styleIndex = i % sytle_cnts
+ lp.lines[i].strokeColor = self._lpstyle.linestyle[styleIndex][0]
+ lp.lines[i].symbol = makeMarker(self._lpstyle.linestyle[styleIndex][1])
+ lp.lines[i].strokeWidth = self._lpstyle.linestyle[styleIndex][2]
+ color_paris.append((self._lpstyle.linestyle[styleIndex][0], self._linename[i]))
+ # lp.lineLabels[i].strokeColor = self._lpstyle.linestyle[styleIndex][0]
+
+ lp.lineLabelFormat = self._lpstyle.format[0]
+
+ lp.strokeColor = self._lpstyle.strokeColor
+
+ lp.xValueAxis.valueMin, lp.xValueAxis.valueMax, lp.xValueAxis.valueSteps = self._xvalue
+ # valueMin, valueMax, xvalueSteps = self._xvalue
+ # lp.xValueAxis.valueStep = (lp.xValueAxis.valueMax - lp.xValueAxis.valueMin)/len(xvalueSteps)
+ # lp.xValueAxis.valueSteps = map(lambda x: str(x), xvalueSteps)
+
+ lp.yValueAxis.valueMin, lp.yValueAxis.valueMax, lp.yValueAxis.valueSteps = self._yvalue
+
+
+
+ # lp.xValueAxis.forceZero = 0
+ # lp.xValueAxis.avoidBoundFrac = 1
+ # lp.xValueAxis.tickDown = 3
+ # lp.xValueAxis.visibleGrid = 1
+ # lp.xValueAxis.categoryNames = '64 256 512 1400 1500 4096'.split(' ')
+
+ lp.xValueAxis.labelTextFormat = self._lpstyle.format[1]
+ lp.yValueAxis.labelTextFormat = self._lpstyle.format[2]
+
+ delsize = int(lp.xValueAxis.valueMax / 2000)
+ lp.xValueAxis.labels.fontSize = self._lpstyle.labelsfont
+ lp.xValueAxis.labels.angle = 25
+
+ lp.yValueAxis.labels.fontSize = self._lpstyle.labelsfont
+ lp.lineLabels.fontSize = self._lpstyle.labelsfont - delsize
+ draw.add(lp)
+
+ lg = Legend()
+ lg.colorNamePairs = color_paris
+ lg.fontName = 'Helvetica'
+ lg.fontSize = 7
+
+ lg.x = self._lpstyle.left * 3
+ lg.y = self._lpstyle.bottom * (1 + lg_line) + lp.height
+
+ lg.dxTextSpace = 5
+ lg.dy = 5
+ lg.dx = 20
+ lg.deltax = 60
+ lg.deltay = 0
+ lg.columnMaximum = 1
+ lg.alignment = 'right'
+ draw.add(lg)
+ self._draw = draw
+
+
+class eHorizontalLineChart(object):
+ def __init__(self, data, style):
+ self._lcstyle = style
+ if len(data) < 1:
+ return
+ self._linename = data[0]
+ self._data = self.analysisData(data[1:])
+ if self._data:
+ self.create()
+
+ @property
+ def draw(self):
+ return self._draw
+
+ def analysisData(self, data):
+ columns = len(data)
+ data = map(list, zip(*data))
+ self._catNames = data[0]
+ self._linename = self._linename[1:]
+ data = data[1:]
+ rows = len(data)
+
+ yvalueMin = float(data[0][0])
+ yvalueMax = float(data[0][0])
+ yvalueSteps = []
+ result = []
+
+ for rowNo in range(rows):
+ for columnNo in range(columns):
+ data[rowNo][columnNo] = float(data[rowNo][columnNo])
+ if yvalueMin > data[rowNo][columnNo]:
+ yvalueMin = data[rowNo][columnNo]
+ if yvalueMax < data[rowNo][columnNo]:
+ yvalueMax = data[rowNo][columnNo]
+ yvalueSteps.append(int(data[rowNo][columnNo] * 1.0) / 1.0)
+ result.append(tuple(data[rowNo]))
+
+ yvalueMin = int(yvalueMin) * 1.0 - 1
+ if yvalueMin < 0:
+ yvalueMin = 0.0
+ yvalueMax = int(yvalueMax) + 2.0
+ yvalueSteps.append(yvalueMin)
+ yvalueSteps.append(yvalueMax)
+ yvalueSteps = {}.fromkeys(yvalueSteps).keys()
+
+ self._value = (yvalueMin, yvalueMax, yvalueSteps)
+ print result
+ return result
+
+ def create(self):
+ dw = self._lcstyle.width
+ dh = self._lcstyle.height
+ draw = Drawing(dw, dh)
+
+ lc = HorizontalLineChart()
+ line_cnts = len(self._linename)
+
+ lg_line = (line_cnts + 3) / 4
+ lc.height = dh - self._lcstyle.bottom * (lg_line + 1.5)
+ lc.width = dw - lc.x * 2
+ lc.x = self._lcstyle.left
+ lc.y = self._lcstyle.bottom
+
+ lc.data = self._data
+
+ lc.strokeColor = self._lcstyle.strokeColor
+ lc.strokeWidth = self._lcstyle.strokeWidth
+ lc.useAbsolute = 1
+ lc.groupSpacing = lc.width * 2.0 / len(self._catNames)
+ lc.joinedLines = 1
+ lc.lineLabelFormat = self._lcstyle.format[0]
+
+ lc.valueAxis.valueMin, lc.valueAxis.valueMax, lc.valueAxis.valueSteps = self._value
+ lc.valueAxis.labelTextFormat = self._lcstyle.format[1]
+ lc.valueAxis.labels.fontSize = self._lcstyle.labelsfont
+
+ lc.categoryAxis.categoryNames = self._catNames
+ lc.categoryAxis.labels.boxAnchor = 'ne'
+ lc.categoryAxis.labels.dx = lc.width / 2.0 / len(self._catNames)
+ lc.categoryAxis.labels.dy = -6
+ lc.categoryAxis.labels.angle = 10
+ lc.categoryAxis.labels.fontSize = self._lcstyle.labelsfont
+ # lc.categoryAxis.visibleGrid = 1
+ # lc.categoryAxis.tickUp = 100
+ # lc.categoryAxis.tickDown = 50
+ # lc.categoryAxis.gridEnd = dh
+ sytle_cnts = len(self._lcstyle.linestyle)
+ color_paris = []
+ for i in range(line_cnts):
+ styleIndex = i % sytle_cnts
+ lc.lines[i].strokeColor = self._lcstyle.linestyle[styleIndex][0]
+ lc.lines[i].symbol = makeMarker(self._lcstyle.linestyle[styleIndex][1])
+ lc.lines[i].strokeWidth = self._lcstyle.linestyle[styleIndex][2]
+ color_paris.append((self._lcstyle.linestyle[styleIndex][0], self._linename[i]))
+
+ lc.lineLabels.fontSize = self._lcstyle.labelsfont - 2
+
+ draw.add(lc)
+
+ lg = Legend()
+ lg.colorNamePairs = color_paris
+ lg.fontName = 'Helvetica'
+ lg.fontSize = 7
+ # lg.x = dw /2
+ # lg.y = self._lcstyle.bottom *(1.5 + lg_line)
+
+ lg.x = self._lcstyle.left * 3
+ lg.y = self._lcstyle.bottom * (1 + lg_line) + lc.height
+
+ lg.dxTextSpace = 5
+ lg.dy = 5
+ lg.dx = 20
+ lg.deltax = 60
+ lg.deltay = 0
+ lg.columnMaximum = 1
+ lg.alignment = 'right'
+ draw.add(lg)
+ self._draw = draw
+
+
+class eBarChartColumn(object):
+ def __init__(self, data, style):
+ self._bcstyle = style
+ if len(data) < 4:
+ return
+ self._data = self.analysisData(data)
+ if self._data:
+ self.create()
+
+ @property
+ def draw(self):
+ return self._draw
+
+ def analysisData(self, data):
+ self._ytitle = data[0]
+ self._name = data[1]
+ self._bar = data[2]
+ bar_data = data[3]
+ result = []
+ for bar in bar_data:
+ bar = map(lambda x: float(x), bar)
+ result.append(tuple(bar))
+ return result
+
+ def create(self):
+ dw = self._bcstyle.width
+ dh = self._bcstyle.height
+ draw = Drawing(dw, dh)
+
+ bc = VerticalBarChart()
+ bar_cnt = len(self._bar)
+ lg_line = (bar_cnt + 3) / 4
+
+ bc.width = dw - self._bcstyle.left - self._bcstyle.right
+ bc.height = dh - self._bcstyle.top - self._bcstyle.bottom
+ if bar_cnt > 1:
+ bc.height -= lg_line * 15
+
+ bc.x = self._bcstyle.left
+ bc.y = self._bcstyle.bottom
+ color_paris = []
+ for i in range(bar_cnt):
+ bc.bars[i].fillColor = self._bcstyle.pillarstyle[self._bar[i]][0]
+ color_paris.append((self._bcstyle.pillarstyle[self._bar[i]][0], self._bar[i]))
+
+ bc.fillColor = self._bcstyle.background
+ bc.barLabels.fontName = 'Helvetica'
+ bc.barLabelFormat = self._bcstyle.pillarstyle[self._bar[0]][1]
+ bc.barLabels.fontSize = self._bcstyle.labelsfont
+ bc.barLabels.dy = self._bcstyle.labelsfont
+ bc.valueAxis.labels.fontName = 'Helvetica'
+ bc.valueAxis.labels.fontSize = self._bcstyle.labelsfont
+ bc.valueAxis.forceZero = 1
+ bc.valueAxis.valueMin = 0
+
+ bc.data = self._data
+ bc.barSpacing = self._bcstyle.barSpacing
+ bc.groupSpacing = self._bcstyle.groupSpacing / bar_cnt
+ bc.valueAxis.avoidBoundFrac = 1
+ bc.valueAxis.gridEnd = dw - self._bcstyle.right
+ bc.valueAxis.tickLeft = self._bcstyle.tick
+ bc.valueAxis.visibleGrid = 1
+ bc.categoryAxis.categoryNames = self._name
+ bc.categoryAxis.tickDown = self._bcstyle.tick
+ bc.categoryAxis.labels.fontName = 'Helvetica'
+ bc.categoryAxis.labels.fontSize = self._bcstyle.labelsfont
+ bc.categoryAxis.labels.dy = -27
+ bc.categoryAxis.labels.angle = -90
+ draw.add(bc)
+ lb = Label()
+ lb.fontName = 'Helvetica'
+ lb.fontSize = 7
+ lb.x = 12
+ lb.y = 80
+ lb.angle = 90
+ lb.textAnchor = 'middle'
+ lb.maxWidth = 100
+ lb.height = 20
+ lb._text = self._ytitle
+ draw.add(lb)
+ if bar_cnt > 1:
+ lg = Legend()
+ lg.colorNamePairs = color_paris
+ lg.fontName = 'Helvetica'
+ lg.fontSize = 7
+
+ lg.x = self._bcstyle.left + bc.width / (bar_cnt + 1)
+ lg.y = dh - self._bcstyle.top - lg_line * 5
+
+ lg.dxTextSpace = 5
+ lg.dy = 5
+ lg.dx = 25
+ lg.deltax = 80
+ lg.deltay = 0
+ lg.columnMaximum = 1
+ lg.alignment = 'right'
+ draw.add(lg)
+
+ self._draw = draw
+
+
+class eParagraph(object):
+ def __init__(self, data, style):
+ self._pstyle = style
+ self._data = self.analysisData(data)
+ self.create()
+
+ def analysisData(self, data):
+ result = ""
+ for dstr in data:
+ if self._pstyle.name == 'ps_body':
+ # dstr = "<i>" + dstr + "</i><br/>"
+ dstr = dstr + "<br/>"
+ else:
+ dstr = dstr + "<br/>"
+ result += dstr
+ return result
+
+ def create(self):
+ self._para = Paragraph(self._data, self._pstyle)
+
+ @property
+ def para(self):
+ return self._para
diff --git a/vstf/vstf/controller/reporters/report/pdf/pdfcreator.py b/vstf/vstf/controller/reporters/report/pdf/pdfcreator.py
new file mode 100755
index 00000000..50b3bc65
--- /dev/null
+++ b/vstf/vstf/controller/reporters/report/pdf/pdfcreator.py
@@ -0,0 +1,446 @@
+#!/usr/bin/python
+# -*- coding: utf8 -*-
+# author: wly
+# date: 2015-05-29
+# see license for license details
+__version__ = ''' '''
+
+import os
+
+from vstf.controller.reporters.report.pdf.styles import TemplateStyle
+from vstf.controller.reporters.report.pdf.pdftemplate import PdfVswitch
+from vstf.controller.reporters.report.pdf.story import TitleStory, SpaceStory, ImageStory, LineChartStory, \
+ LinePlotStory, uTableStory, Story, TableOfContentsStory, PageBreakStory, ParagraphStory, BarChartStory, cTableStory
+from vstf.controller.reporters.report.data_factory import CommonData, ScenarioData, HistoryData
+from vstf.controller.database.dbinterface import DbManage
+import vstf.controller
+
+
+class LetterOrder(object):
+ def __init__(self):
+ self.lettertable = "ABCDEFGHIJKLMNOPQRSTUVWXYZ"
+ self._cur = 0
+ self._len = len(self.lettertable)
+
+ def get(self):
+ return self.lettertable[self._cur]
+
+ def pre(self):
+ self._cur = (self._cur + self._len - 1) % self._len
+
+ def next(self):
+ self._cur = (self._cur + 1) % self._len
+
+
+class PdfBase(object):
+ def __init__(self):
+ self._case = ''
+ self._ofile = ''
+ self._title = []
+ self._story = []
+ self._rootdir = os.path.dirname(vstf.controller.__file__) + '/'
+ self._pdf = None
+
+ def create_pdf(self):
+ style = TemplateStyle(name='default')
+ title = self._title
+ logo = [self._rootdir + "res/logo.jpg"]
+ header = ['']
+ footer = [""]
+ note = ['', '']
+ output = [self._ofile]
+ self._pdf = PdfFrameLoss(style, title, logo, header, footer, output, note)
+
+ def save_pdf(self):
+ self._pdf.generate(self._story)
+
+ def add_coverpage(self):
+ story = Story()
+ story = PageBreakStory(story)
+ self._story += story.storylist
+
+ def create_story(self):
+ raise NotImplementedError("abstract PdfBase")
+
+ def create(self):
+ self.create_pdf()
+ self.create_story()
+ self.save_pdf()
+
+
+class PdfvSwitchCreator(PdfBase):
+ def __init__(self, ofile, common_data, scenario_data, history_data):
+ PdfBase.__init__(self)
+ self._common = common_data
+ self._result = scenario_data
+ self._history = history_data
+ self._ofile = ofile
+ self._chapterid = 0
+ self._appendixid = LetterOrder()
+
+ def create_pdf(self):
+ style = TemplateStyle(name='default')
+ title = self._result.get_covertitle()
+ logo = [self._rootdir + "res/logo.jpg"]
+ header = ['']
+ footer = [""]
+ note = ['', '']
+ output = [self._ofile]
+ self._pdf = PdfVswitch(style, title, logo, header, footer, output, note)
+
+ def get_chapterid(self):
+ self._chapterid = self._chapterid + 1
+ return self._chapterid
+
+ def create_story(self):
+ self.add_coverpage()
+ self.add_table_of_contents()
+ # self.add_contact()
+ # self.add_overview()
+ self.add_scenario()
+ # self.add_info()
+ # self.add_appendix()
+ self.add_historys()
+
+ def add_info(self):
+ self.add_systeminfo()
+ self.add_gitinfo()
+ self.add_profile_parameters()
+ self.add_testing_options()
+
+ def add_contact(self):
+ story = Story()
+ story = SpaceStory(story)
+ title = ["", "", "", "Reporter"]
+ body = self._common.get_contact()
+ story = TitleStory(story, data=title, style=7)
+ story = ParagraphStory(story, data=body)
+ self._story += story.storylist
+
+ def add_table_of_contents(self):
+ story = Story()
+ story = TableOfContentsStory(story)
+ self._story += story.storylist
+
+ def add_overview(self):
+ story = Story()
+ story = PageBreakStory(story)
+
+ chapterid = self.get_chapterid()
+ title = ["%d.Overview" % (chapterid)]
+ body = [""]
+ story = TitleStory(story, data=title, style=1)
+ story = ParagraphStory(story, data=body)
+
+ sectionid = 1
+ title = ["%d.%d Components under Test" % (chapterid, sectionid)]
+ body = self._common.get_components()
+ story = TitleStory(story, data=title, style=2)
+ story = ParagraphStory(story, data=body)
+
+ sectionid = sectionid + 1
+ title = ["%d.%d Test" % (chapterid, sectionid)]
+ body = self._result.get_test()
+ story = TitleStory(story, data=title, style=2)
+ story = ParagraphStory(story, data=body)
+
+ sectionid = sectionid + 1
+ title = ["%d.%d Configuration" % (chapterid, sectionid)]
+ story = TitleStory(story, data=title, style=2)
+
+ title = ["Software"]
+ body = self._common.get_software()
+ story = TitleStory(story, data=title, style=6)
+ story = ParagraphStory(story, data=body)
+
+ title = ["Hardware"]
+ body = self._common.get_hardware()
+ story = TitleStory(story, data=title, style=6)
+ story = ParagraphStory(story, data=body)
+ self._story += story.storylist
+
+ def add_scenario(self):
+ case_list = self._result.get_caselist()
+ for case in case_list:
+ self.add_case(case)
+
+ def add_case(self, case):
+ story = Story()
+ chapterid = self.get_chapterid()
+
+ title = ["%d. Case : %s (%s)" % (chapterid, case, self._common.get_casename(case))]
+
+ tools = self._result.get_test_tools(case)
+ pic = self._common.get_casefigure(case, tools)
+ print pic
+
+ story = TitleStory(story, data=title, style=1)
+ story = SpaceStory(story)
+ story = ImageStory(story, data=[self._rootdir + pic])
+ story = SpaceStory(story)
+
+ sectionid = 1
+ story = self.add_summary(story, chapterid, sectionid, case)
+ story = SpaceStory(story)
+
+ if self._result.is_throughput_start(case):
+ sectionid = sectionid + 1
+ story = self.add_throughput_result(story, chapterid, sectionid, case)
+
+ if self._result.is_frameloss_start(case):
+ sectionid = sectionid + 1
+ story = self.add_frameloss_result(story, chapterid, sectionid, case)
+
+ if self._result.is_latency_start(case):
+ sectionid = sectionid + 1
+ story = self.add_latency_result(story, chapterid, sectionid, case)
+
+ story = SpaceStory(story)
+ story = SpaceStory(story)
+ self._story += story.storylist
+
+ def add_summary(self, story, chapterid, sectionid, case):
+ title = ["%d.%d Summary" % (chapterid, sectionid)]
+ story = TitleStory(story, data=title, style=2)
+ provider_list = ["fastlink", "rdp", "l2switch"]
+ provider_dict = {"fastlink": "Fast Link", "l2switch": "L2Switch", "rdp": "Kernel RDP"}
+ unitid = 1
+ case_name = self._common.get_casename(case)
+ for provider in provider_list:
+ if self._result.is_provider_start(case, provider):
+ title = ["%d.%d.%d %s (%s_%s)" % (
+ chapterid, sectionid, unitid, provider_dict[provider], case_name, provider)]
+ unitid = unitid + 1
+ story = TitleStory(story, data=title, style=6)
+ test_types = ["throughput", "frameloss"]
+ for test_type in test_types:
+ if self._result.is_type_provider_start(case, provider, test_type):
+ story = self.add_summary_type(story, case, provider, test_type)
+ return story
+
+ def add_summary_type(self, story, case, provider, test_type):
+ bar_list = [test_type, "latency"]
+ for item in bar_list:
+ bar_data = self._result.get_bardata(case, provider, item)
+ story = SpaceStory(story)
+ story = BarChartStory(story, data=bar_data)
+
+ table_content = self._result.get_summary_tabledata(case, provider, test_type)
+ story = SpaceStory(story)
+ story = cTableStory(story, data=table_content, style=3)
+ story = SpaceStory(story)
+ return story
+
+ def add_throughput_result(self, story, chapterid, sectionid, case):
+ title = ["%d.%d Throughput " % (chapterid, sectionid)]
+ story = TitleStory(story, data=title, style=2)
+ unitid = 1
+ title = ["%d.%d.%d Summary" % (chapterid, sectionid, unitid)]
+ story = TitleStory(story, data=title, style=6)
+
+ test_type = "throughput"
+ unit = 'RX Frame Rate'
+ chart_data = self._result.get_frameloss_chartdata(case, test_type)
+ table_data = self._result.get_frameloss_tabledata(case, test_type)
+ title = [unit + ' (%)']
+ story = TitleStory(story, data=title, style=6)
+ # story = SpaceStory(story)
+ # story = LinePlotStory(story, data=chart_data)
+ story = SpaceStory(story)
+ story = uTableStory(story, data=table_data)
+ story = SpaceStory(story)
+
+ unit = 'Frame Loss Rate'
+ title = [unit + ' (Mpps)']
+
+ chart_data = self._result.get_framerate_chartdata(case, test_type)
+ table_data = self._result.get_framerate_tabledata(case, test_type)
+ story = TitleStory(story, data=title, style=6)
+ story = SpaceStory(story)
+ story = LinePlotStory(story, data=chart_data)
+ story = SpaceStory(story)
+ story = uTableStory(story, data=table_data)
+ story = SpaceStory(story)
+ return story
+
+ def add_frameloss_result(self, story, chapterid, sectionid, case):
+ title = ["%d.%d Frame Loss Rate " % (chapterid, sectionid)]
+ story = TitleStory(story, data=title, style=2)
+ unitid = 1
+ title = ["%d.%d.%d Summary" % (chapterid, sectionid, unitid)]
+ story = TitleStory(story, data=title, style=6)
+
+ test_type = "frameloss"
+ unit = 'RX Frame Rate'
+ chart_data = self._result.get_frameloss_chartdata(case, test_type)
+ table_data = self._result.get_frameloss_tabledata(case, test_type)
+ title = [unit + ' (%)']
+ story = TitleStory(story, data=title, style=6)
+ # story = SpaceStory(story)
+ # story = LineChartStory(story, data=chart_data)
+ story = SpaceStory(story)
+ story = uTableStory(story, data=table_data)
+ story = SpaceStory(story)
+
+ unit = 'Frame Loss Rate'
+ title = [unit + ' (Mpps)']
+
+ chart_data = self._result.get_framerate_chartdata(case, test_type)
+ table_data = self._result.get_framerate_tabledata(case, test_type)
+ story = TitleStory(story, data=title, style=6)
+ story = SpaceStory(story)
+ story = LineChartStory(story, data=chart_data)
+ story = SpaceStory(story)
+ story = uTableStory(story, data=table_data)
+ story = SpaceStory(story)
+ return story
+
+ def add_latency_result(self, story, chapterid, sectionid, case):
+ title = ["%d.%d Latency " % (chapterid, sectionid)]
+ story = TitleStory(story, data=title, style=2)
+ unitid = 1
+ title = ["%d.%d.%d Summary" % (chapterid, sectionid, unitid)]
+ story = TitleStory(story, data=title, style=6)
+
+ unit = 'Average Latency'
+ title = [unit + ' (uSec)']
+ # chart_data = self._result.get_latency_chartdata(case)
+ bar_data = self._result.get_latency_bardata(case)
+ table_data = self._result.get_latency_tabledata(case)
+ story = TitleStory(story, data=title, style=6)
+ story = SpaceStory(story)
+ # story = LineChartStory(story, data=chart_data)
+ story = BarChartStory(story, data=bar_data)
+
+ story = SpaceStory(story)
+ story = uTableStory(story, data=table_data)
+ story = SpaceStory(story)
+ return story
+
+ def add_systeminfo(self):
+ story = Story()
+ chapterid = self.get_chapterid()
+ story = SpaceStory(story)
+ title = ["%d. System Information " % (chapterid)]
+ story = PageBreakStory(story)
+ story = TitleStory(story, data=title, style=1)
+ table_content = self._common.get_systeminfo_tabledata()
+ story = SpaceStory(story)
+ story = cTableStory(story, data=table_content, style=0)
+ story = SpaceStory(story)
+ self._story += story.storylist
+
+ def add_gitinfo(self):
+ story = Story()
+ chapterid = self.get_chapterid()
+ title = ["%d. Git Repository Information " % (chapterid)]
+ story = TitleStory(story, data=title, style=1)
+
+ table_content = self._common.get_gitinfo_tabledata()
+ if table_content:
+ story = SpaceStory(story)
+ story = cTableStory(story, data=table_content, style=5)
+ story = SpaceStory(story)
+ self._story += story.storylist
+
+ def add_testing_options(self):
+ story = Story()
+ chapterid = self.get_chapterid()
+ story = SpaceStory(story)
+ title = ["%d. Testing Options" % (chapterid)]
+
+ story = TitleStory(story, data=title, style=1)
+ table_content = self._common.get_testingoptions_tabledata()
+ story = SpaceStory(story)
+ story = cTableStory(story, data=table_content, style=1)
+ story = SpaceStory(story)
+ self._story += story.storylist
+
+ def add_profile_parameters(self):
+ story = Story()
+ chapterid = self.get_chapterid()
+ story = PageBreakStory(story)
+ title = ["%d. " % (chapterid)]
+ story = TitleStory(story, data=title, style=1)
+ table_content = self._common.get_profileparameters_tabledData()
+ story = SpaceStory(story)
+ story = cTableStory(story, data=table_content, style=2)
+ story = SpaceStory(story)
+ self._story += story.storylist
+
+ def add_appendix(self):
+ story = Story()
+ story = PageBreakStory(story)
+
+ title = ["<b>Appendix %s: vSwitching Testing Methodology</b>" % (self._appendixid.get())]
+ self._appendixid.next()
+ story = TitleStory(story, data=title, style=1)
+ filename = "res/Traffic-types.jpg"
+ story = SpaceStory(story)
+ story = ImageStory(story, data=[self._rootdir + filename])
+ # story = SpaceStory(story)
+
+ title = ["Traffic Patterns: "]
+ story = TitleStory(story, data=title, style=6)
+
+ body = [
+ "<b>Ti</b> - South North Traffic",
+ "<b>Tu</b> - East Eest Traffic",
+ "<b>Tn</b> - Physical host or VM loop back",
+ "<b>Tnv</b> - Virtual Machine loop back",
+ ]
+ story = ParagraphStory(story, data=body)
+
+ title = ["<b>Performance Testing Coverage </b> (version 0.1):"]
+ story = TitleStory(story, data=title, style=6)
+
+ table_content = self._common.get_introduct_tabledata()
+ story = SpaceStory(story)
+ story = cTableStory(story, data=table_content, style=4)
+ self._story += story.storylist
+
+ def add_historys(self):
+ case_list = self._result.get_caselist()
+ for case in case_list:
+ history = self._history.get_history_info(case)
+ if history:
+ self.add_history(case, history)
+
+ def add_history(self, case, history):
+ story = Story()
+ story = PageBreakStory(story)
+
+ title = ["<b>Appendix %s : %s History Records</b>" % (self._appendixid.get(), case)]
+ story = TitleStory(story, data=title, style=1)
+
+ for i in range(len(history)):
+ title = ["%s.%s %s" % (self._appendixid.get(), i, history[i]["title"])]
+ story = TitleStory(story, data=title, style=2)
+
+ section = history[i]["data"]
+ for unit in section:
+ title = [unit['title']]
+ story = TitleStory(story, data=title, style=6)
+ content = unit['data']
+ story = uTableStory(story, data=content)
+
+ self._appendixid.next()
+ self._story += story.storylist
+
+
+def main():
+ dbase = DbManage()
+ taskid = dbase.get_last_taskid()
+ common_data = CommonData(taskid, dbase)
+ scenario_list = common_data.get_scenariolist()
+ history_data = HistoryData(taskid, dbase)
+ for scenario in scenario_list:
+ out_file = "vstf_report_%s.pdf" % (scenario)
+ scenario_data = ScenarioData(taskid, dbase, scenario)
+ reporter = PdfvSwitchCreator(out_file, common_data, scenario_data, history_data)
+ if reporter:
+ reporter.create()
+
+
+if __name__ == '__main__':
+ main()
diff --git a/vstf/vstf/controller/reporters/report/pdf/pdftemplate.py b/vstf/vstf/controller/reporters/report/pdf/pdftemplate.py
new file mode 100755
index 00000000..819a5c57
--- /dev/null
+++ b/vstf/vstf/controller/reporters/report/pdf/pdftemplate.py
@@ -0,0 +1,107 @@
+#!/usr/bin/python
+# -*- coding: utf8 -*-
+import time
+
+from reportlab.platypus.doctemplate import SimpleDocTemplate
+from reportlab.platypus import PageBreak
+from vstf.controller.reporters.report.pdf.styles import TemplateStyle, ps_head_lv1, ps_head_lv2, ps_head_lv3
+
+
+class MyDocTemplate(SimpleDocTemplate):
+ def __init__(self, filename, **kw):
+ self.allowSplitting = 0
+ SimpleDocTemplate.__init__(self, filename, **kw)
+
+ def afterFlowable(self, flowable):
+ """Registers TOC entries."""
+ if flowable.__class__.__name__ == 'Paragraph':
+ text = flowable.getPlainText()
+ style = flowable.style.name
+ if style == ps_head_lv1.name:
+ self.notify('TOCEntry', (0, text, self.page - 1))
+ elif style == ps_head_lv2.name:
+ self.notify('TOCEntry', (1, text, self.page - 1))
+ elif style == ps_head_lv3.name:
+ self.notify('TOCEntry', (2, text, self.page - 1))
+
+
+class PdfTemplate:
+ def __init__(self, style, title, logo, header, footer, output, note=None):
+ self._style = style
+ self._title = title
+ self._logo = logo[0]
+ self._header = header[0]
+ self._footer = footer
+ self._output = output[0]
+ self._note = note
+ info = " Generated on %s " % time.strftime('%Y/%m/%d %H:%M:%S', time.localtime())
+ self._note[0] += info
+
+ def myFirstPage(self, canvas, doc):
+ raise NotImplementedError("abstract StoryDecorator")
+
+ def myLaterPages(self, canvas, doc):
+ raise NotImplementedError("abstract StoryDecorator")
+
+ def generate(self, story):
+ sizes = (self._style.page_wight, self._style.page_height)
+ doc = MyDocTemplate(self._output, pagesize=sizes)
+ # doc.build(story, onFirstPage=self.myFirstPage, onLaterPages=self.myLaterPages)
+ doc.multiBuild(story, onFirstPage=self.myFirstPage, onLaterPages=self.myLaterPages)
+
+
+class PdfVswitch(PdfTemplate):
+ def myFirstPage(self, canvas, doc):
+ canvas.saveState()
+ title_lines = len(self._title)
+ line_size = [self._style.title_size] * title_lines
+ line_size.append(0)
+
+ canvas.drawImage(self._logo,
+ (self._style.page_wight - self._style.logo_width) / 2.0,
+ self._style.page_height / 2.0 + (1 + self._style.title_leading) * reduce(lambda x, y: x + y,
+ line_size),
+ self._style.logo_width,
+ self._style.logo_height
+ )
+ for i in range(title_lines):
+ canvas.setFont(self._style.title_font, line_size[i])
+ canvas.drawCentredString(self._style.page_wight / 2.0,
+ self._style.page_height / 2.0 + (1 + self._style.title_leading) * reduce(
+ lambda x, y: x + y, line_size[i + 1:]),
+ self._title[i]
+ )
+ size = self._style.body_size
+ canvas.setFont(self._style.body_font, size)
+ note_line = len(self._note)
+
+ for i in range(note_line):
+ print self._note[i]
+ canvas.drawCentredString(self._style.page_wight / 2.0,
+ self._style.page_height / 5.0 + (1 + self._style.body_leading) * size * (
+ note_line - i - 1),
+ self._note[i]
+ )
+ size = self._style.body_size - 2
+ canvas.setFont(self._style.body_font, size)
+ canvas.drawCentredString(self._style.page_wight / 2.0,
+ self._style.page_bottom / 2.0 + (1 + self._style.body_leading) * size,
+ self._footer[0])
+ canvas.restoreState()
+
+ def myLaterPages(self, canvas, doc):
+ canvas.saveState()
+ canvas.setLineWidth(self._style.line_width)
+ canvas.line(self._style.page_left,
+ self._style.page_height - self._style.page_top,
+ self._style.page_wight - self._style.page_right,
+ self._style.page_height - self._style.page_top
+ )
+ size = self._style.body_size - 2
+ canvas.setFont(self._style.body_font, size)
+ canvas.drawCentredString(self._style.page_wight / 2.0,
+ self._style.page_bottom - 24,
+ "%s%s Page %2d " % (self._footer[0], " " * 8, doc.page - 1)
+ )
+ canvas.restoreState()
+
diff --git a/vstf/vstf/controller/reporters/report/pdf/story.py b/vstf/vstf/controller/reporters/report/pdf/story.py
new file mode 100755
index 00000000..3e56e185
--- /dev/null
+++ b/vstf/vstf/controller/reporters/report/pdf/story.py
@@ -0,0 +1,191 @@
+#!/usr/bin/python
+# -*- coding: utf8 -*-
+__doc__ = """
+Story Decorator contains ImageStory, HeaderStory, PageBreakStory,
+TableStory, LinePlotStory, TitleStory, ParagraphStory
+"""
+import sys
+import os
+from reportlab.platypus import PageBreak
+from reportlab.lib import colors
+from reportlab.platypus.tableofcontents import TableOfContents
+from styles import *
+from element import *
+
+
+class Story(object):
+ def __init__(self):
+ self._storylist = []
+
+ @property
+ def storylist(self):
+ return self._storylist
+
+
+class StoryDecorator(Story):
+ def __init__(self, story, data=None, style=None):
+ self._story = story
+ self._data = data
+ self._style = style
+ print self._data
+ self.new_story()
+
+ # print self._story.storylist
+ @property
+ def storylist(self):
+ return self._story.storylist
+
+ def new_story(self):
+ raise NotImplementedError("abstract StoryDecorator")
+
+
+class ImageStory(StoryDecorator):
+ def new_story(self):
+ print "Image Story"
+ for filename in self._data:
+ if os.path.exists(filename) == False:
+ print "not find %s" % filename
+ continue
+ if 'Traffic-types' in filename:
+ style = is_traffic
+ image_height = style.image_height
+ image_width = style.image_width
+ image_hAlign = style.image_hAlign
+ image_vAlign = style.image_vAlign
+ self._story.storylist.append(
+ eImage(filename, image_width, image_height, hAlign=image_hAlign, vAlign=image_vAlign))
+ else:
+ style = is_default
+ image_height = style.image_height
+ image_width = style.image_width
+ image_hAlign = style.image_hAlign
+ image_vAlign = style.image_vAlign
+ # self._story.storylist.append(eGraphicsTable([[' ' * 5, eImage(filename, image_width, image_height, hAlign=image_hAlign, vAlign=image_vAlign)]], ts_left).table)
+ self._story.storylist.append(
+ eImage(filename, image_width, image_height, hAlign=image_hAlign, vAlign=image_vAlign))
+
+
+class HeaderStory(StoryDecorator):
+ def new_story(self):
+ print "header story"
+ self._story.storylist.append(PageBreak())
+
+
+class PageBreakStory(StoryDecorator):
+ def new_story(self):
+ print "PageBreak story"
+ self._story.storylist.append(PageBreak())
+
+
+class TableOfContentsStory(StoryDecorator):
+ def new_story(self):
+ print "TableOfContents story"
+ self._data = [" ", " ", "Table Of Contents", ""]
+ style = ps_head_lv4
+ self._story.storylist.append(eParagraph(self._data, style).para)
+ toc = TableOfContents()
+ toc.levelStyles = [ps_head_lv7, ps_head_lv8, ps_head_lv9]
+ self._story.storylist.append(toc)
+
+
+class uTableStory(StoryDecorator):
+ def new_story(self):
+ print "utable story"
+ style = ts_left
+ if not self._data:
+ print "data error "
+ return
+ self._story.storylist.append(eCommonTable(self._data, style).table)
+
+
+class TableStory(StoryDecorator):
+ def new_story(self):
+ print "table story"
+ style = ts_default
+ self._story.storylist.append(eDataTable(self._data, style).table)
+
+
+class SpaceStory(StoryDecorator):
+ def new_story(self):
+ style = ps_space
+ self._story.storylist.append(eParagraph([" ", " "], style).para)
+
+
+class cTableStory(StoryDecorator):
+ def new_story(self):
+ print "table story"
+ style = ts_default
+ if self._style == 0:
+ self._story.storylist.append(eConfigTable(self._data, style).table)
+ elif self._style == 1:
+ self._story.storylist.append(eOptionsTable(self._data, style).table)
+ elif self._style == 2:
+ self._story.storylist.append(eProfileTable(self._data, style).table)
+ elif self._style == 3:
+ self._story.storylist.append(eSummaryTable(self._data, style).table)
+ elif self._style == 4:
+ self._story.storylist.append(eScenarioTable(self._data, style).table)
+ elif self._style == 5:
+ self._story.storylist.append(eGitInfoTable(self._data, style).table)
+
+
+class LinePlotStory(StoryDecorator):
+ def new_story(self):
+ print "LinePlot"
+ style = lps_default
+ if not self._data:
+ print "data error "
+ return
+ data = eGraphicsTable([[eLinePlot(self._data, style).draw]]).table
+ if data:
+ self._story.storylist.append(data)
+
+
+class LineChartStory(StoryDecorator):
+ def new_story(self):
+ print "LineChartStory: "
+ style = lcs_default
+ if not self._data:
+ print "data error "
+ return
+ data = eGraphicsTable([[eHorizontalLineChart(self._data, style).draw]]).table
+ if data:
+ self._story.storylist.append(data)
+
+
+class BarChartStory(StoryDecorator):
+ def new_story(self):
+ print "BarChartStory: "
+ style = bcs_default
+ if not self._data:
+ print "data error "
+ return
+
+ data = eGraphicsTable([[eBarChartColumn(self._data, style).draw]]).table
+ if data:
+ self._story.storylist.append(data)
+
+
+class ParagraphStory(StoryDecorator):
+ def new_story(self):
+ print "Paragraph Story"
+ style = ps_body
+ if not self._data:
+ print "data error "
+ return
+ data = eParagraph(self._data, style).para
+ if data:
+ self._story.storylist.append(data)
+
+
+class TitleStory(StoryDecorator):
+ def new_story(self):
+ print "Paragraph Story"
+ if self._style - 1 in range(9):
+ style = eval("ps_head_lv" + "%d" % self._style)
+ else:
+ style = ps_body
+ # print style
+ # print self._data
+
+ self._story.storylist.append(eParagraph(self._data, style).para)
diff --git a/vstf/vstf/controller/reporters/report/pdf/styles.py b/vstf/vstf/controller/reporters/report/pdf/styles.py
new file mode 100755
index 00000000..d54ee8ab
--- /dev/null
+++ b/vstf/vstf/controller/reporters/report/pdf/styles.py
@@ -0,0 +1,198 @@
+#!/usr/bin/python
+# -*- coding: utf8 -*-
+from reportlab.lib.styles import PropertySet
+from reportlab.lib.pagesizes import A4
+from reportlab.lib import colors
+from reportlab.lib.styles import ParagraphStyle
+from reportlab.lib.enums import TA_LEFT
+
+
+class TemplateStyle(PropertySet):
+ defaults = dict(
+ page_height=A4[1],
+ page_wight=A4[0],
+ page_left=78,
+ page_top=60,
+ page_bottom=70,
+ page_right=78,
+ title_size=16,
+ title_leading=1.25,
+ title_font='Courier-Bold',
+ body_size=10,
+ body_leading=0.8,
+ body_font='Courier',
+ line_width=1,
+ logo_width=131.2,
+ logo_height=127.7
+ )
+
+
+class ImageStyle(PropertySet):
+ defaults = dict(
+ image_height=165,
+ image_width=175,
+ image_hAlign='CENTRE', # LEFT,CENTRE or RIGHT
+ image_vAlign='MIDDLE' # BOTTOM,MIDDLE or TOP
+ )
+
+
+class TableStyle(PropertySet):
+ defaults = dict(
+ table_hAlign='CENTRE', # LEFT,CENTRE or RIGHT
+ table_vAlign='MIDDLE', # BOTTOM,MIDDLE or TOP
+ table_colWidths=None,
+ table_rowHeights=None
+ )
+
+
+class LinePlotStyle(PropertySet):
+ defaults = dict(
+ width=430,
+ height=400,
+ left=30,
+ bottom=20,
+ strokeColor=colors.black,
+ strokeWidth=1,
+ format=('%4.2f', '%4.0f', '%3.1f'),
+ labelsfont=7,
+ linestyle=[
+ (colors.red, 'Circle', 1.5),
+ (colors.blue, 'Diamond', 1.5),
+ (colors.gold, 'Square', 1.5),
+ (colors.green, 'Triangle', 1.5),
+ (colors.pink, 'FilledCircle', 1.5),
+ (colors.lightblue, 'FilledDiamond', 1.5),
+ (colors.lightgreen, 'FilledTriangle', 1.5)
+ ]
+ )
+
+
+class LineChartStyle(PropertySet):
+ defaults = dict(
+ width=430,
+ height=400,
+ left=30,
+ bottom=20,
+ strokeColor=colors.lightgrey,
+ strokeWidth=1,
+ format=('%4.2f', '%3.1f'),
+ labelsfont=8,
+ linestyle=[
+ (colors.red, 'Circle', 1.5),
+ (colors.blue, 'Diamond', 1.5),
+ (colors.gold, 'Square', 1.5),
+ (colors.green, 'Triangle', 1.5),
+ (colors.pink, 'FilledCircle', 1.5),
+ (colors.lightblue, 'FilledDiamond', 1.5),
+ (colors.lightgreen, 'FilledTriangle', 1.5)
+ ]
+ )
+
+
+class BarChartStyle(PropertySet):
+ defaults = dict(
+ width=430,
+ height=135,
+ left=30,
+ bottom=50,
+ top=0,
+ right=30,
+ groupSpacing=32,
+ barSpacing=4,
+ tick=3,
+ strokeColor=colors.lightgrey,
+ strokeWidth=1,
+ pillarstyle={
+ "loss": (colors.lightgreen, '%4.2f'),
+ "latency": (colors.indianred, '%4.1f'),
+ "fastlink": (colors.pink, '%4.1f'),
+ "l2switch": (colors.lightblue, '%4.1f'),
+ "kernel rdp": (colors.lightgreen, '%4.1f'),
+ },
+ background=colors.lightgrey,
+ labelsfont=6,
+ )
+
+
+ts_left = TableStyle(
+ name='left',
+ table_hAlign='LEFT', # LEFT,CENTRE or RIGHT
+ table_vAlign='BOTTOM', # BOTTOM,MIDDLE or TOP
+ table_colWidths=None,
+ table_rowHeights=None
+)
+
+is_default = ImageStyle(name='default')
+is_traffic = ImageStyle(name='traffic',
+ image_height=150,
+ image_width=360,
+ image_hAlign='CENTRE')
+
+ts_default = TableStyle(name='default')
+lps_default = LinePlotStyle(name='default')
+lcs_default = LineChartStyle(name='default')
+bcs_default = BarChartStyle(name='default')
+ps_head_lv1 = ParagraphStyle(name='ps_head_lv1',
+ fontName='Courier-Bold',
+ alignment=TA_LEFT, # TA_CENTRE,
+ fontSize=13,
+ leading=22,
+ leftIndent=0)
+
+ps_head_lv2 = ParagraphStyle(name='ps_head_lv2',
+ fontName='Courier',
+ fontSize=12,
+ leading=20,
+ leftIndent=16)
+
+ps_head_lv3 = ParagraphStyle(name='ps_head_lv3',
+ fontSize=11,
+ fontName='Courier',
+ leading=20,
+ leftIndent=16)
+
+ps_head_lv4 = ParagraphStyle(name='ps_head_lv4',
+ fontSize=13,
+ fontName='Courier-Bold',
+ leading=22,
+ leftIndent=0)
+
+ps_head_lv5 = ParagraphStyle(name='ps_head_lv5',
+ fontSize=12,
+ fontName='Courier',
+ leading=20,
+ leftIndent=16)
+
+ps_head_lv6 = ParagraphStyle(name='ps_head_lv6',
+ fontSize=11,
+ fontName='Courier',
+ leading=20,
+ leftIndent=16)
+
+ps_head_lv7 = ParagraphStyle(name='ps_head_lv7',
+ fontSize=11,
+ fontName='Courier',
+ leading=18,
+ leftIndent=0)
+
+ps_head_lv8 = ParagraphStyle(name='ps_head_lv8',
+ fontSize=11,
+ fontName='Courier',
+ leading=18,
+ leftIndent=16)
+
+ps_head_lv9 = ParagraphStyle(name='ps_head_lv9',
+ fontSize=11,
+ fontName='Courier',
+ leading=18,
+ leftIndent=32)
+
+ps_body = ParagraphStyle(name='ps_body',
+ fontSize=11,
+ fontName='Courier',
+ leading=18,
+ leftIndent=32)
+
+ps_space = ParagraphStyle(name='ps_space',
+ fontSize=5,
+ leading=5)
diff --git a/vstf/vstf/controller/reporters/report/provider/__init__.py b/vstf/vstf/controller/reporters/report/provider/__init__.py
new file mode 100755
index 00000000..89dcd4e2
--- /dev/null
+++ b/vstf/vstf/controller/reporters/report/provider/__init__.py
@@ -0,0 +1,14 @@
+# Copyright Huawei Technologies Co., Ltd. 1998-2015.
+# All Rights Reserved.
+#
+# Licensed under the Apache License, Version 2.0 (the License); you may
+# not use this file except in compliance with the License. You may obtain
+# a copy of the License at
+#
+# http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an AS IS BASIS, WITHOUT
+# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
+# License for the specific language governing permissions and limitations
+# under the License.
diff --git a/vstf/vstf/controller/reporters/report/provider/html_provider.py b/vstf/vstf/controller/reporters/report/provider/html_provider.py
new file mode 100755
index 00000000..b0b07432
--- /dev/null
+++ b/vstf/vstf/controller/reporters/report/provider/html_provider.py
@@ -0,0 +1,63 @@
+#!/usr/bin/python
+# -*- coding: utf8 -*-
+# author: wly
+# date: 2015-09-25
+# see license for license details
+__version__ = ''' '''
+import logging
+
+LOG = logging.getLogger(__name__)
+from vstf.controller.settings.html_settings import HtmlSettings
+from vstf.controller.settings.data_settings import DataSettings
+
+
+class HtmlProvider(object):
+ def __init__(self, content, style):
+ self._content = content
+ self._style = style
+
+ def get_style(self):
+ return self._style["style"]
+
+ def get_subject(self):
+ return self._content["subject"]
+
+ def get_ovs_title(self):
+ return self._content["ovs"]["title"]
+
+ def get_ovs_table(self):
+ return map(lambda x: list(x), self._content["ovs"]["content"].items())
+
+ def get_result_title(self):
+ return self._content["result"]["title"]
+
+ def get_result_table(self, ttype):
+ result = []
+ content = self._content["result"]["content"]
+ if ttype in content:
+ result.append(content[ttype]["columns"])
+ result.extend(content[ttype]["data"])
+
+ result = map(lambda x: list(x), zip(*result))
+ return result
+
+
+class StyleProvider(object):
+ def __init__(self, style):
+ self._style = style
+
+ def get_style(self):
+ return self._style["style"]
+
+
+def unit_test():
+ from vstf.common.log import setup_logging
+ setup_logging(level=logging.DEBUG, log_file="/var/log/html-provder.log", clevel=logging.INFO)
+
+ html_settings = HtmlSettings()
+ LOG.info(html_settings.settings)
+ data_settings = DataSettings()
+ LOG.info(data_settings.settings)
+
+ hprovider = HtmlProvider(data_settings.settings, html_settings.settings)
+ sprovider = StyleProvider(html_settings.settings)
diff --git a/vstf/vstf/controller/reporters/reporter.py b/vstf/vstf/controller/reporters/reporter.py
new file mode 100755
index 00000000..1c256c61
--- /dev/null
+++ b/vstf/vstf/controller/reporters/reporter.py
@@ -0,0 +1,110 @@
+#!/usr/bin/python
+# -*- coding: utf8 -*-
+# author: wly
+# date: 2015-05-29
+# see license for license details
+import os
+import argparse
+import logging
+import time
+
+from vstf.controller.reporters.report.pdf.pdfcreator import PdfvSwitchCreator
+from vstf.controller.reporters.report.html.htmlcreator import HtmlvSwitchCreator
+from vstf.controller.reporters.report.data_factory import CommonData, TaskData, ScenarioData, HistoryData
+from vstf.controller.database.dbinterface import DbManage
+from vstf.controller.settings.mail_settings import MailSettings
+from vstf.controller.reporters.mail.sendmail import SendMail
+from vstf.controller.settings.html_settings import HtmlSettings
+from vstf.controller.reporters.report.provider.html_provider import StyleProvider
+import vstf.common.constants as cst
+
+
+__version__ = ''' '''
+LOG = logging.getLogger(__name__)
+
+
+class Report(object):
+ def __init__(self, dbase, rpath):
+ """
+
+ :type dbase: object DbManage
+ """
+ self._dbase = dbase
+ self._rpath = "."
+ if os.path.exists(rpath):
+ self._rpath = rpath
+
+ def create_pdf(self, taskid):
+ common_data = CommonData(taskid, self._dbase)
+ scenario_list = common_data.get_scenariolist()
+ history_data = HistoryData(taskid, self._dbase)
+ attach_list = []
+ for scenario in scenario_list:
+ out_file = os.path.join(self._rpath, "vstf_report_%s_%s.pdf" % (scenario, time.strftime(cst.TIME_STR)))
+ LOG.info(out_file)
+ scenario_data = ScenarioData(taskid, self._dbase, scenario)
+ pdf = PdfvSwitchCreator(out_file, common_data, scenario_data, history_data)
+ if pdf:
+ pdf.create()
+ attach_list.append(out_file)
+ if attach_list:
+ self._mail_settings.mset_attach(attach_list)
+
+ def create_html(self, taskid):
+ task_data = TaskData(taskid, self._dbase)
+
+ html_settings = HtmlSettings()
+ LOG.info(html_settings.settings)
+
+ provider = StyleProvider(html_settings.settings)
+ out_file = os.path.join(self._rpath, "mail.html")
+ LOG.info(out_file)
+
+ html = HtmlvSwitchCreator(task_data, provider, out_file)
+ content = html.create()
+
+ self._mail_settings.mset_subtype('html')
+ self._mail_settings.mset_content(content)
+
+ def report(self, taskid, mail_off):
+ self._mail_settings = MailSettings()
+ mail = SendMail(self._mail_settings.settings)
+ self.create_pdf(taskid)
+ self.create_html(taskid)
+ if not mail_off:
+ mail.send()
+
+
+def main():
+ from vstf.common.log import setup_logging
+ setup_logging(level=logging.DEBUG, log_file="/var/log/vstf/vstf-reporter.log", clevel=logging.INFO)
+
+ parser = argparse.ArgumentParser(add_help=True)
+ parser.add_argument('-rpath',
+ action='store',
+ default='./',
+ type=str,
+ help=" the path name of test results "
+ )
+ parser.add_argument('-mail_off',
+ action='store_true',
+ help="is need send mail the for the report"
+ )
+ parser.add_argument('--taskid',
+ action='store',
+ default=-1,
+ help="report depand of a history task id."
+ )
+ args = parser.parse_args()
+ dbase = DbManage()
+
+ report = Report(dbase, args.rpath)
+ if args.taskid == -1:
+ taskid = dbase.get_last_taskid()
+ else:
+ taskid = args.taskid
+ report.report(taskid, args.mail_off)
+
+
+if __name__ == '__main__':
+ main()
diff --git a/vstf/vstf/controller/res/Traffic-types.gif b/vstf/vstf/controller/res/Traffic-types.gif
new file mode 100755
index 00000000..4b1fc600
--- /dev/null
+++ b/vstf/vstf/controller/res/Traffic-types.gif
Binary files differ
diff --git a/vstf/vstf/controller/res/Traffic-types.jpg b/vstf/vstf/controller/res/Traffic-types.jpg
new file mode 100755
index 00000000..07f23300
--- /dev/null
+++ b/vstf/vstf/controller/res/Traffic-types.jpg
Binary files differ
diff --git a/vstf/vstf/controller/res/__init__.py b/vstf/vstf/controller/res/__init__.py
new file mode 100755
index 00000000..89dcd4e2
--- /dev/null
+++ b/vstf/vstf/controller/res/__init__.py
@@ -0,0 +1,14 @@
+# Copyright Huawei Technologies Co., Ltd. 1998-2015.
+# All Rights Reserved.
+#
+# Licensed under the Apache License, Version 2.0 (the License); you may
+# not use this file except in compliance with the License. You may obtain
+# a copy of the License at
+#
+# http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an AS IS BASIS, WITHOUT
+# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
+# License for the specific language governing permissions and limitations
+# under the License.
diff --git a/vstf/vstf/controller/res/deployment/Ti-direct.gif b/vstf/vstf/controller/res/deployment/Ti-direct.gif
new file mode 100755
index 00000000..c06a222b
--- /dev/null
+++ b/vstf/vstf/controller/res/deployment/Ti-direct.gif
Binary files differ
diff --git a/vstf/vstf/controller/res/deployment/Ti-direct.jpg b/vstf/vstf/controller/res/deployment/Ti-direct.jpg
new file mode 100755
index 00000000..edb25cad
--- /dev/null
+++ b/vstf/vstf/controller/res/deployment/Ti-direct.jpg
Binary files differ
diff --git a/vstf/vstf/controller/res/deployment/Ti.gif b/vstf/vstf/controller/res/deployment/Ti.gif
new file mode 100755
index 00000000..56ada3f6
--- /dev/null
+++ b/vstf/vstf/controller/res/deployment/Ti.gif
Binary files differ
diff --git a/vstf/vstf/controller/res/deployment/Ti.jpg b/vstf/vstf/controller/res/deployment/Ti.jpg
new file mode 100755
index 00000000..951b1c09
--- /dev/null
+++ b/vstf/vstf/controller/res/deployment/Ti.jpg
Binary files differ
diff --git a/vstf/vstf/controller/res/deployment/Tn.gif b/vstf/vstf/controller/res/deployment/Tn.gif
new file mode 100755
index 00000000..4367ded0
--- /dev/null
+++ b/vstf/vstf/controller/res/deployment/Tn.gif
Binary files differ
diff --git a/vstf/vstf/controller/res/deployment/Tn.jpg b/vstf/vstf/controller/res/deployment/Tn.jpg
new file mode 100755
index 00000000..45a17048
--- /dev/null
+++ b/vstf/vstf/controller/res/deployment/Tn.jpg
Binary files differ
diff --git a/vstf/vstf/controller/res/deployment/Tnv.gif b/vstf/vstf/controller/res/deployment/Tnv.gif
new file mode 100755
index 00000000..dfc0bd58
--- /dev/null
+++ b/vstf/vstf/controller/res/deployment/Tnv.gif
Binary files differ
diff --git a/vstf/vstf/controller/res/deployment/Tnv.jpg b/vstf/vstf/controller/res/deployment/Tnv.jpg
new file mode 100755
index 00000000..fb47d8a3
--- /dev/null
+++ b/vstf/vstf/controller/res/deployment/Tnv.jpg
Binary files differ
diff --git a/vstf/vstf/controller/res/deployment/Tu.gif b/vstf/vstf/controller/res/deployment/Tu.gif
new file mode 100755
index 00000000..426667ed
--- /dev/null
+++ b/vstf/vstf/controller/res/deployment/Tu.gif
Binary files differ
diff --git a/vstf/vstf/controller/res/deployment/Tu.jpg b/vstf/vstf/controller/res/deployment/Tu.jpg
new file mode 100755
index 00000000..be62df75
--- /dev/null
+++ b/vstf/vstf/controller/res/deployment/Tu.jpg
Binary files differ
diff --git a/vstf/vstf/controller/res/iperf/Ti-3.gif b/vstf/vstf/controller/res/iperf/Ti-3.gif
new file mode 100755
index 00000000..e09094a2
--- /dev/null
+++ b/vstf/vstf/controller/res/iperf/Ti-3.gif
Binary files differ
diff --git a/vstf/vstf/controller/res/iperf/Ti-3.jpg b/vstf/vstf/controller/res/iperf/Ti-3.jpg
new file mode 100755
index 00000000..cdf75271
--- /dev/null
+++ b/vstf/vstf/controller/res/iperf/Ti-3.jpg
Binary files differ
diff --git a/vstf/vstf/controller/res/logo.jpg b/vstf/vstf/controller/res/logo.jpg
new file mode 100755
index 00000000..683acfe0
--- /dev/null
+++ b/vstf/vstf/controller/res/logo.jpg
Binary files differ
diff --git a/vstf/vstf/controller/res/pktgen/Ti-1.gif b/vstf/vstf/controller/res/pktgen/Ti-1.gif
new file mode 100755
index 00000000..ed9e44ac
--- /dev/null
+++ b/vstf/vstf/controller/res/pktgen/Ti-1.gif
Binary files differ
diff --git a/vstf/vstf/controller/res/pktgen/Ti-1.jpg b/vstf/vstf/controller/res/pktgen/Ti-1.jpg
new file mode 100755
index 00000000..5898769f
--- /dev/null
+++ b/vstf/vstf/controller/res/pktgen/Ti-1.jpg
Binary files differ
diff --git a/vstf/vstf/controller/res/pktgen/Ti-2.gif b/vstf/vstf/controller/res/pktgen/Ti-2.gif
new file mode 100755
index 00000000..59359b13
--- /dev/null
+++ b/vstf/vstf/controller/res/pktgen/Ti-2.gif
Binary files differ
diff --git a/vstf/vstf/controller/res/pktgen/Ti-2.jpg b/vstf/vstf/controller/res/pktgen/Ti-2.jpg
new file mode 100755
index 00000000..ea4b2620
--- /dev/null
+++ b/vstf/vstf/controller/res/pktgen/Ti-2.jpg
Binary files differ
diff --git a/vstf/vstf/controller/res/pktgen/Ti-direct-1.gif b/vstf/vstf/controller/res/pktgen/Ti-direct-1.gif
new file mode 100755
index 00000000..57b148c6
--- /dev/null
+++ b/vstf/vstf/controller/res/pktgen/Ti-direct-1.gif
Binary files differ
diff --git a/vstf/vstf/controller/res/pktgen/Ti-direct-1.jpg b/vstf/vstf/controller/res/pktgen/Ti-direct-1.jpg
new file mode 100755
index 00000000..1255dc8e
--- /dev/null
+++ b/vstf/vstf/controller/res/pktgen/Ti-direct-1.jpg
Binary files differ
diff --git a/vstf/vstf/controller/res/pktgen/Ti-direct-2.gif b/vstf/vstf/controller/res/pktgen/Ti-direct-2.gif
new file mode 100755
index 00000000..43e58fbc
--- /dev/null
+++ b/vstf/vstf/controller/res/pktgen/Ti-direct-2.gif
Binary files differ
diff --git a/vstf/vstf/controller/res/pktgen/Ti-direct-2.jpg b/vstf/vstf/controller/res/pktgen/Ti-direct-2.jpg
new file mode 100755
index 00000000..898d68ee
--- /dev/null
+++ b/vstf/vstf/controller/res/pktgen/Ti-direct-2.jpg
Binary files differ
diff --git a/vstf/vstf/controller/res/pktgen/Tn-1.gif b/vstf/vstf/controller/res/pktgen/Tn-1.gif
new file mode 100755
index 00000000..74407a94
--- /dev/null
+++ b/vstf/vstf/controller/res/pktgen/Tn-1.gif
Binary files differ
diff --git a/vstf/vstf/controller/res/pktgen/Tn-1.jpg b/vstf/vstf/controller/res/pktgen/Tn-1.jpg
new file mode 100755
index 00000000..f3ea6e53
--- /dev/null
+++ b/vstf/vstf/controller/res/pktgen/Tn-1.jpg
Binary files differ
diff --git a/vstf/vstf/controller/res/pktgen/Tn-1v.gif b/vstf/vstf/controller/res/pktgen/Tn-1v.gif
new file mode 100755
index 00000000..18681bbe
--- /dev/null
+++ b/vstf/vstf/controller/res/pktgen/Tn-1v.gif
Binary files differ
diff --git a/vstf/vstf/controller/res/pktgen/Tn-1v.jpg b/vstf/vstf/controller/res/pktgen/Tn-1v.jpg
new file mode 100755
index 00000000..59d4ed5c
--- /dev/null
+++ b/vstf/vstf/controller/res/pktgen/Tn-1v.jpg
Binary files differ
diff --git a/vstf/vstf/controller/res/pktgen/Tn-2.gif b/vstf/vstf/controller/res/pktgen/Tn-2.gif
new file mode 100755
index 00000000..e26db799
--- /dev/null
+++ b/vstf/vstf/controller/res/pktgen/Tn-2.gif
Binary files differ
diff --git a/vstf/vstf/controller/res/pktgen/Tn-2.jpg b/vstf/vstf/controller/res/pktgen/Tn-2.jpg
new file mode 100755
index 00000000..15ed91e3
--- /dev/null
+++ b/vstf/vstf/controller/res/pktgen/Tn-2.jpg
Binary files differ
diff --git a/vstf/vstf/controller/res/pktgen/Tn-2v.gif b/vstf/vstf/controller/res/pktgen/Tn-2v.gif
new file mode 100755
index 00000000..9ec54578
--- /dev/null
+++ b/vstf/vstf/controller/res/pktgen/Tn-2v.gif
Binary files differ
diff --git a/vstf/vstf/controller/res/pktgen/Tn-2v.jpg b/vstf/vstf/controller/res/pktgen/Tn-2v.jpg
new file mode 100755
index 00000000..2ff06ea6
--- /dev/null
+++ b/vstf/vstf/controller/res/pktgen/Tn-2v.jpg
Binary files differ
diff --git a/vstf/vstf/controller/res/pktgen/Tu-1.gif b/vstf/vstf/controller/res/pktgen/Tu-1.gif
new file mode 100755
index 00000000..9f2357ad
--- /dev/null
+++ b/vstf/vstf/controller/res/pktgen/Tu-1.gif
Binary files differ
diff --git a/vstf/vstf/controller/res/pktgen/Tu-1.jpg b/vstf/vstf/controller/res/pktgen/Tu-1.jpg
new file mode 100755
index 00000000..ad2724a1
--- /dev/null
+++ b/vstf/vstf/controller/res/pktgen/Tu-1.jpg
Binary files differ
diff --git a/vstf/vstf/controller/res/pktgen/Tu-2.gif b/vstf/vstf/controller/res/pktgen/Tu-2.gif
new file mode 100755
index 00000000..b0a2cede
--- /dev/null
+++ b/vstf/vstf/controller/res/pktgen/Tu-2.gif
Binary files differ
diff --git a/vstf/vstf/controller/res/pktgen/Tu-2.jpg b/vstf/vstf/controller/res/pktgen/Tu-2.jpg
new file mode 100755
index 00000000..d47089b7
--- /dev/null
+++ b/vstf/vstf/controller/res/pktgen/Tu-2.jpg
Binary files differ
diff --git a/vstf/vstf/controller/res/pktgen/Tu-3.gif b/vstf/vstf/controller/res/pktgen/Tu-3.gif
new file mode 100755
index 00000000..e0b15211
--- /dev/null
+++ b/vstf/vstf/controller/res/pktgen/Tu-3.gif
Binary files differ
diff --git a/vstf/vstf/controller/res/pktgen/Tu-3.jpg b/vstf/vstf/controller/res/pktgen/Tu-3.jpg
new file mode 100755
index 00000000..579808c4
--- /dev/null
+++ b/vstf/vstf/controller/res/pktgen/Tu-3.jpg
Binary files differ
diff --git a/vstf/vstf/controller/res/spirent/Tn-1.gif b/vstf/vstf/controller/res/spirent/Tn-1.gif
new file mode 100755
index 00000000..97510da0
--- /dev/null
+++ b/vstf/vstf/controller/res/spirent/Tn-1.gif
Binary files differ
diff --git a/vstf/vstf/controller/res/spirent/Tn-1.jpg b/vstf/vstf/controller/res/spirent/Tn-1.jpg
new file mode 100755
index 00000000..6886d316
--- /dev/null
+++ b/vstf/vstf/controller/res/spirent/Tn-1.jpg
Binary files differ
diff --git a/vstf/vstf/controller/res/spirent/Tn-1v.gif b/vstf/vstf/controller/res/spirent/Tn-1v.gif
new file mode 100755
index 00000000..cb7b668b
--- /dev/null
+++ b/vstf/vstf/controller/res/spirent/Tn-1v.gif
Binary files differ
diff --git a/vstf/vstf/controller/res/spirent/Tn-1v.jpg b/vstf/vstf/controller/res/spirent/Tn-1v.jpg
new file mode 100755
index 00000000..3dec4382
--- /dev/null
+++ b/vstf/vstf/controller/res/spirent/Tn-1v.jpg
Binary files differ
diff --git a/vstf/vstf/controller/res/spirent/Tn-2.gif b/vstf/vstf/controller/res/spirent/Tn-2.gif
new file mode 100755
index 00000000..4eb6780a
--- /dev/null
+++ b/vstf/vstf/controller/res/spirent/Tn-2.gif
Binary files differ
diff --git a/vstf/vstf/controller/res/spirent/Tn-2.jpg b/vstf/vstf/controller/res/spirent/Tn-2.jpg
new file mode 100755
index 00000000..9d425af2
--- /dev/null
+++ b/vstf/vstf/controller/res/spirent/Tn-2.jpg
Binary files differ
diff --git a/vstf/vstf/controller/res/spirent/Tn-2v.gif b/vstf/vstf/controller/res/spirent/Tn-2v.gif
new file mode 100755
index 00000000..21e31470
--- /dev/null
+++ b/vstf/vstf/controller/res/spirent/Tn-2v.gif
Binary files differ
diff --git a/vstf/vstf/controller/res/spirent/Tn-2v.jpg b/vstf/vstf/controller/res/spirent/Tn-2v.jpg
new file mode 100755
index 00000000..40a38292
--- /dev/null
+++ b/vstf/vstf/controller/res/spirent/Tn-2v.jpg
Binary files differ
diff --git a/vstf/vstf/controller/settings/README b/vstf/vstf/controller/settings/README
new file mode 100755
index 00000000..febac1c9
--- /dev/null
+++ b/vstf/vstf/controller/settings/README
@@ -0,0 +1,61 @@
+This module providers a set of profile management solution
+File:
+ settings.py
+Interface:
+ Settings
+
+ 1. it is a base class and supports two modes "Default" and "Single"
+ if the mode is "Default", the program will load the 'json' file from 'default'
+ and 'user' , merge the input, save only the 'json' file from 'user'
+ if the mode is "Single", the program will only load and save the 'json' file
+ 2. it saves a file two, one is only in memory and the other is in file
+ 3. it provides two types of functions, one is like "set_" and "add_" and the
+ other is like "mset" and "madd". the functions are automatically registered.
+
+ 4. You can overload the function _register_func to achieve the functions what you desire
+ by function "_setting_file","_adding_file","_setting_memory" and "_addting_memory"
+
+ 5. it provides "settings" to show the result in memory
+ 6. it provides "reset" to reload the file
+
+Example:
+
+ 1. create your-settings file and paste the contents
+
+ {
+ "items1": "value1",
+ "items2": "value2"
+ }
+
+
+ 2. create your_settings file and paste the codes
+
+ import vstf.controller.settings.settings as sets
+ class YourSettings(sets.Settings):
+ def __init__(self, path="./", filename="your-settings", mode=sets.SETS_SINGLE):
+ super(MailSettings, self).__init__(path, filename, mode)
+
+ def unit_test():
+ setting = YourSettings()
+ print setting.settings()
+ value1 = "test_set_items1"
+ setting.set_items1(value1)
+ print setting.settings()
+ value2 = "test_set_items2"
+ setting.mset_items2(value2)
+ print setting.settings()
+ settings.reset()
+ print setting.settings()
+
+ if __name__ == '__main__':
+ unit_test()
+
+Tree:
+
+ data_settings.py
+ flows_settings.py
+ perf_settings.py
+
+ mail_settings.py
+ tool_settings.py
+ html_settings.py \ No newline at end of file
diff --git a/vstf/vstf/controller/settings/__init__.py b/vstf/vstf/controller/settings/__init__.py
new file mode 100755
index 00000000..89dcd4e2
--- /dev/null
+++ b/vstf/vstf/controller/settings/__init__.py
@@ -0,0 +1,14 @@
+# Copyright Huawei Technologies Co., Ltd. 1998-2015.
+# All Rights Reserved.
+#
+# Licensed under the Apache License, Version 2.0 (the License); you may
+# not use this file except in compliance with the License. You may obtain
+# a copy of the License at
+#
+# http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an AS IS BASIS, WITHOUT
+# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
+# License for the specific language governing permissions and limitations
+# under the License.
diff --git a/vstf/vstf/controller/settings/cpu_settings.py b/vstf/vstf/controller/settings/cpu_settings.py
new file mode 100755
index 00000000..c69742ad
--- /dev/null
+++ b/vstf/vstf/controller/settings/cpu_settings.py
@@ -0,0 +1,63 @@
+#!/usr/bin/python
+# -*- coding: utf8 -*-
+# author: wly
+# date: 2015/11/19
+# see license for license details
+
+import logging
+import pprint
+
+import vstf.controller.settings.settings as sets
+import vstf.common.decorator as deco
+from vstf.common.input import raw_choice
+
+LOG = logging.getLogger(__name__)
+
+
+class CpuSettings(sets.Settings):
+ def __init__(self, path="/etc/vstf/perf/",
+ filename="sw_perf.cpu-settings",
+ mode=sets.SETS_SINGLE):
+ super(CpuSettings, self).__init__(path, filename, mode)
+
+ def _register_func(self):
+ super(CpuSettings, self)._register_func()
+ body = set(
+ self._fset['affctl'].keys()
+ )
+ LOG.debug(body)
+ for item in body:
+ item = item.encode()
+ func_name = "set_%s" % item
+ setattr(self, func_name, self._setting_file(func_name, self._mset['affctl'], self._fset['affctl'], item))
+ func_name = "mset_%s" % item
+ setattr(self, func_name, self._setting_memory(func_name, self._mset['affctl'], item))
+
+ LOG.debug(self.__dict__)
+
+ def sinput(self, info=None):
+ if raw_choice("if set cpu affability by affctl"):
+ affctl = self.raw_affctl(info)
+ self.set_affctl(affctl)
+
+ print "%s set finish: " % self._filename
+ print "+++++++++++++++++++++++++++++++++++++++++"
+ pprint.pprint(self.settings, indent=4)
+ print "+++++++++++++++++++++++++++++++++++++++++"
+
+ @deco.vstf_input('policy', types=int)
+ def raw_affctl(self, info):
+ print info
+ print "---------------------------------------"
+ print "Please vstf set cpu affctl params like:"
+ print " 'policy': 2,"
+ print "---------------------------------------"
+
+
+def unit_test():
+ from vstf.common.log import setup_logging
+ setup_logging(level=logging.DEBUG, log_file="/var/log/vstf/vstf-cpu-settings.log", clevel=logging.INFO)
+
+if __name__ == '__main__':
+ unit_test()
+
diff --git a/vstf/vstf/controller/settings/data_settings.py b/vstf/vstf/controller/settings/data_settings.py
new file mode 100755
index 00000000..d9878bf2
--- /dev/null
+++ b/vstf/vstf/controller/settings/data_settings.py
@@ -0,0 +1,31 @@
+#!/usr/bin/env python
+# -*- coding: utf8 -*-
+# author: wly
+# date: 2015-09-25
+# see license for license details
+
+import logging
+
+import vstf.controller.settings.settings as sets
+
+LOG = logging.getLogger(__name__)
+
+
+class DataSettings(sets.Settings):
+ def __init__(self, path="/etc/vstf/reporter/",
+ filename="reporters.html.data-settings",
+ mode=sets.SETS_SINGLE):
+ super(DataSettings, self).__init__(path, filename, mode)
+
+ def _register_func(self):
+ super(DataSettings, self)._register_func()
+ items = {"ovs", "result"}
+ fkeys = {"title", "content"}
+ for item in items:
+ item = item.encode()
+ for key in fkeys:
+ key = key.encode()
+ func_name = "set_%s_%s" % (item, key)
+ setattr(self, func_name, self._setting_file(func_name, self._mset[item], self._fset[item], key))
+ func_name = "mset_%s_%s" % (item, key)
+ setattr(self, func_name, self._setting_memory(func_name, self._mset[item], key)) \ No newline at end of file
diff --git a/vstf/vstf/controller/settings/device_settings.py b/vstf/vstf/controller/settings/device_settings.py
new file mode 100755
index 00000000..45bc9eb1
--- /dev/null
+++ b/vstf/vstf/controller/settings/device_settings.py
@@ -0,0 +1,18 @@
+#!/usr/bin/python
+# -*- coding: utf8 -*-
+# author: wly
+# date: 2015/11/19
+# see license for license details
+
+import logging
+
+import vstf.controller.settings.settings as sets
+
+LOG = logging.getLogger(__name__)
+
+
+class DeviceSettings(sets.Settings):
+ def __init__(self, path="/etc/vstf/perf/",
+ filename="sw_perf.device-settings",
+ mode=sets.SETS_SINGLE):
+ super(DeviceSettings, self).__init__(path, filename, mode)
diff --git a/vstf/vstf/controller/settings/flows_settings.py b/vstf/vstf/controller/settings/flows_settings.py
new file mode 100755
index 00000000..b2bec625
--- /dev/null
+++ b/vstf/vstf/controller/settings/flows_settings.py
@@ -0,0 +1,122 @@
+#!/usr/bin/env python
+# -*- coding: utf8 -*-
+# author: wly
+# date: 2015-09-18
+# see license for license details
+
+import logging
+
+import vstf.controller.settings.settings as sets
+
+LOG = logging.getLogger(__name__)
+
+
+class FlowsSettings(sets.Settings):
+ def __init__(self, path="/etc/vstf/perf/",
+ filename="sw_perf.flownodes-settings",
+ mode=sets.SETS_SINGLE):
+ self._check_actors = {'namespaces', 'senders', 'receivers', 'watchers'}
+ self._nocheck_actors = {"cpu_listens"}
+ super(FlowsSettings, self).__init__(path, filename, mode)
+
+ def _register_func(self):
+ super(FlowsSettings, self)._register_func()
+ for actor in self._check_actors:
+ actor = actor.encode()
+ func_name = "add_%s" % actor
+ setattr(self, func_name, self._adding_file(func_name, self._mset, self._fset, actor, self._check_add))
+ func_name = "madd_%s" % actor
+ setattr(self, func_name, self._adding_memory(func_name, self._mset, actor, self._check_add))
+
+ for actor in self._nocheck_actors:
+ actor = actor.encode()
+ func_name = "add_%s" % actor
+ setattr(self, func_name, self._adding_file(func_name, self._mset, self._fset, actor))
+ func_name = "madd_%s" % actor
+ setattr(self, func_name, self._adding_memory(func_name, self._mset, actor))
+
+ LOG.debug(self.__dict__.keys())
+
+ def clear_all(self):
+ actors = self._check_actors | self._nocheck_actors
+ for actor in actors:
+ func_name = "set_%s" % actor
+ func = getattr(self, func_name)
+ func([])
+
+ def mclear_all(self):
+ actors = self._check_actors | self._nocheck_actors
+ for actor in actors:
+ func_name = "mset_%s" % actor
+ func = getattr(self, func_name)
+ func([])
+
+ def _check_add(self, value):
+ flows = ['agent', 'dev']
+ if not isinstance(value, dict):
+ raise Exception("type is error: %s" % (str(value)))
+ for flow in flows:
+ if flow not in value.keys():
+ raise Exception("keys[%s] is missing: %s" % (flow, str(value)))
+
+ items = ["ip", "namespace", "mac", "iface", "bdf"]
+ for item in items:
+ if item not in value['dev'].keys():
+ raise Exception("keys[%s] is error: %s" % (item, str(value)))
+
+
+def unit_test():
+ from vstf.common.log import setup_logging
+ setup_logging(level=logging.DEBUG, log_file="/var/log/vstf/vstf-flows-settings.log", clevel=logging.INFO)
+
+ flows_settings = FlowsSettings()
+ LOG.info(flows_settings.settings)
+
+ flows_settings.clear_all()
+ flows_settings.set_flows(2)
+ LOG.info(flows_settings.settings)
+
+ flow_1 = {
+ "agent": "192.168.188.14",
+ "dev": {
+ "ip": "192.168.1.100",
+ "namespace": "vstf-space-1",
+ "mac": "90:e2:ba:20:1f:d8",
+ "iface": "eth4",
+ "bdf": "04:00.0"
+ }
+ }
+ flow_2 = {
+ "agent": "192.168.188.14",
+ "dev": {
+ "ip": "192.168.1.101",
+ "namespace": "vstf-space-2",
+ "mac": "90:e2:ba:20:1f:d9",
+ "iface": "p57p2",
+ "bdf": "04:00.1"
+ }
+ }
+
+ flows_settings.add_senders(flow_1)
+ flows_settings.add_senders(flow_2)
+ flows_settings.add_receivers(flow_2)
+ flows_settings.add_receivers(flow_1)
+
+ flows_settings.add_watchers(flow_1)
+ flows_settings.add_watchers(flow_2)
+
+ flows_settings.add_namespaces(flow_1)
+ flows_settings.add_namespaces(flow_2)
+
+ cpu = {
+ "agent": "192.168.188.16",
+ "affctl":{
+ "policy": 2
+ }
+ }
+ flows_settings.add_cpu_listens(cpu)
+ LOG.info(flows_settings.settings)
+
+
+if __name__ == '__main__':
+ unit_test()
diff --git a/vstf/vstf/controller/settings/forwarding_settings.py b/vstf/vstf/controller/settings/forwarding_settings.py
new file mode 100755
index 00000000..67ec3f85
--- /dev/null
+++ b/vstf/vstf/controller/settings/forwarding_settings.py
@@ -0,0 +1,18 @@
+#!/usr/bin/python
+# -*- coding: utf8 -*-
+# author: wly
+# date: 2015/11/19
+# see license for license details
+
+import logging
+
+import vstf.controller.settings.settings as sets
+
+LOG = logging.getLogger(__name__)
+
+
+class ForwardingSettings(sets.Settings):
+ def __init__(self, path="/etc/vstf/perf/",
+ filename="sw_perf.forwarding-settings",
+ mode=sets.SETS_SINGLE):
+ super(ForwardingSettings, self).__init__(path, filename, mode)
diff --git a/vstf/vstf/controller/settings/html_settings.py b/vstf/vstf/controller/settings/html_settings.py
new file mode 100755
index 00000000..7e715100
--- /dev/null
+++ b/vstf/vstf/controller/settings/html_settings.py
@@ -0,0 +1,51 @@
+#!/usr/bin/python
+# -*- coding: utf8 -*-
+# author: wly
+# date: 2015-09-25
+# see license for license details
+__version__ = ''' '''
+
+import logging
+
+import vstf.controller.settings.settings as sets
+
+LOG = logging.getLogger(__name__)
+
+
+class HtmlSettings(sets.Settings):
+ def __init__(self, path="/etc/vstf/", filename="reporters.html-settings", mode=sets.SETS_DEFAULT):
+ super(HtmlSettings, self).__init__(path, filename, mode)
+
+
+def unit_test():
+ from vstf.common.log import setup_logging
+ setup_logging(level=logging.DEBUG, log_file="/var/log/html-settings.log", clevel=logging.DEBUG)
+ html_settings = HtmlSettings()
+ style = {
+ 'table': {
+ 'font-family': '"Trebuchet MS", Arial, Helvetica, sans-serif',
+ 'border-collapse': 'collapse',
+ 'border': '1px solid green',
+ 'padding': '8px',
+ 'text-align': 'center'
+ },
+ 'td':
+ {
+ 'border': '1px solid green',
+ 'padding': '8px',
+ 'word-wrap': 'break-all'
+ },
+ 'th':
+ {
+ 'background-color': '#EAF2D3',
+ 'border': '1px solid green',
+ 'padding': '8px'
+ }
+ }
+
+ html_settings.set_style(style)
+ LOG.info(html_settings.settings)
+
+
+if __name__ == '__main__':
+ unit_test()
diff --git a/vstf/vstf/controller/settings/mail_settings.py b/vstf/vstf/controller/settings/mail_settings.py
new file mode 100755
index 00000000..fd66b5c2
--- /dev/null
+++ b/vstf/vstf/controller/settings/mail_settings.py
@@ -0,0 +1,121 @@
+#!/usr/bin/env python
+# -*- coding: utf8 -*-
+# author: wly
+# date: 2015-09-06
+# see license for license details
+
+import logging
+import pprint
+
+import vstf.controller.settings.settings as sets
+import vstf.common.decorator as deco
+from vstf.common.input import raw_choice
+
+LOG = logging.getLogger(__name__)
+
+
+class MailSettings(sets.Settings):
+ def __init__(self, path="/etc/vstf", filename="reporters.mail.mail-settings", mode=sets.SETS_DEFAULT):
+ super(MailSettings, self).__init__(path, filename, mode)
+
+ def _register_func(self):
+ super(MailSettings, self)._register_func()
+ body = set(
+ self._fset['body'].keys()
+ )
+ LOG.debug(body)
+ for item in body:
+ item = item.encode()
+ func_name = "set_%s" % item
+ setattr(self, func_name, self._setting_file(func_name, self._mset['body'], self._fset['body'], item))
+ other = {"attach", "content", "subtype"}
+ for item in other:
+ func_name = "mset_%s" % item
+ setattr(self, func_name, self._setting_memory(func_name, self._mset['body'], item))
+
+ LOG.debug(self.__dict__)
+
+ def sinput(self):
+ if raw_choice("if set mail server"):
+ server = self.raw_server()
+ self.set_server(server)
+
+ if raw_choice("if set mail body"):
+ body = self.raw_body()
+ self.set_body(body)
+ print "%s set finish: " % (self._filename)
+ print "+++++++++++++++++++++++++++++++++++++++++"
+ pprint.pprint(self.settings, indent=4)
+ print "+++++++++++++++++++++++++++++++++++++++++"
+
+ @deco.vstf_input("password", types=str)
+ @deco.vstf_input("username", types=str)
+ @deco.vstf_input('host', types=str)
+ def raw_server(self):
+ print "---------------------------------------"
+ print "Please vstf set mail server info like:"
+ print " 'host': 'localhost',"
+ print " 'username': 'user',['\\n' = None]"
+ print " 'password': '******',['\\n' = None]"
+ print "---------------------------------------"
+
+ @deco.vstf_input("subject", types=str, default='vstf mail')
+ @deco.vstf_input("bcc", types=list, default=[])
+ @deco.vstf_input("cc", types=list, default=[])
+ @deco.vstf_input("to", types=list, default=[])
+ @deco.vstf_input('from', types=list, default=['vstf_from@vstf.com'])
+ def raw_body(self):
+ print "----------------------------------------------------"
+ print "Please vstf set mail server info like:"
+ print " 'from': ['vstf_from@vstf.com'],"
+ print " 'to': ['vstf_to@vstf.com'],"
+ print " 'cc': ['vstf_cc@vstf.com']"
+ print " 'bcc': ['vstf_bcc@vstf.com']"
+ print " 'subject': Vstf Performance Test Report"
+ print "----------------------------------------------------"
+
+
+def unit_test():
+ from vstf.common.log import setup_logging
+ setup_logging(level=logging.DEBUG, log_file="/var/log/vstf/vstf-mail-settings.log", clevel=logging.INFO)
+
+ mail_settings = MailSettings()
+ mail_settings.sinput()
+
+ return
+
+ mail_server = {
+ "host": "localhost",
+ "username": None,
+ "password": None
+ }
+ mail_settings.set_server(mail_server)
+
+ from_list = ['vstf_from@vstf.com']
+ mail_settings.set_from(from_list)
+ to_list = ['wangli11@huawei.com']
+ mail_settings.set_to(to_list)
+ cc_list = ['wangli11@huawei.com']
+ mail_settings.set_cc(cc_list)
+ bcc_list = ['wangli11@huawei.com']
+ mail_settings.set_bcc(bcc_list)
+ bcc_list = ['wangli11@huawei.com']
+ mail_settings.set_bcc(bcc_list)
+
+ subject = "Virtual Switching Performance Test Report"
+ mail_settings.set_subject(subject)
+
+ subtype = "plain"
+ mail_settings.mset_subtype(subtype)
+
+ attach_list = []
+ mail_settings.mset_attach(attach_list)
+
+ content = "this is a test"
+ mail_settings.mset_content(content)
+
+ LOG.info(mail_settings.settings)
+
+
+if __name__ == '__main__':
+ unit_test()
diff --git a/vstf/vstf/controller/settings/perf_settings.py b/vstf/vstf/controller/settings/perf_settings.py
new file mode 100755
index 00000000..c0c8123b
--- /dev/null
+++ b/vstf/vstf/controller/settings/perf_settings.py
@@ -0,0 +1,102 @@
+#!/usr/bin/python
+# -*- coding: utf8 -*-
+# author: wly
+# date: 2015-09-28
+# see license for license details
+
+import pprint
+import logging
+
+import vstf.common.decorator as deco
+import vstf.common.constants as cst
+import vstf.controller.settings.settings as sets
+from vstf.common.input import raw_choice
+from vstf.controller.database.dbinterface import DbManage
+
+LOG = logging.getLogger(__name__)
+
+
+class PerfSettings(sets.Settings):
+ def __init__(self, path="/etc/vstf/perf/",
+ filename="sw_perf.batch-settings",
+ mode=sets.SETS_SINGLE):
+ self.dbconn = DbManage()
+ super(PerfSettings, self).__init__(path, filename, mode)
+
+ def clear(self):
+ for item in cst.SCENARIOS:
+ func = getattr(self, "set_" + item)
+ func([])
+
+ def mclear(self):
+ for item in cst.SCENARIOS:
+ func = getattr(self, "mset_" + item)
+ func([])
+
+ def add_case(self, value):
+ scenario = self.dbconn.query_scenario(value["case"])
+ LOG.info(scenario)
+ if not scenario:
+ LOG.warn("not support the case:%s", value["case"])
+ return
+ self._adding_file("add", self._mset, self._fset, scenario, check=self._check_add)(value)
+
+ def madd_case(self, case):
+ scenario = self.dbconn.query_scenario(case)
+ if not scenario:
+ LOG.warn("not support the case:%s", case)
+ return
+ self._adding_memory("madd", self._mset, scenario, check=self._check_add)(case)
+
+ @deco.dcheck('sizes')
+ @deco.dcheck("type", choices=cst.TTYPES)
+ @deco.dcheck("profile", choices=cst.PROFILES)
+ @deco.dcheck("protocol", choices=cst.TPROTOCOLS)
+ @deco.dcheck("tool", choices=cst.TOOLS)
+ @deco.dcheck('case')
+ def _check_add(self, value):
+ LOG.info("check successfully")
+
+ def sinput(self):
+ if raw_choice("if clean all Test case"):
+ self.clear()
+ while True:
+ if raw_choice("if add a new Test case"):
+ case = self.raw_addcase()
+ self.add_case(case)
+ else:
+ break
+ print "%s set finish: " % (self._filename)
+ print "+++++++++++++++++++++++++++++++++++"
+ pprint.pprint(self.settings)
+ print "+++++++++++++++++++++++++++++++++++"
+ return True
+
+ @deco.vstf_input('sizes', types=list)
+ @deco.vstf_input("type", types=str, choices=cst.TTYPES)
+ @deco.vstf_input("profile", types=str, choices=cst.PROFILES)
+ @deco.vstf_input("protocol", types=str, choices=cst.TPROTOCOLS)
+ @deco.vstf_input("tool", types=str, choices=cst.TOOLS)
+ @deco.vstf_input('case')
+ def raw_addcase(self):
+ print "---------------------------------------"
+ print "Please vstf add case info like:"
+ print " 'case': 'Ti-1',"
+ print " 'tool': 'netperf',"
+ print " 'protocol': 'udp',"
+ print " 'profile': 'rdp',"
+ print " 'type': 'latency',"
+ print " 'sizes': [64, 128, 512, 1024]"
+ print "---------------------------------------"
+
+
+def unit_test():
+ perf_settings = PerfSettings()
+ perf_settings.sinput()
+
+ from vstf.common.log import setup_logging
+ setup_logging(level=logging.DEBUG, log_file="/var/log/vstf/vstf-perf-settings.log", clevel=logging.DEBUG)
+
+
+if __name__ == '__main__':
+ unit_test()
diff --git a/vstf/vstf/controller/settings/settings.py b/vstf/vstf/controller/settings/settings.py
new file mode 100755
index 00000000..4730c8db
--- /dev/null
+++ b/vstf/vstf/controller/settings/settings.py
@@ -0,0 +1,286 @@
+#!/usr/bin/python
+# -*- coding: utf8 -*-
+# author: wly
+# date: 2015-09-06
+# see license for license details
+
+import json
+import re
+import os
+import copy
+import logging
+import sys
+
+LOG = logging.getLogger(__name__)
+
+
+def object2dict(obj):
+ # convert object to a dict
+ dic = {'__class__': obj.__class__.__name__, '__module__': obj.__module__}
+ dic.update(obj.__dict__)
+ return dic
+
+
+def dict2object(dic):
+ # convert dict to object
+ if '__class__' in dic:
+ class_name = dic.pop('__class__')
+ module_name = dic.pop('__module__')
+ module = __import__(module_name)
+ class_ = getattr(module, class_name)
+ args = dict((key.encode('ascii'), value) for key, value in dic.items()) # get args
+ inst = class_(**args) # create new instance
+ else:
+ inst = dic
+ return inst
+
+
+def filter_comments(filename, flags="//"):
+ result = []
+ with open(filename, "r") as ifile:
+ lines = ifile.readlines()
+ for data in lines:
+ data = re.sub("%s.*$" % (flags), '', data)
+ data = re.sub("^\s*$", '', data)
+ if data:
+ result.append(data)
+ LOG.debug(result)
+ return ''.join(result)
+
+
+class BaseSettings(object):
+ def _load(self, fullname):
+ data = filter_comments(fullname)
+ LOG.debug(fullname)
+ LOG.debug(data)
+ jparams = None
+ if data:
+ jparams = json.loads(data)
+ return jparams
+
+ def _sub(self, ldata, rdata):
+ if isinstance(ldata, list) and isinstance(rdata, list):
+ data = []
+ if ldata:
+ for litem in ldata:
+ if rdata:
+ for ritem in rdata:
+ if isinstance(litem, dict) or isinstance(litem, list):
+ tmp = self._sub(litem, ritem)
+ else:
+ tmp = ritem
+ if tmp and tmp not in data:
+ data.append(tmp)
+ else:
+ data.append(litem)
+
+ else:
+ data = rdata
+
+ elif isinstance(ldata, dict) and isinstance(rdata, dict):
+ data = {}
+ rdata_bak = copy.deepcopy(rdata)
+ for rkey, rvalue in rdata_bak.items():
+ if rkey not in ldata:
+ rdata_bak.pop(rkey)
+ for lkey, lvalue in ldata.items():
+ if lkey in rdata:
+ if isinstance(lvalue, dict) or isinstance(lvalue, list):
+ data[lkey] = self._sub(lvalue, rdata[lkey])
+ else:
+ data[lkey] = rdata[lkey]
+ else:
+ if rdata_bak:
+ data[lkey] = lvalue
+ else:
+ data = rdata
+
+ return data
+
+ def _save(self, data, filename):
+ if os.path.exists(filename):
+ os.remove(filename)
+ with open(filename, 'w') as ofile:
+ content = json.dumps(data, sort_keys=True, indent=4, separators=(',', ':'))
+ ofile.write(content)
+
+
+class DefaultSettings(BaseSettings):
+ def __init__(self, path):
+ self._default = os.path.join(path, 'default')
+ self._user = os.path.join(path, 'user')
+
+ def load(self, filename):
+ dfile = os.path.join(self._default, filename)
+ if os.path.exists(dfile):
+ ddata = self._load(dfile)
+ data = ddata
+ else:
+ err = "default file is missing : %s" % (dfile)
+ LOG.error(err)
+ raise Exception(err)
+ ufile = os.path.join(self._user, filename)
+ if os.path.exists(ufile):
+ udata = self._load(ufile)
+ if udata:
+ data = self._sub(ddata, udata)
+ else:
+ LOG.info("no user file :%s" % (ufile))
+ return data
+
+ def save(self, data, filename):
+ ufile = os.path.join(self._user, filename)
+ self._save(data, ufile)
+
+
+class SingleSettings(BaseSettings):
+ def __init__(self, path):
+ self._path = path
+
+ def load(self, filename):
+ pfile = os.path.join(self._path, filename)
+ if os.path.exists(pfile):
+ ddata = self._load(pfile)
+ data = ddata
+ else:
+ err = "settings file is missing : %s" % (pfile)
+ LOG.error(err)
+ raise Exception(err)
+ return data
+
+ def save(self, data, filename):
+ pfile = os.path.join(self._path, filename)
+ self._save(data, pfile)
+
+SETS_DEFAULT = "Default"
+SETS_SINGLE = "Single"
+SETTINGS = [SETS_SINGLE, SETS_DEFAULT]
+
+
+class Settings(object):
+ def __init__(self, path, filename, mode=SETS_SINGLE):
+ if mode not in SETTINGS:
+ raise Exception("error Settings mode : %s" % (mode))
+ cls_name = mode + "Settings"
+ thismodule = sys.modules[__name__]
+ cls = getattr(thismodule, cls_name)
+ self._settings = cls(path)
+ self._filename = filename
+ self._fset = self._settings.load(filename)
+ self._mset = copy.deepcopy(self._fset)
+ self._register_func()
+
+ def reset(self):
+ self._fset = self._settings.load(self._filename)
+ self._mset = copy.deepcopy(self._fset)
+
+ @property
+ def settings(self):
+ return self._mset
+
+ def _setting_file(self, func_name, mset, fset, key, check=None):
+ def infunc(value):
+ if hasattr(check, '__call__'):
+ check(value)
+ if isinstance(fset, dict):
+ mset[key] = copy.deepcopy(value)
+ fset[key] = copy.deepcopy(value)
+ elif isinstance(fset, list):
+ del (mset[:])
+ del (fset[:])
+ mset.extend(copy.deepcopy(value))
+ fset.extend(copy.deepcopy(value))
+ self._settings.save(self._fset, self._filename)
+ infunc.__name__ = func_name
+ LOG.debug(self._mset)
+ LOG.debug(self._fset)
+
+ return infunc
+
+ def _setting_memory(self, func_name, mset, key, check=None):
+ def infunc(value):
+ if hasattr(check, '__call__'):
+ check(value)
+ if isinstance(mset, dict):
+ mset[key] = copy.deepcopy(value)
+ elif isinstance(mset, list):
+ for i in range(len(mset)):
+ mset.pop()
+ mset.extend(copy.deepcopy(value))
+
+ infunc.__name__ = func_name
+ LOG.debug(self._mset)
+ LOG.debug(self._fset)
+
+ return infunc
+
+ def _adding_file(self, func_name, mset, fset, key, check=None):
+ def infunc(value):
+ if hasattr(check, '__call__'):
+ check(value)
+ if key:
+ mset[key].append(copy.deepcopy(value))
+ fset[key].append(copy.deepcopy(value))
+ else:
+ mset.append(copy.deepcopy(value))
+ fset.append(copy.deepcopy(value))
+
+ self._settings.save(self._fset, self._filename)
+ infunc.__name__ = func_name
+ LOG.debug(self._mset)
+ LOG.debug(self._fset)
+
+ return infunc
+
+ def _adding_memory(self, func_name, mset, key, check=None):
+ def infunc(value):
+ if hasattr(check, '__call__'):
+ check(value)
+ if key:
+ mset[key].append(copy.deepcopy(value))
+ else:
+ mset.append(copy.deepcopy(value))
+ infunc.__name__ = func_name
+ LOG.debug(self._mset)
+ LOG.debug(self._fset)
+
+ return infunc
+
+ def _register_func(self):
+ if isinstance(self._fset, dict):
+ items = set(
+ self._fset.keys()
+ )
+ for item in items:
+ item = item.encode()
+ func_name = "set_%s" % item
+ setattr(self, func_name, self._setting_file(func_name, self._mset, self._fset, item))
+ func_name = "mset_%s" % item
+ setattr(self, func_name, self._setting_memory(func_name, self._mset, item))
+ elif isinstance(self._fset, list):
+ func_name = "set"
+ setattr(self, func_name, self._setting_file(func_name, self._mset, self._fset, None))
+ func_name = "mset"
+ setattr(self, func_name, self._setting_memory(func_name, self._mset, None))
+ func_name = "add"
+ setattr(self, func_name, self._adding_file(func_name, self._mset, self._fset, None))
+ func_name = "madd"
+ setattr(self, func_name, self._adding_memory(func_name, self._mset, None))
+
+
+def unit_test():
+ from vstf.common.log import setup_logging
+ setup_logging(level=logging.DEBUG, log_file="/var/log/vstf-settings.log", clevel=logging.INFO)
+
+ path = '/etc/vstf'
+ setting = DefaultSettings(path)
+ filename = 'reporters.mail.mail-settings'
+ data = setting.load(filename)
+
+ setting.save(data, filename)
+ LOG.info(type(data))
+ LOG.info(data)
+
+
+if __name__ == '__main__':
+ unit_test()
diff --git a/vstf/vstf/controller/settings/settings_input.py b/vstf/vstf/controller/settings/settings_input.py
new file mode 100755
index 00000000..2c262842
--- /dev/null
+++ b/vstf/vstf/controller/settings/settings_input.py
@@ -0,0 +1,44 @@
+#!/usr/bin/python
+# -*- coding: utf8 -*-
+# author: wly
+# date: 2015-10-16
+# see license for license details
+
+
+import clize
+from sigtools.modifiers import autokwoargs
+from vstf.controller.settings.mail_settings import MailSettings
+from vstf.controller.settings.perf_settings import PerfSettings
+from vstf.controller.settings.cpu_settings import CpuSettings
+from vstf.controller.settings.tool_settings import ToolSettings
+
+
+@autokwoargs
+def sinput(mail=False, perf=False, affctl=False, tool=False):
+ """Settings command line input
+
+ mail: if start mail settings
+
+ perf: if start perf settings
+
+ affctl: if start set cpu affability
+
+ tool: if start set tool properties
+
+ """
+
+ if mail:
+ MailSettings().sinput()
+ if perf:
+ PerfSettings().sinput()
+ if affctl:
+ CpuSettings().sinput()
+ if tool:
+ ToolSettings().sinput()
+
+
+def main():
+ clize.run(sinput)
+
+if __name__ == '__main__':
+ main()
diff --git a/vstf/vstf/controller/settings/tester_settings.py b/vstf/vstf/controller/settings/tester_settings.py
new file mode 100755
index 00000000..fb116a8d
--- /dev/null
+++ b/vstf/vstf/controller/settings/tester_settings.py
@@ -0,0 +1,18 @@
+#!/usr/bin/python
+# -*- coding: utf8 -*-
+# author: wly
+# date: 2015/11/17
+# see license for license details
+
+import logging
+
+import vstf.controller.settings.settings as sets
+
+LOG = logging.getLogger(__name__)
+
+
+class TesterSettings(sets.Settings):
+ def __init__(self, path="/etc/vstf/env/",
+ filename="tester.json",
+ mode=sets.SETS_SINGLE):
+ super(TesterSettings, self).__init__(path, filename, mode)
diff --git a/vstf/vstf/controller/settings/tool_settings.py b/vstf/vstf/controller/settings/tool_settings.py
new file mode 100755
index 00000000..1d543e6a
--- /dev/null
+++ b/vstf/vstf/controller/settings/tool_settings.py
@@ -0,0 +1,84 @@
+#!/usr/bin/env python
+# -*- coding: utf8 -*-
+# author: wly
+# date: 2015-09-18
+# see license for license details
+
+import logging
+import pprint
+import vstf.controller.settings.settings as sets
+import vstf.common.decorator as deco
+from vstf.common.input import raw_choice
+
+LOG = logging.getLogger(__name__)
+
+
+class ToolSettings(sets.Settings):
+ def __init__(self, path="/etc/vstf", filename="sw_perf.tool-settings", mode=sets.SETS_DEFAULT):
+ super(ToolSettings, self).__init__(path, filename, mode)
+
+ def _register_func(self):
+ body = set(
+ self._fset.keys()
+ )
+ LOG.debug(body)
+ for item in body:
+ item = item.encode()
+ func_name = "set_%s" % (item)
+ setattr(self, func_name,
+ self._setting_file(func_name, self._mset, self._fset, item, check=self._check_keys))
+
+ def _check_keys(self, value):
+ keys = ['threads', 'wait', 'time']
+ if not isinstance(value, dict):
+ raise Exception("type is error: %s" % (str(value)))
+ for key in keys:
+ if key not in value.keys():
+ raise Exception("keys[%s] is missing: %s" % (key, str(value)))
+
+ def sinput(self):
+ body = set(
+ self._fset.keys()
+ )
+ for tool in body:
+ info = "if set %s properties" % tool
+ if raw_choice(info):
+ properties = self.raw_properties()
+ func = getattr(self, "set_%s" % tool)
+ func(properties)
+
+ print "%s set finish: " % self._filename
+ print "+++++++++++++++++++++++++++++++++++++++++"
+ pprint.pprint(self.settings, indent=4)
+ print "+++++++++++++++++++++++++++++++++++++++++"
+
+ @deco.vstf_input("time", types=int)
+ @deco.vstf_input("wait", types=int)
+ @deco.vstf_input("threads", types=int)
+ def raw_properties(self):
+ print "---------------------------------------"
+ print "Please vstf set tool properties like:"
+ print " 'threads': 2,"
+ print " 'wait': 2,"
+ print " 'time': 10,"
+ print "---------------------------------------"
+
+
+def unit_test():
+ from vstf.common.log import setup_logging
+ setup_logging(level=logging.DEBUG, log_file="/var/log/vstf/tool-settings.log", clevel=logging.INFO)
+ tool_settings = ToolSettings()
+ value = {
+ "time": 10,
+ "wait": 4,
+ "threads": 1
+ }
+ tool_settings.set_pktgen(value)
+ tool_settings.set_netperf(value)
+ tool_settings.set_iperf(value)
+ tool_settings.set_qperf(value)
+ LOG.info(tool_settings.settings)
+
+
+if __name__ == '__main__':
+ unit_test()
diff --git a/vstf/vstf/controller/spirent/__init__.py b/vstf/vstf/controller/spirent/__init__.py
new file mode 100755
index 00000000..89dcd4e2
--- /dev/null
+++ b/vstf/vstf/controller/spirent/__init__.py
@@ -0,0 +1,14 @@
+# Copyright Huawei Technologies Co., Ltd. 1998-2015.
+# All Rights Reserved.
+#
+# Licensed under the Apache License, Version 2.0 (the License); you may
+# not use this file except in compliance with the License. You may obtain
+# a copy of the License at
+#
+# http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an AS IS BASIS, WITHOUT
+# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
+# License for the specific language governing permissions and limitations
+# under the License.
diff --git a/vstf/vstf/controller/spirent/appliance.py b/vstf/vstf/controller/spirent/appliance.py
new file mode 100755
index 00000000..a06bf452
--- /dev/null
+++ b/vstf/vstf/controller/spirent/appliance.py
@@ -0,0 +1,92 @@
+#!/usr/bin/python
+"""
+ @author: l00190809
+ @group: Huawei Ltd
+"""
+import os
+import logging
+
+import stevedore
+from vstf.controller.spirent.common.result_analysis import analysis_instance as analysis_instance
+LOG = logging.getLogger(__name__)
+
+
+class spirentSTC(object):
+ def __init__(self):
+ super(spirentSTC, self).__init__()
+ self.runmodel = None
+
+ def init(self, conner="", measurand="", model="", **kwargs):
+ """
+ :param str conner: the spirent tester, the agent id of spirent vm
+ :param list measurand: the tested host's agent id
+ :param str model: the model used of the tested host
+
+ """
+ mgr = stevedore.driver.DriverManager(namespace="spirent.model.plugins",
+ name=model,
+ invoke_on_load=False)
+ self.TempMod = mgr.driver(kwargs)
+ self.conner = conner
+ self.measurand = measurand
+
+ @property
+ def run(self):
+ LOG.info(vars(self.runmodel))
+ return True
+
+
+def run(config):
+ # test option parser
+ if not os.path.exists(config['configfile']):
+ LOG.error('The config file %s does exist.', config.get("configfile"))
+ return False
+
+ runmodel = None # Tnv_Model(config = config)
+
+ # check parameter valid
+ flag = runmodel.check_parameter_invalid()
+ if not flag:
+ LOG.error("[ERROR]Check parameter invalid.")
+ return False
+
+ # check logical parameter in the
+ flag = runmodel.check_logic_invalid
+ if not flag:
+ LOG.error("[ERROR]Check logic parameter with host invalid.")
+ return False
+
+ init_flows_tables = runmodel.read_flow_init
+ LOG.info(init_flows_tables)
+
+ # print init_flows_tables
+ update_flows = runmodel.flow_build
+ # print update_flows
+ LOG.info(update_flows)
+
+ flag = runmodel.affinity_bind(aff_strategy=1)
+ if not flag:
+ LOG.error("runmodel affinity bind failed.")
+ return False
+
+ # Get the result
+ result = {}
+ for suite in ["frameloss", "throughput"]:
+ ret, test_result = runmodel.Test_Run(suite)
+ if not ret:
+ LOG.error("[ERROR]Run rfc2544 %s test failed.", suite)
+ return False
+ try:
+ ret, result_dict = restrucData(test_result)
+ except:
+ LOG.error("[ERROR]Restructure the test data failed.")
+ perfdata = getResult(result_dict)
+ columndata = getResultColumn(result_dict)
+ column_array, data_array = analysis_instance.analyseResult(suite, columndata, perfdata)
+ temp = {'columns': column_array, 'data': data_array}
+ result[suite] = temp
+ return result
+
+
+if __name__ == "__main__":
+ run(None)
diff --git a/vstf/vstf/controller/spirent/common/__init__.py b/vstf/vstf/controller/spirent/common/__init__.py
new file mode 100755
index 00000000..0e98d82e
--- /dev/null
+++ b/vstf/vstf/controller/spirent/common/__init__.py
@@ -0,0 +1,14 @@
+# Copyright Huawei Technologies Co., Ltd. 1998-2015.
+# All Rights Reserved.
+#
+# Licensed under the Apache License, Version 2.0 (the License); you may
+# not use this file except in compliance with the License. You may obtain
+# a copy of the License at
+#
+# http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an AS IS BASIS, WITHOUT
+# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
+# License for the specific language governing permissions and limitations
+# under the License. \ No newline at end of file
diff --git a/vstf/vstf/controller/spirent/common/model.py b/vstf/vstf/controller/spirent/common/model.py
new file mode 100755
index 00000000..511eab40
--- /dev/null
+++ b/vstf/vstf/controller/spirent/common/model.py
@@ -0,0 +1,462 @@
+#!/usr/bin/python
+"""
+ @author: l00190809
+ @group: Huawei Ltd
+"""
+import re
+import copy
+import time
+import ConfigParser
+
+fwd = {'single': ['forward'],
+ 'double': ['forward', 'reverse']
+ }
+models = ['Tnv']
+direction = ['single', 'double']
+reverse_dict = {
+ 'forward': 'reverse',
+ 'reverse': 'forward'
+}
+
+
+class BaseModel(object):
+ def __init__(self, config):
+ self.config = config
+
+ def _check_model(self):
+ return self.config['model'] in models
+
+ def _check_virtenv(self):
+ try:
+ num = int(self.config['virtenv'])
+ return num in range(1, 9)
+ except:
+ print("[ERROR]The virtenv is not a inter number.")
+
+ def _check_queues(self):
+ try:
+ num = int(self.config['queues'])
+ return num in range(1, 9)
+ except:
+ print("[ERROR]The virt queues is not a inter number.")
+
+ @property
+ def _check_flows(self):
+ try:
+ num = int(self.config['flows'])
+ return num in range(1, 9)
+ except:
+ print("[ERROR]The flow is not a inter number.")
+
+ def _check_direct(self):
+ return self.config['direct'] in direction
+
+ def _check_vlans(self):
+ return self.config['vlans'] in ['True', 'False']
+
+ def _check_bind(self):
+ return True
+
+ def check_parameter_invalid(self):
+ try:
+ if self._check_model() and \
+ self._check_virtenv() and \
+ self._check_queues() and \
+ self._check_flows and \
+ self._check_direct() and \
+ self._check_vlans() and \
+ self._check_bind():
+ return True
+ else:
+ print("[ERROR]Paramter check invalid")
+ return False
+ except:
+ print("[ERROR]Check parameter invalid with unknown reason.")
+ return False
+
+
+def _get_array_values(irq_array):
+ proc_list = []
+ for i in range(len(irq_array)):
+ proc_list.append(irq_array[i][1])
+ return sorted(proc_list)
+
+
+def check_dict(thread_info, flow):
+ if thread_info['src_recv_irq'] != flow['src_recv_irq']:
+ print("[WARN]Flow src_irq process %s not match %s in the table."
+ % (thread_info['src_recv_irq'],
+ flow['src_recv_irq']))
+ return False
+ if thread_info['dst_send_irq'] != flow['dst_send_irq']:
+ print("[WARN]Flow dst_irq process %s not match %s in the table."
+ % (thread_info['dst_send_irq'],
+ flow['dst_send_irq']))
+ return False
+ return True
+
+
+def dst_ip_update(flow):
+ try:
+ src_dst_ip = flow['dst_ip']
+ ip_section = '.'.join(src_dst_ip.split('.')[0:3]) + '.'
+ number = int(src_dst_ip.split('.')[3])
+ new_number = number + 1
+ new_dst_ip = ip_section + str(new_number)
+ flow['dst_ip'] = new_dst_ip
+ except:
+ print("[ERROR]dst ip update failed.")
+
+
+def _tranfer_array_to_range(array):
+ return str(array[0]) + '-' + str(array[-1])
+
+
+class TnV(BaseModel):
+ def __init__(self, config):
+ super(TnV, self).__init__(config)
+ self.config = config
+ self.host_instance = None
+ self.send_instace = None
+ self.vms = None
+ self.init_flows = {}
+ handle = ConfigParser.ConfigParser()
+ handle.read(self.config['configfile'])
+ self.handle = handle
+
+ def _get_vms(self):
+ return self.host_instance.get_libvirt_vms()
+
+ def flow_match(self):
+ _queues = int(self.config['queues'])
+ _virtenv = int(self.config['virtenv'])
+ _flows = int(self.config['flows'])
+ return _flows == _queues * _virtenv
+
+ def match_virt_env(self):
+ try:
+ self.vms = self._get_vms()
+ return len(self.vms) == int(self.config['virtenv'])
+ except:
+ print("[ERROR]vms or containers number is equal to virtenv.")
+ return False
+
+ @property
+ def match_flows_and_nic(self):
+ # get src_nic
+ for section in ['send', 'recv']:
+ nic = self._get_nic_from_file(section, 'nic')
+ try:
+ irq_proc = self.host_instance.get_nic_interrupt_proc(nic)
+ return int(self.config['flows']) == len(irq_proc)
+ except:
+ print("[ERROR]match flow with nic interrupt failed.")
+ return False
+
+ def _get_nic_irq_proc(self, nic):
+ return self.host_instance.get_nic_interrupt_proc(nic)
+
+ def _get_nic_from_file(self, section, column):
+ return self.handle.get(section, column)
+
+ def _get_range(self, section, column):
+ try:
+ info = self.handle.get(section, column)
+ return info.split(' ')
+ except:
+ print("[ERROR]Get mac failed.")
+ return False
+
+ def check_mac_valid(self):
+ flag = True
+ try:
+ for option in ['send', 'recv']:
+ info = self.handle.get(option, 'macs')
+ macs = info.split()
+ if len(macs) != int(self.config['virtenv']) or macs == []:
+ print("[ERROR]The macs number is not equal to vms or containers.")
+ return False
+ for mac in macs:
+ # check mac valid
+ if re.match(r'..:..:..:..:..:..', mac):
+ continue
+ else:
+ print("[ERROR]mac %s invalid" % mac)
+ flag = False
+ break
+ if not flag:
+ break
+ return flag
+ except:
+ print("[ERROR]parse macs failed.")
+ return False
+
+ def check_vlan_valid(self):
+ flag = True
+ for direct in ['send', 'recv']:
+ vlans = self.handle.get(direct, 'vlans').split()
+ if len(vlans) != int(self.config['virtenv']):
+ print("[ERROR]vlan un config")
+ return False
+ for vlan in vlans:
+ if int(vlan) <= 1 or int(vlan) >= 4095:
+ flag = False
+ break
+ return flag
+
+ @property
+ def check_logic_invalid(self):
+ return self.flow_match() and self.match_virt_env() and \
+ self.match_flows_and_nic and self.check_mac_valid() and \
+ self.check_vlan_valid()
+
+ @property
+ def read_flow_init(self):
+ # The
+ temp_flow = {}
+ src_macs = self._get_range('send', 'macs')
+ dst_macs = self._get_range('recv', 'macs')
+ src_vlan = self._get_range('send', 'vlans')
+ dst_vlan = self._get_range('recv', 'vlans')
+ src_nic = self._get_nic_from_file('send', 'nic')
+ dst_nic = self._get_nic_from_file('recv', 'nic')
+ src_nic_irq = _get_array_values(self._get_nic_irq_proc(src_nic))
+ dst_nic_irq = _get_array_values(self._get_nic_irq_proc(dst_nic))
+ src_ip_sections = self._get_range('send', 'ip_sections')
+ dst_ip_sections = self._get_range('recv', 'ip_sections')
+ send_port = self._get_nic_from_file('send', 'port')
+ recv_port = self._get_nic_from_file('recv', 'port')
+ temp_flow['tester_ip'] = self._get_nic_from_file('common', 'tester_ip')
+ vlan = src_vlan
+ avg_flow = int(self.config['flows']) / int(self.config['virtenv'])
+ # build the main dictionary
+ for _direct in sorted(fwd[self.config['direct']]):
+ i = 0
+ j = 0
+ temp_flow['direct'] = _direct
+ temp_flow['send_port'] = send_port
+ temp_flow['recv_port'] = recv_port
+
+ for _vm in sorted(self.vms):
+ vlan_id = {
+ 'True': vlan[i],
+ 'False': None}
+ temp_flow['virt'] = _vm
+ _vm_info = self.host_instance.get_vm_info(_vm)
+ temp_flow['qemu_proc'] = _vm_info['main_pid']
+ # temp_flow['qemu_thread'] = _vm_info['qemu_thread']
+ temp_flow['mem_numa'] = _vm_info['mem_numa']
+ # temp_flow['vhost_thread'] = _vm_info['vhost_thread']
+
+ temp_flow['src_mac'] = src_macs[i]
+ temp_flow['dst_mac'] = dst_macs[i]
+ temp_flow['vlan'] = vlan_id[self.config['vlans']]
+ src_ip = src_ip_sections[i]
+ dst_ip = dst_ip_sections[i]
+ temp_flow['src_ip'] = src_ip
+ temp_flow['dst_ip'] = dst_ip
+ vm_index = sorted(self.vms).index(_vm)
+ for _queue in range(1, int(self.config['queues']) + 1):
+ # flow info
+ temp_flow['queue'] = _queue
+ # fwd thread
+
+ temp_flow['qemu_thread_list'] = _vm_info['qemu_thread']
+ forward_core = {
+ "forward": _vm_info['qemu_thread'][_queue + avg_flow * vm_index],
+ "reverse": _vm_info['qemu_thread'][_queue + avg_flow * vm_index + int(self.config['flows'])]
+ }
+ temp_flow['fwd_thread'] = forward_core[_direct]
+
+ temp_flow['fwd_vhost'] = None
+ # nic interrupts info
+ temp_flow['src_recv_irq'] = src_nic_irq[j]
+ temp_flow['src_nic'] = src_nic
+ temp_flow['dst_send_irq'] = dst_nic_irq[j]
+ temp_flow['dst_nic'] = dst_nic
+ # above all
+ j += 1
+ self.init_flows[_direct + '_' + _vm + '_' + str(_queue)] = copy.deepcopy(temp_flow)
+ i += 1
+ src_nic_irq, dst_nic_irq = dst_nic_irq, src_nic_irq
+ vlan = dst_vlan
+ send_port, recv_port = recv_port, send_port
+ src_nic, dst_nic = dst_nic, src_nic
+ src_macs, dst_macs = dst_macs, src_macs
+ src_ip_sections, dst_ip_sections = dst_ip_sections, src_ip_sections
+ # return sorted(self.init_flows.iteritems(), key=lambda d:d[0])
+ return self.init_flows
+
+ def mac_learning(self, flowa, flowb):
+ flowa = str(flowa)
+ flowb = str(flowb)
+ ret = self.send_instace.mac_learning(flowa, flowb)
+ return ret
+
+ def send_packet(self, flow):
+ flow = str(flow)
+ # return a stream block handle
+ return self.send_instace.send_packet(flow)
+
+ def stop_flow(self, streamblock, flow):
+ flow = str(flow)
+ return self.send_instace.stop_flow(streamblock, flow)
+
+ def catch_thread_info(self):
+ return self.host_instance.catch_thread_info()
+
+ def set_thread2flow(self, thread_info, flow):
+ flow['fwd_vhost'] = thread_info['fwd_vhost']
+ return True
+
+ @property
+ def flow_build(self):
+ for _direct in fwd[self.config['direct']]:
+ for _vm in self.vms:
+ for _queue in range(1, int(self.config['queues']) + 1):
+ i = 0
+ while i < 50:
+ try:
+ i += 1
+ thread_info = None
+ self.mac_learning(self.init_flows[_direct + '_' + _vm + '_' + str(_queue)],
+ self.init_flows[reverse_dict[_direct] + '_' + _vm + '_' + str(_queue)])
+ streamblock = self.send_packet(self.init_flows[_direct + '_' + _vm + '_' + str(_queue)])
+ time.sleep(1)
+ result, thread_info = self.catch_thread_info()
+ thread_info = eval(thread_info)
+ self.stop_flow(streamblock, self.init_flows[_direct + '_' + _vm + '_' + str(_queue)])
+ time.sleep(1)
+ if not result:
+ print("[ERROR]Catch the thread info failed.")
+ break
+ except:
+ print("[ERROR]send flow failed error or get host thread info failed.")
+
+ # compare the got thread info to
+ if check_dict(thread_info, self.init_flows[_direct + '_' + _vm + '_' + str(_queue)]):
+ self.set_thread2flow(thread_info, self.init_flows[_direct + '_' + _vm + '_' + str(_queue)])
+ print("[INFO]Flow %s_%s_%s : fwd_vhost %s src_recv_irq %s dst_send_irq %s"
+ % (_direct, _vm, _queue, thread_info['fwd_vhost'], thread_info['src_recv_irq'],
+ thread_info['dst_send_irq']))
+ print("%s" % (self.init_flows[_direct + '_' + _vm + '_' + str(_queue)]))
+ break
+ else:
+ dst_ip_update(self.init_flows[_direct + '_' + _vm + '_' + str(_queue)])
+ return self.init_flows
+
+ def affinity_bind(self, aff_strategy):
+ # get the forward cores
+ qemu_list = []
+ qemu_other = []
+ src_vhost = []
+ dst_vhost = []
+ src_irq = []
+ dst_irq = []
+
+ # recognize the thread id
+ for flowname in sorted(self.init_flows.keys()):
+ tmp_thread = self.init_flows[flowname]['fwd_thread']
+ qemu_other = qemu_other + copy.deepcopy(self.init_flows[flowname]['qemu_thread_list'])
+ qemu_list.append(tmp_thread)
+ if self.init_flows[flowname]['direct'] == 'forward':
+ dst_vhost.append(self.init_flows[flowname]['fwd_vhost'])
+ src_irq.append(self.init_flows[flowname]['src_recv_irq'])
+ dst_irq.append(self.init_flows[flowname]['dst_send_irq'])
+ elif self.init_flows[flowname]['direct'] == 'reverse':
+ src_vhost.append(self.init_flows[flowname]['fwd_vhost'])
+ dst_irq.append(self.init_flows[flowname]['src_recv_irq'])
+ src_irq.append(self.init_flows[flowname]['dst_send_irq'])
+
+ qemu_list = sorted({}.fromkeys(qemu_list).keys())
+ src_vhost = sorted({}.fromkeys(src_vhost).keys())
+ dst_vhost = sorted({}.fromkeys(dst_vhost).keys())
+ src_irq = sorted({}.fromkeys(src_irq).keys())
+ dst_irq = sorted({}.fromkeys(dst_irq).keys())
+
+ # get the qemu thread except the forward core
+ qemu_other = sorted({}.fromkeys(qemu_other).keys())
+ for i in qemu_list:
+ qemu_other.remove(i)
+ # get the bind strategy
+ handle = ConfigParser.ConfigParser()
+ handle.read(self.config['strategyfile'])
+ try:
+ qemu_numa = handle.get('strategy' + self.config['strategy'], 'qemu_numa')
+ src_vhost_numa = handle.get('strategy' + self.config['strategy'], 'src_vhost_numa')
+ dst_vhost_numa = handle.get('strategy' + self.config['strategy'], 'dst_vhost_numa')
+ src_irq_numa = handle.get('strategy' + self.config['strategy'], 'src_irq_numa')
+ dst_irq_numa = handle.get('strategy' + self.config['strategy'], 'dst_irq_numa')
+ loan_numa = handle.get('strategy' + self.config['strategy'], 'loan_numa')
+ except:
+ print("[ERROR]Parse the strategy file failed or get the options failed.")
+
+ for value in [qemu_numa, src_vhost_numa, dst_vhost_numa, src_irq_numa, dst_irq_numa, loan_numa]:
+ if value is not None or value == '':
+ raise ValueError('some option in the strategy file is none.')
+ # cores mapping thread
+ numa_topo = self.host_instance.get_numa_core()
+ numa_topo = eval(numa_topo)
+ # first check the cores number
+
+ # order src_irq dst_irq src_vhost dst_vhost qemu_list
+ for node in numa_topo.keys():
+ numa_topo[node]['process'] = []
+ if 'node' + src_irq_numa == node:
+ numa_topo[node]['process'] = numa_topo[node]['process'] + src_irq
+ if 'node' + dst_irq_numa == node:
+ numa_topo[node]['process'] = numa_topo[node]['process'] + dst_irq
+ if 'node' + src_vhost_numa == node:
+ numa_topo[node]['process'] = numa_topo[node]['process'] + src_vhost
+ if 'node' + dst_vhost_numa == node:
+ numa_topo[node]['process'] = numa_topo[node]['process'] + dst_vhost
+ if 'node' + qemu_numa == node:
+ numa_topo[node]['process'] = numa_topo[node]['process'] + qemu_list
+ loan_cores = ''
+ for node in numa_topo.keys():
+ if len(numa_topo[node]['process']) > len(numa_topo[node]['phy_cores']):
+ # length distance
+ diff = len(numa_topo[node]['process']) - len(numa_topo[node]['phy_cores'])
+ # first deep copy
+ numa_topo['node' + loan_numa]['process'] = numa_topo['node' + loan_numa]['process'] + copy.deepcopy(
+ numa_topo[node]['process'][-diff:])
+ cores_str = _tranfer_array_to_range(numa_topo['node' + loan_numa]['phy_cores'][diff:])
+ loan_cores = ','.join([loan_cores, cores_str])
+ numa_topo[node]['process'] = numa_topo[node]['process'][0:-diff]
+ loan_cores = loan_cores[1:]
+ loan_bind_list = {}
+ for proc_loan in qemu_other:
+ loan_bind_list[proc_loan] = loan_cores
+
+ bind_list = {}
+ for node in numa_topo.keys():
+ for i in range(len(numa_topo[node]['process'])):
+ bind_list[numa_topo[node]['process'][i]] = str(numa_topo[node]['phy_cores'][i])
+ bind_list.update(loan_bind_list)
+ for key in bind_list.keys():
+ self.host_instance.bind_cpu(bind_list[key], key)
+ print bind_list
+ return True
+
+ def testrun(self, suite):
+ global forward_init_flows, reverse_init_flows
+ try:
+ forward_init_flows = {}
+ reverse_init_flows = {}
+ for key in self.init_flows.keys():
+ if self.init_flows[key]['direct'] == "forward":
+ forward_init_flows[key] = self.init_flows[key]
+ elif self.init_flows[key]['direct'] == "reverse":
+ reverse_init_flows[key] = self.init_flows[key]
+ forward_init_flows = str(forward_init_flows)
+ reverse_init_flows = str(reverse_init_flows)
+ except:
+ print("[ERROR]init the forward and reverse flow failed.")
+
+ if suite == "throughput":
+ print("[INFO]!!!!!!!!!!!!!!!Now begin to throughput test")
+ ret, result = self.send_instace.run_rfc2544_throughput(forward_init_flows, reverse_init_flows)
+ elif suite == "frameloss":
+ print("[INFO]!!!!!!!!!!!1!!!Now begin to frameloss test")
+ ret, result = self.send_instace.run_rfc2544_frameloss(forward_init_flows, reverse_init_flows)
+ return ret, result
diff --git a/vstf/vstf/controller/spirent/common/result_analysis.py b/vstf/vstf/controller/spirent/common/result_analysis.py
new file mode 100755
index 00000000..162e3888
--- /dev/null
+++ b/vstf/vstf/controller/spirent/common/result_analysis.py
@@ -0,0 +1,172 @@
+#!/usr/bin/python
+
+import re
+
+
+def getResultColumn(data_dict):
+ column_string = data_dict['Columns']
+ return column_string.strip('{}').split()
+
+
+def getResult(data_dict):
+ result_string = data_dict['Output']
+ result_array = result_string.split('} {')
+ result = []
+ for line in result_array:
+ result.append(line.split())
+ return result
+
+
+def restrucData(data_string):
+ try:
+ data_dict = {}
+ p = re.compile('-Columns.*-Output')
+ data_dict['Columns'] = p.findall(data_string)[0].strip('-Columns {} -Output')
+ p = re.compile('-Output.*-State')
+ data_dict['Output'] = p.findall(data_string)[0].strip('-Output {} -State')
+ if data_dict['Columns'] is not None or data_dict['Output'] is not None:
+ return False, None
+ return True, data_dict
+ except:
+ print("[ERROR]Find the column name or the output result failed.")
+
+
+def framelossData(column, perfdata):
+ column_name_dict = {
+ 'TrialNumber': 0,
+ 'Id': 1,
+ 'FrameSize': 3,
+ 'TxFrameCount': 9,
+ 'RxFrameCount': 10,
+ 'PercentLoss(%s)': 12,
+ 'MinimumLatency(us)': 17,
+ 'MaximumLatency(us)': 18,
+ 'AverageLatency(us)': 19,
+ 'MinimumJitter(us)': 20,
+ 'MaximumJitter(us)': 21,
+ 'AverageJitter(us)': 22,
+ }
+ # get the column array
+ column_array = [
+ column[column_name_dict['FrameSize']],
+ 'ForwardingRate(Mpps)',
+ column[column_name_dict['TxFrameCount']],
+ column[column_name_dict['RxFrameCount']],
+ column[column_name_dict['PercentLoss(%s)']],
+ column[column_name_dict['AverageLatency(us)']],
+ column[column_name_dict['MinimumLatency(us)']],
+ column[column_name_dict['MaximumLatency(us)']],
+ column[column_name_dict['AverageJitter(us)']],
+ column[column_name_dict['MinimumJitter(us)']],
+ column[column_name_dict['MaximumJitter(us)']]
+ ]
+ data_array = []
+ for line in perfdata:
+ line_options = [
+ # line[column_name_dict['TrialNumber']],
+ # line[column_name_dict['Id']],
+ line[column_name_dict['FrameSize']],
+ str(float(line[column_name_dict['RxFrameCount']]) / 60 / 1000000),
+ line[column_name_dict['TxFrameCount']],
+ line[column_name_dict['RxFrameCount']],
+ line[column_name_dict['PercentLoss(%s)']],
+ line[column_name_dict['AverageLatency(us)']],
+ line[column_name_dict['MinimumLatency(us)']],
+ line[column_name_dict['MaximumLatency(us)']],
+ line[column_name_dict['AverageJitter(us)']],
+ line[column_name_dict['MinimumJitter(us)']],
+ line[column_name_dict['MaximumJitter(us)']]
+ ]
+ data_array.append(line_options)
+ return [column_array, data_array]
+
+
+class analysis(object):
+ def __init__(self):
+ pass
+
+ def analyseResult(self, suite, column, perfdata):
+ """
+ :type self: object
+ """
+ global data_array, column_array
+ if suite == 'throughput':
+ [column_array, data_array] = self.throughputData(column, perfdata)
+ elif suite == 'frameloss':
+ [column_array, data_array] = self.framelossData(column, perfdata)
+ elif suite == 'latency':
+ self.latencyData(column, perfdata)
+ else:
+ return None
+ for line in data_array:
+ print line
+ return [column_array, data_array]
+
+ def throughputData(self, column, perfdata):
+ column_name_dict = {
+ 'TrialNumber': 0,
+ 'Id': 1,
+ 'FrameSize': 3,
+ 'Load(%)': 6,
+ 'Result': 8,
+ 'TxFrameCount': 12,
+ 'RxFrameCount': 13,
+ 'ForwardingRate(mpps)': 17,
+ 'MinimumLatency(us)': 18,
+ 'MaximumLatency(us)': 19,
+ 'AverageLatency(us)': 20,
+ 'MinimumJitter(us)': 21,
+ 'MaximumJitter(us)': 22,
+ 'AverageJitter(us)': 23
+ }
+ column_array = {column[column_name_dict['FrameSize']],
+ column[column_name_dict['Load(%)']],
+ column[column_name_dict['Result']],
+ 'ForwardingRate(mpps)',
+ column[column_name_dict['TxFrameCount']],
+ column[column_name_dict['RxFrameCount']],
+ column[column_name_dict['AverageLatency(us)']],
+ column[column_name_dict['MinimumLatency(us)']],
+ column[column_name_dict['MaximumLatency(us)']],
+ column[column_name_dict['AverageJitter(us)']],
+ column[column_name_dict['MinimumJitter(us)']],
+ column[column_name_dict['MaximumJitter(us)']]}
+ data_array = []
+ for line in perfdata:
+ if line[column_name_dict['Result']] == 'Passed':
+ line_options = [
+ # line[column_name_dict['TrialNumber']],
+ # line[column_name_dict['Id']],
+ line[column_name_dict['FrameSize']],
+ line[column_name_dict['Load(%)']],
+ line[column_name_dict['Result']],
+ str(float(line[column_name_dict['ForwardingRate(mpps)']]) / 1000000),
+ line[column_name_dict['TxFrameCount']],
+ line[column_name_dict['RxFrameCount']],
+ line[column_name_dict['AverageLatency(us)']],
+ line[column_name_dict['MinimumLatency(us)']],
+ line[column_name_dict['MaximumLatency(us)']],
+ line[column_name_dict['AverageJitter(us)']],
+ line[column_name_dict['MinimumJitter(us)']],
+ line[column_name_dict['MaximumJitter(us)']]]
+ else:
+ continue
+ data_array.append(line_options)
+ # delete the redundant test data
+ delete_index = []
+ new_data_array = []
+ for ele in range(len(data_array) - 1):
+ if data_array[ele][0] == data_array[ele + 1][0]:
+ delete_index.append(ele)
+
+ for num in len(data_array):
+ if num not in delete_index:
+ new_data_array.append(data_array[num])
+
+ return column_array, new_data_array
+
+ def latencyData(self, column, perfdata):
+ pass
+
+
+analysis_instance = analysis()
diff --git a/vstf/vstf/controller/sw_perf/README b/vstf/vstf/controller/sw_perf/README
new file mode 100755
index 00000000..02844a3e
--- /dev/null
+++ b/vstf/vstf/controller/sw_perf/README
@@ -0,0 +1,39 @@
+Tree
+
+|--- flow_producer.py
+|--- model.py
+|--- performance.py
+|--- perf_provider.py
+|--- raw_data.py
+
+Entry:
+ performance.py
+ usage: performance.py [-h] [-case CASE]
+ [-tool {pktgen,netperf,qperf,iperf,netmap}]
+ [-protocol {tcp,udp}] [-profile {rdp,fastlink,l2switch}]
+ [-type {throughput,latency,frameloss}] [-sizes SIZES]
+ [--monitor MONITOR]
+
+ optional arguments:
+ -h, --help show this help message and exit
+ -case CASE test case like Ti-1, Tn-1, Tnv-1, Tu-1...
+ -tool {pktgen,netperf,qperf,iperf,netmap}
+ -protocol {tcp,udp}
+ -profile {rdp,fastlink,l2switch}
+ -type {throughput,latency,frameloss}
+ -sizes SIZES test size list "64 128"
+ --monitor MONITOR which ip to be monitored
+
+Interface:
+ usage:
+ conn = Server(host=args.monitor)
+ flows_settings = FlowsSettings()
+ tool_settings = ToolSettings()
+ tester_settings = TesterSettings()
+ flow_producer = FlowsProducer(conn, flows_settings)
+ provider = PerfProvider(flows_settings.settings, tool_settings.settings, tester_settings.settings)
+ perf = Performance(conn, provider)
+ flow_producer.create(scenario, case)
+ LOG.info(flows_settings.settings())
+ result = perf.run(tool, protocol, type, sizes)
+
diff --git a/vstf/vstf/controller/sw_perf/__init__.py b/vstf/vstf/controller/sw_perf/__init__.py
new file mode 100755
index 00000000..89dcd4e2
--- /dev/null
+++ b/vstf/vstf/controller/sw_perf/__init__.py
@@ -0,0 +1,14 @@
+# Copyright Huawei Technologies Co., Ltd. 1998-2015.
+# All Rights Reserved.
+#
+# Licensed under the Apache License, Version 2.0 (the License); you may
+# not use this file except in compliance with the License. You may obtain
+# a copy of the License at
+#
+# http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an AS IS BASIS, WITHOUT
+# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
+# License for the specific language governing permissions and limitations
+# under the License.
diff --git a/vstf/vstf/controller/sw_perf/flow_producer.py b/vstf/vstf/controller/sw_perf/flow_producer.py
new file mode 100755
index 00000000..1de4161c
--- /dev/null
+++ b/vstf/vstf/controller/sw_perf/flow_producer.py
@@ -0,0 +1,137 @@
+#!/usr/bin/python
+# -*- coding: utf8 -*-
+# author: wly
+# date: 2015-11-19
+# see license for license details
+
+import logging
+
+from vstf.controller.settings.device_settings import DeviceSettings
+from vstf.controller.settings.forwarding_settings import ForwardingSettings
+from vstf.controller.settings.cpu_settings import CpuSettings
+from vstf.controller.fabricant import Fabricant
+from vstf.controller.settings.flows_settings import FlowsSettings
+import vstf.common.constants as cst
+
+LOG = logging.getLogger(__name__)
+
+
+class FlowsProducer(object):
+ def __init__(self, conn, flows_settings):
+ self._perf = flows_settings
+ self._forwarding = ForwardingSettings().settings
+ self._device = DeviceSettings().settings
+ self._cpu = CpuSettings().settings
+ self._conn = conn
+ self._devs_map = {}
+
+ def get_dev(self, item):
+ agent = self._device[item[0]]["agent"]
+ devs = self._device[item[0]]["devs"][item[1]]
+
+ keys = ["bdf", "iface", "mac"]
+
+ key = devs.keys()[0]
+
+ if key in keys:
+ name = devs[key]
+ else:
+ raise Exception("error devs :%s", devs)
+ LOG.info(agent)
+ LOG.info(name)
+ if not self._devs_map.has_key((agent, name)):
+ query = Fabricant(agent, self._conn)
+ query.clean_all_namespace()
+ dev_info = query.get_device_verbose(identity=name)
+ if not isinstance(dev_info, dict):
+ err = "get device detail failed, agent:%s net:%s" % (agent, name)
+ raise Exception(err)
+ dev = {
+ "agent": agent,
+ "dev": {
+ "bdf": dev_info["bdf"],
+ "iface": dev_info["iface"],
+ "mac": dev_info["mac"],
+ "ip": None,
+ "namespace": None
+ }
+ }
+
+ self._devs_map[(agent, name)] = dev
+ LOG.info(dev)
+
+ return self._devs_map[(agent, name)]
+
+ def get_host(self):
+ result = {
+ "agent": self._device["host"]["agent"],
+ "affctl": self._cpu["affctl"]
+ }
+ return result
+
+ def create(self, scenario, case):
+ self._devs_map = {}
+ flows_indexes = self._forwarding[scenario]["flows"]
+ flows_infos = []
+ for index in flows_indexes:
+ if not index:
+ raise Exception("error flows %s" % flows_indexes)
+ dev = self.get_dev(index)
+ flows_infos.append(dev)
+
+ flows_infos[0]['dev'].update(self._forwarding["head"])
+ flows_infos[-1]['dev'].update(self._forwarding["tail"])
+
+ LOG.info(flows_infos)
+
+ actor_info = cst.CASE_ACTOR_MAP[case]
+
+ self._perf.clear_all()
+ senders = actor_info["senders"]
+ LOG.info(senders)
+ for sender in senders:
+ dev = flows_infos[sender]
+ if dev:
+ self._perf.add_senders(dev)
+
+ receivers = actor_info["receivers"]
+ for receiver in receivers:
+ dev = flows_infos[receiver]
+ if dev:
+ self._perf.add_receivers(dev)
+
+ watchers = self._forwarding[scenario]["watchers"]
+ for watcher in watchers:
+ dev = flows_infos[watcher]
+ if dev:
+ self._perf.add_watchers(dev)
+
+ namespaces = [0, -1]
+ for namespace in namespaces:
+ dev = flows_infos[namespace]
+ if dev:
+ self._perf.add_namespaces(dev)
+
+ host = self.get_host()
+ if host:
+ self._perf.add_cpu_listens(host)
+
+ self._perf.set_flows(actor_info["flows"])
+ return True
+
+
+def unit_test():
+ from vstf.rpc_frame_work.rpc_producer import Server
+ from vstf.common.log import setup_logging
+ setup_logging(level=logging.INFO, log_file="/var/log/vstf/vstf-producer.log", clevel=logging.INFO)
+
+ conn = Server("192.168.188.10")
+ flow_settings = FlowsSettings()
+ flow_producer = FlowsProducer(conn, flow_settings)
+ scenario = "Tn"
+ case = "Tn-1"
+ flow_producer.create(scenario, case)
+
+
+if __name__ == '__main__':
+ unit_test()
diff --git a/vstf/vstf/controller/sw_perf/model.py b/vstf/vstf/controller/sw_perf/model.py
new file mode 100755
index 00000000..672daade
--- /dev/null
+++ b/vstf/vstf/controller/sw_perf/model.py
@@ -0,0 +1,190 @@
+#!/usr/bin/python
+# -*- coding: utf8 -*-
+# author:
+# date:
+# see license for license details
+
+import logging
+
+from vstf.controller.fabricant import Fabricant
+from vstf.controller.sw_perf.raw_data import RawDataProcess
+from vstf.common import perfmark as mark
+
+LOG = logging.getLogger(__name__)
+
+
+class NetDeviceMgr(Fabricant):
+ @classmethod
+ def add(cls, dst, conn, dev):
+ self = cls(dst, conn)
+ LOG.info(dev)
+ ret = self.config_dev(netdev=dev)
+ LOG.info(ret)
+
+ @classmethod
+ def remove(cls, dst, conn, dev):
+ self = cls(dst, conn)
+ LOG.info(dev)
+ ret = self.recover_dev(netdev=dev)
+ LOG.info(ret)
+
+ @classmethod
+ def clear(cls, dst, conn):
+ self = cls(dst, conn)
+ self.clean_all_namespace()
+
+
+class Actor(Fabricant):
+ def __init__(self, dst, conn, tool, params):
+ super(Actor, self).__init__(dst, conn)
+ self._tool = tool
+ self._params = params
+ self._data = {}
+
+ def __repr__(self):
+ repr_dict = self.__dict__
+ repr_keys = list(repr_dict.keys())
+ repr_keys.sort()
+ return '%s(%s)' % (self.__class__.__name__, ', '.join(['%s=%r' % (k, repr_dict[k]) for k in repr_keys]))
+
+
+class Sender(Actor):
+ def start(self, pktsize, **kwargs):
+ LOG.info("Sender.start")
+ if 'ratep' in kwargs and kwargs['ratep']:
+ self._params['ratep'] = kwargs['ratep']
+ self._params['size'] = pktsize
+
+ ret, info = self.perf_run(
+ operation="start",
+ action="send",
+ tool=self._tool,
+ params=self._params
+ )
+ LOG.info(ret)
+ if ret:
+ raise Exception(info)
+ LOG.info(info)
+ print ret
+
+ def stop(self):
+ LOG.info(self._params)
+ rets = self.perf_run(
+ operation="stop",
+ action="send",
+ tool=self._tool,
+ params={}
+ )
+ LOG.info(rets)
+ minlatency, avglatency, maxlatency = 0, 0, 0
+ count = 0
+ for (ret, info) in rets:
+ if ret:
+ raise Exception(info)
+ if self.is_data() and ret == 0:
+ count += 1
+ minlatency += info[mark.minLatency]
+ avglatency += info[mark.avgLatency]
+ maxlatency += info[mark.maxLatency]
+ count = 1 if not count else count
+ self._data[mark.minLatency] = minlatency / count
+ self._data[mark.avgLatency] = avglatency / count
+ self._data[mark.maxLatency] = maxlatency / count
+
+ print rets
+
+ def is_data(self):
+ if '_lat' in self._params['protocol']:
+ return True
+ return False
+
+ def result(self):
+ return self._data
+
+
+class Receiver(Actor):
+ def start(self, **kwargs):
+ LOG.info("Receiver.start")
+ ret, info = self.perf_run(
+ operation="start",
+ action="receive",
+ tool=self._tool,
+ params=self._params
+ )
+ LOG.info(ret)
+ if ret:
+ raise Exception(info)
+ LOG.info(info)
+ return ret
+
+ def stop(self):
+ LOG.info("Receiver.stop")
+ ret, info = self.perf_run(
+ operation="stop",
+ action="receive",
+ tool=self._tool,
+ params=self._params
+ )
+ LOG.info(ret)
+ if ret:
+ raise Exception(info)
+ LOG.info(info)
+ return ret
+
+
+class NicWatcher(Fabricant):
+ def __init__(self, dst, conn, params):
+ super(NicWatcher, self).__init__(dst, conn)
+ self._params = params
+ self._pid = None
+ self._data = {}
+
+ def start(self):
+ print "NicWatcher.start"
+ self._pid = self.run_vnstat(device=self._params["iface"], namespace=self._params["namespace"])
+ print self._pid
+
+ def stop(self):
+ print "NicWatcher.stop"
+ if self._pid:
+ data = self.kill_vnstat(pid=self._pid)
+ self._data = RawDataProcess.process(data)
+ print "---------------------------------"
+ print self._data
+ print "---------------------------------"
+
+ def result(self, **kwargs):
+ return self._data
+
+
+class CpuWatcher(Fabricant):
+ def __init__(self, dst, conn):
+ super(CpuWatcher, self).__init__(dst, conn)
+ self._pid = None
+ self._data = {}
+
+ def start(self):
+ print "CpuWatcher.start"
+ self._pid = self.run_cpuwatch()
+ print self._pid
+
+ def stop(self):
+ print "CpuWatcher.stop"
+ if self._pid:
+ print self._pid
+ data = self.kill_cpuwatch(pid=self._pid)
+ self._data = RawDataProcess.process(data)
+ print "---------------------------------"
+ print self._data
+ print "---------------------------------"
+
+ def result(self, **kwargs):
+ return self._data
+
+
+def unit_test():
+ pass
+
+
+if __name__ == '__main__':
+ unit_test()
diff --git a/vstf/vstf/controller/sw_perf/perf_provider.py b/vstf/vstf/controller/sw_perf/perf_provider.py
new file mode 100755
index 00000000..bd1027ad
--- /dev/null
+++ b/vstf/vstf/controller/sw_perf/perf_provider.py
@@ -0,0 +1,209 @@
+#!/usr/bin/python
+# -*- coding: utf8 -*-
+# author: wly
+# date: 2015-09-21
+# see license for license details
+
+import logging
+
+LOG = logging.getLogger(__name__)
+
+
+def get_agent_dict(nodes):
+ """
+ :param:
+ nodes: list of flow info
+ and ever element must be a dict and kas key "agent"
+ :return : list for agent
+ :rtype : dict
+ """
+ agent_list = map(lambda x: x["agent"], nodes)
+ return {}.fromkeys(agent_list, False)
+
+
+class PerfProvider(object):
+ def __init__(self, flows_info, tool_info, tester_info):
+ self._flows_info = flows_info
+ self._tool_info = tool_info
+ self._tester_info = tester_info
+
+ def _islation(self):
+ flows = self._flows_info["flows"]
+ if flows == 2 and self._flows_info["senders"][0]["agent"] == self._flows_info["senders"][1]["agent"]:
+ return True
+ return False
+
+ def get_senders(self, tool, protocol):
+ result = []
+ flows = self._flows_info["flows"]
+ if self._islation() and "pktgen" == tool:
+ sender = {
+ "agent": self._flows_info["senders"][0]["agent"],
+ "params": {
+ "protocol": protocol,
+ "namespace": None,
+ "src": [],
+ "dst": [],
+ "time": self._tool_info[tool]["time"],
+ "threads": self._tool_info[tool]["threads"]
+ }
+ }
+ for i in range(flows):
+ sender['params']['src'].append(self._flows_info["senders"][i]['dev'])
+ sender['params']['dst'].append(self._flows_info["receivers"][i]['dev'])
+ result.append(sender)
+ else:
+ for i in range(flows):
+ sender = {
+ "agent": self._flows_info["senders"][i]["agent"],
+ "params": {
+ "protocol": protocol,
+ "namespace": None if "netmap" == tool else self._flows_info["senders"][i]['dev']['namespace'],
+ "src": [self._flows_info["senders"][i]['dev']],
+ "dst": [self._flows_info["receivers"][i]['dev']],
+ "time": self._tool_info[tool]["time"],
+ "threads": self._tool_info[tool]["threads"]
+ }
+ }
+ result.append(sender)
+ return result
+
+ def get_receivers(self, tool, protocol):
+ result = []
+ flows = self._flows_info["flows"]
+ if self._islation() and "pktgen" == tool:
+ receiver = {
+ "agent": self._flows_info["receivers"][0]["agent"],
+ "params": {
+ "namespace": None,
+ "protocol": protocol,
+ }
+ }
+ result.append(receiver)
+ else:
+ for i in range(flows):
+ receiver = {
+ "agent": self._flows_info["receivers"][i]["agent"],
+ "params": {
+ "namespace": None if "netmap" == tool else self._flows_info["receivers"][i]['dev']['namespace'],
+ "protocol": protocol,
+ "dst": [self._flows_info["receivers"][i]['dev']]
+ }
+ }
+ result.append(receiver)
+ return result
+
+ def get_watchers(self, tool):
+ result = []
+ for watcher in self._flows_info["watchers"]:
+ node = {
+ "agent": watcher["agent"],
+ "params": {
+ "iface": watcher['dev']["iface"],
+ "namespace": None if tool in ["pktgen", "netmap"] else watcher['dev']["namespace"],
+ }
+ }
+ result.append(node)
+ return result
+
+ def get_namespaces(self, tool):
+ result = []
+
+ for watcher in self._flows_info["namespaces"]:
+ node = {
+ "agent": watcher["agent"],
+ "params": {
+ "iface": watcher['dev']["iface"],
+ "namespace": watcher['dev']["namespace"] if tool not in ["pktgen", "netmap"] else None,
+ "ip": watcher['dev']["ip"] + '/24',
+ }
+ }
+ result.append(node)
+ return result
+
+ @property
+ def get_cpuwatcher(self):
+ LOG.info(self._flows_info["cpu_listens"])
+ result = {
+ "agent": self._flows_info["cpu_listens"][0]["agent"],
+ "params": {
+ }
+ }
+ return result
+
+ @property
+ def get_cpu_affctl(self):
+ LOG.info(self._flows_info["cpu_listens"])
+ result = {
+ "agent": self._flows_info["cpu_listens"][0]["agent"],
+ "params": {
+ "policy": self._flows_info["cpu_listens"][0]["affctl"]["policy"]
+ }
+ }
+ return result
+
+ def get_cleaners(self, tool, protocol):
+ nodes = self.get_senders(tool, protocol) + \
+ self.get_receivers(tool, protocol) + \
+ self.get_watchers(tool) + \
+ [self.get_cpuwatcher]
+ return get_agent_dict(nodes).keys()
+
+ @property
+ def get_testers(self):
+ agents = get_agent_dict(self._flows_info["namespaces"]).keys()
+ result = []
+ for agent in agents:
+ node = {
+ "agent": agent,
+ "params": {
+ "drivers": self._tester_info["drivers"]
+ }
+ }
+ result.append(node)
+ return result
+
+ def duration(self, tool):
+ return self._tool_info[tool]["time"]
+
+ def wait_balance(self, tool):
+ return self._tool_info[tool]["wait"]
+
+
+def unit_test():
+ from vstf.common.log import setup_logging
+ setup_logging(level=logging.DEBUG, log_file="/var/log/vstf/vstf-perf-provider.log", clevel=logging.INFO)
+
+ from vstf.controller.settings.flows_settings import FlowsSettings
+ from vstf.controller.settings.tool_settings import ToolSettings
+ from vstf.controller.settings.tester_settings import TesterSettings
+
+ flows_settings = FlowsSettings()
+ tool_settings = ToolSettings()
+ tester_settings = TesterSettings()
+
+ provider = PerfProvider(flows_settings.settings, tool_settings.settings, tester_settings.settings)
+
+ tools = ['pktgen']
+ protocols = ['udp_bw', 'udp_lat']
+
+ for tool in tools:
+ LOG.info(tool)
+ for protocol in protocols:
+ LOG.info(protocol)
+ senders = provider.get_senders(tool, protocols)
+ LOG.info(len(senders))
+ LOG.info(senders)
+
+ receivers = provider.get_receivers(tool, protocols)
+ LOG.info(len(receivers))
+ LOG.info(receivers)
+
+ LOG.info(provider.get_cpuwatcher)
+ LOG.info(provider.get_watchers(tool))
+ LOG.info(provider.get_namespaces(tool))
+ LOG.info(provider.duration(tool))
+
+
+if __name__ == '__main__':
+ unit_test()
diff --git a/vstf/vstf/controller/sw_perf/performance.py b/vstf/vstf/controller/sw_perf/performance.py
new file mode 100755
index 00000000..6ca8160e
--- /dev/null
+++ b/vstf/vstf/controller/sw_perf/performance.py
@@ -0,0 +1,396 @@
+#!/usr/bin/python
+# -*- coding: utf8 -*-
+# author: wly
+# date: 2015-09-19
+# see license for license details
+
+import time
+import argparse
+import logging
+
+from vstf.controller.sw_perf import model
+from vstf.common import perfmark as mark
+import vstf.common.constants as cst
+from vstf.rpc_frame_work.rpc_producer import Server
+from vstf.controller.settings.flows_settings import FlowsSettings
+from vstf.controller.settings.tool_settings import ToolSettings
+from vstf.controller.settings.perf_settings import PerfSettings
+from vstf.controller.sw_perf.perf_provider import PerfProvider, get_agent_dict
+from vstf.controller.sw_perf.flow_producer import FlowsProducer
+from vstf.controller.settings.tester_settings import TesterSettings
+from vstf.controller.fabricant import Fabricant
+
+LOG = logging.getLogger(__name__)
+
+
+class Performance(object):
+ def __init__(self, conn, provider):
+ self._provider = provider
+ self._conn = conn
+ self._init()
+
+ def _init(self):
+ self._senders = []
+ self._receivers = []
+ self._watchers = []
+ self._cpuwatcher = None
+
+ def create(self, tool, tpro):
+ self._init()
+ agents = self._provider.get_cleaners(tool, tpro)
+ LOG.info(agents)
+ for agent in agents:
+ cleaner = Fabricant(agent, self._conn)
+ cleaner.clean_all_namespace()
+
+ for tester_info in self._provider.get_testers:
+ dst = tester_info["agent"]
+ params = tester_info["params"]
+ LOG.info(tester_info)
+ driver_mgr = Fabricant(dst, self._conn)
+ ret = driver_mgr.install_drivers(drivers=params["drivers"])
+ LOG.info(ret)
+
+ self.create_namespace(tool)
+ self.create_senders(tool, tpro)
+ self.create_receivers(tool, tpro)
+ self.create_watchers(tool)
+ self.create_cpuwatcher()
+
+ def destory(self, tool):
+ self.clear_namespace(tool)
+
+ def create_namespace(self, tool):
+ devices = self._provider.get_namespaces(tool)
+ agents = get_agent_dict(devices)
+ LOG.info(agents)
+ for device in devices:
+ dst = device["agent"]
+ params = device["params"]
+ if not agents[dst]:
+ model.NetDeviceMgr.clear(dst, self._conn)
+ agents[dst] = True
+
+ model.NetDeviceMgr.add(dst, self._conn, params)
+
+ def clear_namespace(self, tool):
+ devices = self._provider.get_namespaces(tool)
+ for device in devices:
+ dst = device["agent"]
+ params = device["params"]
+ model.NetDeviceMgr.remove(dst, self._conn, params)
+
+ def create_senders(self, tool, tpro):
+ sender_infos = self._provider.get_senders(tool, tpro)
+ LOG.info(sender_infos)
+ for sender_info in sender_infos:
+ dst = sender_info["agent"]
+ params = sender_info["params"]
+ send = model.Sender(dst, self._conn, tool, params)
+ self._senders.append(send)
+
+ def create_receivers(self, tool, tpro):
+ receiver_infos = self._provider.get_receivers(tool, tpro)
+ LOG.info(receiver_infos)
+ for receiver_info in receiver_infos:
+ dst = receiver_info["agent"]
+ params = receiver_info["params"]
+ receive = model.Receiver(dst, self._conn, tool, params)
+ self._receivers.append(receive)
+
+ def create_watchers(self, tool):
+ watcher_infos = self._provider.get_watchers(tool)
+ LOG.info(watcher_infos)
+ for watcher_info in watcher_infos:
+ dst = watcher_info["agent"]
+ params = watcher_info["params"]
+ watch = model.NicWatcher(dst, self._conn, params)
+ self._watchers.append(watch)
+
+ def create_cpuwatcher(self):
+ watcher_info = self._provider.get_cpuwatcher
+ LOG.info(watcher_info)
+ dst = watcher_info["agent"]
+ self._cpuwatcher = model.CpuWatcher(dst, self._conn)
+
+ def start_receivers(self, **kwargs):
+ for receiver in self._receivers:
+ receiver.start(**kwargs)
+
+ def start_senders(self, pktsize, **kwargs):
+ for sender in self._senders:
+ sender.start(pktsize, **kwargs)
+
+ def start_watchers(self):
+ for watcher in self._watchers:
+ watcher.start()
+
+ def stop_receivers(self):
+ for receiver in self._receivers:
+ receiver.stop()
+
+ def stop_senders(self):
+ for sender in self._senders:
+ sender.stop()
+
+ def stop_watchers(self):
+ for watcher in self._watchers:
+ watcher.stop()
+
+ def start_cpuwatcher(self):
+ if self._cpuwatcher:
+ self._cpuwatcher.start()
+
+ def stop_cpuwatcher(self):
+ if self._cpuwatcher:
+ self._cpuwatcher.stop()
+
+ def getlimitspeed(self, ptype, size):
+ return 0
+
+ def affctl(self):
+ ctl = self._provider.get_cpu_affctl
+ LOG.info(ctl)
+ driver_mgr = Fabricant(ctl["agent"], self._conn)
+ ret = driver_mgr.affctl_load(policy=ctl["params"]["policy"])
+ LOG.info(ret)
+
+ def run_pre_affability_settings(self, tool, tpro, pktsize, **kwargs):
+ LOG.info("run_pre_affability_settings start")
+ self.create(tool, tpro)
+ self.start_receivers()
+ self.start_senders(pktsize, **kwargs)
+ self.affctl()
+ time.sleep(2)
+ self.stop_senders()
+ self.stop_receivers()
+ self.destory(tool)
+ LOG.info("run_pre_affability_settings end")
+
+ def run_bandwidth_test(self, tool, tpro, pktsize, **kwargs):
+ LOG.info("run_bandwidth_test ")
+ self.create(tool, tpro)
+ self.start_receivers()
+ self.start_senders(pktsize, **kwargs)
+ time.sleep(self._provider.wait_balance(tool))
+ self.start_watchers()
+ self.start_cpuwatcher()
+ time.sleep(self._provider.duration(tool))
+ self.stop_watchers()
+ self.stop_cpuwatcher()
+ self.stop_senders()
+ self.stop_receivers()
+ self.destory(tool)
+ LOG.info("run_bandwidth_test end")
+
+ def run_latency_test(self, tool, tpro, pktsize, **kwargs):
+ LOG.info("run_latency_test start")
+ self.create(tool, tpro)
+ self.start_receivers()
+ self.start_senders(pktsize, **kwargs)
+ time.sleep(self._provider.duration(tool))
+ self.stop_senders()
+ self.stop_receivers()
+ self.destory(tool)
+ LOG.info("run_latency_test end")
+
+ def run(self, tool, protocol, ttype, sizes, affctl=False):
+ result = {}
+ if affctl:
+ pre_tpro = protocol + "_bw"
+ size = sizes[0]
+ self.run_pre_affability_settings(tool, pre_tpro, size, ratep=0)
+
+ for size in sizes:
+ if ttype in ['throughput', 'frameloss']:
+ realspeed = self.getlimitspeed(ttype, size)
+ bw_tpro = protocol + "_bw"
+ bw_type = ttype
+ self.run_bandwidth_test(tool, bw_tpro, size, ratep=realspeed)
+ bw_result = self.result(tool, bw_type)
+
+ lat_tool = "qperf"
+ lat_type = 'latency'
+ lat_tpro = protocol + '_lat'
+ self.run_latency_test(lat_tool, lat_tpro, size, ratep=realspeed)
+ lat_result = self.result(tool, lat_type)
+ LOG.info(bw_result)
+ LOG.info(lat_result)
+ lat_result.pop('OfferedLoad')
+ bw_result.update(lat_result)
+ result[size] = bw_result
+
+ elif ttype in ['latency']:
+ lat_tpro = protocol + '_lat'
+ lat_type = ttype
+ self.run_latency_test(tool, lat_tpro, size, ratep=None)
+ lat_result = self.result(tool, lat_type)
+ result[size] = lat_result
+ else:
+ raise Exception("error:protocol type:%s" % (ttype))
+ return result
+
+ def result(self, tool, ttype):
+ if ttype in {'throughput', 'frameloss'}:
+ record = {
+ mark.rxCount: 0,
+ mark.txCount: 0,
+ mark.bandwidth: 0,
+ mark.offLoad: 100.0,
+ mark.mppsGhz: 0,
+ mark.percentLoss: 0,
+ mark.avgLatency: 0,
+ mark.maxLatency: 0,
+ mark.minLatency: 0,
+ mark.rxMbps:0,
+ mark.txMbps:0
+ }
+
+ cpu_data = self._cpuwatcher.result()
+ print self._cpuwatcher, cpu_data
+ if cpu_data:
+ cpu_usage = cpu_data['cpu_num'] * (100 - cpu_data['idle'])
+ cpu_mhz = cpu_data['cpu_mhz']
+ record[mark.cpu] = round(cpu_usage, cst.CPU_USAGE_ROUND)
+ record[mark.duration] = self._provider.duration(tool)
+
+ for watcher in self._watchers:
+ nic_data = watcher.result()
+ record[mark.rxCount] += nic_data['rxpck']
+ record[mark.txCount] += nic_data['txpck']
+ record[mark.bandwidth] += nic_data['rxpck/s']
+ record[mark.rxMbps] += nic_data['rxmB/s']
+ record[mark.txMbps] += nic_data['txmB/s']
+
+ if record[mark.txCount]:
+ record[mark.percentLoss] = round(100 * (1 - record[mark.rxCount] / record[mark.txCount]),
+ cst.PKTLOSS_ROUND)
+ else:
+ record[mark.percentLoss] = 100
+
+ record[mark.bandwidth] /= 1000000.0
+ if cpu_mhz and record[mark.cpu]:
+ record[mark.mppsGhz] = round(record[mark.bandwidth] / (record[mark.cpu] * cpu_mhz / 100000),
+ cst.CPU_USAGE_ROUND)
+
+ record[mark.bandwidth] = round(record[mark.bandwidth], cst.RATEP_ROUND)
+
+ elif ttype in {'latency'}:
+ record = {
+ mark.offLoad: 0.0,
+ mark.avgLatency: 0,
+ mark.maxLatency: 0,
+ mark.minLatency: 0
+ }
+ minlatency, avglatency, maxlatency = 0, 0, 0
+ count = 0
+ for sender in self._senders:
+ info = sender.result()
+ LOG.info(info)
+ minlatency += info[mark.minLatency]
+ avglatency += info[mark.avgLatency]
+ maxlatency += info[mark.maxLatency]
+ count = 1 if not count else count
+ record[mark.minLatency] = round(minlatency / count, cst.TIME_ROUND)
+ record[mark.avgLatency] = round(avglatency / count, cst.TIME_ROUND)
+ record[mark.maxLatency] = round(maxlatency / count, cst.TIME_ROUND)
+
+ else:
+ raise Exception('error:protocol type:%s' % ttype)
+
+ LOG.info('record:%s' % record)
+ return record
+
+
+def unit_test():
+ from vstf.common.log import setup_logging
+ setup_logging(level=logging.DEBUG, log_file="/var/log/vstf/vstf-sw_perf.log", clevel=logging.INFO)
+
+ conn = Server("192.168.188.10")
+ perf_settings = PerfSettings()
+ flows_settings = FlowsSettings()
+ tool_settings = ToolSettings()
+ tester_settings = TesterSettings()
+ flow_producer = FlowsProducer(conn, flows_settings)
+ provider = PerfProvider(flows_settings.settings, tool_settings.settings, tester_settings.settings)
+ perf = Performance(conn, provider)
+ tests = perf_settings.settings
+ for scenario, cases in tests.items():
+ if not cases:
+ continue
+ for case in cases:
+ casetag = case['case']
+ tool = case['tool']
+ protocol = case['protocol']
+ profile = case['profile']
+ ttype = case['type']
+ sizes = case['sizes']
+
+ flow_producer.create(scenario, casetag)
+ result = perf.run(tool, protocol, ttype, sizes)
+ LOG.info(result)
+
+
+def main():
+ from vstf.common.log import setup_logging
+ setup_logging(level=logging.DEBUG, log_file="/var/log/vstf/vstf-performance.log", clevel=logging.INFO)
+ from vstf.controller.database.dbinterface import DbManage
+ parser = argparse.ArgumentParser(add_help=True)
+ parser.add_argument("case",
+ action="store",
+ help="test case like Ti-1, Tn-1, Tnv-1, Tu-1...")
+ parser.add_argument("tool",
+ action="store",
+ choices=cst.TOOLS,
+ )
+ parser.add_argument("protocol",
+ action="store",
+ choices=cst.TPROTOCOLS,
+ )
+ parser.add_argument("profile",
+ action="store",
+ choices=cst.PROFILES,
+ )
+ parser.add_argument("type",
+ action="store",
+ choices=cst.TTYPES,
+ )
+ parser.add_argument("sizes",
+ action="store",
+ default="64",
+ help='test size list "64 128"')
+ parser.add_argument("--affctl",
+ action="store_true",
+ help="when input '--affctl', the performance will do affctl before testing")
+ parser.add_argument("--monitor",
+ dest="monitor",
+ default="localhost",
+ action="store",
+ help="which ip to be monitored")
+ args = parser.parse_args()
+
+ LOG.info(args.monitor)
+ conn = Server(host=args.monitor)
+ db_mgr = DbManage()
+
+ casetag = args.case
+ tool = args.tool
+ protocol = args.protocol
+ profile = args.profile
+ ttype = args.type
+ sizes = map(lambda x: int(x), args.sizes.strip().split())
+
+ flows_settings = FlowsSettings()
+ tool_settings = ToolSettings()
+ tester_settings = TesterSettings()
+ flow_producer = FlowsProducer(conn, flows_settings)
+ provider = PerfProvider(flows_settings.settings, tool_settings.settings, tester_settings.settings)
+ perf = Performance(conn, provider)
+ scenario = db_mgr.query_scenario(casetag)
+ flow_producer.create(scenario, casetag)
+ LOG.info(flows_settings.settings)
+ result = perf.run(tool, protocol, ttype, sizes, affctl)
+
+
+if __name__ == '__main__':
+ main()
diff --git a/vstf/vstf/controller/sw_perf/raw_data.py b/vstf/vstf/controller/sw_perf/raw_data.py
new file mode 100755
index 00000000..dab749eb
--- /dev/null
+++ b/vstf/vstf/controller/sw_perf/raw_data.py
@@ -0,0 +1,124 @@
+import subprocess
+import re
+import logging
+
+LOG = logging.getLogger(__name__)
+
+
+class RawDataProcess(object):
+ def __init__(self):
+ pass
+
+ def process_vnstat(self, data):
+ buf = data.splitlines()
+ buf = buf[9:]
+ buf = ' '.join(buf)
+ m = {}
+ digits = re.compile(r"\d{1,}\.?\d*")
+ units = re.compile(r"(?:gib|mib|kib|kbit/s|gbit/s|mbit/s|p/s)", re.IGNORECASE | re.MULTILINE)
+ units_arr = units.findall(buf)
+ LOG.debug(units_arr)
+ digits_arr = digits.findall(buf)
+
+ for i in range(len(digits_arr)):
+ digits_arr[i] = round(float(digits_arr[i]), 2)
+
+ LOG.info("-------------digit_arr------------------")
+ LOG.info(digits_arr)
+ LOG.info(units_arr)
+ LOG.info("-----------------------------------------")
+ m['rxpck'], m['txpck'] = digits_arr[8], digits_arr[9]
+ m['time'] = digits_arr[-1]
+ digits_arr = digits_arr[:8] + digits_arr[10:-1]
+ index = 0
+ for unit in units_arr:
+ unit = unit.lower()
+ if unit == 'gib':
+ digits_arr[index] *= 1024
+ elif unit == 'kib':
+ digits_arr[index] /= 1024
+ elif unit == 'gbit/s':
+ digits_arr[index] *= 1000
+ elif unit == 'kbit/s':
+ digits_arr[index] /= 1000
+ else:
+ pass
+ index += 1
+
+ for i in range(len(digits_arr)):
+ digits_arr[i] = round(digits_arr[i], 2)
+
+ m['rxmB'], m['txmB'] = digits_arr[0:2]
+ m['rxmB_max/s'], m['txmB_max/s'] = digits_arr[2:4]
+ m['rxmB/s'], m['txmB/s'] = digits_arr[4:6]
+ m['rxmB_min/s'], m['txmB_min/s'] = digits_arr[6:8]
+ m['rxpck_max/s'], m['txpck_max/s'] = digits_arr[8:10]
+ m['rxpck/s'], m['txpck/s'] = digits_arr[10:12]
+ m['rxpck_min/s'], m['txpck_min/s'] = digits_arr[12:14]
+ LOG.info("---------------vnstat data start-------------")
+ LOG.info(m)
+ LOG.info("---------------vnstat data end---------------")
+ return m
+
+ def process_sar_cpu(self, raw):
+ lines = raw.splitlines()
+ # print lines
+ head = lines[2].split()[3:]
+ average = lines[-1].split()[2:]
+ data = {}
+ for h, d in zip(head, average):
+ data[h.strip('%')] = float(d)
+ return data
+
+ def process_qperf(self, raw):
+ buf = raw.splitlines()
+ data = buf[1].strip().split()
+ key = data[0]
+ value = float(data[2])
+ unit = data[3]
+ return {key: value, 'unit': unit}
+
+ @classmethod
+ def process(cls, raw):
+ self = cls()
+ tool, data_type, data = raw['tool'], raw['type'], raw['raw_data']
+ m = {}
+ if tool == 'vnstat' and data_type == 'nic':
+ m = self.process_vnstat(data)
+ if tool == 'sar' and data_type == 'cpu':
+ m = self.process_sar_cpu(data)
+ if raw.has_key('cpu_num'):
+ m['cpu_num'] = raw['cpu_num']
+ if raw.has_key('cpu_mhz'):
+ m['cpu_mhz'] = raw['cpu_mhz']
+ if tool == 'qperf':
+ m = self.process_qperf(data)
+ return m
+
+
+if __name__ == '__main__':
+ logging.basicConfig(level=logging.DEBUG)
+ p = RawDataProcess()
+ cmd = "vnstat -i eth0 -l"
+ child = subprocess.Popen(cmd.split(), stdout=subprocess.PIPE)
+ import time
+ import os
+ from signal import SIGINT
+
+ time.sleep(20)
+ os.kill(child.pid, SIGINT)
+ data = child.stdout.read()
+ print data
+ print p.process_vnstat(data)
+
+ cmd = "sar -u 2"
+ child = subprocess.Popen(cmd.split(), stdout=subprocess.PIPE, stderr=subprocess.PIPE)
+ import time
+ import os
+ from signal import SIGINT
+
+ time.sleep(20)
+ os.kill(child.pid, SIGINT)
+ data = child.stdout.read()
+ print data
+ print p.process_sar_cpu(data)
diff --git a/vstf/vstf/controller/unittest/README b/vstf/vstf/controller/unittest/README
new file mode 100755
index 00000000..f9414e95
--- /dev/null
+++ b/vstf/vstf/controller/unittest/README
@@ -0,0 +1,49 @@
+"""
+Created on 2015-9-28
+
+@author: y00228926
+"""
+
+the procedure to integrate a module unit testing into the unit testing framework:
+
+1.create your own unit test module, the name should start by 'test', for example, test_env.py
+
+2.create the test cases inside the module, inherit unittest.TestCase, for example:
+ class TestNetnsManager(unittest.TestCase):
+ def setUp(self): // preparing the testig
+ pass
+ def tearDown(self):// cleanup after testing
+ pass
+ def testCase1(self):// cases
+ pass
+
+3.single modules testing, appending below code at the end of the module, execute 'python test_env.py'.
+
+if __name__ == "__main__":
+ import logging
+ logging.getLogger(__name__)
+ logging.basicConfig(level = logging.DEBUG)
+ unittest.main()
+
+4.multiple modules integration, create run_test.py,run_test.py the example code as below:
+
+import unittest
+import importlib
+
+test_order_list = [
+ "vstf.services.agent.unittest.perf.test_utils",
+ "vstf.services.agent.unittest.perf.test_netns",
+ "vstf.services.agent.unittest.perf.test_netperf",
+ "vstf.services.agent.unittest.perf.test_qperf",
+ "vstf.services.agent.unittest.perf.test_pktgen",
+]
+
+if __name__ == '__main__':
+ import logging
+ logging.getLogger(__name__)
+ logging.basicConfig(level = logging.DEBUG)
+ for mod_name in test_order_list:
+ mod = importlib.import_module(mod_name)
+ suit = unittest.TestLoader().loadTestsFromModule(mod)
+ unittest.TextTestRunner().run(suit)
+
diff --git a/vstf/vstf/controller/unittest/__init__.py b/vstf/vstf/controller/unittest/__init__.py
new file mode 100755
index 00000000..89dcd4e2
--- /dev/null
+++ b/vstf/vstf/controller/unittest/__init__.py
@@ -0,0 +1,14 @@
+# Copyright Huawei Technologies Co., Ltd. 1998-2015.
+# All Rights Reserved.
+#
+# Licensed under the Apache License, Version 2.0 (the License); you may
+# not use this file except in compliance with the License. You may obtain
+# a copy of the License at
+#
+# http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an AS IS BASIS, WITHOUT
+# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
+# License for the specific language governing permissions and limitations
+# under the License.
diff --git a/vstf/vstf/controller/unittest/configuration.py b/vstf/vstf/controller/unittest/configuration.py
new file mode 100755
index 00000000..9364bbbf
--- /dev/null
+++ b/vstf/vstf/controller/unittest/configuration.py
@@ -0,0 +1,17 @@
+"""
+Created on 2015-9-24
+
+@author: y00228926
+"""
+
+rabbit_mq_server = '192.168.188.10'
+
+tester_host = '192.168.188.14'
+
+target_host = '192.168.188.16'
+
+source_repo = {
+ "passwd": "root",
+ "ip": "192.168.188.10",
+ "user": "root"
+} \ No newline at end of file
diff --git a/vstf/vstf/controller/unittest/model.py b/vstf/vstf/controller/unittest/model.py
new file mode 100755
index 00000000..c4e992ce
--- /dev/null
+++ b/vstf/vstf/controller/unittest/model.py
@@ -0,0 +1,27 @@
+"""
+Created on 2015-9-28
+
+@author: y00228926
+"""
+import unittest
+
+from vstf.rpc_frame_work import rpc_producer
+from vstf.controller.unittest import configuration
+
+
+class Test(unittest.TestCase):
+
+ def setUp(self):
+ self.controller = configuration.rabbit_mq_server
+ self.tester_host = configuration.tester_host
+ self.target_host = configuration.target_host
+ self.source_repo = configuration.source_repo
+ self.conn = rpc_producer.Server(self.controller)
+
+ def tearDown(self):
+ self.conn.close()
+
+
+if __name__ == "__main__":
+ #import sys;sys.argv = ['', 'Test.testName']
+ unittest.main() \ No newline at end of file
diff --git a/vstf/vstf/controller/unittest/test_cfg_intent_parse.py b/vstf/vstf/controller/unittest/test_cfg_intent_parse.py
new file mode 100755
index 00000000..665732aa
--- /dev/null
+++ b/vstf/vstf/controller/unittest/test_cfg_intent_parse.py
@@ -0,0 +1,32 @@
+"""
+Created on 2015-10-14
+
+@author: y00228926
+"""
+import os
+import unittest
+
+from vstf.controller.unittest import model
+from vstf.controller.env_build.cfg_intent_parse import IntentParser
+
+
+class Test(model.Test):
+ def setUp(self):
+ super(Test, self).setUp()
+ self.dir = os.path.dirname(__file__)
+
+ def tearDown(self):
+ super(Test, self).tearDown()
+
+ def test_parse_cfg_file(self):
+ for m in ['Ti', 'Tu', 'Tn', 'Tnv']:
+ filepath = os.path.join(self.dir, 'configuration/env/%s.json' % m)
+ parser = IntentParser(filepath)
+ parser.parse_cfg_file()
+
+
+if __name__ == "__main__":
+ import logging
+
+ logging.basicConfig(level=logging.INFO)
+ unittest.main() \ No newline at end of file
diff --git a/vstf/vstf/controller/unittest/test_collect.py b/vstf/vstf/controller/unittest/test_collect.py
new file mode 100755
index 00000000..f1d54a95
--- /dev/null
+++ b/vstf/vstf/controller/unittest/test_collect.py
@@ -0,0 +1,41 @@
+"""
+Created on 2015-9-28
+
+@author: y00228926
+"""
+import unittest
+import json
+
+from vstf.controller.env_build import env_collect
+from vstf.controller.unittest import model
+
+
+class TestCollect(model.Test):
+
+ def setUp(self):
+ super(TestCollect, self).setUp()
+ self.obj = env_collect.EnvCollectApi(self.conn)
+
+ def test_collect_host_info(self):
+ ret_str = json.dumps(self.obj.collect_host_info(self.tester_host), indent = 4)
+ for key in ("CPU INFO","MEMORY INFO","HW_INFO","OS INFO"):
+ self.assertTrue(key in ret_str, "collect_host_info failed, ret_str = %s" % ret_str)
+
+ def test_list_nic_devices(self):
+ ret_str = json.dumps(self.obj.list_nic_devices(self.tester_host), indent = 4)
+ for key in ("device","mac","bdf","desc"):
+ self.assertTrue(key in ret_str, "list_nic_devices failed, ret_str = %s" % ret_str)
+ print ret_str
+
+ def test_get_device_detail(self):
+ identity = "01:00.0"
+ ret = self.obj.get_device_detail(self.tester_host, "01:00.0")
+ for key in ("device","mac","bdf","desc"):
+ self.assertTrue(key in ret)
+ self.assertTrue(ret['bdf'] == identity)
+
+
+if __name__ == "__main__":
+ import logging
+ logging.basicConfig(level = logging.INFO)
+ unittest.main() \ No newline at end of file
diff --git a/vstf/vstf/controller/unittest/test_driver_function.py b/vstf/vstf/controller/unittest/test_driver_function.py
new file mode 100755
index 00000000..05804421
--- /dev/null
+++ b/vstf/vstf/controller/unittest/test_driver_function.py
@@ -0,0 +1,27 @@
+"""
+Created on 2015-10-27
+@author: l00190809
+"""
+import unittest
+import json
+
+from vstf.controller.functiontest.driver.drivertest import config_setup
+from vstf.controller.unittest import model
+
+
+class TestDriverFunction(model.Test):
+ def setUp(self):
+ logging.info("start driver function test unit test.")
+
+ def test_config_setup(self):
+ config ,_ = config_setup()
+ for key in ("test_scene","bond_flag","switch_module"):
+ self.assertTrue(key in config.keys(), "config_setup function failure.")
+
+ def teardown(self):
+ logging.info("stop driver function test unit test.")
+
+if __name__ == "__main__":
+ import logging
+ logging.basicConfig(level = logging.INFO)
+ unittest.main() \ No newline at end of file
diff --git a/vstf/vstf/controller/unittest/test_env_build.py b/vstf/vstf/controller/unittest/test_env_build.py
new file mode 100755
index 00000000..2d2d882b
--- /dev/null
+++ b/vstf/vstf/controller/unittest/test_env_build.py
@@ -0,0 +1,55 @@
+'''
+Created on 2015-9-28
+
+@author: y00228926
+'''
+import unittest
+import os
+
+from vstf.controller.unittest import model
+from vstf.controller.env_build import env_build
+
+
+class TestEnvBuilder(model.Test):
+ def setUp(self):
+ super(TestEnvBuilder, self).setUp()
+ self.dir = os.path.dirname(__file__)
+
+ @unittest.skip('for now')
+ def test_build_tn(self):
+ filepath = os.path.join(self.dir,'../../../etc/vstf/env/Tn.json')
+ self.mgr = env_build.EnvBuildApi(self.conn, filepath)
+ ret = self.mgr.build()
+ self.assertTrue(ret, "build_tn failed,ret = %s" % ret)
+
+ @unittest.skip('for now')
+ def test_build_tn1v(self):
+ filepath = os.path.join(self.dir,'../../../etc/vstf/env/Tnv.json')
+ self.mgr = env_build.EnvBuildApi(self.conn, filepath)
+ ret = self.mgr.build()
+ self.assertTrue(ret, "build_tn1v failed,ret = %s" % ret)
+
+ @unittest.skip('for now')
+ def test_build_ti(self):
+ filepath = os.path.join(self.dir,'../../../etc/vstf/env/Ti.json')
+ self.mgr = env_build.EnvBuildApi(self.conn, filepath)
+ ret = self.mgr.build()
+ self.assertTrue(ret, "build_ti failed,ret = %s" % ret)
+
+ @unittest.skip('for now')
+ def test_build_tu(self):
+ filepath = os.path.join(self.dir,'../../../etc/vstf/env/Tu.json')
+ self.mgr = env_build.EnvBuildApi(self.conn, filepath)
+ ret = self.mgr.build()
+ self.assertTrue(ret, "build_tu failed,ret = %s" % ret)
+
+ def test_build_tu_bridge(self):
+ filepath = os.path.join(self.dir,'../../../etc/vstf/env/Tu_br.json')
+ self.mgr = env_build.EnvBuildApi(self.conn, filepath)
+ ret = self.mgr.build()
+ self.assertTrue(ret, "build_tu failed,ret = %s" % ret)
+
+if __name__ == "__main__":
+ import logging
+ logging.basicConfig(level = logging.INFO)
+ unittest.main() \ No newline at end of file
diff --git a/vstf/vstf/controller/unittest/test_perf.py b/vstf/vstf/controller/unittest/test_perf.py
new file mode 100755
index 00000000..5a54b826
--- /dev/null
+++ b/vstf/vstf/controller/unittest/test_perf.py
@@ -0,0 +1,120 @@
+#!/usr/bin/python
+# -*- coding: utf8 -*-
+# author: wly
+# date: 2015-10-30
+# see license for license details
+
+import unittest
+import os
+import logging
+
+from vstf.controller.unittest import model
+from vstf.controller.settings.flows_settings import FlowsSettings
+from vstf.controller.settings.tool_settings import ToolSettings
+from vstf.controller.settings.perf_settings import PerfSettings
+from vstf.controller.sw_perf.perf_provider import PerfProvider
+from vstf.controller.sw_perf.flow_producer import FlowsProducer
+from vstf.controller.settings.tester_settings import TesterSettings
+from vstf.controller.env_build.env_build import EnvBuildApi as Builder
+from vstf.common.log import setup_logging
+import vstf.controller.sw_perf.performance as pf
+
+LOG = logging.getLogger(__name__)
+
+
+class TestPerf(model.Test):
+
+ def setUp(self):
+ LOG.info("start performance unit test.")
+ super(TestPerf, self).setUp()
+ self.dir = os.path.dirname(__file__)
+ self.perf_path = os.path.join(self.dir, '../../../etc/vstf/perf')
+ self.base_path = os.path.join(self.dir, '../../../etc/vstf/env')
+
+ def teardown(self):
+ LOG.info("stop performance unit test.")
+
+ @unittest.skip('for now')
+ def test_batch_perf(self):
+
+ LOG.info(self.perf_path)
+ LOG.info(self.base_path)
+ perf_settings = PerfSettings(path=self.perf_path)
+ flows_settings = FlowsSettings(path=self.perf_path)
+ tool_settings = ToolSettings(path=self.base_path)
+ tester_settings = TesterSettings(path=self.base_path)
+ flow_producer = FlowsProducer(self.conn, flows_settings)
+ provider = PerfProvider(flows_settings.settings, tool_settings.settings, tester_settings.settings)
+ perf = pf.Performance(self.conn, provider)
+ tests = perf_settings.settings
+ for scenario, cases in tests.items():
+ if not cases:
+ continue
+
+ config_file = os.path.join(self.base_path, scenario + '.json')
+
+ LOG.info(config_file)
+
+ env = Builder(self.conn, config_file)
+ env.build()
+
+ for case in cases:
+ casetag = case['case']
+ tool = case['tool']
+ protocol = case['protocol']
+ profile = case['profile']
+ ttype = case['type']
+ sizes = case['sizes']
+
+ flow_producer.create(scenario, casetag)
+ result = perf.run(tool, protocol, ttype, sizes)
+ self.assertEqual(True, isinstance(result, dict))
+ LOG.info(result)
+
+ @unittest.skip('for now')
+ def test_perf_settings(self):
+ perf_settings = PerfSettings()
+ self.assertEqual(True, perf_settings.input())
+
+ def test_tool_settings(self):
+ tool_settings = ToolSettings()
+ value = {
+ "time": 20,
+ "threads": 1
+ }
+ tool_settings.set_pktgen(value)
+ tool_settings.set_netperf(value)
+ tool_settings.set_iperf(value)
+ tool_settings.set_qperf(value)
+ LOG.info(tool_settings.settings)
+
+ def test_flow_settings(self):
+ tests = {
+ "Tn": ["Tn-1", "Tn-2", "Tn-3", "Tn-4"],
+ "Tnv": ["Tnv-1", "Tnv-2", "Tnv-3", "Tnv-4"],
+ "Ti": ["Ti-1", "Ti-2", "Ti-3", "Ti-4", "Ti-5", "Ti-6"],
+ "Tu": ["Tu-1", "Tu-2", "Tu-3", "Tu-4", "Tu-5", "Tu-6"]
+ }
+ flows_settings = FlowsSettings(path=self.perf_path)
+ flow_producer = FlowsProducer(self.conn, flows_settings)
+
+ for scenario, cases in tests.items():
+ if not cases:
+ continue
+
+ config_file = os.path.join(self.base_path, scenario + '.json')
+
+ LOG.info(config_file)
+
+ env = Builder(self.conn, config_file)
+ env.build()
+
+ for case in cases:
+ LOG.info(case)
+ flow_producer.create(scenario, case)
+ LOG.info(flows_settings.settings)
+
+
+if __name__ == "__main__":
+ setup_logging(level=logging.INFO, log_file="/var/log/vstf/vstf-unit-test.log", clevel=logging.INFO)
+ unittest.main() \ No newline at end of file
diff --git a/vstf/vstf/controller/unittest/test_ssh.py b/vstf/vstf/controller/unittest/test_ssh.py
new file mode 100755
index 00000000..844f8ff5
--- /dev/null
+++ b/vstf/vstf/controller/unittest/test_ssh.py
@@ -0,0 +1,32 @@
+"""
+Created on 2015-10-10
+
+@author: y00228926
+"""
+import unittest
+
+from vstf.common import ssh
+from vstf.controller.unittest import model
+
+
+class Test(model.Test):
+
+ def setUp(self):
+ super(Test, self).setUp()
+ self.host = self.source_repo["ip"]
+ self.user = self.source_repo["user"]
+ self.passwd = self.source_repo["passwd"]
+
+
+ def tearDown(self):
+ super(Test, self).tearDown()
+
+
+ def test_run_cmd(self):
+ ssh.run_cmd(self.host, self.user, self.passwd, 'ls')
+
+
+if __name__ == "__main__":
+ import logging
+ logging.basicConfig(level = logging.INFO)
+ unittest.main() \ No newline at end of file
diff --git a/vstf/vstf/controller/vstfadm.py b/vstf/vstf/controller/vstfadm.py
new file mode 100755
index 00000000..068ab2e0
--- /dev/null
+++ b/vstf/vstf/controller/vstfadm.py
@@ -0,0 +1,270 @@
+import sys
+import logging
+import json
+from vstf.common.vstfcli import VstfParser
+from vstf.common import cliutil, constants, unix, message
+from vstf.common.log import setup_logging
+import vstf.common.constants as cst
+import pprint
+
+CONN = None
+
+
+def print_stdout(msg):
+ # out = json.dumps(message.get_body(message.decode(msg)), indent=2)
+ out = message.get_body(message.decode(msg))
+ pprint.pprint(out, indent=2)
+
+
+def call(msg):
+ """msg must be a dict"""
+ msg = message.add_context(msg, corr=message.gen_corrid())
+ CONN.send(message.encode(msg))
+ return message.decode(CONN.recv())
+
+
+def make_msg(method, **kwargs):
+ return {"method": method, "args": kwargs}
+
+
+@cliutil.arg("--host", dest="host", default="", action="store", help="list nic devices of specified host")
+def do_list_devs(args):
+ """List the host's all netdev."""
+ ret = call(make_msg("list_devs", host=args.host))
+ print_stdout(ret)
+
+
+@cliutil.arg("--host", dest="host", action="store", default=None,
+ help="which host to run src_install.")
+@cliutil.arg("--config_file", dest="config_file", action="store", default=None,
+ help="the git repo config.")
+def do_src_install(args):
+ """work agent to pull source code and compile.
+ use git as underlying mechanism, please make sure the host has access to git repo.
+ """
+ ret = call(make_msg("src_install", host=args.host, config_file=args.config_file))
+ print_stdout(ret)
+
+
+@cliutil.arg("--host", dest="host", action="store", default=None,
+ help="which host to build, must exists in your config file, use default[None] value to build all hosts.")
+@cliutil.arg("--model", dest="model", action="store", choices=('Tn', 'Ti', 'Tu', 'Tnv'),
+ help="which model to build, if specified, the according config file /etc/vstf/env/{model}.json must exist.")
+@cliutil.arg("--config_file", dest="config_file", action="store", default=None,
+ help="if specified, the config file will replace the default config file from /etc/vstf/env.")
+def do_apply_model(args):
+ """Apply model to the host."""
+ ret = call(make_msg("apply_model", host=args.host, model=args.model, config_file=args.config_file))
+ print_stdout(ret)
+
+
+@cliutil.arg("--host", dest="host", action="store", default=None,
+ help="to which host you wish to create images")
+@cliutil.arg("--config_file", dest="config_file", action="store", default=None,
+ help="configuration file for image creation.")
+def do_create_images(args):
+ """create images on host, images are configed by configuration file."""
+ ret = call(make_msg("create_images", host=args.host, config_file=args.config_file))
+ print_stdout(ret)
+
+
+@cliutil.arg("--host", dest="host", action="store", default=None,
+ help="to which host you wish to clean images")
+@cliutil.arg("--config_file", dest="config_file", action="store", default=None,
+ help="configuration file for images.")
+def do_clean_images(args):
+ """clean images on host, images are configed by configuration file."""
+ ret = call(make_msg("clean_images", host=args.host, config_file=args.config_file))
+ print_stdout(ret)
+
+
+@cliutil.arg("--host", dest="host", action="store", default=None,
+ help="which host to clean, must exists in your config file, use default[None] value to clean all hosts.")
+@cliutil.arg("--model", dest="model", action="store", choices=('Tn', 'Ti', 'Tu', 'Tnv'),
+ help="if specified, the according config file /etc/vstf/env/{model}.json must exist.")
+@cliutil.arg("--config_file", dest="config_file", action="store", default=None,
+ help="if specified, the config file will replace the default config file from /etc/vstf/env.")
+def do_disapply_model(args):
+ """Apply model to the host."""
+ ret = call(make_msg("disapply_model", host=args.host, model=args.model, config_file=args.config_file))
+ print_stdout(ret)
+
+
+@cliutil.arg("--host", dest="host", action="store", help="collect host information about cpu/mem etc")
+def do_collect_host_info(args):
+ """Show the host's CPU/MEN info"""
+ ret = call(make_msg("collect_host_info", target=args.host))
+ print_stdout(ret)
+
+
+def do_show_tasks(args):
+ """List history performance test tasks. Can be used by report cmd to generate reports.
+ """
+ ret = call(make_msg("list_tasks"))
+ print_stdout(ret)
+
+
+@cliutil.arg("case", action="store", help="test case like Ti-1, Tn-1, Tnv-1, Tu-1, see case definition in documents")
+@cliutil.arg("tool", action="store", choices=cst.TOOLS, )
+@cliutil.arg("protocol", action="store", choices=cst.TPROTOCOLS, )
+@cliutil.arg("profile", action="store", choices=cst.PROFILES, )
+@cliutil.arg("type", action="store", choices=cst.TTYPES)
+@cliutil.arg("sizes", action="store", default="64", help='test size list "64 128"')
+@cliutil.arg("--affctl", action="store_true", help="when affctl is True, it will do affctl before testing")
+def do_perf_test(args):
+ """Runs a quick single software performance test without envbuild and generating reports.
+ Outputs the result to the stdout immediately."""
+ case_info = {
+ 'case': args.case,
+ 'tool': args.tool,
+ 'protocol': args.protocol,
+ 'profile': args.profile,
+ 'type': args.type,
+ 'sizes': map(lambda x: int(x), args.sizes.strip().split())
+ }
+ ret = call(make_msg("run_perf_cmd",
+ case=case_info,
+ rpath=cst.REPORT_DEFAULTS,
+ affctl=args.affctl,
+ build_on=False,
+ save_on=False,
+ report_on=False,
+ mail_on=False
+ ))
+ print_stdout(ret)
+
+
+@cliutil.arg("-rpath",
+ help="path of result",
+ default=cst.REPORT_DEFAULTS,
+ action="store")
+@cliutil.arg("--report_off",
+ help="when report_off is True, it will not generate the report",
+ action="store_true")
+@cliutil.arg("--mail_off",
+ help="when mail_off is True, it will not send mail",
+ action="store_true")
+@cliutil.arg("--affctl",
+ help="when affctl is True, it will do affctl before testing",
+ action="store_true")
+def do_batch_perf_test(args):
+ """run soft performance test cases defined in /etc/vstf/perf/sw_perf.batch-settings"""
+ ret = call(make_msg("run_perf_file",
+ affctl=args.affctl,
+ rpath=args.rpath,
+ report_on=not args.report_off,
+ mail_on=not args.mail_off
+ ))
+ print_stdout(ret)
+
+
+@cliutil.arg('-rpath',
+ action='store',
+ default=cst.REPORT_DEFAULTS,
+ help=" the path name of test results ")
+@cliutil.arg("--mail_off",
+ help="when mail_off is True, it will not send mail",
+ action="store_true")
+@cliutil.arg("--taskid",
+ help="report depend of a history task id",
+ default=-1,
+ action="store")
+def do_report(args):
+ """generate the report from the database"""
+ ret = call(make_msg("report",
+ rpath=args.rpath,
+ mail_off=args.mail_off,
+ taskid=args.taskid
+ ))
+ print_stdout(ret)
+
+
+@cliutil.arg("--conner",
+ dest="conner",
+ action="store",
+ help="tester")
+@cliutil.arg("--measurand",
+ dest="measurand",
+ action="store",
+ help="tested")
+@cliutil.arg("-m", "--model",
+ dest="model",
+ action="store",
+ help="Test scene name : Tnv")
+@cliutil.arg("-e", "--virtenv",
+ dest="virtenv",
+ action="store",
+ help="virt env_build number(s): [1-8]")
+@cliutil.arg("-q", "--queues",
+ dest="queues",
+ action="store",
+ help="VM nic queues.")
+@cliutil.arg("-f", "--flows",
+ dest="flows",
+ action="store",
+ help="Flow queue(s) : [1-8]")
+@cliutil.arg("-v", "--vlans",
+ dest="vlans",
+ action="store_true",
+ help="vlan setting : 100-150;200-250")
+@cliutil.arg("-d", "--direct",
+ dest="direct",
+ action="store",
+ choices=["single", "double"],
+ help="Flow Direction")
+@cliutil.arg("-b", "--bind",
+ dest="strategy",
+ action="store",
+ help="CPU bind strategy : 1 | 2 | 3 ")
+@cliutil.arg("--config_file",
+ dest="config_file",
+ default='/etc/vstf/spirent/optimize.ini',
+ action="store",
+ help="config file for optimize.")
+@cliutil.arg("--strategyfile",
+ dest="strategyfile",
+ default='/etc/vstf/spirent/strategy.ini',
+ action="store",
+ help="config file for strategy.")
+def do_spirent_test(args):
+ ret = call(make_msg("perf_test",
+ plugin="spirent",
+ conner=args.conner,
+ measurand=args.measurand,
+ virtenv=args.virtenv,
+ queues=args.queues,
+ direct=args.direct,
+ flows=args.flows,
+ strategy=args.strategy,
+ model=args.model,
+ vlans=args.vlans,
+ configfile=args.config_file,
+ strategyfile=args.strategyfile))
+ print_stdout(ret)
+
+
+@cliutil.arg("--host", dest="host", action="store", default=None,
+ help="which host to list affctl info")
+def do_affctl_list(args):
+ ret = call(make_msg("affctl_list", host=args.host))
+ print_stdout(ret)
+
+
+def main():
+ parser = VstfParser(prog="vstfadm", description="vstf administration")
+ parser.set_subcommand_parser(sys.modules[__name__], "functions")
+ args = parser.parse_args()
+ if args.func is None:
+ sys.exit(-1)
+ setup_logging(level=logging.DEBUG, log_file="/var/log/vstf/vstf-adm.log", clevel=logging.INFO)
+ # connect to manage
+ global CONN
+ try:
+ CONN = unix.UdpClient()
+ CONN.connect(constants.sockaddr)
+ except Exception as e:
+ raise e
+
+ args.func(args)
+ # call functions of manage
+ sys.exit(CONN.close())