summaryrefslogtreecommitdiffstats
path: root/networking-odl/tools
diff options
context:
space:
mode:
authorWojciech Dec <wdec@cisco.com>2016-08-16 19:27:01 +0200
committerWojciech Dec <wdec@cisco.com>2016-08-16 19:29:27 +0200
commitc3b2c2a9a22bac5cf17813c589444d3abebaa23b (patch)
tree68c2fc0cb8c32cbb8fabf69ac81e1e0ba50cff2a /networking-odl/tools
parent3285c8e93ea59d98b392591ef6dfa5b1de3bb92d (diff)
Adding Mitaka networking-old module with the ODL topology based port
binding resolution mechanism from https://review.openstack.org/333186 Change-Id: I10d400aac9bb639c146527f0f93e6925cb74d9de Signed-off-by: Wojciech Dec <wdec@cisco.com>
Diffstat (limited to 'networking-odl/tools')
-rw-r--r--networking-odl/tools/check_bash.sh31
-rw-r--r--networking-odl/tools/check_i18n.py153
-rw-r--r--networking-odl/tools/check_i18n_test_case.txt67
-rwxr-xr-xnetworking-odl/tools/clean.sh5
-rw-r--r--networking-odl/tools/i18n_cfg.py97
-rw-r--r--networking-odl/tools/install_venv.py72
-rwxr-xr-xnetworking-odl/tools/pretty_tox.sh6
-rwxr-xr-xnetworking-odl/tools/subunit-trace.py307
-rwxr-xr-xnetworking-odl/tools/with_venv.sh19
9 files changed, 757 insertions, 0 deletions
diff --git a/networking-odl/tools/check_bash.sh b/networking-odl/tools/check_bash.sh
new file mode 100644
index 0000000..e9d178e
--- /dev/null
+++ b/networking-odl/tools/check_bash.sh
@@ -0,0 +1,31 @@
+#! /bin/sh
+
+# Copyright (C) 2014 VA Linux Systems Japan K.K.
+# Copyright (C) 2014 YAMAMOTO Takashi <yamamoto at valinux co jp>
+# All Rights Reserved.
+#
+# Licensed under the Apache License, Version 2.0 (the "License"); you may
+# not use this file except in compliance with the License. You may obtain
+# a copy of the License at
+#
+# http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
+# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
+# License for the specific language governing permissions and limitations
+# under the License.
+
+# The purpose of this script is to avoid casual introduction of more
+# bash dependency. Please consider alternatives before commiting code
+# which uses bash specific features.
+
+# Ignore comments, but include shebangs
+OBSERVED=$(grep -E '^([^#]|#!).*bash' tox.ini tools/* | wc -l)
+EXPECTED=5
+if [ ${EXPECTED} -ne ${OBSERVED} ]; then
+ echo Unexpected number of bash usages are detected.
+ echo Please read the comment in $0
+ exit 1
+fi
+exit 0
diff --git a/networking-odl/tools/check_i18n.py b/networking-odl/tools/check_i18n.py
new file mode 100644
index 0000000..697ad18
--- /dev/null
+++ b/networking-odl/tools/check_i18n.py
@@ -0,0 +1,153 @@
+# Copyright 2012 OpenStack Foundation
+#
+# Licensed under the Apache License, Version 2.0 (the "License"); you may
+# not use this file except in compliance with the License. You may obtain
+# a copy of the License at
+#
+# http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
+# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
+# License for the specific language governing permissions and limitations
+# under the License.
+from __future__ import print_function
+
+import compiler
+import imp
+import os.path
+import sys
+
+
+def is_localized(node):
+ """Check message wrapped by _()"""
+ if isinstance(node.parent, compiler.ast.CallFunc):
+ if isinstance(node.parent.node, compiler.ast.Name):
+ if node.parent.node.name == '_':
+ return True
+ return False
+
+
+class ASTWalker(compiler.visitor.ASTVisitor):
+
+ def default(self, node, *args):
+ for child in node.getChildNodes():
+ child.parent = node
+ compiler.visitor.ASTVisitor.default(self, node, *args)
+
+
+class Visitor(object):
+
+ def __init__(self, filename, i18n_msg_predicates,
+ msg_format_checkers, debug):
+ self.filename = filename
+ self.debug = debug
+ self.error = 0
+ self.i18n_msg_predicates = i18n_msg_predicates
+ self.msg_format_checkers = msg_format_checkers
+ with open(filename) as f:
+ self.lines = f.readlines()
+
+ def visitConst(self, node):
+ if not isinstance(node.value, str):
+ return
+
+ if is_localized(node):
+ for (checker, msg) in self.msg_format_checkers:
+ if checker(node):
+ print('%s:%d %s: %s Error: %s' %
+ (self.filename, node.lineno,
+ self.lines[node.lineno - 1][:-1],
+ checker.__name__, msg),
+ file=sys.stderr)
+ self.error = 1
+ return
+ if debug:
+ print('%s:%d %s: %s' %
+ (self.filename, node.lineno,
+ self.lines[node.lineno - 1][:-1],
+ "Pass"))
+ else:
+ for (predicate, action, msg) in self.i18n_msg_predicates:
+ if predicate(node):
+ if action == 'skip':
+ if debug:
+ print('%s:%d %s: %s' %
+ (self.filename, node.lineno,
+ self.lines[node.lineno - 1][:-1],
+ "Pass"))
+ return
+ elif action == 'error':
+ print('%s:%d %s: %s Error: %s' %
+ (self.filename, node.lineno,
+ self.lines[node.lineno - 1][:-1],
+ predicate.__name__, msg),
+ file=sys.stderr)
+ self.error = 1
+ return
+ elif action == 'warn':
+ print('%s:%d %s: %s' %
+ (self.filename, node.lineno,
+ self.lines[node.lineno - 1][:-1],
+ "Warn: %s" % msg))
+ return
+ print('Predicate with wrong action!', file=sys.stderr)
+
+
+def is_file_in_black_list(black_list, f):
+ for f in black_list:
+ if os.path.abspath(input_file).startswith(
+ os.path.abspath(f)):
+ return True
+ return False
+
+
+def check_i18n(input_file, i18n_msg_predicates, msg_format_checkers, debug):
+ input_mod = compiler.parseFile(input_file)
+ v = compiler.visitor.walk(input_mod,
+ Visitor(input_file,
+ i18n_msg_predicates,
+ msg_format_checkers,
+ debug),
+ ASTWalker())
+ return v.error
+
+
+if __name__ == '__main__':
+ input_path = sys.argv[1]
+ cfg_path = sys.argv[2]
+ try:
+ cfg_mod = imp.load_source('', cfg_path)
+ except Exception:
+ print("Load cfg module failed", file=sys.stderr)
+ sys.exit(1)
+
+ i18n_msg_predicates = cfg_mod.i18n_msg_predicates
+ msg_format_checkers = cfg_mod.msg_format_checkers
+ black_list = cfg_mod.file_black_list
+
+ debug = False
+ if len(sys.argv) > 3:
+ if sys.argv[3] == '-d':
+ debug = True
+
+ if os.path.isfile(input_path):
+ sys.exit(check_i18n(input_path,
+ i18n_msg_predicates,
+ msg_format_checkers,
+ debug))
+
+ error = 0
+ for dirpath, dirs, files in os.walk(input_path):
+ for f in files:
+ if not f.endswith('.py'):
+ continue
+ input_file = os.path.join(dirpath, f)
+ if is_file_in_black_list(black_list, input_file):
+ continue
+ if check_i18n(input_file,
+ i18n_msg_predicates,
+ msg_format_checkers,
+ debug):
+ error = 1
+ sys.exit(error)
diff --git a/networking-odl/tools/check_i18n_test_case.txt b/networking-odl/tools/check_i18n_test_case.txt
new file mode 100644
index 0000000..3d1391d
--- /dev/null
+++ b/networking-odl/tools/check_i18n_test_case.txt
@@ -0,0 +1,67 @@
+# test-case for check_i18n.py
+# python check_i18n.py check_i18n.txt -d
+
+# message format checking
+# capital checking
+msg = _("hello world, error")
+msg = _("hello world_var, error")
+msg = _('file_list xyz, pass')
+msg = _("Hello world, pass")
+
+# format specifier checking
+msg = _("Hello %s world %d, error")
+msg = _("Hello %s world, pass")
+msg = _("Hello %(var1)s world %(var2)s, pass")
+
+# message has been localized
+# is_localized
+msg = _("Hello world, pass")
+msg = _("Hello world, pass") % var
+LOG.debug(_('Hello world, pass'))
+LOG.info(_('Hello world, pass'))
+raise x.y.Exception(_('Hello world, pass'))
+raise Exception(_('Hello world, pass'))
+
+# message need be localized
+# is_log_callfunc
+LOG.debug('hello world, error')
+LOG.debug('hello world, error' % xyz)
+sys.append('hello world, warn')
+
+# is_log_i18n_msg_with_mod
+LOG.debug(_('Hello world, error') % xyz)
+
+# default warn
+msg = 'hello world, warn'
+msg = 'hello world, warn' % var
+
+# message needn't be localized
+# skip only one word
+msg = ''
+msg = "hello,pass"
+
+# skip dict
+msg = {'hello world, pass': 1}
+
+# skip list
+msg = ["hello world, pass"]
+
+# skip subscript
+msg['hello world, pass']
+
+# skip xml marker
+msg = "<test><t></t></test>, pass"
+
+# skip sql statement
+msg = "SELECT * FROM xyz WHERE hello=1, pass"
+msg = "select * from xyz, pass"
+
+# skip add statement
+msg = 'hello world' + e + 'world hello, pass'
+
+# skip doc string
+"""
+Hello world, pass
+"""
+class Msg:
+ pass
diff --git a/networking-odl/tools/clean.sh b/networking-odl/tools/clean.sh
new file mode 100755
index 0000000..27bc219
--- /dev/null
+++ b/networking-odl/tools/clean.sh
@@ -0,0 +1,5 @@
+#!/bin/bash
+rm -rf ./*.deb ./*.tar.gz ./*.dsc ./*.changes
+rm -rf */*.deb
+rm -rf ./plugins/**/build/ ./plugins/**/dist
+rm -rf ./plugins/**/lib/neutron_*_plugin.egg-info ./plugins/neutron-*
diff --git a/networking-odl/tools/i18n_cfg.py b/networking-odl/tools/i18n_cfg.py
new file mode 100644
index 0000000..5ad1a51
--- /dev/null
+++ b/networking-odl/tools/i18n_cfg.py
@@ -0,0 +1,97 @@
+import compiler
+import re
+
+
+def is_log_callfunc(n):
+ """LOG.xxx('hello %s' % xyz) and LOG('hello')"""
+ if isinstance(n.parent, compiler.ast.Mod):
+ n = n.parent
+ if isinstance(n.parent, compiler.ast.CallFunc):
+ if isinstance(n.parent.node, compiler.ast.Getattr):
+ if isinstance(n.parent.node.getChildNodes()[0],
+ compiler.ast.Name):
+ if n.parent.node.getChildNodes()[0].name == 'LOG':
+ return True
+ return False
+
+
+def is_log_i18n_msg_with_mod(n):
+ """LOG.xxx("Hello %s" % xyz) should be LOG.xxx("Hello %s", xyz)"""
+ if not isinstance(n.parent.parent, compiler.ast.Mod):
+ return False
+ n = n.parent.parent
+ if isinstance(n.parent, compiler.ast.CallFunc):
+ if isinstance(n.parent.node, compiler.ast.Getattr):
+ if isinstance(n.parent.node.getChildNodes()[0],
+ compiler.ast.Name):
+ if n.parent.node.getChildNodes()[0].name == 'LOG':
+ return True
+ return False
+
+
+def is_wrong_i18n_format(n):
+ """Check _('hello %s' % xyz)"""
+ if isinstance(n.parent, compiler.ast.Mod):
+ n = n.parent
+ if isinstance(n.parent, compiler.ast.CallFunc):
+ if isinstance(n.parent.node, compiler.ast.Name):
+ if n.parent.node.name == '_':
+ return True
+ return False
+
+
+"""
+Used for check message need be localized or not.
+(predicate_func, action, message)
+"""
+i18n_msg_predicates = [
+ # Skip ['hello world', 1]
+ (lambda n: isinstance(n.parent, compiler.ast.List), 'skip', ''),
+ # Skip {'hellow world', 1}
+ (lambda n: isinstance(n.parent, compiler.ast.Dict), 'skip', ''),
+ # Skip msg['hello world']
+ (lambda n: isinstance(n.parent, compiler.ast.Subscript), 'skip', ''),
+ # Skip doc string
+ (lambda n: isinstance(n.parent, compiler.ast.Discard), 'skip', ''),
+ # Skip msg = "hello", in normal, message should more than one word
+ (lambda n: len(n.value.strip().split(' ')) <= 1, 'skip', ''),
+ # Skip msg = 'hello world' + vars + 'world hello'
+ (lambda n: isinstance(n.parent, compiler.ast.Add), 'skip', ''),
+ # Skip xml markers msg = "<test></test>"
+ (lambda n: len(re.compile("</.*>").findall(n.value)) > 0, 'skip', ''),
+ # Skip sql statement
+ (lambda n: len(
+ re.compile("^SELECT.*FROM", flags=re.I).findall(n.value)) > 0,
+ 'skip', ''),
+ # LOG.xxx()
+ (is_log_callfunc, 'error', 'Message must be localized'),
+ # _('hello %s' % xyz) should be _('hello %s') % xyz
+ (is_wrong_i18n_format, 'error',
+ ("Message format was wrong, _('hello %s' % xyz) "
+ "should be _('hello %s') % xyz")),
+ # default
+ (lambda n: True, 'warn', 'Message might need localized')
+]
+
+
+"""
+Used for checking message format. (checker_func, message)
+"""
+msg_format_checkers = [
+ # If message contain more than on format specifier, it should use
+ # mapping key
+ (lambda n: len(re.compile("%[bcdeEfFgGnosxX]").findall(n.value)) > 1,
+ "The message shouldn't contain more than one format specifier"),
+ # Check capital
+ (lambda n: n.value.split(' ')[0].count('_') == 0 and
+ n.value[0].isalpha() and
+ n.value[0].islower(),
+ "First letter must be capital"),
+ (is_log_i18n_msg_with_mod,
+ 'LOG.xxx("Hello %s" % xyz) should be LOG.xxx("Hello %s", xyz)')
+]
+
+
+file_black_list = ["./neutron/tests/unit",
+ "./neutron/openstack",
+ "./neutron/plugins/bigswitch/tests"]
diff --git a/networking-odl/tools/install_venv.py b/networking-odl/tools/install_venv.py
new file mode 100644
index 0000000..f8fb8fa
--- /dev/null
+++ b/networking-odl/tools/install_venv.py
@@ -0,0 +1,72 @@
+#!/usr/bin/env python
+# Copyright 2010 United States Government as represented by the
+# Administrator of the National Aeronautics and Space Administration.
+# All Rights Reserved.
+#
+# Copyright 2010 OpenStack Foundation.
+# Copyright 2013 IBM Corp.
+#
+# Licensed under the Apache License, Version 2.0 (the "License"); you may
+# not use this file except in compliance with the License. You may obtain
+# a copy of the License at
+#
+# http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
+# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
+# License for the specific language governing permissions and limitations
+# under the License.
+
+"""
+Installation script for Neutron's development virtualenv
+"""
+from __future__ import print_function
+
+import os
+import sys
+
+import install_venv_common as install_venv
+
+
+def print_help():
+ help = """
+ Neutron development environment setup is complete.
+
+ Neutron development uses virtualenv to track and manage Python dependencies
+ while in development and testing.
+
+ To activate the Neutron virtualenv for the extent of your current shell
+ session you can run:
+
+ $ source .venv/bin/activate
+
+ Or, if you prefer, you can run commands in the virtualenv on a case by case
+ basis by running:
+
+ $ tools/with_venv.sh <your command>
+
+ Also, make test will automatically use the virtualenv.
+ """
+ print(help)
+
+
+def main(argv):
+ root = os.path.dirname(os.path.dirname(os.path.realpath(__file__)))
+ venv = os.path.join(root, '.venv')
+ pip_requires = os.path.join(root, 'requirements.txt')
+ test_requires = os.path.join(root, 'test-requirements.txt')
+ py_version = "python%s.%s" % (sys.version_info[0], sys.version_info[1])
+ project = 'Neutron'
+ install = install_venv.InstallVenv(root, venv, pip_requires, test_requires,
+ py_version, project)
+ options = install.parse_args(argv)
+ install.check_python_version()
+ install.check_dependencies()
+ install.create_virtualenv(no_site_packages=options.no_site_packages)
+ install.install_dependencies()
+ print_help()
+
+
+if __name__ == '__main__':
+ main(sys.argv)
diff --git a/networking-odl/tools/pretty_tox.sh b/networking-odl/tools/pretty_tox.sh
new file mode 100755
index 0000000..a40f248
--- /dev/null
+++ b/networking-odl/tools/pretty_tox.sh
@@ -0,0 +1,6 @@
+#! /bin/sh
+
+TESTRARGS=$1
+
+exec 3>&1
+status=$(exec 4>&1 >&3; ( python setup.py testr --slowest --testr-args="--subunit $TESTRARGS"; echo $? >&4 ) | $(dirname $0)/subunit-trace.py -f) && exit $status
diff --git a/networking-odl/tools/subunit-trace.py b/networking-odl/tools/subunit-trace.py
new file mode 100755
index 0000000..73f2f10
--- /dev/null
+++ b/networking-odl/tools/subunit-trace.py
@@ -0,0 +1,307 @@
+#!/usr/bin/env python
+
+# Copyright 2014 Hewlett-Packard Development Company, L.P.
+# Copyright 2014 Samsung Electronics
+# All Rights Reserved.
+#
+# Licensed under the Apache License, Version 2.0 (the "License"); you may
+# not use this file except in compliance with the License. You may obtain
+# a copy of the License at
+#
+# http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
+# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
+# License for the specific language governing permissions and limitations
+# under the License.
+
+"""Trace a subunit stream in reasonable detail and high accuracy."""
+
+import argparse
+import functools
+import os
+import re
+import sys
+
+import mimeparse
+import subunit
+import testtools
+
+DAY_SECONDS = 60 * 60 * 24
+FAILS = []
+RESULTS = {}
+
+
+class Starts(testtools.StreamResult):
+
+ def __init__(self, output):
+ super(Starts, self).__init__()
+ self._output = output
+
+ def startTestRun(self):
+ self._neednewline = False
+ self._emitted = set()
+
+ def status(self, test_id=None, test_status=None, test_tags=None,
+ runnable=True, file_name=None, file_bytes=None, eof=False,
+ mime_type=None, route_code=None, timestamp=None):
+ super(Starts, self).status(
+ test_id, test_status,
+ test_tags=test_tags, runnable=runnable, file_name=file_name,
+ file_bytes=file_bytes, eof=eof, mime_type=mime_type,
+ route_code=route_code, timestamp=timestamp)
+ if not test_id:
+ if not file_bytes:
+ return
+ if not mime_type or mime_type == 'test/plain;charset=utf8':
+ mime_type = 'text/plain; charset=utf-8'
+ primary, sub, parameters = mimeparse.parse_mime_type(mime_type)
+ content_type = testtools.content_type.ContentType(
+ primary, sub, parameters)
+ content = testtools.content.Content(
+ content_type, lambda: [file_bytes])
+ text = content.as_text()
+ if text and text[-1] not in '\r\n':
+ self._neednewline = True
+ self._output.write(text)
+ elif test_status == 'inprogress' and test_id not in self._emitted:
+ if self._neednewline:
+ self._neednewline = False
+ self._output.write('\n')
+ worker = ''
+ for tag in test_tags or ():
+ if tag.startswith('worker-'):
+ worker = '(' + tag[7:] + ') '
+ if timestamp:
+ timestr = timestamp.isoformat()
+ else:
+ timestr = ''
+ self._output.write('%s: %s%s [start]\n' %
+ (timestr, worker, test_id))
+ self._emitted.add(test_id)
+
+
+def cleanup_test_name(name, strip_tags=True, strip_scenarios=False):
+ """Clean up the test name for display.
+
+ By default we strip out the tags in the test because they don't help us
+ in identifying the test that is run to it's result.
+
+ Make it possible to strip out the testscenarios information (not to
+ be confused with tempest scenarios) however that's often needed to
+ indentify generated negative tests.
+ """
+ if strip_tags:
+ tags_start = name.find('[')
+ tags_end = name.find(']')
+ if tags_start > 0 and tags_end > tags_start:
+ newname = name[:tags_start]
+ newname += name[tags_end + 1:]
+ name = newname
+
+ if strip_scenarios:
+ tags_start = name.find('(')
+ tags_end = name.find(')')
+ if tags_start > 0 and tags_end > tags_start:
+ newname = name[:tags_start]
+ newname += name[tags_end + 1:]
+ name = newname
+
+ return name
+
+
+def get_duration(timestamps):
+ start, end = timestamps
+ if not start or not end:
+ duration = ''
+ else:
+ delta = end - start
+ duration = '%d.%06ds' % (
+ delta.days * DAY_SECONDS + delta.seconds, delta.microseconds)
+ return duration
+
+
+def find_worker(test):
+ for tag in test['tags']:
+ if tag.startswith('worker-'):
+ return int(tag[7:])
+ return 'NaN'
+
+
+# Print out stdout/stderr if it exists, always
+def print_attachments(stream, test, all_channels=False):
+ """Print out subunit attachments.
+
+ Print out subunit attachments that contain content. This
+ runs in 2 modes, one for successes where we print out just stdout
+ and stderr, and an override that dumps all the attachments.
+ """
+ channels = ('stdout', 'stderr')
+ for name, detail in test['details'].items():
+ # NOTE(sdague): the subunit names are a little crazy, and actually
+ # are in the form pythonlogging:'' (with the colon and quotes)
+ name = name.split(':')[0]
+ if detail.content_type.type == 'test':
+ detail.content_type.type = 'text'
+ if (all_channels or name in channels) and detail.as_text():
+ title = "Captured %s:" % name
+ stream.write("\n%s\n%s\n" % (title, ('~' * len(title))))
+ # indent attachment lines 4 spaces to make them visually
+ # offset
+ for line in detail.as_text().split('\n'):
+ stream.write(" %s\n" % line)
+
+
+def show_outcome(stream, test, print_failures=False, failonly=False):
+ global RESULTS
+ status = test['status']
+ # TODO(sdague): ask lifeless why on this?
+ if status == 'exists':
+ return
+
+ worker = find_worker(test)
+ name = cleanup_test_name(test['id'])
+ duration = get_duration(test['timestamps'])
+
+ if worker not in RESULTS:
+ RESULTS[worker] = []
+ RESULTS[worker].append(test)
+
+ # don't count the end of the return code as a fail
+ if name == 'process-returncode':
+ return
+
+ if status == 'fail':
+ FAILS.append(test)
+ stream.write('{%s} %s [%s] ... FAILED\n' % (
+ worker, name, duration))
+ if not print_failures:
+ print_attachments(stream, test, all_channels=True)
+ elif not failonly:
+ if status == 'success':
+ stream.write('{%s} %s [%s] ... ok\n' % (
+ worker, name, duration))
+ print_attachments(stream, test)
+ elif status == 'skip':
+ stream.write('{%s} %s ... SKIPPED: %s\n' % (
+ worker, name, test['details']['reason'].as_text()))
+ else:
+ stream.write('{%s} %s [%s] ... %s\n' % (
+ worker, name, duration, test['status']))
+ if not print_failures:
+ print_attachments(stream, test, all_channels=True)
+
+ stream.flush()
+
+
+def print_fails(stream):
+ """Print summary failure report.
+
+ Currently unused, however there remains debate on inline vs. at end
+ reporting, so leave the utility function for later use.
+ """
+ if not FAILS:
+ return
+ stream.write("\n==============================\n")
+ stream.write("Failed %s tests - output below:" % len(FAILS))
+ stream.write("\n==============================\n")
+ for f in FAILS:
+ stream.write("\n%s\n" % f['id'])
+ stream.write("%s\n" % ('-' * len(f['id'])))
+ print_attachments(stream, f, all_channels=True)
+ stream.write('\n')
+
+
+def count_tests(key, value):
+ count = 0
+ for k, v in RESULTS.items():
+ for item in v:
+ if key in item:
+ if re.search(value, item[key]):
+ count += 1
+ return count
+
+
+def run_time():
+ runtime = 0.0
+ for k, v in RESULTS.items():
+ for test in v:
+ runtime += float(get_duration(test['timestamps']).strip('s'))
+ return runtime
+
+
+def worker_stats(worker):
+ tests = RESULTS[worker]
+ num_tests = len(tests)
+ delta = tests[-1]['timestamps'][1] - tests[0]['timestamps'][0]
+ return num_tests, delta
+
+
+def print_summary(stream):
+ stream.write("\n======\nTotals\n======\n")
+ stream.write("Run: %s in %s sec.\n" % (count_tests('status', '.*'),
+ run_time()))
+ stream.write(" - Passed: %s\n" % count_tests('status', 'success'))
+ stream.write(" - Skipped: %s\n" % count_tests('status', 'skip'))
+ stream.write(" - Failed: %s\n" % count_tests('status', 'fail'))
+
+ # we could have no results, especially as we filter out the process-codes
+ if RESULTS:
+ stream.write("\n==============\nWorker Balance\n==============\n")
+
+ for w in range(max(RESULTS.keys()) + 1):
+ if w not in RESULTS:
+ stream.write(
+ " - WARNING: missing Worker %s! "
+ "Race in testr accounting.\n" % w)
+ else:
+ num, time = worker_stats(w)
+ stream.write(" - Worker %s (%s tests) => %ss\n" %
+ (w, num, time))
+
+
+def parse_args():
+ parser = argparse.ArgumentParser()
+ parser.add_argument('--no-failure-debug', '-n', action='store_true',
+ dest='print_failures', help='Disable printing failure '
+ 'debug information in realtime')
+ parser.add_argument('--fails', '-f', action='store_true',
+ dest='post_fails', help='Print failure debug '
+ 'information after the stream is proccesed')
+ parser.add_argument('--failonly', action='store_true',
+ dest='failonly', help="Don't print success items",
+ default=(
+ os.environ.get('TRACE_FAILONLY', False)
+ is not False))
+ return parser.parse_args()
+
+
+def main():
+ args = parse_args()
+ stream = subunit.ByteStreamToStreamResult(
+ sys.stdin, non_subunit_name='stdout')
+ starts = Starts(sys.stdout)
+ outcomes = testtools.StreamToDict(
+ functools.partial(show_outcome, sys.stdout,
+ print_failures=args.print_failures,
+ failonly=args.failonly
+ ))
+ summary = testtools.StreamSummary()
+ result = testtools.CopyStreamResult([starts, outcomes, summary])
+ result.startTestRun()
+ try:
+ stream.run(result)
+ finally:
+ result.stopTestRun()
+ if count_tests('status', '.*') == 0:
+ print("The test run didn't actually run any tests")
+ return 1
+ if args.post_fails:
+ print_fails(sys.stdout)
+ print_summary(sys.stdout)
+ return (0 if summary.wasSuccessful() else 1)
+
+
+if __name__ == '__main__':
+ sys.exit(main())
diff --git a/networking-odl/tools/with_venv.sh b/networking-odl/tools/with_venv.sh
new file mode 100755
index 0000000..dea5c5f
--- /dev/null
+++ b/networking-odl/tools/with_venv.sh
@@ -0,0 +1,19 @@
+#!/bin/bash
+# Copyright 2011 OpenStack Foundation.
+# All Rights Reserved.
+#
+# Licensed under the Apache License, Version 2.0 (the "License"); you may
+# not use this file except in compliance with the License. You may obtain
+# a copy of the License at
+#
+# http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
+# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
+# License for the specific language governing permissions and limitations
+# under the License.
+
+TOOLS=`dirname $0`
+VENV=$TOOLS/../.venv
+source $VENV/bin/activate && "$@"