aboutsummaryrefslogtreecommitdiffstats
diff options
context:
space:
mode:
-rw-r--r--collector/timeline.json39
-rw-r--r--contrib/TOM/README4
-rw-r--r--qtip/cli/commands/cmd_metric.py10
-rw-r--r--qtip/cli/commands/cmd_plan.py8
-rw-r--r--qtip/cli/commands/cmd_qpi.py8
-rw-r--r--qtip/cli/commands/cmd_report.py5
-rw-r--r--qtip/reporter/console.py12
-rw-r--r--qtip/runner/runner.py2
-rw-r--r--tests/data/reporter/qtip-2017-03-16-20-07/result.json (renamed from collector/2017-03-16-20-07/result.json)0
-rw-r--r--tests/unit/cli/cmd_report_test.py32
-rw-r--r--tests/unit/reporter/console_test.py32
11 files changed, 72 insertions, 80 deletions
diff --git a/collector/timeline.json b/collector/timeline.json
deleted file mode 100644
index dea929e7..00000000
--- a/collector/timeline.json
+++ /dev/null
@@ -1,39 +0,0 @@
-{
- "phases": [{
- "name": "Monitor",
- "checkpoints": [{
- "name": "T00",
- "timestamp": "1"
- }]
- }, {
- "name": "Inspector",
- "checkpoints": [{
- "name": "T01",
- "timestamp": "2"
- }, {
- "name": "T02 ",
- "timestamp": "5"
- }, {
- "name": "T03 ",
- "timestamp": "8"
- }]
- }, {
- "name": "Controller",
- "checkpoints": [{
- "name": "T04",
- "timestamp": "11"
- }]
- }, {
- "name": "Notifier",
- "checkpoints": [{
- "name": "T05 ",
- "timestamp": "16"
- }]
- }, {
- "name": "Evaluator",
- "checkpoints": [{
- "name": "T06 ",
- "timestamp": "40"
- }]
- }]
-}
diff --git a/contrib/TOM/README b/contrib/TOM/README
new file mode 100644
index 00000000..706573ce
--- /dev/null
+++ b/contrib/TOM/README
@@ -0,0 +1,4 @@
+https://wiki.opnfv.org/display/testing/R+post-processing+of+the+Yardstick+results
+
+This folder is created for TOM, a performance dataset processing tool, by Alassane Samba <alassane.samba@orange.com>
+
diff --git a/qtip/cli/commands/cmd_metric.py b/qtip/cli/commands/cmd_metric.py
index 31b7b702..a2208444 100644
--- a/qtip/cli/commands/cmd_metric.py
+++ b/qtip/cli/commands/cmd_metric.py
@@ -8,6 +8,7 @@
##############################################################################
import click
+import os
from qtip.cli import utils
from qtip.cli.entry import Context
@@ -41,8 +42,11 @@ def show(ctx, name):
click.echo(output)
-@cli.command('run', help='Run tests to run Performance Metrics')
+@cli.command('run', help='Run performance test')
@click.argument('name')
+@click.option('-p', '--path', help='Path to store results')
@pass_context
-def cmd_run(ctx, name):
- pass
+def run(ctx, name, path):
+ runner_path = os.path.join(os.path.dirname(__file__), os.path.pardir, os.path.pardir,
+ 'runner/runner.py')
+ os.system('python {0} -b {1} -d {2}'.format(runner_path, name, path))
diff --git a/qtip/cli/commands/cmd_plan.py b/qtip/cli/commands/cmd_plan.py
index 90773491..beb61b0e 100644
--- a/qtip/cli/commands/cmd_plan.py
+++ b/qtip/cli/commands/cmd_plan.py
@@ -9,6 +9,7 @@
import click
+import os
from qtip.cli import utils
from qtip.cli.entry import Context
@@ -51,6 +52,9 @@ def show(ctx, name):
@cli.command('run', help='Execute a Plan')
@click.argument('name')
+@click.option('-p', '--path', help='Path to store results')
@pass_context
-def run(ctx, name):
- pass
+def run(ctx, name, path):
+ runner_path = os.path.join(os.path.dirname(__file__), os.path.pardir, os.path.pardir,
+ 'runner/runner.py')
+ os.system('python {0} -b all -d {1}'.format(runner_path, path))
diff --git a/qtip/cli/commands/cmd_qpi.py b/qtip/cli/commands/cmd_qpi.py
index 1f23211e..1e3671c5 100644
--- a/qtip/cli/commands/cmd_qpi.py
+++ b/qtip/cli/commands/cmd_qpi.py
@@ -9,6 +9,7 @@
import click
+import os
from qtip.cli import utils
from qtip.cli.entry import Context
@@ -44,6 +45,9 @@ def show(ctx, name):
@cli.command('run', help='Run performance tests for the specified QPI')
@click.argument('name')
+@click.option('-p', '--path', help='Path to store results')
@pass_context
-def run(ctx, name):
- pass
+def run(ctx, name, path):
+ runner_path = path.join(os.path.dirname(__file__), os.path.pardir, os.path.pardir,
+ 'runner/runner.py')
+ os.system('python {0} -b all -d {1}'.format(runner_path, path))
diff --git a/qtip/cli/commands/cmd_report.py b/qtip/cli/commands/cmd_report.py
index cb9c70b6..c9f31f4a 100644
--- a/qtip/cli/commands/cmd_report.py
+++ b/qtip/cli/commands/cmd_report.py
@@ -24,8 +24,9 @@ def cli(ctx):
@cli.command('show')
@click.argument('metric')
+@click.option('-p', '--path', help='Path to result directory')
@pass_context
-def show(ctx, metric):
+def show(ctx, metric, path):
reporter = ConsoleReporter({})
- report = reporter.render(metric)
+ report = reporter.render(metric, path)
click.echo(report)
diff --git a/qtip/reporter/console.py b/qtip/reporter/console.py
index 64d677ba..cb51d9c9 100644
--- a/qtip/reporter/console.py
+++ b/qtip/reporter/console.py
@@ -28,19 +28,17 @@ class ConsoleReporter(BaseActor):
tpl_path = path.join(path.dirname(__file__), 'templates')
tpl_loader = FileSystemLoader(tpl_path)
self._env = Environment(loader=tpl_loader)
- self.result_path = path.join(ROOT_DIR, 'collector')
- def load_result(self):
- # TODO (taseer) change result directory format more suitable to filter out
- result_dirs = glob.glob('{}/20*'.format(self.result_path))
+ def load_result(self, result_path):
+ result_dirs = glob.glob('{}/qtip-*'.format(result_path))
# select the last (latest) directory for rendering report, result_dirs[-1]
- with open(path.join(self.result_path, result_dirs[-1], 'result.json')) as sample:
+ with open(path.join(result_path, result_dirs[-1], 'result.json')) as sample:
result = json.load(sample)
return result
- def render(self, metric):
+ def render(self, metric, result_path):
template = self._env.get_template('base.j2')
- var_dict = self.load_result()
+ var_dict = self.load_result(result_path)
var_dict['metric_name'] = metric
out = template.render(var_dict)
return out
diff --git a/qtip/runner/runner.py b/qtip/runner/runner.py
index 8bdbfb78..9b09f0f8 100644
--- a/qtip/runner/runner.py
+++ b/qtip/runner/runner.py
@@ -93,7 +93,7 @@ def main(args=sys.argv[1:]):
logger.info("start_time: {0}".format(start_time))
if not args.dest.endswith('/'):
args.dest += '/'
- result_dir = args.dest + start_time
+ result_dir = args.dest + 'qtip-' + start_time
ansible_result = run_benchmark(result_dir, args.benchmark)
stop_time = time.strftime("%Y-%m-%d-%H-%M")
logger.info("stop_time: {0}".format(stop_time))
diff --git a/collector/2017-03-16-20-07/result.json b/tests/data/reporter/qtip-2017-03-16-20-07/result.json
index d26ad400..d26ad400 100644
--- a/collector/2017-03-16-20-07/result.json
+++ b/tests/data/reporter/qtip-2017-03-16-20-07/result.json
diff --git a/tests/unit/cli/cmd_report_test.py b/tests/unit/cli/cmd_report_test.py
index 963ce987..9263707f 100644
--- a/tests/unit/cli/cmd_report_test.py
+++ b/tests/unit/cli/cmd_report_test.py
@@ -8,6 +8,7 @@
##############################################################################
import pytest
+from os import path
from click.testing import CliRunner
from qtip.cli.entry import cli
@@ -18,10 +19,17 @@ def runner():
return CliRunner()
-def test_dhrystone(runner):
+@pytest.fixture(scope="module")
+def result_path():
+ result = path.join(path.dirname(__file__), path.pardir, path.pardir,
+ 'data/reporter')
+ return result
+
+
+def test_dhrystone(runner, result_path):
"""Test dhrystone report"""
- result = runner.invoke(cli, ['report', 'show', 'dhrystone'])
+ result = runner.invoke(cli, ['report', 'show', 'dhrystone', '-p', result_path])
assert "Benchmark: dhrystone" in result.output
assert "CPU Usage: 3%" in result.output
assert "Number: 40" in result.output
@@ -30,10 +38,10 @@ def test_dhrystone(runner):
assert "Total CPUs: 40" in result.output
-def test_whetstone(runner):
+def test_whetstone(runner, result_path):
""" Test whetstone output"""
- result = runner.invoke(cli, ['report', 'show', 'whetstone'])
+ result = runner.invoke(cli, ['report', 'show', 'whetstone', '-p', result_path])
assert "Benchmark: whetstone" in result.output
assert "CPU Usage: 3%" in result.output
assert "Results:" in result.output
@@ -43,9 +51,9 @@ def test_whetstone(runner):
assert "Single CPU:" in result.output
-def test_dpi(runner):
+def test_dpi(runner, result_path):
""" Test dpi report"""
- result = runner.invoke(cli, ['report', 'show', 'dpi'])
+ result = runner.invoke(cli, ['report', 'show', 'dpi', '-p', result_path])
assert "Benchmark: dpi" in result.output
assert "CPU Usage: 3%" in result.output
assert "Bits per Second: 3.638" in result.output
@@ -54,9 +62,9 @@ def test_dpi(runner):
assert "Packets per Second: 1.458" in result.output
-def test_ramspeed(runner):
+def test_ramspeed(runner, result_path):
""" Test ramspeed report """
- result = runner.invoke(cli, ['report', 'show', 'ramspeed'])
+ result = runner.invoke(cli, ['report', 'show', 'ramspeed', '-p', result_path])
assert "Benchmark: ramspeed" in result.output
assert "CPU Usage: 3%" in result.output
assert "Float Addition: 10217.62" in result.output
@@ -68,10 +76,10 @@ def test_ramspeed(runner):
assert "Integer Average: 11396.35" in result.output
-def test_ssl(runner):
+def test_ssl(runner, result_path):
""" Test ssl report"""
- result = runner.invoke(cli, ['report', 'show', 'ssl'])
+ result = runner.invoke(cli, ['report', 'show', 'ssl', '-p', result_path])
assert "Benchmark: ssl" in result.output
assert "CPU Usage: 3%" in result.output
assert "AES 128 CBC (bytes):" in result.output
@@ -82,10 +90,10 @@ def test_ssl(runner):
assert "4096: 7688.5" in result.output
-def test_sys(runner):
+def test_sys(runner, result_path):
""" Test sys_info """
- result = runner.invoke(cli, ['report', 'show', 'ssl'])
+ result = runner.invoke(cli, ['report', 'show', 'ssl', '-p', result_path])
assert "System Information:" in result.output
assert "Host Name: node-38.zte.com.cn" in result.output
assert "Memory: 4403.7/128524.1MB" in result.output
diff --git a/tests/unit/reporter/console_test.py b/tests/unit/reporter/console_test.py
index aa7f848b..037ef2fb 100644
--- a/tests/unit/reporter/console_test.py
+++ b/tests/unit/reporter/console_test.py
@@ -8,6 +8,7 @@
##############################################################################
import pytest
+from os import path
from qtip.reporter.console import ConsoleReporter
@@ -17,14 +18,21 @@ def console_reporter():
return ConsoleReporter({})
+@pytest.fixture
+def result_path():
+ result = path.join(path.dirname(__file__), path.pardir, path.pardir,
+ 'data/reporter')
+ return result
+
+
def test_constructor(console_reporter):
assert isinstance(console_reporter, ConsoleReporter)
-def test_dhrystone(console_reporter):
+def test_dhrystone(console_reporter, result_path):
""" Test dhrystone report"""
- result = console_reporter.render('dhrystone')
+ result = console_reporter.render('dhrystone', result_path)
assert "Benchmark: dhrystone" in result
assert "Number: 40" in result
assert "Score: 63529.6" in result
@@ -32,10 +40,10 @@ def test_dhrystone(console_reporter):
assert "Total CPUs: 40" in result
-def test_whetstone(console_reporter):
+def test_whetstone(console_reporter, result_path):
""" Test whetstone output"""
- result = console_reporter.render('whetstone')
+ result = console_reporter.render('whetstone', result_path)
assert "Benchmark: whetstone" in result
assert "Results:" in result
assert "Multi CPU:" in result
@@ -44,10 +52,10 @@ def test_whetstone(console_reporter):
assert "Single CPU:" in result
-def test_dpi(console_reporter):
+def test_dpi(console_reporter, result_path):
""" Test dpi report"""
- result = console_reporter.render('dpi')
+ result = console_reporter.render('dpi', result_path)
assert "Benchmark: dpi" in result
assert "Bits per Second: 3.638" in result
assert "Packets per Second: 1.45" in result
@@ -55,10 +63,10 @@ def test_dpi(console_reporter):
assert "Packets per Second: 1.458" in result
-def test_ramspeed(console_reporter):
+def test_ramspeed(console_reporter, result_path):
""" Test ramspeed report """
- result = console_reporter.render('ramspeed')
+ result = console_reporter.render('ramspeed', result_path)
assert "Float Addition: 10217.62" in result
assert "Float Average: 9176.88" in result
assert "Float Copy: 8127.13" in result
@@ -68,10 +76,10 @@ def test_ramspeed(console_reporter):
assert "Integer Average: 11396.35" in result
-def test_ssl(console_reporter):
+def test_ssl(console_reporter, result_path):
""" Test ssl report"""
- result = console_reporter.render('ssl')
+ result = console_reporter.render('ssl', result_path)
assert "AES 128 CBC (bytes):" in result
assert "256: 584951.30k" in result
assert "RSA SIGN:" in result
@@ -80,10 +88,10 @@ def test_ssl(console_reporter):
assert "4096: 7688.5" in result
-def test_sys(console_reporter):
+def test_sys(console_reporter, result_path):
""" Test sys_info """
- result = console_reporter.render('ssl')
+ result = console_reporter.render('ssl', result_path)
assert "System Information:" in result
assert "Host Name: node-38.zte.com.cn" in result
assert "Memory: 4403.7/128524.1MB" in result