aboutsummaryrefslogtreecommitdiffstats
path: root/tests
diff options
context:
space:
mode:
Diffstat (limited to 'tests')
-rw-r--r--tests/data/results/expected.json16
-rw-r--r--tests/unit/ansible_library/plugins/action/calculate_test.py22
2 files changed, 26 insertions, 12 deletions
diff --git a/tests/data/results/expected.json b/tests/data/results/expected.json
index a495d999..e77200d4 100644
--- a/tests/data/results/expected.json
+++ b/tests/data/results/expected.json
@@ -1,7 +1,15 @@
{
"score": 150,
- "host_results": [
- {"host": "host1", "result": {"score": 100}},
- {"host": "host2", "result": {"score": 200}}
- ]
+ "children": [
+ {
+ "name": "host1",
+ "score": 100
+ },
+ {
+ "name": "host2",
+ "score": 200
+ }
+ ],
+ "description": "POD Compute QPI",
+ "name": "compute"
}
diff --git a/tests/unit/ansible_library/plugins/action/calculate_test.py b/tests/unit/ansible_library/plugins/action/calculate_test.py
index 68a03e2a..31d72120 100644
--- a/tests/unit/ansible_library/plugins/action/calculate_test.py
+++ b/tests/unit/ansible_library/plugins/action/calculate_test.py
@@ -45,8 +45,8 @@ def section_spec(metric_spec):
@pytest.fixture
def qpi_spec(section_spec):
return {
- "description": "QTIP Performance Index of compute",
"name": "compute",
+ "description": "QTIP Performance Index of compute",
"sections": [section_spec]
}
@@ -54,23 +54,29 @@ def qpi_spec(section_spec):
@pytest.fixture()
def metric_result():
return {'score': 1.0,
- 'workload_results': [
- {'name': 'rsa_sign', 'score': 1.0},
- {'name': 'rsa_verify', 'score': 1.0}]}
+ 'name': 'ssl_rsa',
+ 'description': 'metric',
+ 'children': [{'description': 'workload', 'name': 'rsa_sign', 'score': 1.0},
+ {'description': 'workload', 'name': 'rsa_verify', 'score': 1.0}]}
@pytest.fixture()
def section_result(metric_result):
return {'score': 1.0,
- 'metric_results': [{'name': 'ssl_rsa', 'result': metric_result}]}
+ 'name': 'ssl',
+ 'description': 'cryptography and SSL/TLS performance',
+ 'children': [metric_result]}
@pytest.fixture()
def qpi_result(qpi_spec, section_result, metrics):
return {'score': 2048,
- 'spec': qpi_spec,
- 'metrics': metrics,
- 'section_results': [{'name': 'ssl', 'result': section_result}]}
+ 'name': 'compute',
+ 'description': 'QTIP Performance Index of compute',
+ 'children': [section_result],
+ 'details': {
+ 'spec': qpi_spec,
+ 'metrics': metrics}}
def test_calc_metric(metric_spec, metrics, metric_result):