From 93d8fc2cd18822136a4d848c38d934203e53c034 Mon Sep 17 00:00:00 2001 From: Gwenael Lambrouin Date: Mon, 28 Jun 2021 18:10:41 +0200 Subject: Compare the latency result with a fixed threshold of 1ms Change-Id: I2b4ea4ee6e6442d4ceac268e7bf3c6bf9277ff54 Signed-off-by: Gwenael Lambrouin --- behave_tests/features/non-regression.feature | 3 +-- behave_tests/features/steps/steps.py | 30 ++++++++++++++++++++++++++++ 2 files changed, 31 insertions(+), 2 deletions(-) diff --git a/behave_tests/features/non-regression.feature b/behave_tests/features/non-regression.feature index 89c3c4d..62daafa 100644 --- a/behave_tests/features/non-regression.feature +++ b/behave_tests/features/non-regression.feature @@ -31,8 +31,7 @@ Feature: non-regression When NFVbench API is ready Then run is started and waiting for result And push result to database - And verify latency result is in same range as the previous result - And verify latency result is in same range as the characterization result + And verify latency result is lower than 1000 microseconds Examples: Frame sizes and throughput percentages | frame_size | throughput | diff --git a/behave_tests/features/steps/steps.py b/behave_tests/features/steps/steps.py index 8798280..965b0c8 100644 --- a/behave_tests/features/steps/steps.py +++ b/behave_tests/features/steps/steps.py @@ -226,6 +226,36 @@ def get_latency_result_from_database(context, threshold='90%'): if last_result: compare_latency_values(context, last_result, threshold) + +@then('verify latency result is lower than {max_avg_latency_usec:g} microseconds') +def check_latency_result_against_fixed_threshold(context, max_avg_latency_usec: float): + """Check latency result against a fixed threshold. + + Check that the average latency measured during the current scenario run is + lower or equal to the provided fixed reference value. + + Args: + context: The context data of the current scenario run. It includes the + test results for that run. + + max_avg_latency_usec: Reference value to be used as a threshold. This + is a maximum average latency expressed in microseconds. + + Raises: + AssertionError: The latency result is strictly greater than the reference value. + + """ + # Get the just measured average latency (a float): + new_avg_latency_usec = context.synthesis['avg_delay_usec'] + + # Compare measured value to reference: + if new_avg_latency_usec > max_avg_latency_usec: + raise AssertionError("Average latency higher than max threshold: " + "{avg_latency} usec > {threshold} usec".format( + avg_latency=round(new_avg_latency_usec), + threshold=round(max_avg_latency_usec))) + + @then( 'verify result is in [{min_reference_value}pps, {max_reference_value}pps] range for throughput') def compare_throughput_pps_result_with_range_values(context, min_reference_value, -- cgit 1.2.3-korg