summaryrefslogtreecommitdiffstats
diff options
context:
space:
mode:
-rwxr-xr-xci/daily.sh24
-rwxr-xr-xci/start_job.sh20
-rw-r--r--storperf/db/test_results_db.py10
-rw-r--r--storperf/utilities/data_handler.py71
-rw-r--r--tests/utilities_tests/data_handler_test.py36
5 files changed, 110 insertions, 51 deletions
diff --git a/ci/daily.sh b/ci/daily.sh
index 80263ae..11af7f4 100755
--- a/ci/daily.sh
+++ b/ci/daily.sh
@@ -14,11 +14,6 @@ then
WORKSPACE=`pwd`
fi
-if [ -d $WORKSPACE/ci/job ]
-then
- sudo rm -rf $WORKSPACE/ci/job
-fi
-
git clone --depth 1 https://gerrit.opnfv.org/gerrit/releng $WORKSPACE/ci/job/releng
virtualenv $WORKSPACE/ci/job/storperf_daily_venv
@@ -39,9 +34,21 @@ then
fi
export POD_NAME=$NODE_NAME
+# Unless we get a job that automatically deploys Apex or other installers,
+# we have to rely on there being a value written into a file to tell us
+# what scenario was deployed. This file needs to tell us:
+# DEPLOYED_SCENARIO
+# DISK_TYPE
+if [ -f ~/jenkins-env.rc ]
+then
+ . ~/jenkins-env.rc
+fi
+export SCENARIO_NAME=$DEPLOYED_SCENARIO
+
sudo find $WORKSPACE/ -name '*.db' -exec rm -fv {} \;
$WORKSPACE/ci/generate-admin-rc.sh
+echo "TEST_DB_URL=http://testresults.opnfv.org/test/api/v1" >> $WORKSPACE/ci/job/admin.rc
$WORKSPACE/ci/generate-environment.sh
. $WORKSPACE/ci/job/environment.rc
@@ -51,7 +58,7 @@ do
export "$env"
done < $WORKSPACE/ci/job/admin.rc
-echo "TEST_DB_URL=http://testresults.opnfv.org/test/api/v1" >> $WORKSPACE/ci/job/admin.rc
+export VERSION=`echo ${BUILD_TAG#*daily-} | cut -d- -f1`
echo ==========================================================================
echo Environment
@@ -71,7 +78,6 @@ echo ==========================================================================
export QUEUE_DEPTH=8
export BLOCK_SIZE=16384
export WORKLOAD=_warm_up
-export SCENARIO_NAME="${CINDER_BACKEND}_${WORKLOAD}"
WARM_UP=`$WORKSPACE/ci/start_job.sh | awk '/job_id/ {print $2}' | sed 's/"//g'`
WARM_UP_STATUS=`curl -s -X GET "http://127.0.0.1:5000/api/v1.0/jobs?id=$WARM_UP&type=status" \
@@ -91,9 +97,7 @@ echo ==========================================================================
export WORKLOAD=ws,wr,rs,rr,rw
export BLOCK_SIZE=2048,8192,16384
export QUEUE_DEPTH=1,2,8
-export SCENARIO_NAME="${CINDER_BACKEND}_${WORKLOAD}"
-export VERSION
-export BUILD_TAG
+export TEST_CASE=snia_steady_state
JOB=`$WORKSPACE/ci/start_job.sh \
| awk '/job_id/ {print $2}' | sed 's/"//g'`
diff --git a/ci/start_job.sh b/ci/start_job.sh
index 51f35cb..86b8fc8 100755
--- a/ci/start_job.sh
+++ b/ci/start_job.sh
@@ -13,17 +13,23 @@ cat << EOF > body.json
"block_sizes": "${BLOCK_SIZE}",
"nowarm": "string",
"nossd": "string",
- "deadline": 600,
+ "deadline": 1200,
"queue_depths": "${QUEUE_DEPTH}",
"workload": "${WORKLOAD}",
"metadata": {
- "disk_type": "SSD",
- "pod_name": "${POD_NAME}",
- "scenario_name": "${SCENARIO_NAME}",
- "storage_node_count": ${CINDER_NODES}
+ "disk_type": "${DISK_TYPE}",
+ "pod_name": "${POD_NAME}",
+ "scenario_name": "${SCENARIO_NAME}",
+ "storage_node_count": ${CINDER_NODES},
+ "version": "${VERSION}",
+ "build_tag": "${BUILD_TAG}",
+ "test_case": "${TEST_CASE}"
}
}
EOF
-curl -s -X POST --header 'Content-Type: application/json' --header 'Accept: application/json' \
- -d @body.json http://127.0.0.1:5000/api/v1.0/jobs \ No newline at end of file
+cat body.json
+
+curl -s -X POST --header 'Content-Type: application/json' \
+ --header 'Accept: application/json' \
+ -d @body.json http://127.0.0.1:5000/api/v1.0/jobs
diff --git a/storperf/db/test_results_db.py b/storperf/db/test_results_db.py
index 75cb05d..bb328db 100644
--- a/storperf/db/test_results_db.py
+++ b/storperf/db/test_results_db.py
@@ -9,7 +9,6 @@
import json
import os
-
import requests
@@ -29,17 +28,18 @@ def get_installer_type(logger=None):
def push_results_to_db(db_url, project, case_name,
test_start, test_stop, logger, pod_name,
- version, scenario, criteria, build_tag, payload):
+ version, scenario, criteria, build_tag, details):
"""
POST results to the Result target DB
"""
url = db_url + "/results"
installer = get_installer_type(logger)
+
params = {"project_name": project, "case_name": case_name,
- "start_date": test_start, "stop_date": test_stop,
"pod_name": pod_name, "installer": installer,
"version": version, "scenario": scenario, "criteria": criteria,
- "build_tag": build_tag, "details": payload}
+ "build_tag": build_tag, "start_date": test_start,
+ "stop_date": test_stop, "details": details}
headers = {'Content-Type': 'application/json'}
try:
@@ -56,5 +56,5 @@ def push_results_to_db(db_url, project, case_name,
logger.error("Error [push_results_to_db('%s', '%s', '%s', " +
"'%s', '%s', '%s', '%s', '%s', '%s')]:" %
(db_url, project, case_name, pod_name, version,
- scenario, criteria, build_tag, payload[:512]), e)
+ scenario, criteria, build_tag, details[:512]), e)
return False
diff --git a/storperf/utilities/data_handler.py b/storperf/utilities/data_handler.py
index 2d4194a..2f79054 100644
--- a/storperf/utilities/data_handler.py
+++ b/storperf/utilities/data_handler.py
@@ -124,42 +124,55 @@ class DataHandler(object):
return SteadyState.steady_state(data_series)
def _push_to_db(self, executor):
- test_db = os.environ.get('TEST_DB_URL')
- if test_db is not None:
- pod_name = dictionary.get_key_from_dict(executor.metadata,
- 'pod_name',
- 'Unknown')
- version = dictionary.get_key_from_dict(executor.metadata,
- 'version',
- 'Unknown')
- scenario = dictionary.get_key_from_dict(executor.metadata,
- 'scenario_name',
- 'Unknown')
- build_tag = dictionary.get_key_from_dict(executor.metadata,
- 'build_tag',
- 'Unknown')
- duration = executor.end_time - executor.start_time
+ pod_name = dictionary.get_key_from_dict(executor.metadata,
+ 'pod_name',
+ 'Unknown')
+ version = dictionary.get_key_from_dict(executor.metadata,
+ 'version',
+ 'Unknown')
+ scenario = dictionary.get_key_from_dict(executor.metadata,
+ 'scenario_name',
+ 'Unknown')
+ build_tag = dictionary.get_key_from_dict(executor.metadata,
+ 'build_tag',
+ 'Unknown')
+ test_case = dictionary.get_key_from_dict(executor.metadata,
+ 'test_case',
+ 'Unknown')
+ duration = executor.end_time - executor.start_time
+
+ payload = executor.metadata
+
+ steady_state = True
+ for _, value in executor.metadata['steady_state'].items():
+ steady_state = steady_state and value
+
+ payload['timestart'] = executor.start_time
+ payload['duration'] = duration
+ graphite_db = GraphiteDB()
+ payload['metrics'] = graphite_db.fetch_averages(
+ executor.job_db.job_id)
+ if steady_state:
+ criteria = 'PASS'
+ else:
+ criteria = 'FAIL'
- self.logger.info("Pushing results to %s" % (test_db))
+ start_time = time.strftime('%Y-%m-%d %H:%M:%S',
+ time.gmtime(executor.start_time))
- payload = executor.metadata
- payload['timestart'] = executor.start_time
- payload['duration'] = duration
- payload['status'] = 'OK'
- graphite_db = GraphiteDB()
- payload['metrics'] = graphite_db.fetch_averages(
- executor.job_db.job_id)
- criteria = {}
- criteria['block_sizes'] = executor.block_sizes
- criteria['queue_depths'] = executor.queue_depths
+ end_time = time.strftime('%Y-%m-%d %H:%M:%S',
+ time.gmtime(executor.end_time))
+ test_db = os.environ.get('TEST_DB_URL')
+ if test_db is not None:
+ self.logger.info("Pushing results to %s" % (test_db))
try:
test_results_db.push_results_to_db(test_db,
"storperf",
- "Latency Test",
- executor.start_time,
- executor.end_time,
+ test_case,
+ start_time,
+ end_time,
self.logger,
pod_name,
version,
diff --git a/tests/utilities_tests/data_handler_test.py b/tests/utilities_tests/data_handler_test.py
index 3813957..7963c9f 100644
--- a/tests/utilities_tests/data_handler_test.py
+++ b/tests/utilities_tests/data_handler_test.py
@@ -44,6 +44,7 @@ class DataHandlerTest(unittest.TestCase):
self.job_db = mock
self.pushed = False
self.current_workload = None
+ self.db_results = None
pass
@property
@@ -52,6 +53,7 @@ class DataHandlerTest(unittest.TestCase):
def push_results_to_db(self, *args):
self.pushed = True
+ self.db_results = args
pass
def terminate(self):
@@ -131,6 +133,13 @@ class DataHandlerTest(unittest.TestCase):
self._terminated = True
mock_results_db.side_effect = self.push_results_to_db
mock_graphite_db.side_effect = MockGraphiteDB
+ self.metadata = {
+ "steady_state": {
+ "rr.queue-depth.8.block-size.16384": True,
+ "rr.queue-depth.8.block-size.2048": False,
+ "rr.queue-depth.8.block-size.8192": True,
+ },
+ }
self.data_handler.data_event(self)
self.assertEqual(True, self.pushed)
@@ -248,3 +257,30 @@ class DataHandlerTest(unittest.TestCase):
self.assertEqual(True, self._terminated)
self.assertEqual(False, self.pushed)
+
+ @mock.patch.dict(os.environ, {'TEST_DB_URL': 'mock'})
+ @mock.patch("storperf.db.test_results_db.push_results_to_db")
+ def test_playload_report(self,
+ mock_results_db):
+ mock_results_db.side_effect = self.push_results_to_db
+ self.start_time = 1504559100
+ self.end_time = 1504560000
+ self.metadata = {
+ "scenario_name": "ceph_ws,wr,rs,rr,rw",
+ "status": "OK",
+ "steady_state": {
+ "rr.queue-depth.8.block-size.16384": True,
+ "rr.queue-depth.8.block-size.2048": False,
+ "rr.queue-depth.8.block-size.8192": True,
+ },
+ "storage_node_count": 5,
+ "volume_size": 10
+ }
+ self.data_handler._push_to_db(self)
+ self.assertEqual('FAIL', self.db_results[9],
+ 'Expected FAIL in criteria')
+ self.assertEqual('2017-09-04 21:05:00', self.db_results[3],
+ 'Start time')
+ self.assertEqual('2017-09-04 21:20:00', self.db_results[4],
+ 'End time')
+