summaryrefslogtreecommitdiffstats
diff options
context:
space:
mode:
-rwxr-xr-xci/daily.sh25
-rw-r--r--rest_server.py19
-rw-r--r--storperf/storperf_master.py10
-rw-r--r--storperf/test_executor.py6
-rw-r--r--storperf/utilities/data_handler.py15
-rw-r--r--tests/utilities_tests/data_handler_test.py13
6 files changed, 46 insertions, 42 deletions
diff --git a/ci/daily.sh b/ci/daily.sh
index 10a06c5..21fbc5c 100755
--- a/ci/daily.sh
+++ b/ci/daily.sh
@@ -14,20 +14,6 @@ then
WORKSPACE=`pwd`
fi
-export AGENT_COUNT=${AGENT_COUNT:-$CINDER_NODES}
-export VOLUME_SIZE=${VOLUME_SIZE:-2}
-export WORKLOADS=${WORKLOADS:-ws,wr,rs,rr,rw}
-export BLOCK_SIZES=${BLOCK_SIZES:-1024,16384}
-export QUEUE_DEPTHS=${QUEUE_DEPTHS:-1,4}
-export STEADY_STATE_SAMPLES=${STEADY_STATE_SAMPLES:-10}
-export DEADLINE=${DEADLINE:-`expr $STEADY_STATE_SAMPLES \* 3`}
-export TEST_CASE=${TEST_CASE:-snia_steady_state}
-export SCENARIO_NAME=${DEPLOY_SCENARIO:-none}
-export DISK_TYPE=${DISK_TYPE:-unspecified}
-
-# This is set by Jenkins, but if we are running manually, just use the
-# current hostname.
-export POD_NAME=${NODE_NAME:-`hostname`}
git clone --depth 1 https://gerrit.opnfv.org/gerrit/releng $WORKSPACE/ci/job/releng
@@ -54,7 +40,18 @@ do
export "$env"
done < $WORKSPACE/ci/job/admin.rc
+export AGENT_COUNT=${AGENT_COUNT:-$CINDER_NODES}
+export BLOCK_SIZES=${BLOCK_SIZES:-1024,16384}
+export DEADLINE=${DEADLINE:-`expr $STEADY_STATE_SAMPLES \* 3`}
+export DISK_TYPE=${DISK_TYPE:-unspecified}
+export QUEUE_DEPTHS=${QUEUE_DEPTHS:-1,4}
+export POD_NAME=${NODE_NAME:-`hostname`}
+export SCENARIO_NAME=${DEPLOY_SCENARIO:-none}
+export STEADY_STATE_SAMPLES=${STEADY_STATE_SAMPLES:-10}
+export TEST_CASE=${TEST_CASE:-snia_steady_state}
export VERSION=`echo ${BUILD_TAG#*daily-} | cut -d- -f1`
+export VOLUME_SIZE=${VOLUME_SIZE:-2}
+export WORKLOADS=${WORKLOADS:-ws,wr,rs,rr,rw}
echo ==========================================================================
echo Environment
diff --git a/rest_server.py b/rest_server.py
index f6075f9..1033ca8 100644
--- a/rest_server.py
+++ b/rest_server.py
@@ -10,6 +10,9 @@
import json
import logging.config
import os
+from storperf.db.job_db import JobDB
+from storperf.plot.barchart import Barchart
+from storperf.storperf_master import StorPerfMaster
import sys
from flask import abort, Flask, request, jsonify, send_from_directory
@@ -17,10 +20,6 @@ from flask_cors import CORS, cross_origin
from flask_restful import Resource, Api, fields
from flask_restful_swagger import swagger
-from storperf.db.job_db import JobDB
-from storperf.plot.barchart import Barchart
-from storperf.storperf_master import StorPerfMaster
-
app = Flask(__name__, static_url_path="")
CORS(app)
@@ -217,9 +216,8 @@ class Configure(Resource):
class WorkloadModel:
resource_fields = {
'target': fields.String,
- 'nossd': fields.String,
- 'nowarm': fields.String,
'deadline': fields.Integer,
+ "steady_state_samples": fields.Integer,
'workload': fields.String,
'queue_depths': fields.String,
'block_sizes': fields.String
@@ -302,12 +300,6 @@ following parameters:
"deadline": if specified, the maximum duration in minutes
for any single test iteration.
-"nossd": Do not fill the target with random
-data prior to running the test,
-
-"nowarm": Do not refill the target with data
-prior to running any further tests,
-
"workload":if specified, the workload to run. Defaults to all.
""",
"required": True,
@@ -338,6 +330,9 @@ prior to running any further tests,
storperf.filename = request.json['target']
if ('deadline' in request.json):
storperf.deadline = request.json['deadline']
+ if ('steady_state_samples' in request.json):
+ storperf.steady_state_samples = request.json[
+ 'steady_state_samples']
if ('queue_depths' in request.json):
storperf.queue_depths = request.json['queue_depths']
if ('block_sizes' in request.json):
diff --git a/storperf/storperf_master.py b/storperf/storperf_master.py
index 440f5b3..5432ece 100644
--- a/storperf/storperf_master.py
+++ b/storperf/storperf_master.py
@@ -174,6 +174,14 @@ class StorPerfMaster(object):
self._test_executor.deadline = value
@property
+ def steady_state_samples(self):
+ return self._test_executor.steady_state_samples
+
+ @steady_state_samples.setter
+ def steady_state_samples(self, value):
+ self._test_executor.steady_state_samples = value
+
+ @property
def queue_depths(self):
return self._test_executor.queue_depths
@@ -386,6 +394,8 @@ class StorPerfMaster(object):
"username": os.environ.get('OS_USERNAME'),
"password": os.environ.get('OS_PASSWORD'),
"auth_url": os.environ.get('OS_AUTH_URL'),
+ "project_domain_id":
+ os.environ.get('OS_PROJECT_DOMAIN_ID'),
"project_domain_name":
os.environ.get('OS_PROJECT_DOMAIN_NAME'),
"project_id": os.environ.get('OS_PROJECT_ID'),
diff --git a/storperf/test_executor.py b/storperf/test_executor.py
index 2fadc81..b2d5914 100644
--- a/storperf/test_executor.py
+++ b/storperf/test_executor.py
@@ -15,15 +15,14 @@ from os import listdir
import os
from os.path import isfile, join
import sched
-from threading import Thread
-import time
-
from storperf.carbon.converter import Converter
from storperf.carbon.emitter import CarbonMetricTransmitter
from storperf.db.job_db import JobDB
from storperf.fio.fio_invoker import FIOInvoker
from storperf.utilities.data_handler import DataHandler
from storperf.utilities.thread_gate import ThreadGate
+from threading import Thread
+import time
class UnknownWorkload(Exception):
@@ -37,6 +36,7 @@ class TestExecutor(object):
self.workload_modules = []
self.filename = None
self.deadline = None
+ self.steady_state_samples = 10
self.metadata = {}
self.start_time = None
self.end_time = None
diff --git a/storperf/utilities/data_handler.py b/storperf/utilities/data_handler.py
index d95d6fa..1da869c 100644
--- a/storperf/utilities/data_handler.py
+++ b/storperf/utilities/data_handler.py
@@ -9,9 +9,6 @@
import logging
import os
-from time import sleep
-import time
-
from storperf.db import test_results_db
from storperf.db.graphite_db import GraphiteDB
from storperf.db.job_db import JobDB
@@ -19,13 +16,14 @@ from storperf.utilities import data_treatment as DataTreatment
from storperf.utilities import dictionary
from storperf.utilities import math as math
from storperf.utilities import steady_state as SteadyState
+from time import sleep
+import time
class DataHandler(object):
def __init__(self):
self.logger = logging.getLogger(__name__)
- self.samples = 10
self.job_db = JobDB()
"""
@@ -51,7 +49,8 @@ class DataHandler(object):
series = self._lookup_prior_data(executor, metric, io_type)
series = self._convert_timestamps_to_samples(
executor, series)
- steady = self._evaluate_prior_data(series)
+ steady = self._evaluate_prior_data(
+ series, executor.steady_state_samples)
self.logger.debug("Steady state for %s %s: %s"
% (io_type, metric, steady))
@@ -94,7 +93,7 @@ class DataHandler(object):
# A bit of a hack here as Carbon might not be finished storing the
# data we just sent to it
now = int(time.time())
- backtime = 60 * (self.samples + 2)
+ backtime = 60 * (executor.steady_state_samples + 2)
data_series = graphite_db.fetch_series(workload,
metric,
io_type,
@@ -135,13 +134,13 @@ class DataHandler(object):
return normalized_series
- def _evaluate_prior_data(self, data_series):
+ def _evaluate_prior_data(self, data_series, samples):
self.logger.debug("Data series: %s" % data_series)
number_of_samples = len(data_series)
if number_of_samples == 0:
return False
- if (number_of_samples < self.samples):
+ if (number_of_samples < samples):
self.logger.debug("Only %s samples, ignoring" % number_of_samples)
return False
diff --git a/tests/utilities_tests/data_handler_test.py b/tests/utilities_tests/data_handler_test.py
index 4630d54..6d57b0d 100644
--- a/tests/utilities_tests/data_handler_test.py
+++ b/tests/utilities_tests/data_handler_test.py
@@ -8,12 +8,11 @@
##############################################################################
import os
+from storperf.utilities.data_handler import DataHandler
import unittest
import mock
-from storperf.utilities.data_handler import DataHandler
-
class MockGraphiteDB(object):
@@ -32,6 +31,7 @@ class DataHandlerTest(unittest.TestCase):
self._terminated = False
self.args = None
self.start_time = 0
+ self.steady_state_samples = 10
self.end_time = 1
self.metadata = {}
self.block_sizes = "1"
@@ -96,7 +96,8 @@ class DataHandlerTest(unittest.TestCase):
[1480456040, 219.28],
[1480456050, 217.75]]
- actual = self.data_handler._evaluate_prior_data(series)
+ actual = self.data_handler._evaluate_prior_data(
+ series, self.steady_state_samples)
self.assertEqual(False, actual)
def test_long_not_steady_sample(self):
@@ -106,7 +107,8 @@ class DataHandlerTest(unittest.TestCase):
[4804560300, 21937],
[4804560400, 21928],
[4804560500, 21775]]
- actual = self.data_handler._evaluate_prior_data(series)
+ actual = self.data_handler._evaluate_prior_data(
+ series, self.steady_state_samples)
self.assertEqual(False, actual)
def test_long_steady_sample(self):
@@ -120,7 +122,8 @@ class DataHandlerTest(unittest.TestCase):
[4804560300, 219.37],
[4804560400, 219.28],
[4804560500, 217.75]]
- actual = self.data_handler._evaluate_prior_data(series)
+ actual = self.data_handler._evaluate_prior_data(
+ series, self.steady_state_samples)
self.assertEqual(True, actual)
@mock.patch.dict(os.environ, {'TEST_DB_URL': 'mock'})