summaryrefslogtreecommitdiffstats
path: root/storperf
diff options
context:
space:
mode:
authormbeierl <mark.beierl@dell.com>2017-06-07 21:11:55 -0400
committerMark Beierl <mark.beierl@dell.com>2017-06-08 02:30:58 +0000
commit752372ec7c50f9cd1e3f012f33e50ee88362ad81 (patch)
treeade229a4d80876036d5a356dd70039ef619015a0 /storperf
parenta2de73c1d6d66bb9624e192d95c6a540cf31b6f7 (diff)
Allow User-Specified Sample Size
Adds the ability to accept steady_state_sample_size as a parameter. Removes deprecated nowarm and nossd. Adds OS_PROJECT_DOMAIN_ID. Change-Id: I00c01b05cd2cbf001bc5446faa30f36137350ccf JIRA: STORPERF-139 Signed-off-by: mbeierl <mark.beierl@dell.com> (cherry picked from commit c7806a0f08f6114d8b1f037a77af041a2b0364d5)
Diffstat (limited to 'storperf')
-rw-r--r--storperf/storperf_master.py10
-rw-r--r--storperf/test_executor.py6
-rw-r--r--storperf/utilities/data_handler.py15
3 files changed, 20 insertions, 11 deletions
diff --git a/storperf/storperf_master.py b/storperf/storperf_master.py
index 440f5b3..5432ece 100644
--- a/storperf/storperf_master.py
+++ b/storperf/storperf_master.py
@@ -174,6 +174,14 @@ class StorPerfMaster(object):
self._test_executor.deadline = value
@property
+ def steady_state_samples(self):
+ return self._test_executor.steady_state_samples
+
+ @steady_state_samples.setter
+ def steady_state_samples(self, value):
+ self._test_executor.steady_state_samples = value
+
+ @property
def queue_depths(self):
return self._test_executor.queue_depths
@@ -386,6 +394,8 @@ class StorPerfMaster(object):
"username": os.environ.get('OS_USERNAME'),
"password": os.environ.get('OS_PASSWORD'),
"auth_url": os.environ.get('OS_AUTH_URL'),
+ "project_domain_id":
+ os.environ.get('OS_PROJECT_DOMAIN_ID'),
"project_domain_name":
os.environ.get('OS_PROJECT_DOMAIN_NAME'),
"project_id": os.environ.get('OS_PROJECT_ID'),
diff --git a/storperf/test_executor.py b/storperf/test_executor.py
index 2fadc81..b2d5914 100644
--- a/storperf/test_executor.py
+++ b/storperf/test_executor.py
@@ -15,15 +15,14 @@ from os import listdir
import os
from os.path import isfile, join
import sched
-from threading import Thread
-import time
-
from storperf.carbon.converter import Converter
from storperf.carbon.emitter import CarbonMetricTransmitter
from storperf.db.job_db import JobDB
from storperf.fio.fio_invoker import FIOInvoker
from storperf.utilities.data_handler import DataHandler
from storperf.utilities.thread_gate import ThreadGate
+from threading import Thread
+import time
class UnknownWorkload(Exception):
@@ -37,6 +36,7 @@ class TestExecutor(object):
self.workload_modules = []
self.filename = None
self.deadline = None
+ self.steady_state_samples = 10
self.metadata = {}
self.start_time = None
self.end_time = None
diff --git a/storperf/utilities/data_handler.py b/storperf/utilities/data_handler.py
index d95d6fa..1da869c 100644
--- a/storperf/utilities/data_handler.py
+++ b/storperf/utilities/data_handler.py
@@ -9,9 +9,6 @@
import logging
import os
-from time import sleep
-import time
-
from storperf.db import test_results_db
from storperf.db.graphite_db import GraphiteDB
from storperf.db.job_db import JobDB
@@ -19,13 +16,14 @@ from storperf.utilities import data_treatment as DataTreatment
from storperf.utilities import dictionary
from storperf.utilities import math as math
from storperf.utilities import steady_state as SteadyState
+from time import sleep
+import time
class DataHandler(object):
def __init__(self):
self.logger = logging.getLogger(__name__)
- self.samples = 10
self.job_db = JobDB()
"""
@@ -51,7 +49,8 @@ class DataHandler(object):
series = self._lookup_prior_data(executor, metric, io_type)
series = self._convert_timestamps_to_samples(
executor, series)
- steady = self._evaluate_prior_data(series)
+ steady = self._evaluate_prior_data(
+ series, executor.steady_state_samples)
self.logger.debug("Steady state for %s %s: %s"
% (io_type, metric, steady))
@@ -94,7 +93,7 @@ class DataHandler(object):
# A bit of a hack here as Carbon might not be finished storing the
# data we just sent to it
now = int(time.time())
- backtime = 60 * (self.samples + 2)
+ backtime = 60 * (executor.steady_state_samples + 2)
data_series = graphite_db.fetch_series(workload,
metric,
io_type,
@@ -135,13 +134,13 @@ class DataHandler(object):
return normalized_series
- def _evaluate_prior_data(self, data_series):
+ def _evaluate_prior_data(self, data_series, samples):
self.logger.debug("Data series: %s" % data_series)
number_of_samples = len(data_series)
if number_of_samples == 0:
return False
- if (number_of_samples < self.samples):
+ if (number_of_samples < samples):
self.logger.debug("Only %s samples, ignoring" % number_of_samples)
return False