summaryrefslogtreecommitdiffstats
path: root/storperf/db
diff options
context:
space:
mode:
Diffstat (limited to 'storperf/db')
-rw-r--r--storperf/db/configuration_db.py2
-rw-r--r--storperf/db/graphite_db.py81
-rw-r--r--storperf/db/job_db.py47
3 files changed, 91 insertions, 39 deletions
diff --git a/storperf/db/configuration_db.py b/storperf/db/configuration_db.py
index b12394e..5b996c7 100644
--- a/storperf/db/configuration_db.py
+++ b/storperf/db/configuration_db.py
@@ -7,7 +7,7 @@
# http://www.apache.org/licenses/LICENSE-2.0
##############################################################################
-from _sqlite3 import OperationalError
+from sqlite3 import OperationalError
from threading import Lock
import logging
import sqlite3
diff --git a/storperf/db/graphite_db.py b/storperf/db/graphite_db.py
new file mode 100644
index 0000000..c62340c
--- /dev/null
+++ b/storperf/db/graphite_db.py
@@ -0,0 +1,81 @@
+from storperf.db.job_db import JobDB
+import json
+import logging
+
+import requests
+
+
+class GraphiteDB(object):
+
+ def __init__(self):
+ """
+ """
+ self._job_db = JobDB()
+ self.logger = logging.getLogger(__name__)
+
+ def fetch_averages(self, workload):
+ workload_executions = self._job_db.fetch_workloads(workload)
+
+ # Create a map of job runs
+ workload_names = {}
+ for workload_execution in workload_executions:
+ name = '.'.join(workload_execution[0].split('.')[0:6])
+ if name in workload_names:
+ workload_record = workload_names[name]
+ start = workload_record[0]
+ end = workload_record[1]
+ else:
+ start = None
+ end = None
+
+ if start is None or workload_execution[1] < start:
+ start = workload_execution[1]
+
+ if end is None or workload_execution[2] > end:
+ end = workload_execution[2]
+
+ workload_names[name] = [start, end]
+
+ averages = {}
+
+ for io_type in ['read', 'write']:
+ for workload_name, times in workload_names.iteritems():
+ workload_pattern = self.make_fullname_pattern(workload_name)
+ request = ("http://127.0.0.1:8000/render/?target="
+ "averageSeries(%s.jobs.1.%s.lat.mean)"
+ "&format=json"
+ "&from=%s"
+ "&until=%s" %
+ (workload_pattern, io_type, times[0], times[1]))
+ self.logger.debug("Calling %s" % (request))
+
+ response = requests.get(request)
+ if (response.status_code == 200):
+ short_name = '.'.join(workload_name.split('.')[1:6])
+ averages[short_name + "." + io_type] = \
+ self._average_results(json.loads(response.content))
+
+ return averages
+
+ def _average_results(self, results):
+
+ for item in results:
+ datapoints = item['datapoints']
+
+ total = 0
+ count = 0
+
+ for datapoint in datapoints:
+ if datapoint[0] is not None:
+ total += datapoint[0]
+ count += 1
+
+ average = total / count
+
+ return average
+
+ def make_fullname_pattern(self, workload):
+ parts = workload.split('.')
+ wildcards_needed = 7 - len(parts)
+ fullname = workload + (".*" * wildcards_needed)
+ return fullname
diff --git a/storperf/db/job_db.py b/storperf/db/job_db.py
index 0e94358..d42568a 100644
--- a/storperf/db/job_db.py
+++ b/storperf/db/job_db.py
@@ -7,7 +7,7 @@
# http://www.apache.org/licenses/LICENSE-2.0
##############################################################################
-from _sqlite3 import OperationalError
+from sqlite3 import OperationalError
from threading import Lock
import calendar
import logging
@@ -15,8 +15,6 @@ import sqlite3
import time
import uuid
-import requests
-
db_mutex = Lock()
@@ -172,22 +170,14 @@ class JobDB(object):
db.commit()
db.close()
- def fetch_results(self, workload_prefix=""):
- if (workload_prefix is None):
- workload_prefix = ""
-
- workload_prefix = workload_prefix + "%"
-
- start_time = str(calendar.timegm(time.gmtime()))
- end_time = "0"
-
- self.logger.debug("Workload like: " + workload_prefix)
+ def fetch_workloads(self, workload):
+ workload_prefix = workload + "%"
+ workload_executions = []
with db_mutex:
-
db = sqlite3.connect(JobDB.db_name)
cursor = db.cursor()
- cursor.execute("""select start, end, workload
+ cursor.execute("""select workload, start, end
from jobs where workload like ?""",
(workload_prefix,))
@@ -195,27 +185,8 @@ class JobDB(object):
row = cursor.fetchone()
if (row is None):
break
-
- start_time = str(row[0])
- end_time = str(row[1])
- workload = str(row[2])
-
- # for most of these stats, we just want the final one
- # as that is cumulative average or whatever for the whole
- # run
-
- self.logger.info("workload=" + workload +
- "start=" + start_time + " end=" + end_time)
-
- request = ("http://127.0.0.1:8000/render/?target="
- "*.%s.%s.jobs.1.*.clat.mean"
- "&format=json&from=%s&until=%s"
- % (self.job_id, workload, start_time, end_time))
- response = requests.get(request)
-
- if (response.status_code == 200):
- data = response.json()
- print data
- else:
- pass
+ workload_execution = [row[0], row[1], row[2]]
+ workload_executions.append(workload_execution)
db.close()
+
+ return workload_executions