summaryrefslogtreecommitdiffstats
path: root/storperf/db
diff options
context:
space:
mode:
authormbeierl <mark.beierl@dell.com>2017-07-11 15:12:35 -0400
committermbeierl <mark.beierl@dell.com>2017-07-11 15:47:46 -0400
commit7602a54309adbe5c5346ee6befecc2e596976504 (patch)
tree60f15026780db30b0b8842ba1a1e2cc021e22625 /storperf/db
parentfc09b37e95c19f820ec60db19d98c0dc3d670829 (diff)
Change all paths
Changes the paths of all source code so that it exists under the dockerfile location for each container. This way we can use COPY instead of git clone, as well as use the existing JJB. Change-Id: I883b2957d89659c164fff0a1ebc4d677c534796d JIRA: STORPERF-188 Signed-off-by: mbeierl <mark.beierl@dell.com>
Diffstat (limited to 'storperf/db')
-rw-r--r--storperf/db/__init__.py8
-rw-r--r--storperf/db/configuration_db.py120
-rw-r--r--storperf/db/graphite_db.py63
-rw-r--r--storperf/db/job_db.py259
-rw-r--r--storperf/db/test_results_db.py61
5 files changed, 0 insertions, 511 deletions
diff --git a/storperf/db/__init__.py b/storperf/db/__init__.py
deleted file mode 100644
index 73334c7..0000000
--- a/storperf/db/__init__.py
+++ /dev/null
@@ -1,8 +0,0 @@
-##############################################################################
-# Copyright (c) 2015 EMC and others.
-#
-# All rights reserved. This program and the accompanying materials
-# are made available under the terms of the Apache License, Version 2.0
-# which accompanies this distribution, and is available at
-# http://www.apache.org/licenses/LICENSE-2.0
-##############################################################################
diff --git a/storperf/db/configuration_db.py b/storperf/db/configuration_db.py
deleted file mode 100644
index 5b996c7..0000000
--- a/storperf/db/configuration_db.py
+++ /dev/null
@@ -1,120 +0,0 @@
-##############################################################################
-# Copyright (c) 2015 EMC and others.
-#
-# All rights reserved. This program and the accompanying materials
-# are made available under the terms of the Apache License, Version 2.0
-# which accompanies this distribution, and is available at
-# http://www.apache.org/licenses/LICENSE-2.0
-##############################################################################
-
-from sqlite3 import OperationalError
-from threading import Lock
-import logging
-import sqlite3
-
-db_mutex = Lock()
-
-
-class ConfigurationDB(object):
-
- db_name = "StorPerfConfig.db"
-
- def __init__(self):
- """
- Creates the StorPerfConfig.db and configuration tables on demand
- """
-
- self.logger = logging.getLogger(__name__)
- self.logger.debug("Connecting to " + ConfigurationDB.db_name)
- with db_mutex:
- db = sqlite3.connect(ConfigurationDB.db_name)
-
- cursor = db.cursor()
- try:
- cursor.execute('''CREATE TABLE configuration
- (configuration_name text,
- key text,
- value text)''')
- self.logger.debug("Created configuration table")
- except OperationalError:
- self.logger.debug("Configuration table exists")
-
- cursor.execute('SELECT * FROM configuration')
- db.commit()
- db.close()
-
- def delete_configuration_value(self, configuration_name, key):
- """Deletes the value associated with the given key
- """
-
- with db_mutex:
- db = sqlite3.connect(ConfigurationDB.db_name)
- cursor = db.cursor()
-
- cursor.execute("delete from configuration where "
- "configuration_name=? and key=?",
- (configuration_name, key))
-
- self.logger.debug("Deleted " + configuration_name + ":" + key)
-
- db.commit()
- db.close()
-
- def get_configuration_value(self, configuration_name, key):
- """Returns a string representation of the value stored
- with this key under the given configuration name.
- """
-
- with db_mutex:
- db = sqlite3.connect(ConfigurationDB.db_name)
- cursor = db.cursor()
-
- cursor.execute(
- """select value from configuration
- where configuration_name = ?
- and key = ?""",
- (configuration_name, key,))
-
- row = cursor.fetchone()
-
- return_value = None
-
- if (row is None):
- self.logger.debug(
- configuration_name + ":" + key + " does not exist")
- else:
- self.logger.debug(
- configuration_name + ":" + key + " is " + str(row[0]))
- return_value = str(row[0])
-
- db.close()
-
- return return_value
-
- def set_configuration_value(self, configuration_name, key, value):
- """Updates or creates the key under the given configuration
- name so that it holds the value specified.
- """
-
- if (value is None):
- return self.delete_configuration_value(configuration_name, key)
-
- with db_mutex:
- value = str(value)
-
- db = sqlite3.connect(ConfigurationDB.db_name)
- cursor = db.cursor()
-
- cursor.execute("delete from configuration where "
- "configuration_name=? and key=?",
- (configuration_name, key))
-
- cursor.execute(
- """insert into configuration(configuration_name, key, value)
- values (?,?,?)""", (configuration_name, key, value))
-
- self.logger.debug(
- configuration_name + ":" + key + " set to " + value)
-
- db.commit()
- db.close()
diff --git a/storperf/db/graphite_db.py b/storperf/db/graphite_db.py
deleted file mode 100644
index c8a2d35..0000000
--- a/storperf/db/graphite_db.py
+++ /dev/null
@@ -1,63 +0,0 @@
-##############################################################################
-# Copyright (c) 2016 EMC and others.
-#
-# All rights reserved. This program and the accompanying materials
-# are made available under the terms of the Apache License, Version 2.0
-# which accompanies this distribution, and is available at
-# http://www.apache.org/licenses/LICENSE-2.0
-##############################################################################
-
-import json
-import logging
-
-import requests
-
-from storperf.db.job_db import JobDB
-
-
-class GraphiteDB(object):
-
- def __init__(self):
- """
- """
- self._job_db = JobDB()
- self.logger = logging.getLogger(__name__)
-
- def fetch_series(self, workload, metric, io_type, time, duration):
-
- series = []
- end = time
- start = end - duration
-
- request = ("http://127.0.0.1:8000/render/?target="
- "averageSeries(%s.*.jobs.1.%s.%s)"
- "&format=json"
- "&from=%s"
- "&until=%s" %
- (workload, io_type, metric,
- start, end))
- self.logger.debug("Calling %s" % (request))
-
- response = requests.get(request)
- if (response.status_code == 200):
- series = self._series_results(json.loads(response.content))
-
- return series
-
- def _series_results(self, results):
-
- series = []
-
- for item in results:
- datapoints = item['datapoints']
- for datapoint in datapoints:
- if datapoint[0] is not None:
- series.append([datapoint[1], datapoint[0]])
-
- return series
-
- def make_fullname_pattern(self, workload):
- parts = workload.split('.')
- wildcards_needed = 7 - len(parts)
- fullname = workload + (".*" * wildcards_needed)
- return fullname
diff --git a/storperf/db/job_db.py b/storperf/db/job_db.py
deleted file mode 100644
index 3308fa8..0000000
--- a/storperf/db/job_db.py
+++ /dev/null
@@ -1,259 +0,0 @@
-##############################################################################
-# Copyright (c) 2015 EMC and others.
-#
-# All rights reserved. This program and the accompanying materials
-# are made available under the terms of the Apache License, Version 2.0
-# which accompanies this distribution, and is available at
-# http://www.apache.org/licenses/LICENSE-2.0
-##############################################################################
-
-import calendar
-import json
-import logging
-from sqlite3 import OperationalError
-import sqlite3
-from threading import Lock
-import time
-import uuid
-
-
-db_mutex = Lock()
-
-
-class JobDB(object):
-
- db_name = "StorPerfJob.db"
-
- def __init__(self):
- """
- Creates the StorPerfJob.db and jobs tables on demand
- """
-
- self.logger = logging.getLogger(__name__)
- self.logger.debug("Connecting to " + JobDB.db_name)
- self.job_id = None
-
- with db_mutex:
- db = sqlite3.connect(JobDB.db_name)
- cursor = db.cursor()
- try:
- cursor.execute('''CREATE TABLE jobs
- (job_id text,
- workload text,
- start text,
- end text)''')
- self.logger.debug("Created job table")
- except OperationalError:
- self.logger.debug("Job table exists")
-
- try:
- cursor.execute('''CREATE TABLE job_params
- (job_id text,
- param text,
- value text)''')
- self.logger.debug("Created job_params table")
- except OperationalError:
- self.logger.debug("Job params table exists")
-
- try:
- cursor.execute('''CREATE TABLE job_summary
- (job_id text,
- summary text)''')
- self.logger.debug("Created job summary table")
- except OperationalError:
- self.logger.debug("Job summary table exists")
-
- cursor.execute('SELECT * FROM jobs')
- cursor.execute('SELECT * FROM job_params')
- db.commit()
- db.close()
-
- def create_job_id(self):
- """
- Returns a job id that is guaranteed to be unique in this
- StorPerf instance.
- """
- with db_mutex:
- db = sqlite3.connect(JobDB.db_name)
- cursor = db.cursor()
-
- self.job_id = str(uuid.uuid4())
- row = cursor.execute(
- "select * from jobs where job_id = ?", (self.job_id,))
-
- while (row.fetchone() is not None):
- self.logger.info("Duplicate job id found, regenerating")
- self.job_id = str(uuid.uuid4())
- row = cursor.execute(
- "select * from jobs where job_id = ?", (self.job_id,))
-
- cursor.execute(
- "insert into jobs(job_id) values (?)", (self.job_id,))
- self.logger.debug("Reserved job id " + self.job_id)
- db.commit()
- db.close()
-
- def start_workload(self, workload):
- """
- Records the start time for the given workload
- """
-
- workload_name = workload.fullname
-
- if (self.job_id is None):
- self.create_job_id()
-
- with db_mutex:
-
- db = sqlite3.connect(JobDB.db_name)
- cursor = db.cursor()
-
- now = str(calendar.timegm(time.gmtime()))
-
- row = cursor.execute(
- """select * from jobs
- where job_id = ?
- and workload = ?""",
- (self.job_id, workload_name,))
-
- if (row.fetchone() is None):
- cursor.execute(
- """insert into jobs
- (job_id,
- workload,
- start)
- values (?, ?, ?)""",
- (self.job_id,
- workload_name,
- now,))
- else:
- self.logger.warn("Duplicate start time for workload %s"
- % workload_name)
- cursor.execute(
- """update jobs set
- job_id = ?,
- start = ?
- where workload = ?""",
- (self.job_id,
- now,
- workload_name,))
-
- db.commit()
- db.close()
-
- def end_workload(self, workload):
- """
- Records the end time for the given workload
- """
- if (self.job_id is None):
- self.create_job_id()
-
- workload_name = workload.fullname
-
- with db_mutex:
-
- db = sqlite3.connect(JobDB.db_name)
- cursor = db.cursor()
- now = str(calendar.timegm(time.gmtime()))
-
- row = cursor.execute(
- """select * from jobs
- where job_id = ?
- and workload = ?""",
- (self.job_id, workload_name,))
-
- if (row.fetchone() is None):
- self.logger.warn("No start time recorded for workload %s"
- % workload_name)
- cursor.execute(
- """insert into jobs
- (job_id,
- workload,
- start,
- end)
- values (?, ?, ?, ?)""",
- (self.job_id,
- workload_name,
- now,
- now))
- else:
- cursor.execute(
- """update jobs set
- job_id = ?,
- end = ?
- where workload = ?""",
- (self.job_id,
- now,
- workload_name,))
-
- db.commit()
- db.close()
-
- def fetch_workloads(self, workload):
- workload_prefix = workload + "%"
- workload_executions = []
-
- with db_mutex:
- db = sqlite3.connect(JobDB.db_name)
- cursor = db.cursor()
- cursor.execute("""select workload, start, end
- from jobs where workload like ?""",
- (workload_prefix,))
-
- while (True):
- row = cursor.fetchone()
- if (row is None):
- break
- workload_execution = [row[0], row[1], row[2]]
- workload_executions.append(workload_execution)
- db.close()
-
- return workload_executions
-
- def record_workload_params(self, params):
- """
- """
- if (self.job_id is None):
- self.create_job_id()
-
- with db_mutex:
-
- db = sqlite3.connect(JobDB.db_name)
- cursor = db.cursor()
- for param, value in params.iteritems():
- cursor.execute(
- """insert into job_params
- (job_id,
- param,
- value)
- values (?, ?, ?)""",
- (self.job_id,
- param,
- value,))
- db.commit()
- db.close()
-
- def fetch_workload_params(self, job_id):
- """
- """
- params = {}
- with db_mutex:
-
- db = sqlite3.connect(JobDB.db_name)
- cursor = db.cursor()
-
- cursor.execute(
- "select param, value from job_params where job_id = ?",
- (job_id,))
-
- while (True):
- row = cursor.fetchone()
- if (row is None):
- break
- try:
- data = json.loads(row[1])
- except:
- data = row[1]
- params[row[0]] = data
- db.close()
- return params
diff --git a/storperf/db/test_results_db.py b/storperf/db/test_results_db.py
deleted file mode 100644
index a2f7038..0000000
--- a/storperf/db/test_results_db.py
+++ /dev/null
@@ -1,61 +0,0 @@
-##############################################################################
-# Copyright (c) 2016 EMC and others.
-#
-# All rights reserved. This program and the accompanying materials
-# are made available under the terms of the Apache License, Version 2.0
-# which accompanies this distribution, and is available at
-# http://www.apache.org/licenses/LICENSE-2.0
-##############################################################################
-
-import json
-import os
-import requests
-
-
-def get_installer_type(logger=None):
- """
- Get installer type (fuel, apex, joid, compass)
- """
- try:
- installer = os.environ['INSTALLER_TYPE']
- except KeyError:
- if logger:
- logger.error("Impossible to retrieve the installer type")
- installer = "Unknown_installer"
-
- return installer
-
-
-def push_results_to_db(db_url, project, case_name,
- test_start, test_stop, logger, pod_name,
- version, scenario, criteria, build_tag, details):
- """
- POST results to the Result target DB
- """
- url = db_url + "/results"
- installer = get_installer_type(logger)
-
- params = {"project_name": project, "case_name": case_name,
- "pod_name": pod_name, "installer": installer,
- "version": version, "scenario": scenario, "criteria": criteria,
- "build_tag": build_tag, "start_date": test_start,
- "stop_date": test_stop, "details": details}
-
- headers = {'Content-Type': 'application/json'}
- try:
- if logger:
- jsonified_params = json.dumps(params)
- logger.info("Pushing results to %s" % (url))
- logger.debug("Parameters: %s" % jsonified_params[:1024])
- r = requests.post(url, data=jsonified_params, headers=headers)
- if logger:
- logger.debug(r)
- logger.debug(r.status_code)
- logger.debug(r.content)
- return json.loads(r.content)
- except Exception, e:
- logger.error("Error [push_results_to_db('%s', '%s', '%s', " +
- "'%s', '%s', '%s', '%s', '%s', '%s')]:" %
- (db_url, project, case_name, pod_name, version,
- scenario, criteria, build_tag, details), e)
- return None