aboutsummaryrefslogtreecommitdiffstats
diff options
context:
space:
mode:
-rw-r--r--benchmarks/playbooks/info_script/info_collect.py45
-rw-r--r--benchmarks/playbooks/result_transform/dpi/dpi_transform.py5
-rw-r--r--benchmarks/playbooks/result_transform/dpi/final_report.py20
-rw-r--r--benchmarks/playbooks/result_transform/final_report.py9
-rwxr-xr-xbenchmarks/playbooks/result_transform/fio/fio_result_transform.py51
-rw-r--r--benchmarks/playbooks/result_transform/iperf/iperf_transform.py14
-rw-r--r--benchmarks/playbooks/result_transform/ramspd/ramspd_transform.py25
-rw-r--r--benchmarks/playbooks/result_transform/ssl/ssl_transform.py28
-rw-r--r--benchmarks/playbooks/result_transform/ubench_transform.py18
-rw-r--r--data/ref_results/compute_benchmarks_indices.py84
-rw-r--r--data/ref_results/compute_suite.py4
-rw-r--r--data/ref_results/index_calculation.py2
-rw-r--r--data/ref_results/network_benchmarks_indices.py10
-rw-r--r--data/ref_results/network_suite.py4
-rw-r--r--data/ref_results/storage_benchmarks_indices.py13
-rw-r--r--data/ref_results/storage_suite.py4
-rw-r--r--docker/cleanup_qtip_image.sh24
-rw-r--r--docker/prepare_qtip_image.sh49
-rwxr-xr-xdocker/run_qtip.sh66
-rw-r--r--docs/apidocs/index.rst13
-rw-r--r--docs/apidocs/qtip_restful_api.rst10
-rw-r--r--docs/templates/sample_config.yaml2
-rw-r--r--docs/userguide/introduction.rst68
-rw-r--r--func/driver.py2
-rw-r--r--func/fetchimg.py35
-rw-r--r--func/spawn_vm.py32
-rw-r--r--tests/driver_test.py6
-rw-r--r--tests/fetchimg_test.py34
-rw-r--r--tests/spawn_vm_test.py13
29 files changed, 326 insertions, 364 deletions
diff --git a/benchmarks/playbooks/info_script/info_collect.py b/benchmarks/playbooks/info_script/info_collect.py
index 4daed318..3fc35d5a 100644
--- a/benchmarks/playbooks/info_script/info_collect.py
+++ b/benchmarks/playbooks/info_script/info_collect.py
@@ -13,7 +13,6 @@ est_ob2.write(in_string)
est_ob.close()
est_ob2.close()
-Info_dict = {}
inxi_host = os.popen("""cat $PWD/est_1 | grep -o -P '(?<=Host:).*(?=Kernel)' """).read().lstrip().rstrip()
inxi_mem = os.popen("""cat $PWD/est_1 | grep -o -P '(?<=Memory:).*(?=MB)' """).read().lstrip().rstrip() + "MB"
inxi_cpu = os.popen("""cat $PWD/est_1 | grep -o -P '(?<=CPU).*(?=speed)' | cut -f2 -d':'""").read().lstrip().rstrip()
@@ -22,19 +21,19 @@ inxi_kernel = os.popen(""" cat $PWD/est_1 | grep -o -P '(?<=Kernel:).*(?=Console
inxi_HD = os.popen(""" cat $PWD/est_1 | grep -o -P '(?<=HDD Total Size:).*(?=Info:)' """).read().rstrip().lstrip()
inxi_product = os.popen(""" cat $PWD/est_1 | grep -o -P '(?<=product:).*(?=Mobo:)' """).read().rstrip().lstrip()
-Info_dict['1_Hostname'] = inxi_host
-Info_dict['2_Product'] = inxi_product
-Info_dict['3_OS Distribution'] = inxi_distro
-Info_dict['4_Kernel'] = inxi_kernel
-Info_dict['5_CPU'] = inxi_cpu
-Info_dict['6_CPU_Usage'] = str(round(cpu_usage, 3)) + '%'
-Info_dict['7_Memory Usage'] = inxi_mem
-Info_dict['8_Disk usage'] = inxi_HD
+info_dict = {'hostname': inxi_host,
+ 'product': inxi_product,
+ 'os': inxi_distro,
+ 'kernel': inxi_kernel,
+ 'cpu': inxi_cpu,
+ 'cpu_usage': '{0}%'.format(str(round(cpu_usage, 3))),
+ 'memory_usage': inxi_mem,
+ 'disk_usage': inxi_HD}
network_flag = str(sys.argv[1]).rstrip()
if (network_flag == 'n'):
- Info_dict['9_Network_Interfaces'] = {}
+ info_dict['network_interfaces'] = {}
tem_2 = """ cat $PWD/est_1 | grep -o -P '(?<=Network:).*(?=Info:)'"""
print os.system(tem_2 + ' > Hello')
i = int(os.popen(tem_2 + " | grep -o 'Card' | wc -l ").read())
@@ -46,24 +45,24 @@ if (network_flag == 'n'):
tem = """ cat $PWD/est_1 | grep -o -P '(?<=Network:).*(?=Info:)'"""
inxi_card_1 = ((os.popen(tem + " | grep -o -P '(?<=Card:).*(?=Drives:)'|sed 's/ *driver:.*//'").read().rstrip().lstrip()))
print inxi_card_1
- Info_dict['9_Network_Interfaces']['Interface_' + str(x)] = {}
- Info_dict['9_Network_Interfaces']['Interface_' + str(x)]['1_Network_Card'] = inxi_card_1
+ info_dict['network_interfaces']['interface_' + str(x)] = {}
+ info_dict['network_interfaces']['interface_' + str(x)]['network_card'] = inxi_card_1
inxi_card_2 = ((os.popen(tem + "| grep -o -P '(?<=Card:).*(?=Drives:)'|sed -e 's/^.*IF: //'").read())).rstrip().lstrip()
- Info_dict['9_Network_Interfaces']['Interface_' + str(x)]['2_Interface_info'] = inxi_card_2
+ info_dict['network_interfaces']['interface_' + str(x)]['interface_info'] = inxi_card_2
elif x < (i):
print "two"
inxi_card_1 = ((os.popen(tem + "| sed 's/ *driver:.*//'").read().rstrip().lstrip()))
- Info_dict['9_Network_Interfaces']['Interface_' + str(x)] = {}
- Info_dict['9_Network_Interfaces']['Interface_' + str(x)]['1_Network_Card'] = inxi_card_1
+ info_dict['network_interfaces']['interface_' + str(x)] = {}
+ info_dict['network_interfaces']['interface_' + str(x)]['network_Card'] = inxi_card_1
inxi_card_2 = ((os.popen(tem + "|sed -e 's/^.*IF: //'").read())).rstrip().lstrip()
- Info_dict['9_Network_Interfaces']['Interface_' + str(x)]['2_Interface_info'] = inxi_card_2
+ info_dict['network_interfaces']['interface_' + str(x)]['interface_info'] = inxi_card_2
elif x == i:
print "Three"
- Info_dict['9_Network_Interfaces']['Interface_' + str(x)] = {}
+ info_dict['network_interfaces']['interface_' + str(x)] = {}
inxi_card_1 = ((os.popen(""" cat $PWD/est_1 | grep -o -P '(?<=Card-""" + str(x) + """:).*(?=Drives:)'| sed 's/ *driver:.*//' """).read().rstrip().lstrip()))
- Info_dict['9_Network_Interfaces']['Interface_' + str(x)]['1_Network_Card'] = inxi_card_1
+ info_dict['network_interfaces']['interface_' + str(x)]['network_Card'] = inxi_card_1
inxi_card_2 = ((os.popen(""" cat $PWD/est_1 | grep -o -P '(?<=Card-""" + str(x) + """:).*(?=Drives:)'| sed -e 's/^.*IF: //' """).read().rstrip().lstrip()))
- Info_dict['9_Network_Interfaces']['Interface_' + str(x)]['2_Interface_info'] = inxi_card_2
+ info_dict['network_interfaces']['interface_' + str(x)]['interface_info'] = inxi_card_2
else:
print "No network cards"
os.system("bwm-ng -o plain -c 1 | grep -v '=' | grep -v 'iface' | grep -v '-' > bwm_dump")
@@ -76,12 +75,12 @@ if (network_flag == 'n'):
interface[str(interface_name)]['Tx (KB/s)'] = os.popen(" cat bwm_dump | awk 'NR==" + str(x) + "' | awk '{print $4}' ").read().rstrip()
interface[str(interface_name)]['Total (KB/s)'] = os.popen(" cat bwm_dump | awk 'NR== " + str(x) + "' | awk '{print $6}' ").read().rstrip()
- Info_dict['10.Interface I/O'] = interface
+ info_dict['interface_io'] = interface
-print Info_dict
+print info_dict
with open('./sys_info_temp', 'w+')as out_info:
- pickle.dump(Info_dict, out_info)
+ pickle.dump(info_dict, out_info)
with open('temp', 'w+') as result_json:
- json.dump(Info_dict, result_json, indent=4, sort_keys=True)
+ json.dump(info_dict, result_json, indent=4, sort_keys=True)
diff --git a/benchmarks/playbooks/result_transform/dpi/dpi_transform.py b/benchmarks/playbooks/result_transform/dpi/dpi_transform.py
index 622030cd..ee29d8e2 100644
--- a/benchmarks/playbooks/result_transform/dpi/dpi_transform.py
+++ b/benchmarks/playbooks/result_transform/dpi/dpi_transform.py
@@ -41,8 +41,7 @@ os.popen(
home_dir = str(os.popen("echo $HOME").read().rstrip())
host = os.popen("echo $HOSTNAME")
-result = {}
-result['DPI_benchmark(M pps)'] = round(dpi_result_pps, 3)
-result['DPI_benchmark(Gb/s)'] = round(dpi_result_bps, 3)
+result = {'pps': round(dpi_result_pps, 3),
+ 'bps': round(dpi_result_bps, 3)}
with open('./result_temp', 'w+') as result_file:
pickle.dump(result, result_file)
diff --git a/benchmarks/playbooks/result_transform/dpi/final_report.py b/benchmarks/playbooks/result_transform/dpi/final_report.py
deleted file mode 100644
index be25daa8..00000000
--- a/benchmarks/playbooks/result_transform/dpi/final_report.py
+++ /dev/null
@@ -1,20 +0,0 @@
-import pickle
-import json
-import datetime
-import sys
-
-with open('/root/qtip_result/sys_info_temp', 'r') as sys_info_f:
- sys_info_dict = pickle.load(sys_info_f)
-with open('/root/qtip_result/result_temp', 'r') as result_f:
- result_dict = pickle.load(result_f)
-
-host_name = sys_info_dict['1_Hostname']
-benchmark_name = str(sys.argv[1])
-report_time_stamp = str(datetime.datetime.utcnow().isoformat())
-final_dict = {}
-final_dict['1 Time of Report'] = report_time_stamp
-final_dict['2 System Information'] = sys_info_dict
-final_dict['3 ' + benchmark_name + ' result'] = result_dict
-
-with open('/root/qtip_result/' + host_name + '-' + report_time_stamp + '.json', 'w+') as result_json:
- json.dump(final_dict, result_json, indent=4, sort_keys=True)
diff --git a/benchmarks/playbooks/result_transform/final_report.py b/benchmarks/playbooks/result_transform/final_report.py
index 6a10c026..274742d4 100644
--- a/benchmarks/playbooks/result_transform/final_report.py
+++ b/benchmarks/playbooks/result_transform/final_report.py
@@ -15,11 +15,10 @@ host_name = (os.popen("hostname").read().rstrip())
benchmark_name = str(sys.argv[1])
testcase_name = str(sys.argv[2])
report_time_stamp = str(datetime.datetime.utcnow().isoformat())
-final_dict = {}
-final_dict['1 Testcase Name'] = testcase_name
-final_dict['2 Time of Report'] = report_time_stamp
-final_dict['3 System Information'] = sys_info_dict
-final_dict['4 ' + benchmark_name + ' result'] = result_dict
+final_dict = {"name": testcase_name,
+ "time": report_time_stamp,
+ "system_information": sys_info_dict,
+ "details": result_dict}
with open('./' + host_name + '-' + report_time_stamp + '.json', 'w+') as result_json:
json.dump(final_dict, result_json, indent=4, sort_keys=True)
diff --git a/benchmarks/playbooks/result_transform/fio/fio_result_transform.py b/benchmarks/playbooks/result_transform/fio/fio_result_transform.py
index 9929aa18..5ecac823 100755
--- a/benchmarks/playbooks/result_transform/fio/fio_result_transform.py
+++ b/benchmarks/playbooks/result_transform/fio/fio_result_transform.py
@@ -3,48 +3,27 @@ import pickle
import os
import datetime
-with open("fio_result.json") as fio_raw:
- fio_data = json.load(fio_raw)
-
-r_iops = []
-r_io_bytes = []
-r_io_runtime = []
-r_lat = []
-w_iops = []
-w_io_bytes = []
-w_io_runtime = []
-w_lat = []
-total_jobs = len(fio_data["jobs"])
+def get_fio_job_result(fio_job_data):
+ return {'read': {'io_bytes': fio_job_data["read"]["io_bytes"],
+ 'io_ps': fio_job_data["read"]["iops"],
+ 'io_runtime_millisec': fio_job_data["read"]["runtime"],
+ 'mean_io_latenchy_microsec': fio_job_data["read"]["lat"]["mean"]},
+ 'write': {'io_bytes': fio_job_data["write"]["io_bytes"],
+ 'io_ps': fio_job_data["write"]["iops"],
+ 'io_runtime_millisec': fio_job_data["write"]["runtime"],
+ 'mean_io_latenchy_microsec': fio_job_data["write"]["lat"]["mean"]}}
-for x in range(0, int(total_jobs)):
- r_iops.append(fio_data["jobs"][x]["read"]["iops"])
- r_io_bytes.append(fio_data["jobs"][x]["read"]["io_bytes"])
- r_io_runtime.append(fio_data["jobs"][x]["read"]["runtime"])
- r_lat.append(fio_data["jobs"][x]["read"]["lat"]["mean"])
- w_iops.append(fio_data["jobs"][x]["write"]["iops"])
- w_io_bytes.append(fio_data["jobs"][x]["write"]["io_bytes"])
- w_io_runtime.append(fio_data["jobs"][x]["write"]["runtime"])
- w_lat.append(fio_data["jobs"][x]["write"]["lat"]["mean"])
-FIO_result_dict = {}
-
-for x in range(0, total_jobs):
- FIO_result_dict['Job_' + str(x)] = {}
- FIO_result_dict['Job_' + str(x)]['read'] = {}
- FIO_result_dict['Job_' + str(x)]['read']['Total_IO_Bytes'] = r_io_bytes[x]
- FIO_result_dict['Job_' + str(x)]['read']['IO/sec'] = r_iops[x]
- FIO_result_dict['Job_' + str(x)]['read']['IO_runtime (millisec)'] = r_io_runtime[x]
- FIO_result_dict['Job_' + str(x)]['read']['mean_IO_latenchy (microsec)'] = r_lat[x]
+with open("fio_result.json") as fio_raw:
+ fio_data = json.load(fio_raw)
- FIO_result_dict['Job_' + str(x)]['write'] = {}
- FIO_result_dict['Job_' + str(x)]['write']['Total_IO_Bytes'] = w_io_bytes[x]
- FIO_result_dict['Job_' + str(x)]['write']['IO/sec'] = w_iops[x]
- FIO_result_dict['Job_' + str(x)]['write']['IO_runtime (millisec)'] = w_io_runtime[x]
- FIO_result_dict['Job_' + str(x)]['write']['mean_IO_latenchy (microsec)'] = w_lat[x]
+fio_result_dict = {}
+for x, result in enumerate(map(get_fio_job_result, fio_data["jobs"])):
+ fio_result_dict['job_{0}'.format(x)] = result
host_name = (os.popen("hostname").read().rstrip())
report_time = str(datetime.datetime.utcnow().isoformat())
os.system("mv fio_result.json " + str(host_name) + "-" + report_time + ".log")
with open('./result_temp', 'w + ')as out_fio_result:
- pickle.dump(FIO_result_dict, out_fio_result)
+ pickle.dump(fio_result_dict, out_fio_result)
diff --git a/benchmarks/playbooks/result_transform/iperf/iperf_transform.py b/benchmarks/playbooks/result_transform/iperf/iperf_transform.py
index 8df5a79a..b52e4634 100644
--- a/benchmarks/playbooks/result_transform/iperf/iperf_transform.py
+++ b/benchmarks/playbooks/result_transform/iperf/iperf_transform.py
@@ -11,16 +11,14 @@ total_byte_received = raw_iperf_data['end']['sum_received']['bytes']
cpu_host_total_percent = raw_iperf_data['end']['cpu_utilization_percent']['host_total']
cpu_remote_total_percent = raw_iperf_data['end']['cpu_utilization_percent']['remote_total']
-result = {}
time_stamp = str(datetime.datetime.utcnow().isoformat())
-result['1. Version'] = raw_iperf_data['start']['version']
-result['2. Bandwidth'] = {}
-result['2. Bandwidth']['1. throughput Sender (b/s)'] = bits_sent
-result['2. Bandwidth']['2. throughput Received (b/s)'] = bits_received
-result['3. CPU'] = {}
-result['3. CPU']['1. CPU host total (%)'] = cpu_host_total_percent
-result['3. CPU']['2. CPU remote total (%)'] = cpu_remote_total_percent
+result = {'version': raw_iperf_data['start']['version'],
+ 'bandwidth': {'sender_throughput': bits_sent,
+ 'received_throughput': bits_received},
+ 'cpu': {'cpu_host': cpu_host_total_percent,
+ 'cpu_remote': cpu_remote_total_percent}
+ }
with open('iperf_raw-' + time_stamp + '.log', 'w+') as ofile:
ofile.write(json.dumps(raw_iperf_data))
diff --git a/benchmarks/playbooks/result_transform/ramspd/ramspd_transform.py b/benchmarks/playbooks/result_transform/ramspd/ramspd_transform.py
index c3f03dd0..960f84fc 100644
--- a/benchmarks/playbooks/result_transform/ramspd/ramspd_transform.py
+++ b/benchmarks/playbooks/result_transform/ramspd/ramspd_transform.py
@@ -26,21 +26,16 @@ time_stamp = str(datetime.datetime.utcnow().isoformat())
os.system("mv Intmem " + hostname + "-" + time_stamp + ".log")
os.system("cp Floatmem >> " + hostname + "-" + time_stamp + ".log")
-result = {}
-
-result['1. INTmem bandwidth'] = {}
-result['1. INTmem bandwidth']['1. Copy (MB/s)'] = intmem_copy
-result['1. INTmem bandwidth']['2. Add (MB/s)'] = intmem_add
-result['1. INTmem bandwidth']['3. Scale (MB/s)'] = intmem_scale
-result['1. INTmem bandwidth']['4. Triad (MB/s)'] = intmem_triad
-result['1. INTmem bandwidth']['5. Average (MB/s)'] = intmem_average
-
-result['2. FLOATmem bandwidth'] = {}
-result['2. FLOATmem bandwidth']['1. Copy (MB/s)'] = floatmem_copy
-result['2. FLOATmem bandwidth']['2. Add (MB/s)'] = floatmem_add
-result['2. FLOATmem bandwidth']['3. Scale (MB/s)'] = floatmem_scale
-result['2. FLOATmem bandwidth']['4. Triad (MB/s)'] = floatmem_triad
-result['2. FLOATmem bandwidth']['5. Average (MB/s)'] = floatmem_average
+result = {"int_bandwidth": {"copy": intmem_copy,
+ "add": intmem_add,
+ "scale": intmem_scale,
+ "triad": intmem_triad,
+ "average": intmem_average},
+ "float_bandwidth": {"copy": floatmem_copy,
+ "add": floatmem_add,
+ "scale": floatmem_scale,
+ "triad": floatmem_triad,
+ "average": floatmem_average}}
with open('./result_temp', 'w+') as result_file:
pickle.dump(result, result_file)
diff --git a/benchmarks/playbooks/result_transform/ssl/ssl_transform.py b/benchmarks/playbooks/result_transform/ssl/ssl_transform.py
index 029135ac..de84d24b 100644
--- a/benchmarks/playbooks/result_transform/ssl/ssl_transform.py
+++ b/benchmarks/playbooks/result_transform/ssl/ssl_transform.py
@@ -37,22 +37,18 @@ time_stamp = str(datetime.datetime.utcnow().isoformat())
os.system("mv RSA_dump " + hostname + "-" + time_stamp + ".log")
os.system("cat AES-128-CBC_dump >> " + hostname + "-" + time_stamp + ".log")
-result = {}
-
-result['1. Version'] = [openssl_version]
-result['2. RSA signatures'] = {}
-result['2. RSA signatures']['1. 512 bits (sign/s)'] = rsa_512_sps
-result['2. RSA signatures']['2. 1024 bits (sign/s)'] = rsa_1024_sps
-result['2. RSA signatures']['3. 2048 bits (sign/s)'] = rsa_2048_sps
-result['2. RSA signatures']['4. 4096 bits (sign/s)'] = rsa_4096_sps
-
-result['3. AES-128-cbc throughput'] = {}
-result['3. AES-128-cbc throughput']['1. 16 Bytes block (B/sec)'] = aes_16B
-result['3. AES-128-cbc throughput']['2. 64 Bytes block (B/sec)'] = aes_64B
-result['3. AES-128-cbc throughput']['3. 256 Bytes block (B/sec)'] = aes_256B
-result['3. AES-128-cbc throughput']['4. 1024 Bytes block (B/sec)'] = aes_1024B
-result['3. AES-128-cbc throughput']['5. 8192 Bytes block (B/sec)'] = aes_8192B
-
+result = {"version": [openssl_version],
+ "rsa_sig": {"512_bits": rsa_512_sps,
+ "1024_bits": rsa_1024_sps,
+ "2048_bits": rsa_2048_sps,
+ "4096_bits": rsa_4096_sps,
+ "unit": "sig/sec"},
+ "aes_128_cbc": {"16B_block": aes_16B,
+ "64B_block": aes_64B,
+ "256B_block": aes_256B,
+ "1024B_block": aes_1024B,
+ "8192B_block": aes_8192B,
+ "unit": "B/sec"}}
with open('./result_temp', 'w+') as result_file:
pickle.dump(result, result_file)
diff --git a/benchmarks/playbooks/result_transform/ubench_transform.py b/benchmarks/playbooks/result_transform/ubench_transform.py
index 3c8ba1d8..ab5fe171 100644
--- a/benchmarks/playbooks/result_transform/ubench_transform.py
+++ b/benchmarks/playbooks/result_transform/ubench_transform.py
@@ -13,20 +13,18 @@ cpu_2 = os.popen(
"cat $HOME/tempT/UnixBench/results/* | grep 'of tests' | awk '{print $6;}' | awk 'NR==2'").read().rstrip()
-Index_1 = os.popen(
+index_1 = os.popen(
"cat $HOME/tempT/UnixBench/results/* | grep 'Index Score (Partial Only) ' | awk '{print $7;}' | awk 'NR==1'").read().rstrip()
-Index_2 = os.popen(
+index_2 = os.popen(
"cat $HOME/tempT/UnixBench/results/* | grep 'Index Score (Partial Only) ' | awk '{print $7;}' | awk 'NR==2'").read().rstrip()
-result = {}
-result['1.Number of CPU(s) in system'] = total_cpu
-result['2.Single CPU test'] = {}
-result['2.Single CPU test']['1.Number of parallell test(s)'] = cpu_1
-result['2.Single CPU test']['2.Index score'] = Index_1
-result['3.Multi CPU test'] = {}
-result['3.Multi CPU test']['1.Number of parallell test(s)'] = cpu_2
-result['3.Multi CPU test']['2.Index score'] = Index_2
+result = {"n_cpu": total_cpu,
+ "single": {"n_para_test": cpu_1,
+ "score": index_1},
+ "multi": {"n_para_test": cpu_2,
+ "score": index_2}
+ }
with open('result_temp', 'w+') as result_file:
pickle.dump(result, result_file)
diff --git a/data/ref_results/compute_benchmarks_indices.py b/data/ref_results/compute_benchmarks_indices.py
index e46b8771..400d54ac 100644
--- a/data/ref_results/compute_benchmarks_indices.py
+++ b/data/ref_results/compute_benchmarks_indices.py
@@ -6,15 +6,15 @@ from result_accum import result_concat as concat
def dpi_index():
dpi_dict = concat('../../results/dpi/')
dpi_bm_ref = get_reference('compute', 'dpi_bm')
- dpi_bm_index = get_index(dpi_dict, 'dpi_bm', dpi_bm_ref, '4 DPI result', 'DPI_benchmark(Gb/s)')
+ dpi_bm_index = get_index(dpi_dict, 'dpi_bm', dpi_bm_ref, 'details', 'bps')
dpi_vm_ref = get_reference('compute', 'dpi_vm')
- dpi_vm_index = get_index(dpi_dict, 'dpi_vm', dpi_vm_ref, '4 DPI result', 'DPI_benchmark(Gb/s)')
+ dpi_vm_index = get_index(dpi_dict, 'dpi_vm', dpi_vm_ref, 'details', 'bps')
dpi_index = (dpi_bm_index + dpi_vm_index) / 2
dpi_dict_i = {}
- dpi_dict_i['1. Index'] = dpi_index
- dpi_dict_i['2. Results'] = dpi_dict
+ dpi_dict_i['index'] = dpi_index
+ dpi_dict_i['results'] = dpi_dict
return dpi_dict_i
@@ -22,25 +22,25 @@ def dhrystone_index():
dhrystone_dict = concat('../../results/dhrystone/')
dhrystone_single_bm_ref = get_reference('compute', 'dhrystone_bm', 'single_cpu')
- dhrystone_single_bm_index = get_index(dhrystone_dict, 'dhrystone_bm', dhrystone_single_bm_ref, '4 Dhrystone result', '2.Single CPU test', '2.Index score')
+ dhrystone_single_bm_index = get_index(dhrystone_dict, 'dhrystone_bm', dhrystone_single_bm_ref, 'details', 'single', 'score')
dhrystone_multi_bm_ref = get_reference('compute', 'dhrystone_bm', 'multi_cpu')
- dhrystone_multi_bm_index = get_index(dhrystone_dict, 'dhrystone_bm', dhrystone_multi_bm_ref, '4 Dhrystone result', '3.Multi CPU test', '2.Index score')
+ dhrystone_multi_bm_index = get_index(dhrystone_dict, 'dhrystone_bm', dhrystone_multi_bm_ref, 'details', 'multi', 'score')
dhrystone_bm_index = (dhrystone_single_bm_index + dhrystone_multi_bm_index) / 2
dhrystone_single_vm_ref = get_reference('compute', 'dhrystone_vm', 'single_cpu')
- dhrystone_single_vm_index = get_index(dhrystone_dict, 'dhrystone_vm', dhrystone_single_vm_ref, '4 Dhrystone result', '2.Single CPU test', '2.Index score')
+ dhrystone_single_vm_index = get_index(dhrystone_dict, 'dhrystone_vm', dhrystone_single_vm_ref, 'details', 'single', 'score')
dhrystone_multi_vm_ref = get_reference('compute', 'dhrystone_vm', 'multi_cpu')
- dhrystone_multi_vm_index = get_index(dhrystone_dict, 'dhrystone_vm', dhrystone_multi_vm_ref, '4 Dhrystone result', '3.Multi CPU test', '2.Index score')
+ dhrystone_multi_vm_index = get_index(dhrystone_dict, 'dhrystone_vm', dhrystone_multi_vm_ref, 'details', 'multi', 'score')
dhrystone_vm_index = (dhrystone_single_vm_index + dhrystone_multi_vm_index) / 2
dhrystone_index = (dhrystone_bm_index + dhrystone_vm_index) / 2
dhrystone_dict_i = {}
- dhrystone_dict_i['1. Index'] = dhrystone_index
- dhrystone_dict_i['2. Results'] = dhrystone_dict
+ dhrystone_dict_i['index'] = dhrystone_index
+ dhrystone_dict_i['results'] = dhrystone_dict
return dhrystone_dict_i
@@ -48,25 +48,25 @@ def whetstone_index():
whetstone_dict = concat('../../results/whetstone/')
whetstone_single_bm_ref = get_reference('compute', 'whetstone_bm', 'single_cpu')
- whetstone_single_bm_index = get_index(whetstone_dict, 'whetstone_bm', whetstone_single_bm_ref, '4 Whetstone result', '2.Single CPU test', '2.Index score')
+ whetstone_single_bm_index = get_index(whetstone_dict, 'whetstone_bm', whetstone_single_bm_ref, 'details', 'single', 'score')
whetstone_multi_bm_ref = get_reference('compute', 'whetstone_bm', 'multi_cpu')
- whetstone_multi_bm_index = get_index(whetstone_dict, 'whetstone_bm', whetstone_multi_bm_ref, '4 Whetstone result', '3.Multi CPU test', '2.Index score')
+ whetstone_multi_bm_index = get_index(whetstone_dict, 'whetstone_bm', whetstone_multi_bm_ref, 'details', 'multi', 'score')
whetstone_bm_index = (whetstone_single_bm_index + whetstone_multi_bm_index) / 2
whetstone_single_vm_ref = get_reference('compute', 'whetstone_vm', 'single_cpu')
- whetstone_single_vm_index = get_index(whetstone_dict, 'whetstone_vm', whetstone_single_vm_ref, '4 Whetstone result', '2.Single CPU test', '2.Index score')
+ whetstone_single_vm_index = get_index(whetstone_dict, 'whetstone_vm', whetstone_single_vm_ref, 'details', 'single', 'score')
whetstone_multi_vm_ref = get_reference('compute', 'whetstone_vm', 'multi_cpu')
- whetstone_multi_vm_index = get_index(whetstone_dict, 'whetstone_vm', whetstone_multi_vm_ref, '4 Whetstone result', '3.Multi CPU test', '2.Index score')
+ whetstone_multi_vm_index = get_index(whetstone_dict, 'whetstone_vm', whetstone_multi_vm_ref, 'details', 'multi', 'score')
whetstone_vm_index = (whetstone_single_vm_index + whetstone_multi_vm_index) / 2
whetstone_index = (whetstone_bm_index + whetstone_vm_index) / 2
whetstone_dict_i = {}
- whetstone_dict_i['1. Index'] = whetstone_index
- whetstone_dict_i['2. Results'] = whetstone_dict
+ whetstone_dict_i['index'] = whetstone_index
+ whetstone_dict_i['results'] = whetstone_dict
return whetstone_dict_i
@@ -74,26 +74,26 @@ def ramspeed_index():
ramspeed_dict = concat('../../results/ramspeed/')
ramspeed_int_bm_ref = get_reference('compute', 'ramspeed_bm', 'INTmem', 'Average (MB/s)')
- ramspeed_int_bm_index = get_index(ramspeed_dict, 'ramspeed_bm', ramspeed_int_bm_ref, '4 RamSpeed result', '1. INTmem bandwidth', '5. Average (MB/s)')
+ ramspeed_int_bm_index = get_index(ramspeed_dict, 'ramspeed_bm', ramspeed_int_bm_ref, 'details', 'int_bandwidth', 'average')
ramspeed_float_bm_ref = get_reference('compute', 'ramspeed_bm', 'FLOATmem', 'Average (MB/s)')
- ramspeed_float_bm_index = get_index(ramspeed_dict, 'ramspeed_bm', ramspeed_float_bm_ref, '4 RamSpeed result', '2. FLOATmem bandwidth', '5. Average (MB/s)')
+ ramspeed_float_bm_index = get_index(ramspeed_dict, 'ramspeed_bm', ramspeed_float_bm_ref, 'details', 'float_bandwidth', 'average')
ramspeed_bm_index = (ramspeed_int_bm_index + ramspeed_float_bm_index) / 2
ramspeed_int_vm_ref = get_reference('compute', 'ramspeed_vm', 'INTmem', 'Average (MB/s)')
- ramspeed_int_vm_index = get_index(ramspeed_dict, 'ramspeed_vm', ramspeed_int_vm_ref, '4 RamSpeed result', '1. INTmem bandwidth', '5. Average (MB/s)')
+ ramspeed_int_vm_index = get_index(ramspeed_dict, 'ramspeed_vm', ramspeed_int_vm_ref, 'details', 'int_bandwidth', 'average')
ramspeed_float_vm_ref = get_reference('compute', 'ramspeed_vm', 'FLOATmem', 'Average (MB/s)')
- ramspeed_float_vm_index = get_index(ramspeed_dict, 'ramspeed_vm', ramspeed_float_vm_ref, '4 RamSpeed result', '2. FLOATmem bandwidth', '5. Average (MB/s)')
+ ramspeed_float_vm_index = get_index(ramspeed_dict, 'ramspeed_vm', ramspeed_float_vm_ref, 'details', 'float_bandwidth', 'average')
ramspeed_vm_index = (ramspeed_int_vm_index + ramspeed_float_vm_index) / 2
ramspeed_index = (ramspeed_vm_index + ramspeed_bm_index) / 2
ramspeed_dict_i = {}
- ramspeed_dict_i['1. Index'] = ramspeed_index
- ramspeed_dict_i['2. Results'] = ramspeed_dict
+ ramspeed_dict_i['index'] = ramspeed_index
+ ramspeed_dict_i['results'] = ramspeed_dict
return ramspeed_dict_i
@@ -112,17 +112,17 @@ def ssl_index():
ssl_AES1024B_bm_ref = get_reference('compute', 'ssl_bm', 'AES', '1024B')
ssl_AES8192B_bm_ref = get_reference('compute', 'ssl_bm', 'AES', '8192B')
- ssl_RSA512b_bm_index = get_index(ssl_dict, "ssl_bm", ssl_RSA512b_bm_ref, '4 SSL result', '2. RSA signatures', '1. 512 bits (sign/s)')
- ssl_RSA1024b_bm_index = get_index(ssl_dict, "ssl_bm", ssl_RSA1024b_bm_ref, '4 SSL result', '2. RSA signatures', '2. 1024 bits (sign/s)')
- ssl_RSA2048b_bm_index = get_index(ssl_dict, "ssl_bm", ssl_RSA2048b_bm_ref, '4 SSL result', '2. RSA signatures', '3. 2048 bits (sign/s)')
- ssl_RSA4096b_bm_index = get_index(ssl_dict, "ssl_bm", ssl_RSA4096b_bm_ref, '4 SSL result', '2. RSA signatures', '4. 4096 bits (sign/s)')
+ ssl_RSA512b_bm_index = get_index(ssl_dict, "ssl_bm", ssl_RSA512b_bm_ref, 'details', 'rsa_sig', '512_bits')
+ ssl_RSA1024b_bm_index = get_index(ssl_dict, "ssl_bm", ssl_RSA1024b_bm_ref, 'details', 'rsa_sig', '1024_bits')
+ ssl_RSA2048b_bm_index = get_index(ssl_dict, "ssl_bm", ssl_RSA2048b_bm_ref, 'details', 'rsa_sig', '2048_bits')
+ ssl_RSA4096b_bm_index = get_index(ssl_dict, "ssl_bm", ssl_RSA4096b_bm_ref, 'details', 'rsa_sig', '4096_bits')
ssl_RSA_bm_index = (ssl_RSA512b_bm_index + ssl_RSA1024b_bm_index + ssl_RSA2048b_bm_index + ssl_RSA4096b_bm_index) / 4
- ssl_AES16B_bm_index = get_index(ssl_dict, "ssl_bm", ssl_AES16B_bm_ref, '4 SSL result', '3. AES-128-cbc throughput', '1. 16 Bytes block (B/sec)')
- ssl_AES64B_bm_index = get_index(ssl_dict, "ssl_bm", ssl_AES64B_bm_ref, '4 SSL result', '3. AES-128-cbc throughput', '2. 64 Bytes block (B/sec)')
- ssl_AES256B_bm_index = get_index(ssl_dict, "ssl_bm", ssl_AES256B_bm_ref, '4 SSL result', '3. AES-128-cbc throughput', '3. 256 Bytes block (B/sec)')
- ssl_AES1024B_bm_index = get_index(ssl_dict, "ssl_bm", ssl_AES1024B_bm_ref, '4 SSL result', '3. AES-128-cbc throughput', '4. 1024 Bytes block (B/sec)')
- ssl_AES8192B_bm_index = get_index(ssl_dict, "ssl_bm", ssl_AES8192B_bm_ref, '4 SSL result', '3. AES-128-cbc throughput', '5. 8192 Bytes block (B/sec)')
+ ssl_AES16B_bm_index = get_index(ssl_dict, "ssl_bm", ssl_AES16B_bm_ref, 'details', 'aes_128_cbc', '16_block')
+ ssl_AES64B_bm_index = get_index(ssl_dict, "ssl_bm", ssl_AES64B_bm_ref, 'details', 'aes_128_cbc', '64_block')
+ ssl_AES256B_bm_index = get_index(ssl_dict, "ssl_bm", ssl_AES256B_bm_ref, 'details', 'aes_128_cbc', '256_block')
+ ssl_AES1024B_bm_index = get_index(ssl_dict, "ssl_bm", ssl_AES1024B_bm_ref, 'details', 'aes_128_cbc', '1024_block')
+ ssl_AES8192B_bm_index = get_index(ssl_dict, "ssl_bm", ssl_AES8192B_bm_ref, 'details', 'aes_128_cbc', '8192_block')
ssl_AES_bm_index = (ssl_AES16B_bm_index + ssl_AES64B_bm_index + ssl_AES256B_bm_index + ssl_AES1024B_bm_index + ssl_AES8192B_bm_index) / 5
ssl_bm_index = (ssl_RSA_bm_index + ssl_AES_bm_index) / 2
@@ -138,17 +138,17 @@ def ssl_index():
ssl_AES1024B_vm_ref = get_reference('compute', 'ssl_vm', 'AES', '1024B')
ssl_AES8192B_vm_ref = get_reference('compute', 'ssl_vm', 'AES', '8192B')
- ssl_RSA512b_vm_index = get_index(ssl_dict, "ssl_vm", ssl_RSA512b_vm_ref, '4 SSL result', '2. RSA signatures', '1. 512 bits (sign/s)')
- ssl_RSA1024b_vm_index = get_index(ssl_dict, "ssl_vm", ssl_RSA1024b_vm_ref, '4 SSL result', '2. RSA signatures', '2. 1024 bits (sign/s)')
- ssl_RSA2048b_vm_index = get_index(ssl_dict, "ssl_vm", ssl_RSA2048b_vm_ref, '4 SSL result', '2. RSA signatures', '3. 2048 bits (sign/s)')
- ssl_RSA4096b_vm_index = get_index(ssl_dict, "ssl_vm", ssl_RSA4096b_vm_ref, '4 SSL result', '2. RSA signatures', '4. 4096 bits (sign/s)')
+ ssl_RSA512b_vm_index = get_index(ssl_dict, "ssl_vm", ssl_RSA512b_vm_ref, 'details', 'rsa_sig', '512_bits')
+ ssl_RSA1024b_vm_index = get_index(ssl_dict, "ssl_vm", ssl_RSA1024b_vm_ref, 'details', 'rsa_sig', '1024_bits')
+ ssl_RSA2048b_vm_index = get_index(ssl_dict, "ssl_vm", ssl_RSA2048b_vm_ref, 'details', 'rsa_sig', '2048_bits')
+ ssl_RSA4096b_vm_index = get_index(ssl_dict, "ssl_vm", ssl_RSA4096b_vm_ref, 'details', 'rsa_sig', '4096_bits')
ssl_RSA_vm_index = (ssl_RSA512b_vm_index + ssl_RSA1024b_vm_index + ssl_RSA2048b_vm_index + ssl_RSA4096b_vm_index) / 4
- ssl_AES16B_vm_index = get_index(ssl_dict, "ssl_vm", ssl_AES16B_vm_ref, '4 SSL result', '3. AES-128-cbc throughput', '1. 16 Bytes block (B/sec)')
- ssl_AES64B_vm_index = get_index(ssl_dict, "ssl_vm", ssl_AES64B_vm_ref, '4 SSL result', '3. AES-128-cbc throughput', '2. 64 Bytes block (B/sec)')
- ssl_AES256B_vm_index = get_index(ssl_dict, "ssl_vm", ssl_AES256B_vm_ref, '4 SSL result', '3. AES-128-cbc throughput', '3. 256 Bytes block (B/sec)')
- ssl_AES1024B_vm_index = get_index(ssl_dict, "ssl_vm", ssl_AES1024B_vm_ref, '4 SSL result', '3. AES-128-cbc throughput', '4. 1024 Bytes block (B/sec)')
- ssl_AES8192B_vm_index = get_index(ssl_dict, "ssl_vm", ssl_AES8192B_vm_ref, '4 SSL result', '3. AES-128-cbc throughput', '5. 8192 Bytes block (B/sec)')
+ ssl_AES16B_vm_index = get_index(ssl_dict, "ssl_vm", ssl_AES16B_vm_ref, 'details', 'aes_128_cbc', '16_block')
+ ssl_AES64B_vm_index = get_index(ssl_dict, "ssl_vm", ssl_AES64B_vm_ref, 'details', 'aes_128_cbc', '64_block')
+ ssl_AES256B_vm_index = get_index(ssl_dict, "ssl_vm", ssl_AES256B_vm_ref, 'details', 'aes_128_cbc', '256_block')
+ ssl_AES1024B_vm_index = get_index(ssl_dict, "ssl_vm", ssl_AES1024B_vm_ref, 'details', 'aes_128_cbc', '1024_block')
+ ssl_AES8192B_vm_index = get_index(ssl_dict, "ssl_vm", ssl_AES8192B_vm_ref, 'details', 'aes_128_cbc', '8192_block')
ssl_AES_vm_index = (ssl_AES16B_vm_index + ssl_AES64B_vm_index + ssl_AES256B_vm_index + ssl_AES1024B_vm_index + ssl_AES8192B_vm_index) / 5
ssl_vm_index = (ssl_RSA_vm_index + ssl_AES_vm_index) / 2
@@ -156,6 +156,6 @@ def ssl_index():
ssl_index = (ssl_bm_index + ssl_vm_index) / 2
ssl_dict_i = {}
- ssl_dict_i['1. Index'] = ssl_index
- ssl_dict_i['2. Results'] = ssl_dict
+ ssl_dict_i['index'] = ssl_index
+ ssl_dict_i['results'] = ssl_dict
return ssl_dict_i
diff --git a/data/ref_results/compute_suite.py b/data/ref_results/compute_suite.py
index bcaf83c8..7154183c 100644
--- a/data/ref_results/compute_suite.py
+++ b/data/ref_results/compute_suite.py
@@ -34,7 +34,7 @@ l = len(compute_bench_list)
temp = 0
for benchmark in compute_bench_list:
try:
- temp = temp + float(compute_dict[benchmark]['1. Index'])
+ temp = temp + float(compute_dict[benchmark]['index'])
except KeyError:
l = l - 1
pass
@@ -45,6 +45,6 @@ else:
compute_suite_index = temp / l
compute_dict_f = {}
compute_dict_f['index'] = compute_suite_index
- compute_dict_f['suite results'] = compute_dict
+ compute_dict_f['suite_results'] = compute_dict
with open('../../results/compute_result.json', 'w+') as result_json:
json.dump(compute_dict_f, result_json, indent=4, sort_keys=True)
diff --git a/data/ref_results/index_calculation.py b/data/ref_results/index_calculation.py
index e3c75350..7dee7497 100644
--- a/data/ref_results/index_calculation.py
+++ b/data/ref_results/index_calculation.py
@@ -28,7 +28,7 @@ def generic_index(dict_gen, testcase, reference_num, *args):
result = 0
for k, v in dict_gen.iteritems():
dict_temp = dict_gen[k]
- if dict_gen[k]['1 Testcase Name'] == str(testcase):
+ if dict_gen[k]['name'] == str(testcase):
count = count + 1
for arg in args:
if arg == args[c - 1]:
diff --git a/data/ref_results/network_benchmarks_indices.py b/data/ref_results/network_benchmarks_indices.py
index 96ccd4fc..f4c581db 100644
--- a/data/ref_results/network_benchmarks_indices.py
+++ b/data/ref_results/network_benchmarks_indices.py
@@ -6,15 +6,15 @@ from result_accum import result_concat as concat
def iperf_index():
iperf_dict = concat('../../results/iperf/')
iperf_bm_ref = get_reference('network', 'iperf_bm', 'throughput received(b/s)')
- iperf_bm_index = get_index(iperf_dict, 'iperf_bm', iperf_bm_ref, '4 IPERF result', '2. Bandwidth', '2. throughput Received (b/s)')
+ iperf_bm_index = get_index(iperf_dict, 'iperf_bm', iperf_bm_ref, 'details', 'bandwidth', 'received_throughput')
iperf_vm_ref = get_reference('network', 'iperf_vm', 'throughput received(b/s)')
- iperf_vm_index = get_index(iperf_dict, 'iperf_vm', iperf_vm_ref, '4 IPERF result', '2. Bandwidth', '2. throughput Received (b/s)')
+ iperf_vm_index = get_index(iperf_dict, 'iperf_vm', iperf_vm_ref, 'details', 'bandwidth', 'received_throughput')
iperf_vm_2_ref = get_reference('network', 'iperf_vm_2', 'throughput received(b/s)')
- iperf_vm_2_index = get_index(iperf_dict, 'iperf_vm_2', iperf_vm_2_ref, '4 IPERF result', '2. Bandwidth', '2. throughput Received (b/s)')
+ iperf_vm_2_index = get_index(iperf_dict, 'iperf_vm_2', iperf_vm_2_ref, 'details', 'bandwidth', 'received_throughput')
iperf_index = float(iperf_bm_index + iperf_vm_index + iperf_vm_2_index) / 3
print iperf_index
iperf_dict_i = {}
- iperf_dict_i['1. Index'] = iperf_index
- iperf_dict_i['2. Results'] = iperf_dict
+ iperf_dict_i['index'] = iperf_index
+ iperf_dict_i['results'] = iperf_dict
return iperf_dict_i
diff --git a/data/ref_results/network_suite.py b/data/ref_results/network_suite.py
index 37dcb093..ae4ed107 100644
--- a/data/ref_results/network_suite.py
+++ b/data/ref_results/network_suite.py
@@ -14,7 +14,7 @@ l = len(network_bench_list)
for benchmark in network_bench_list:
try:
- temp = temp + float(network_dict[benchmark]['1. Index'])
+ temp = temp + float(network_dict[benchmark]['index'])
except:
l = l - 1
pass
@@ -25,6 +25,6 @@ else:
network_suite_index = temp / len(network_bench_list)
network_dict_f = {}
network_dict_f['index'] = network_suite_index
- network_dict_f['suite results'] = network_dict
+ network_dict_f['suite_results'] = network_dict
with open('../../results/network_result.json', 'w+') as result_json:
json.dump(network_dict_f, result_json, indent=4, sort_keys=True)
diff --git a/data/ref_results/storage_benchmarks_indices.py b/data/ref_results/storage_benchmarks_indices.py
index f51b3d6e..6c5dd435 100644
--- a/data/ref_results/storage_benchmarks_indices.py
+++ b/data/ref_results/storage_benchmarks_indices.py
@@ -6,18 +6,17 @@ from result_accum import result_concat as concat
def fio_index():
fio_dict = concat('../../results/fio/')
fio_r_bm_ref = get_reference('storage', 'fio_bm', 'read', 'IOPS')
- fio_r_bm_index = get_index(fio_dict, 'fio_bm', fio_r_bm_ref, '4 FIO result', 'Job_0', 'read', 'IO/sec')
-
+ fio_r_bm_index = get_index(fio_dict, 'fio_bm', fio_r_bm_ref, 'details', 'job_0', 'read', 'io_ps')
fio_w_bm_ref = get_reference('storage', 'fio_bm', 'write', 'IOPS')
- fio_w_bm_index = get_index(fio_dict, 'fio_bm', fio_w_bm_ref, '4 FIO result', 'Job_0', 'write', 'IO/sec')
+ fio_w_bm_index = get_index(fio_dict, 'fio_bm', fio_w_bm_ref, 'details', 'job_0', 'write', 'io_ps')
fio_bm_index = (fio_r_bm_index + fio_w_bm_index) / 2
fio_r_vm_ref = get_reference('storage', 'fio_vm', 'read', 'IOPS')
- fio_r_vm_index = get_index(fio_dict, 'fio_vm', fio_r_vm_ref, '4 FIO result', 'Job_0', 'read', 'IO/sec')
+ fio_r_vm_index = get_index(fio_dict, 'fio_vm', fio_r_vm_ref, 'details', 'job_0', 'read', 'io_ps')
fio_w_vm_ref = get_reference('storage', 'fio_vm', 'write', 'IOPS')
- fio_w_vm_index = get_index(fio_dict, 'fio_vm', fio_w_vm_ref, '4 FIO result', 'Job_0', 'write', 'IO/sec')
+ fio_w_vm_index = get_index(fio_dict, 'fio_vm', fio_w_vm_ref, 'details', 'job_0', 'write', 'io_ps')
fio_vm_index = (fio_r_vm_index + fio_w_vm_index) / 2
@@ -25,6 +24,6 @@ def fio_index():
print fio_index
fio_dict_i = {}
- fio_dict_i['1. Index'] = fio_index
- fio_dict_i['2. Results'] = fio_dict
+ fio_dict_i['index'] = fio_index
+ fio_dict_i['results'] = fio_dict
return fio_dict_i
diff --git a/data/ref_results/storage_suite.py b/data/ref_results/storage_suite.py
index 52d6c8c6..38d97622 100644
--- a/data/ref_results/storage_suite.py
+++ b/data/ref_results/storage_suite.py
@@ -13,7 +13,7 @@ l = len(storage_bench_list)
temp = 0
for benchmark in storage_bench_list:
try:
- temp = temp + float(storage_dict[benchmark]['1. Index'])
+ temp = temp + float(storage_dict[benchmark]['index'])
except KeyError:
l -= 1
if l == 0:
@@ -22,6 +22,6 @@ else:
storage_suite_index = temp / l
storage_dict_f = {}
storage_dict_f['index'] = storage_suite_index
- storage_dict_f['storage suite'] = storage_dict
+ storage_dict_f['storage_suite'] = storage_dict
with open('../../results/storage_result.json', 'w+') as result_json:
json.dump(storage_dict_f, result_json, indent=4, sort_keys=True)
diff --git a/docker/cleanup_qtip_image.sh b/docker/cleanup_qtip_image.sh
new file mode 100644
index 00000000..9c2b59db
--- /dev/null
+++ b/docker/cleanup_qtip_image.sh
@@ -0,0 +1,24 @@
+#!/bin/bash
+
+if [[ ! -f ${QTIP_DIR}/openrc ]];then
+ source ${REPOS_DIR}/releng/utils/fetch_os_creds.sh \
+ -d ${QTIP_DIR}/openrc \
+ -i ${INSTALLER_TYPE} \
+ -a ${INSTALLER_IP}
+fi
+
+source ${QTIP_DIR}/openrc
+
+cleanup_image()
+{
+ echo
+ if ! glance image-list; then
+ return
+ fi
+
+ echo "Deleting image QTIP_CentOS..."
+ glance image-delete $(glance image-list | grep -e QTIP_CentOS | awk '{print $2}')
+
+}
+
+cleanup_image
diff --git a/docker/prepare_qtip_image.sh b/docker/prepare_qtip_image.sh
new file mode 100644
index 00000000..4095c806
--- /dev/null
+++ b/docker/prepare_qtip_image.sh
@@ -0,0 +1,49 @@
+#!/bin/bash
+IMGNAME='QTIP_CentOS.qcow2'
+IMGPATH='/home/opnfv/imgstore'
+IMGURL='http://build.opnfv.org/artifacts.opnfv.org/qtip/QTIP_CentOS.qcow2'
+
+load_image()
+{
+ if [[ -n $( glance image-list | grep -e QTIP_CentOS) ]]; then
+ return
+ fi
+
+ test -d $IMGPATH || mkdir -p $IMGPATH
+ if [[ ! -f "$IMGPATH/$IMGNAME" ]];then
+ echo
+ echo "========== Downloading QTIP_CentOS image =========="
+ cd $IMGPATH
+ wget -c --progress=dot:giga $IMGURL
+ fi
+
+ echo
+ echo "========== Loading QTIP_CentOS image =========="
+ output=$(glance image-create \
+ --name QTIP_CentOS \
+ --visibility public \
+ --disk-format qcow2 \
+ --container-format bare \
+ --file $IMGPATH/$IMGNAME )
+ echo "$output"
+
+ IMAGE_ID=$(echo "$output" | grep " id " | awk '{print $(NF-1)}')
+
+ if [ -z "$IMAGE_ID" ]; then
+ echo 'Failed uploading QTIP_CentOS image to cloud'.
+ exit 1
+ fi
+
+ echo "QTIP_CentOS image id: $IMAGE_ID"
+}
+
+rm -rf ${QTIP_DIR}/openrc
+
+${REPOS_DIR}/releng/utils/fetch_os_creds.sh \
+-d ${QTIP_DIR}/openrc \
+-i ${INSTALLER_TYPE} \
+-a ${INSTALLER_IP}
+
+source ${QTIP_DIR}/openrc
+
+load_image
diff --git a/docker/run_qtip.sh b/docker/run_qtip.sh
index 37eb0ea7..79529e54 100755
--- a/docker/run_qtip.sh
+++ b/docker/run_qtip.sh
@@ -1,40 +1,28 @@
#! /bin/bash
-
-
-
-cp ${REPOS_DIR}/releng/utils/fetch_os_creds.sh ${QTIP_DIR}/data/
-cd ${QTIP_DIR} && source get_env_info.sh \
--n ${INSTALLER_TYPE} \
--i ${INSTALLER_IP}
-
-source ${QTIP_DIR}/opnfv-creds.sh
-
-if [ "$TEST_CASE" == "compute" ]; then
- cd ${QTIP_DIR} && python qtip.py -l default -f compute
- cd ${QTIP_DIR}/data/ref_results/ && python compute_suite.py
-fi
-
-if [ "$TEST_CASE" == "storage" ]; then
- cd ${QTIP_DIR} && python qtip.py -l default -f storage
- cd ${QTIP_DIR}/data/ref_results/ && python storage_suite.py
-fi
-
-if [ "$TEST_CASE" == "network" ]; then
- cd ${QTIP_DIR} && python qtip.py -l default -f network
- cd ${QTIP_DIR}/data/ref_results/ && python network_suite.py
-fi
-
-
-if [ "$TEST_CASE" == "all" ]; then
- cd ${QTIP_DIR} && python qtip.py -l default -f compute
- cd ${QTIP_DIR} && python qtip.py -l default -f storage
- cd ${QTIP_DIR} && python qtip.py -l default -f network
-
- cd ${QTIP_DIR}/data/ref_results/ && python compute_suite.py
- cd ${QTIP_DIR}/data/ref_results/ && python storage_suite.py
- cd ${QTIP_DIR}/data/ref_results/ && python network_suite.py
-fi
-
-
-
-
+run_test_suite()
+{
+ if [ "$TEST_CASE" == "compute" ]; then
+ cd ${QTIP_DIR} && python qtip.py -l default -f compute
+ cd ${QTIP_DIR}/data/ref_results/ && python compute_suite.py
+ elif [ "$TEST_CASE" == "storage" ]; then
+ cd ${QTIP_DIR} && python qtip.py -l default -f storage
+ cd ${QTIP_DIR}/data/ref_results/ && python storage_suite.py
+ elif [ "$TEST_CASE" == "network" ]; then
+ cd ${QTIP_DIR} && python qtip.py -l default -f network
+ cd ${QTIP_DIR}/data/ref_results/ && python network_suite.py
+ elif [ "$TEST_CASE" == "all" ]; then
+ cd ${QTIP_DIR} && python qtip.py -l default -f compute
+ cd ${QTIP_DIR} && python qtip.py -l default -f storage
+ cd ${QTIP_DIR} && python qtip.py -l default -f network
+
+ cd ${QTIP_DIR}/data/ref_results/ && python compute_suite.py
+ cd ${QTIP_DIR}/data/ref_results/ && python storage_suite.py
+ cd ${QTIP_DIR}/data/ref_results/ && python network_suite.py
+ fi
+}
+
+source ${QTIP_DIR}/docker/prepare_qtip_image.sh
+
+run_test_suite
+
+source ${QTIP_DIR}/docker/cleanup_qtip_image.sh
diff --git a/docs/apidocs/index.rst b/docs/apidocs/index.rst
new file mode 100644
index 00000000..916fab08
--- /dev/null
+++ b/docs/apidocs/index.rst
@@ -0,0 +1,13 @@
+.. This work is licensed under a Creative Commons Attribution 4.0 International License.
+.. http://creativecommons.org/licenses/by/4.0
+.. (c) 2015 Dell Inc.
+.. (c) 2016 ZTE Corp.
+
+****************
+QTIP Configguide
+****************
+
+.. toctree::
+ :maxdepth: 2
+
+ ./qtip_restful_api.rst
diff --git a/docs/apidocs/qtip_restful_api.rst b/docs/apidocs/qtip_restful_api.rst
new file mode 100644
index 00000000..ca77224c
--- /dev/null
+++ b/docs/apidocs/qtip_restful_api.rst
@@ -0,0 +1,10 @@
+.. This work is licensed under a Creative Commons Attribution 4.0 International License.
+.. http://creativecommons.org/licenses/by/4.0
+.. (c) 2015 Dell Inc.
+.. (c) 2016 ZTE Corp.
+
+
+Qtip restful api
+================
+
+You can get all the Qtip restful api by http://qtip_server_ip:5000/api/spec.html.
diff --git a/docs/templates/sample_config.yaml b/docs/templates/sample_config.yaml
index 8dcaa11c..13f6d3fe 100644
--- a/docs/templates/sample_config.yaml
+++ b/docs/templates/sample_config.yaml
@@ -14,7 +14,7 @@ Scenario:
#Context would define the environment on which to run:
#Host Machine keys would contain Host_Machines/ Baremetal machines to run the benchmarks on
-#e.g in Host Machine , machine_1 and machine_2 are the bare metal machines. For each baremetal machine its IP(which should be reachable from the location on which you run QTIP), passwords and its role(host or server)
+#e.g in Host Machine , machine_1 and machine_2 are the bare metal machines. For each baremetal machine its IP(which should be reachable from the location on which you run QTIP), passwords and its role(host or server). If your installer is 'fuel' or 'compass' and you left baremetal machine IP empty,qtip will get compute node ip from installer automaticly.
Context:
Host_Machines:
machine_1:
diff --git a/docs/userguide/introduction.rst b/docs/userguide/introduction.rst
index d3bba51d..823a2722 100644
--- a/docs/userguide/introduction.rst
+++ b/docs/userguide/introduction.rst
@@ -27,7 +27,8 @@ This folder is used to store all the config files which are used to setup the
which run QTIP. Inside each pod there are folders which contain the config
files segmented based on test cases. Namely, these include, `Compute`,
`Network` and `Storage`. The default folder is there for the end user who
- is interested in testing their infrastructure but arent part of a opnfv pod.
+ is interested in testing their infrastructure which is installed by fuel
+or compass but aren't part of a opnfv pod,and for opnfv CI.
The structure of the directory for the user appears as follows
::
@@ -64,14 +65,25 @@ These files list the benchmarks are to be run by the QTIP framework. Sample
compute test file is shown below
::
- dhrystone_vm.yaml
- dhrystone_bm.yaml
- whetstone_vm.yaml
- ssl_bm.yaml
+{
+ "bm": [
+ "dhrystone_bm.yaml",
+ "whetstone_bm.yaml",
+ "ramspeed_bm.yaml",
+ "dpi_bm.yaml",
+ "ssl_bm.yaml"
+ ],
+ "vm": [
+ "dhrystone_vm.yaml",
+ "whetstone_vm.yaml",
+ "ramspeed_vm.yaml",
+ "dpi_vm.yaml",
+ "ssl_vm.yaml"
+ ]
+}
The compute file will now run all the benchmarks listed above one after
-another on the environment. `NOTE: Please ensure there are no blank lines
-in this file as that has been known to throw an exception`.
+another on the environment.
Preparing a config file for test:
---------------------------------
@@ -317,21 +329,57 @@ This will generate the `opnfv-creds.sh` file needed to use the python clients fo
source opnfv-creds.sh
-Running QTIP on the using `default` as the pod name and for the `compute` suite
+Running QTIP on the using `default` as the pod name and for the `compute` suite by cli
::
python qtip.py -l default -f compute
-Running QTIP on the using `default` as the pod name and for the `network` suite
+Running QTIP on the using 'default' as the pod name and for the 'compute' suite 'bm' type by restful api
+::
+
+ curl --trace-ascii debug.txt -X POST -d '{ "installer_ip": "10.20.6.2","installer_type":"fuel", "suite_name":"compute", "type": "BM"}' -H "Content-Type: application/json" http://qtip_server_ip:5000/api/v1.0/jobs
+
+
+Running QTIP on the using 'default' as the pod name and for the 'compute' suite 'vm' type by restful api
+::
+
+ curl --trace-ascii debug.txt -X POST -d '{ "installer_ip": "10.20.6.2","installer_type":"fuel", "suite_name":"compute", "type": "VM"}' -H "Content-Type: application/json" http://qtip_server_ip:5000/api/v1.0/jobs
+
+
+Running QTIP on the using `default` as the pod name and for the `network` suite by cli
::
python qtip.py -l default -f network
-Running QTIP on the using `default` as the pod name and for the `storage` suite
+Running QTIP on the using 'default' as the pod name and for the 'network' suite 'bm' type by restful api
+::
+
+ curl --trace-ascii debug.txt -X POST -d '{ "installer_ip": "10.20.6.2","installer_type":"fuel", "suite_name":"network", "type": "BM"}' -H "Content-Type: application/json" http://qtip_server_ip:5000/api/v1.0/jobs
+
+Running QTIP on the using `default` as the pod name and for the `storage` suite by cli
::
python qtip.py -l default -f network
+Running QTIP on the using 'default' as the pod name and for the 'storage' suite 'bm' type by restful api
+::
+
+ curl --trace-ascii debug.txt -X POST -d '{ "installer_ip": "10.20.6.2","installer_type":"fuel", "suite_name":"storage", "type": "BM"}' -H "Content-Type: application/json" http://qtip_server_ip:5000/api/v1.0/jobs
+
+Get running QTIP job status by restful api
+::
+
+ curl --trace-ascii debug.txt -X GET http://qtip_server_ip:5000/api/v1.0/jobs/job-id
+ For example:
+ curl --trace-ascii debug.txt -X GET http://172.37.0.3:5000/api/v1.0/jobs/5b71f035-3fd6-425c-9cc7-86acd3a04214
+
+Stop running QTIP job by restful api.The job will finish the current benchmark test and stop.
+::
+
+ curl --trace-ascii debug.txt -X DELTET http://qtip_server_ip:5000/api/v1.0/jobs/job-id
+ For example:
+ curl --trace-ascii debug.txt -X DELETE http://172.37.0.3:5000/api/v1.0/jobs/5b71f035-3fd6-425c-9cc7-86acd3a04214q
+
Results:
--------
QTIP generates results in the `results/` directory are listed down under the particularly benchmark name. So all the results for dhrystone would be listed and time stamped.
diff --git a/func/driver.py b/func/driver.py
index 88d673fa..bcda0ce1 100644
--- a/func/driver.py
+++ b/func/driver.py
@@ -49,7 +49,7 @@ class Driver:
def get_special_var_json(self, role, roles, benchmark_detail, pip_dict):
special_json = {}
index = roles.index(role) + 1
- private_ip = pip_dict[0][1][0] if pip_dict[0][1][0] else 'NONE'
+ private_ip = pip_dict[0][1] if pip_dict[0][1][0] else 'NONE'
map(lambda x: special_json.update({'ip' + str(index): x}), role[1])\
if benchmark_detail and (role[0] == '1-server') else None
map(lambda x: special_json.update({'privateip' + str(index): private_ip}), role[1])\
diff --git a/func/fetchimg.py b/func/fetchimg.py
deleted file mode 100644
index 1ed3def6..00000000
--- a/func/fetchimg.py
+++ /dev/null
@@ -1,35 +0,0 @@
-##############################################################################
-# Copyright (c) 2015 Dell Inc and others.
-#
-# All rights reserved. This program and the accompanying materials
-# are made available under the terms of the Apache License, Version 2.0
-# which accompanies this distribution, and is available at
-# http://www.apache.org/licenses/LICENSE-2.0
-##############################################################################
-import os
-import time
-
-IMGSTORE = "/home/opnfv/imgstore"
-
-
-class FetchImg:
-
- def __init__(self):
- print 'Fetching Image!'
-
- @staticmethod
- def download():
- time.sleep(2)
- os.system('mkdir -p Temp_Img')
- filepath = './Temp_Img/QTIP_CentOS.qcow2'
- imgstorepath = IMGSTORE + "/QTIP_CentOS.qcow2"
- if os.path.isfile(imgstorepath):
- os.system("ln -s %s %s" % (imgstorepath, filepath))
- print "QTIP_CentOS.qcow2 exists locally. Skipping the download and using the file from IMG store"
- else:
- print 'Fetching QTIP_CentOS.qcow2'
- os.system('wget http://artifacts.opnfv.org/qtip/QTIP_CentOS.qcow2 -P Temp_Img')
-
- while not os.path.isfile(filepath):
- time.sleep(10)
- print 'Download Completed!'
diff --git a/func/spawn_vm.py b/func/spawn_vm.py
index 5710308b..7ac4340e 100644
--- a/func/spawn_vm.py
+++ b/func/spawn_vm.py
@@ -11,11 +11,9 @@ import os
import sys
from collections import defaultdict
from func.env_setup import Env_setup
-from func.fetchimg import FetchImg
import yaml
import heatclient.client
import keystoneclient
-import glanceclient
from novaclient import client
import time
from func.create_zones import create_zones
@@ -191,41 +189,12 @@ class SpawnVM(Env_setup):
'1', endpoint=heat_endpoint, token=keystone.auth_token)
return self._heat_client
- def _get_glance_client(self):
- if self._glance_client is None:
- keystone = self._get_keystone_client()
- glance_endpoint = keystone.service_catalog.url_for(
- service_type='image')
- self._glance_client = glanceclient.Client(
- '2', glance_endpoint, token=keystone.auth_token)
- return self._glance_client
-
def create_stack(self, vm_role_ip_dict, heat_template):
global sshkey
stackname = 'QTIP'
heat = self._get_heat_client()
- glance = self._get_glance_client()
-
- available_images = []
- for image_list in glance.images.list():
- available_images.append(image_list.name)
-
- if 'QTIP_CentOS' in available_images:
- print 'Image Present'
-
- elif 'QTIP_CentOS' not in available_images:
- fetchImage = FetchImg()
- fetchImage.download()
- print 'Uploading Image to Glance. Please wait'
- qtip_image = glance.images.create(
- name='QTIP_CentOS',
- visibility='public',
- disk_format='qcow2',
- container_format='bare')
- glance.images.upload(
- qtip_image.id, open('./Temp_Img/QTIP_CentOS.qcow2'))
for checks in range(3):
print "Try to delete heats %s" % checks
for prev_stacks in heat.stacks.list():
@@ -233,7 +202,6 @@ class SpawnVM(Env_setup):
print 'QTIP Stacks exists.\nDeleting Existing Stack'
heat.stacks.delete('QTIP')
time.sleep(10)
-
print '\nStack Creating Started\n'
try:
diff --git a/tests/driver_test.py b/tests/driver_test.py
index 5ea5dac4..bc705dad 100644
--- a/tests/driver_test.py
+++ b/tests/driver_test.py
@@ -35,13 +35,13 @@ class TestClass:
[('1-server', ['10.20.0.13']), ('2-host', ['10.20.0.15'])],
"iperf_vm.yaml",
[('duration', 20), ('protocol', 'tcp'), ('bandwidthGbps', 0)],
- [("10.20.0.13", [None]), ("10.20.0.15", [None])],
+ [('1-server', '10.10.17.4'), ('2-host', '10.10.17.5')],
{}],
[{'Dest_dir': 'results',
'ip1': '10.20.0.13',
'ip2': '',
'installer': 'joid',
- 'privateip1': 'NONE',
+ 'privateip1': '10.10.17.4',
'workingdir': '/home',
'fname': 'iperf_vm.yaml',
'username': 'ubuntu',
@@ -53,7 +53,7 @@ class TestClass:
'ip1': '10.20.0.13',
'ip2': '',
'installer': 'joid',
- 'privateip1': 'NONE',
+ 'privateip1': '10.10.17.4',
'workingdir': '/home',
'fname': 'iperf_vm.yaml',
'username': 'ubuntu',
diff --git a/tests/fetchimg_test.py b/tests/fetchimg_test.py
deleted file mode 100644
index 5d482567..00000000
--- a/tests/fetchimg_test.py
+++ /dev/null
@@ -1,34 +0,0 @@
-import mock
-from func.fetchimg import FetchImg
-
-
-class TestClass:
- @mock.patch('func.fetchimg.os')
- @mock.patch('func.fetchimg.os.path')
- def test_fetch_img_success(self, mock_path, mock_os):
- mock_os.system.return_value = True
- mock_path.isfile.return_value = True
- img = FetchImg()
- img.download()
-
- @mock.patch('func.fetchimg.time')
- @mock.patch('func.fetchimg.os.system')
- @mock.patch('func.fetchimg.os.path')
- def test_fetch_img_fail(self, mock_path, mock_system, mock_time):
- img = FetchImg()
- mock_system.return_value = True
- mock_path.isfile.side_effect = [False, False, True]
- img.download()
- assert mock_time.sleep.call_count == 2
-
- @mock.patch('func.fetchimg.time')
- @mock.patch('func.fetchimg.os.system')
- @mock.patch('func.fetchimg.os.path')
- def test_fetch_temp_success(self, mock_path, mock_system, mock_time):
- img = FetchImg()
- mock_system.return_value = True
- mock_path.isfile.side_effect = [True]
- img.download()
- filepath = './Temp_Img/QTIP_CentOS.qcow2'
- imgstorepath = "/home/opnfv/imgstore/QTIP_CentOS.qcow2"
- mock_system.assert_called_with("ln -s %s %s" % (imgstorepath, filepath))
diff --git a/tests/spawn_vm_test.py b/tests/spawn_vm_test.py
index eb843ad9..b22745d7 100644
--- a/tests/spawn_vm_test.py
+++ b/tests/spawn_vm_test.py
@@ -10,15 +10,6 @@ class KeystoneMock(MagicMock):
v2_0 = Mock()
-class ImageMock(MagicMock):
- name = 'QTIP_CentOS'
-
-
-class ImagesMock(MagicMock):
- def list(self):
- return [ImageMock()]
-
-
class StackMock(MagicMock):
status = 'COMPLETE'
outputs = [{'output_key': 'availability_instance_1',
@@ -55,13 +46,11 @@ class TestClass:
@mock.patch('func.spawn_vm.FetchImg')
@mock.patch('func.spawn_vm.create_zones')
@mock.patch('func.spawn_vm.client', autospec=True)
- @mock.patch('func.spawn_vm.glanceclient', autospec=True)
@mock.patch('func.spawn_vm.keystoneclient.v2_0', autospec=True)
@mock.patch('func.spawn_vm.heatclient.client', autospec=True)
- def test_create_zones_success(self, mock_heat, mock_keystone, mock_glance,
+ def test_create_zones_success(self, mock_heat, mock_keystone,
mock_nova_client, mock_zone, mock_fetch,
mock_setup, test_input, expected):
- mock_glance.Client.return_value = Mock(images=ImagesMock())
mock_nova_client.Client.return_value = Mock()
mock_heat.Client.return_value = Mock(stacks=HeatMock())
k = mock.patch.dict(os.environ, {'INSTALLER_TYPE': 'fuel'})