summaryrefslogtreecommitdiffstats
diff options
context:
space:
mode:
-rw-r--r--jjb/armband/armband-ci-jobs.yml10
-rw-r--r--jjb/bottlenecks/bottlenecks-ci-jobs.yml14
-rw-r--r--jjb/compass4nfv/compass-ci-jobs.yml38
-rwxr-xr-xjjb/fuel/fuel-basic-exp.sh18
-rwxr-xr-xjjb/fuel/fuel-build-exp.sh10
-rwxr-xr-xjjb/fuel/fuel-deploy-exp.sh10
-rwxr-xr-xjjb/fuel/fuel-smoke-test-exp.sh10
-rw-r--r--jjb/fuel/fuel-verify-jobs-experimental.yml264
-rw-r--r--jjb/functest/functest-ci-jobs.yml4
-rw-r--r--jjb/opnfv/slave-params.yml14
-rw-r--r--jjb/yardstick/yardstick-ci-jobs.yml13
-rw-r--r--utils/test/dashboard/dashboard/common/elastic_access.py6
-rw-r--r--utils/test/dashboard/dashboard/conf/config.py12
-rw-r--r--utils/test/dashboard/dashboard/elastic2kibana/dashboard_assembler.py59
-rw-r--r--utils/test/dashboard/dashboard/elastic2kibana/main.py195
-rw-r--r--utils/test/dashboard/dashboard/elastic2kibana/utility.py15
-rw-r--r--utils/test/dashboard/dashboard/elastic2kibana/visualization_assembler.py107
-rw-r--r--utils/test/dashboard/dashboard/mongo2elastic/main.py161
18 files changed, 614 insertions, 346 deletions
diff --git a/jjb/armband/armband-ci-jobs.yml b/jjb/armband/armband-ci-jobs.yml
index 55ab7fc34..024681841 100644
--- a/jjb/armband/armband-ci-jobs.yml
+++ b/jjb/armband/armband-ci-jobs.yml
@@ -251,23 +251,23 @@
- trigger:
name: 'fuel-os-odl_l2-nofeature-ha-armband-baremetal-colorado-trigger'
triggers:
- - timed: '0 8 * * 1,4,6'
+ - timed: '0 8 * * 1,3,5,7'
- trigger:
name: 'fuel-os-nosdn-nofeature-ha-armband-baremetal-colorado-trigger'
triggers:
- - timed: '0 16 * * 2,5'
+ - timed: '0 16 * * 2,7'
- trigger:
name: 'fuel-os-odl_l2-bgpvpn-ha-armband-baremetal-colorado-trigger'
triggers:
- - timed: '0 8 * * 1,3,6'
+ - timed: '0 8 * * 2,4,6'
- trigger:
name: 'fuel-os-odl_l3-nofeature-ha-armband-baremetal-colorado-trigger'
triggers:
- - timed: '0 16 * * 2,4,7'
+ - timed: '0 16 * * 1,4,6'
- trigger:
name: 'fuel-os-odl_l2-nofeature-noha-armband-baremetal-colorado-trigger'
triggers:
- - timed: '0 8 * * 3,5,7'
+ - timed: '0 16 * * 3,5'
#---------------------------------------------------------------
# Enea Armband CI Virtual Triggers running against master branch
#---------------------------------------------------------------
diff --git a/jjb/bottlenecks/bottlenecks-ci-jobs.yml b/jjb/bottlenecks/bottlenecks-ci-jobs.yml
index 4bc56ab1b..7f2e6bf8a 100644
--- a/jjb/bottlenecks/bottlenecks-ci-jobs.yml
+++ b/jjb/bottlenecks/bottlenecks-ci-jobs.yml
@@ -68,12 +68,6 @@
# installer: joid
# auto-trigger-name: 'daily-trigger-disabled'
# <<: *master
- - huawei-pod2:
- slave-label: '{pod}'
- installer: compass
- auto-trigger-name: 'daily-trigger-disabled'
- <<: *master
-
#--------------------------------------------
suite:
- 'rubbos'
@@ -225,14 +219,6 @@
description: 'Arguments to use in order to choose the backend DB'
- parameter:
- name: 'bottlenecks-params-huawei-pod2'
- parameters:
- - string:
- name: BOTTLENECKS_DB_TARGET
- default: '104.197.68.199:8086'
- description: 'Arguments to use in order to choose the backend DB'
-
-- parameter:
name: 'bottlenecks-params-orange-pod2'
parameters:
- string:
diff --git a/jjb/compass4nfv/compass-ci-jobs.yml b/jjb/compass4nfv/compass-ci-jobs.yml
index 3ad26b7ee..da882cdfe 100644
--- a/jjb/compass4nfv/compass-ci-jobs.yml
+++ b/jjb/compass4nfv/compass-ci-jobs.yml
@@ -39,10 +39,6 @@
#--------------------------------
# master
#--------------------------------
- - huawei-pod2:
- slave-label: '{pod}'
- os-version: 'trusty'
- <<: *colorado
- huawei-pod5:
slave-label: '{pod}'
os-version: 'centos7'
@@ -103,6 +99,7 @@
use-build-blocker: true
blocking-jobs:
- 'compass-os-.*?-{pod}-daily-.*?'
+ - 'compass-os-.*?-baremetal-daily-.*?'
- 'compass-verify-[^-]*'
block-level: 'NODE'
@@ -295,39 +292,6 @@
- timed: ''
- trigger:
- name: 'compass-os-nosdn-nofeature-ha-huawei-pod2-colorado-trigger'
- triggers:
- - timed: '0 19 * * *'
-- trigger:
- name: 'compass-os-odl_l2-nofeature-ha-huawei-pod2-colorado-trigger'
- triggers:
- - timed: '0 23 * * *'
-- trigger:
- name: 'compass-os-odl_l3-nofeature-ha-huawei-pod2-colorado-trigger'
- triggers:
- - timed: '0 15 * * *'
-- trigger:
- name: 'compass-os-onos-nofeature-ha-huawei-pod2-colorado-trigger'
- triggers:
- - timed: '0 11 * * *'
-- trigger:
- name: 'compass-os-ocl-nofeature-ha-huawei-pod2-colorado-trigger'
- triggers:
- - timed: ''
-- trigger:
- name: 'compass-os-onos-sfc-ha-huawei-pod2-colorado-trigger'
- triggers:
- - timed: '0 7 * * *'
-- trigger:
- name: 'compass-os-odl_l2-moon-ha-huawei-pod2-colorado-trigger'
- triggers:
- - timed: ''
-- trigger:
- name: 'compass-os-nosdn-kvm-ha-huawei-pod2-colorado-trigger'
- triggers:
- - timed: ''
-
-- trigger:
name: 'compass-os-nosdn-nofeature-ha-baremetal-master-trigger'
triggers:
- timed: '0 2 * * *'
diff --git a/jjb/fuel/fuel-basic-exp.sh b/jjb/fuel/fuel-basic-exp.sh
new file mode 100755
index 000000000..a70a0c765
--- /dev/null
+++ b/jjb/fuel/fuel-basic-exp.sh
@@ -0,0 +1,18 @@
+#!/bin/bash
+set -o nounset
+
+echo "-----------------------------------------------------------------------"
+echo $GERRIT_CHANGE_COMMIT_MESSAGE
+echo "-----------------------------------------------------------------------"
+
+# proposal for specifying the scenario name in commit message
+# currently only 1 scenario name is supported but depending on
+# the need, it can be expanded, supporting multiple scenarios
+# using comma separated list or something
+SCENARIO_NAME_PATTERN="(?<=@scenario:).*?(?=@)"
+SCENARIO_NAME=(echo $GERRIT_CHANGE_COMMIT_MESSAGE | grep -oP "$SCENARIO_NAME_PATTERN")
+if [[ $? -ne 0 ]]; then
+ echo "The patch verification will be done only with build!"
+else
+ echo "Will run full verification; build, deploy, and smoke test using scenario $SCENARIO_NAME"
+fi
diff --git a/jjb/fuel/fuel-build-exp.sh b/jjb/fuel/fuel-build-exp.sh
new file mode 100755
index 000000000..f7f613dc0
--- /dev/null
+++ b/jjb/fuel/fuel-build-exp.sh
@@ -0,0 +1,10 @@
+#!/bin/bash
+
+if [[ "$JOB_NAME" =~ (verify|merge|daily|weekly) ]]; then
+ JOB_TYPE=${BASH_REMATCH[0]}
+else
+ echo "Unable to determine job type!"
+ exit 1
+fi
+
+echo "Not activated!"
diff --git a/jjb/fuel/fuel-deploy-exp.sh b/jjb/fuel/fuel-deploy-exp.sh
new file mode 100755
index 000000000..f7f613dc0
--- /dev/null
+++ b/jjb/fuel/fuel-deploy-exp.sh
@@ -0,0 +1,10 @@
+#!/bin/bash
+
+if [[ "$JOB_NAME" =~ (verify|merge|daily|weekly) ]]; then
+ JOB_TYPE=${BASH_REMATCH[0]}
+else
+ echo "Unable to determine job type!"
+ exit 1
+fi
+
+echo "Not activated!"
diff --git a/jjb/fuel/fuel-smoke-test-exp.sh b/jjb/fuel/fuel-smoke-test-exp.sh
new file mode 100755
index 000000000..f7f613dc0
--- /dev/null
+++ b/jjb/fuel/fuel-smoke-test-exp.sh
@@ -0,0 +1,10 @@
+#!/bin/bash
+
+if [[ "$JOB_NAME" =~ (verify|merge|daily|weekly) ]]; then
+ JOB_TYPE=${BASH_REMATCH[0]}
+else
+ echo "Unable to determine job type!"
+ exit 1
+fi
+
+echo "Not activated!"
diff --git a/jjb/fuel/fuel-verify-jobs-experimental.yml b/jjb/fuel/fuel-verify-jobs-experimental.yml
new file mode 100644
index 000000000..ae83b08cf
--- /dev/null
+++ b/jjb/fuel/fuel-verify-jobs-experimental.yml
@@ -0,0 +1,264 @@
+- project:
+ # TODO: rename the project name
+ # TODO: get rid of appended -exp from the remainder of the file
+ name: 'fuel-verify-jobs-experimental'
+
+ project: 'fuel'
+
+ installer: 'fuel'
+#------------------------------------
+# branch definitions
+#------------------------------------
+ # TODO: enable master once things settle
+ stream-exp:
+ - experimental:
+ branch: 'stable/{stream-exp}'
+ gs-pathname: '/{stream-exp}'
+ disabled: false
+#------------------------------------
+# patch verification phases
+#------------------------------------
+ phase:
+ - 'basic':
+ # this phase does basic commit message check, unit test and so on
+ slave-label: 'opnfv-build'
+ - 'build':
+ # this phase builds artifacts if valid for given installer
+ slave-label: 'opnfv-build-ubuntu'
+ - 'deploy-virtual':
+ # this phase does virtual deployment using the artifacts produced in previous phase
+ slave-label: 'fuel-virtual'
+ - 'smoke-test':
+ # this phase runs functest smoke test
+ slave-label: 'fuel-virtual'
+#------------------------------------
+# jobs
+#------------------------------------
+ jobs:
+ - 'fuel-verify-{stream-exp}'
+ - 'fuel-verify-{phase}-{stream-exp}'
+#------------------------------------
+# job templates
+#------------------------------------
+- job-template:
+ name: 'fuel-verify-{stream-exp}'
+
+ project-type: multijob
+
+ disabled: '{obj:disabled}'
+
+ # TODO: this is valid for experimental only
+ # enable concurrency for master once things settle
+ concurrent: false
+
+ properties:
+ - throttle:
+ enabled: true
+ max-total: 4
+ option: 'project'
+
+ scm:
+ - gerrit-trigger-scm:
+ credentials-id: '{ssh-credentials}'
+ refspec: '$GERRIT_REFSPEC'
+ choosing-strategy: 'gerrit'
+
+ wrappers:
+ - ssh-agent-credentials:
+ users:
+ - '{ssh-credentials}'
+ - timeout:
+ timeout: 360
+ fail: true
+
+ triggers:
+ - gerrit:
+ trigger-on:
+ - patchset-created-event:
+ exclude-drafts: 'false'
+ exclude-trivial-rebase: 'false'
+ exclude-no-code-change: 'false'
+ - draft-published-event
+ - comment-added-contains-event:
+ comment-contains-value: 'recheck'
+ - comment-added-contains-event:
+ comment-contains-value: 'reverify'
+ projects:
+ - project-compare-type: 'ANT'
+ project-pattern: '{project}'
+ branches:
+ - branch-compare-type: 'ANT'
+ branch-pattern: '**/{branch}'
+ file-paths:
+ - compare-type: ANT
+ pattern: 'ci/**'
+ - compare-type: ANT
+ pattern: 'build/**'
+ - compare-type: ANT
+ pattern: 'deploy/**'
+ forbidden-file-paths:
+ - compare-type: ANT
+ pattern: 'docs/**'
+ readable-message: true
+
+ parameters:
+ - project-parameter:
+ project: '{project}'
+ - gerrit-parameter:
+ branch: '{branch}'
+ - 'opnfv-build-defaults'
+ - 'fuel-verify-defaults-exp':
+ gs-pathname: '{gs-pathname}'
+
+ builders:
+ - description-setter:
+ description: "Built on $NODE_NAME"
+ - multijob:
+ name: basic
+ condition: SUCCESSFUL
+ projects:
+ - name: 'fuel-verify-basic-{stream-exp}'
+ current-parameters: false
+ predefined-parameters: |
+ GERRIT_BRANCH=$GERRIT_BRANCH
+ GERRIT_REFSPEC=$GERRIT_REFSPEC
+ GERRIT_CHANGE_NUMBER=$GERRIT_CHANGE_NUMBER
+ GERRIT_CHANGE_COMMIT_MESSAGE=$GERRIT_CHANGE_COMMIT_MESSAGE
+ node-parameters: false
+ kill-phase-on: FAILURE
+ abort-all-job: true
+ - multijob:
+ name: build
+ condition: SUCCESSFUL
+ projects:
+ - name: 'fuel-verify-build-{stream-exp}'
+ current-parameters: false
+ predefined-parameters: |
+ GERRIT_BRANCH=$GERRIT_BRANCH
+ GERRIT_REFSPEC=$GERRIT_REFSPEC
+ GERRIT_CHANGE_NUMBER=$GERRIT_CHANGE_NUMBER
+ GERRIT_CHANGE_COMMIT_MESSAGE=$GERRIT_CHANGE_COMMIT_MESSAGE
+ node-parameters: false
+ kill-phase-on: FAILURE
+ abort-all-job: true
+ - multijob:
+ name: deploy-virtual
+ condition: SUCCESSFUL
+ projects:
+ - name: 'fuel-verify-deploy-virtual-{stream-exp}'
+ current-parameters: false
+ predefined-parameters: |
+ GERRIT_BRANCH=$GERRIT_BRANCH
+ GERRIT_REFSPEC=$GERRIT_REFSPEC
+ GERRIT_CHANGE_NUMBER=$GERRIT_CHANGE_NUMBER
+ GERRIT_CHANGE_COMMIT_MESSAGE=$GERRIT_CHANGE_COMMIT_MESSAGE
+ node-parameters: false
+ kill-phase-on: FAILURE
+ abort-all-job: true
+ - multijob:
+ name: smoke-test
+ condition: SUCCESSFUL
+ projects:
+ - name: 'fuel-verify-smoke-test-{stream-exp}'
+ current-parameters: false
+ predefined-parameters: |
+ GERRIT_BRANCH=$GERRIT_BRANCH
+ GERRIT_REFSPEC=$GERRIT_REFSPEC
+ GERRIT_CHANGE_NUMBER=$GERRIT_CHANGE_NUMBER
+ GERRIT_CHANGE_COMMIT_MESSAGE=$GERRIT_CHANGE_COMMIT_MESSAGE
+ node-parameters: false
+ kill-phase-on: FAILURE
+ abort-all-job: true
+
+- job-template:
+ name: 'fuel-verify-{phase}-{stream-exp}'
+
+ disabled: '{obj:disabled}'
+
+ concurrent: true
+
+ properties:
+ - throttle:
+ enabled: true
+ max-total: 6
+ option: 'project'
+ - build-blocker:
+ use-build-blocker: true
+ blocking-jobs:
+ - 'fuel-verify-deploy-.*'
+ - 'fuel-verify-test-.*'
+ block-level: 'NODE'
+
+ scm:
+ - gerrit-trigger-scm:
+ credentials-id: '{ssh-credentials}'
+ refspec: '$GERRIT_REFSPEC'
+ choosing-strategy: 'gerrit'
+
+ wrappers:
+ - ssh-agent-credentials:
+ users:
+ - '{ssh-credentials}'
+ - timeout:
+ timeout: 360
+ fail: true
+ parameters:
+ - project-parameter:
+ project: '{project}'
+ - gerrit-parameter:
+ branch: '{branch}'
+ - '{slave-label}-defaults'
+ - '{installer}-defaults'
+ - 'fuel-verify-defaults-exp':
+ gs-pathname: '{gs-pathname}'
+
+ builders:
+ - description-setter:
+ description: "Built on $NODE_NAME"
+ - '{project}-verify-{phase}-macro-exp'
+#------------------------------------
+# builder macros
+#------------------------------------
+- builder:
+ name: 'fuel-verify-basic-macro-exp'
+ builders:
+ - shell:
+ !include-raw: ./fuel-basic-exp.sh
+
+- builder:
+ name: 'fuel-verify-build-macro-exp'
+ builders:
+ - shell:
+ !include-raw: ./fuel-build-exp.sh
+ - shell:
+ !include-raw: ./fuel-workspace-cleanup.sh
+
+- builder:
+ name: 'fuel-verify-deploy-virtual-macro-exp'
+ builders:
+ - shell:
+ !include-raw: ./fuel-deploy-exp.sh
+
+- builder:
+ name: 'fuel-verify-smoke-test-macro-exp'
+ builders:
+ - shell:
+ !include-raw: ./fuel-smoke-test-exp.sh
+#------------------------------------
+# parameter macros
+#------------------------------------
+- parameter:
+ name: 'fuel-verify-defaults-exp'
+ parameters:
+ - string:
+ name: BUILD_DIRECTORY
+ default: $WORKSPACE/build_output
+ description: "Directory where the build artifact will be located upon the completion of the build."
+ - string:
+ name: CACHE_DIRECTORY
+ default: $HOME/opnfv/cache/$INSTALLER_TYPE
+ description: "Directory where the cache to be used during the build is located."
+ - string:
+ name: GS_URL
+ default: artifacts.opnfv.org/$PROJECT{gs-pathname}
+ description: "URL to Google Storage."
diff --git a/jjb/functest/functest-ci-jobs.yml b/jjb/functest/functest-ci-jobs.yml
index 4747835b1..348779308 100644
--- a/jjb/functest/functest-ci-jobs.yml
+++ b/jjb/functest/functest-ci-jobs.yml
@@ -128,10 +128,6 @@
slave-label: '{pod}'
installer: joid
<<: *master
- - huawei-pod2:
- slave-label: '{pod}'
- installer: compass
- <<: *colorado
- huawei-pod5:
slave-label: '{pod}'
installer: compass
diff --git a/jjb/opnfv/slave-params.yml b/jjb/opnfv/slave-params.yml
index 6cbaba4a5..4ffaff4ae 100644
--- a/jjb/opnfv/slave-params.yml
+++ b/jjb/opnfv/slave-params.yml
@@ -378,20 +378,6 @@
default: https://gerrit.opnfv.org/gerrit/$PROJECT
description: 'Git URL to use on this Jenkins Slave'
- parameter:
- name: 'huawei-pod2-defaults'
- parameters:
- - node:
- name: SLAVE_NAME
- description: 'Slave name on Jenkins'
- allowed-slaves:
- - huawei-pod2
- default-slaves:
- - huawei-pod2
- - string:
- name: GIT_BASE
- default: https://gerrit.opnfv.org/gerrit/$PROJECT
- description: 'Git URL to use on this Jenkins Slave'
-- parameter:
name: 'huawei-pod3-defaults'
parameters:
- node:
diff --git a/jjb/yardstick/yardstick-ci-jobs.yml b/jjb/yardstick/yardstick-ci-jobs.yml
index 1cb1c9779..962ea4743 100644
--- a/jjb/yardstick/yardstick-ci-jobs.yml
+++ b/jjb/yardstick/yardstick-ci-jobs.yml
@@ -177,11 +177,6 @@
installer: joid
auto-trigger-name: 'daily-trigger-disabled'
<<: *master
- - huawei-pod2:
- slave-label: '{pod}'
- installer: compass
- auto-trigger-name: 'daily-trigger-disabled'
- <<: *colorado
- huawei-pod3:
slave-label: '{pod}'
installer: compass
@@ -372,14 +367,6 @@
description: 'Arguments to use in order to choose the backend DB'
- parameter:
- name: 'yardstick-params-huawei-pod2'
- parameters:
- - string:
- name: YARDSTICK_DB_BACKEND
- default: '-i 104.197.68.199:8086'
- description: 'Arguments to use in order to choose the backend DB'
-
-- parameter:
name: 'yardstick-params-huawei-pod5'
parameters:
- string:
diff --git a/utils/test/dashboard/dashboard/common/elastic_access.py b/utils/test/dashboard/dashboard/common/elastic_access.py
index 8c6494d39..aaf776f7a 100644
--- a/utils/test/dashboard/dashboard/common/elastic_access.py
+++ b/utils/test/dashboard/dashboard/common/elastic_access.py
@@ -1,4 +1,5 @@
import json
+import urlparse
import urllib3
@@ -43,3 +44,8 @@ def get_docs(url, creds=None, body=None, field='_source'):
for hit in res_data['hits']['hits']:
docs.append(hit[field])
return docs
+
+
+def publish_kibana(url, creds, type, id, body):
+ url = urlparse.urljoin(url, '/.kibana/{}/{}'.format(type, id))
+ publish_docs(url, creds, body)
diff --git a/utils/test/dashboard/dashboard/conf/config.py b/utils/test/dashboard/dashboard/conf/config.py
index b868999a2..143b1939a 100644
--- a/utils/test/dashboard/dashboard/conf/config.py
+++ b/utils/test/dashboard/dashboard/conf/config.py
@@ -23,8 +23,8 @@ class APIConfig:
def __init__(self):
self._default_config_location = "../etc/config.ini"
- self.elastic_url = 'http://localhost:9200'
- self.elastic_creds = None
+ self.es_url = 'http://localhost:9200'
+ self.es_creds = None
self.kibana_url = None
self.is_js = True
self.js_path = None
@@ -64,8 +64,8 @@ class APIConfig:
raise ParseError("%s not found" % config_location)
# Linking attributes to keys from file with their sections
- obj.elastic_url = obj._get_str_parameter("elastic", "url")
- obj.elastic_creds = obj._get_str_parameter("elastic", "creds")
+ obj.es_url = obj._get_str_parameter("elastic", "url")
+ obj.es_creds = obj._get_str_parameter("elastic", "creds")
obj.kibana_url = obj._get_str_parameter("kibana", "url")
obj.is_js = obj._get_bool_parameter("kibana", "js")
obj.js_path = obj._get_str_parameter("kibana", "js_path")
@@ -77,8 +77,8 @@ class APIConfig:
"elastic_creds = %s \n" \
"kibana_url = %s \n" \
"is_js = %s \n" \
- "js_path = %s \n" % (self.elastic_url,
- self.elastic_creds,
+ "js_path = %s \n" % (self.es_url,
+ self.es_creds,
self.kibana_url,
self.is_js,
self.js_path)
diff --git a/utils/test/dashboard/dashboard/elastic2kibana/dashboard_assembler.py b/utils/test/dashboard/dashboard/elastic2kibana/dashboard_assembler.py
new file mode 100644
index 000000000..c1e9dfb22
--- /dev/null
+++ b/utils/test/dashboard/dashboard/elastic2kibana/dashboard_assembler.py
@@ -0,0 +1,59 @@
+import json
+
+import utility
+from common import elastic_access
+
+
+class DashboardAssembler(object):
+ def __init__(self,
+ project,
+ case,
+ family,
+ installer,
+ pod,
+ visAssemblers,
+ es_url,
+ es_creds):
+ super(DashboardAssembler, self).__init__()
+ self.project = project
+ self.case = case
+ self.test_family = family
+ self.installer = installer
+ self.pod = pod
+ self.visAssemblers = visAssemblers
+ self.es_url = es_url
+ self.es_creds = es_creds
+ self._assemble()
+ self._publish()
+
+ def _assemble(self):
+ db = {
+ "query": {
+ "project_name": self.project,
+ "case_name": self.case,
+ "installer": self.installer,
+ "metric": self.visAssemblers[0].vis_state_title,
+ "pod": self.pod
+ },
+ "test_family": self.test_family,
+ "ids": [visualization.id for visualization in self.visAssemblers]
+ }
+ template = utility.env.get_template('dashboard.json')
+ self.dashboard = json.loads(template.render(db=db))
+ utility.dumps(self.dashboard,
+ ['description',
+ 'uiStateJSON',
+ 'panelsJSON',
+ 'optionsJSON'])
+ utility.dumps_2depth(self.dashboard,
+ 'kibanaSavedObjectMeta',
+ 'searchSourceJSON')
+ self.id = self.dashboard['title'].replace(' ', '-').replace('/', '-')
+ return self
+
+ def _publish(self):
+ elastic_access.publish_kibana(self.es_url,
+ self.es_creds,
+ 'dashboard',
+ self.id,
+ self.dashboard)
diff --git a/utils/test/dashboard/dashboard/elastic2kibana/main.py b/utils/test/dashboard/dashboard/elastic2kibana/main.py
index ae5cbe8fa..8be0a01dd 100644
--- a/utils/test/dashboard/dashboard/elastic2kibana/main.py
+++ b/utils/test/dashboard/dashboard/elastic2kibana/main.py
@@ -3,12 +3,13 @@ import json
import urlparse
import argparse
-from jinja2 import PackageLoader, Environment
from common import elastic_access
from common import logger_utils
+from conf import config
from conf import testcases
-from conf.config import APIConfig
+from dashboard_assembler import DashboardAssembler
+from visualization_assembler import VisualizationsAssembler
logger = logger_utils.DashboardLogger('elastic2kibana').get
@@ -18,154 +19,10 @@ parser.add_argument("-c", "--config-file",
help="Config file location")
args = parser.parse_args()
-CONF = APIConfig().parse(args.config_file)
-base_elastic_url = CONF.elastic_url
-generate_inputs = CONF.is_js
-input_file_path = CONF.js_path
-kibana_url = CONF.kibana_url
-es_creds = CONF.elastic_creds
+CONF = config.APIConfig().parse(args.config_file)
_installers = {'fuel', 'apex', 'compass', 'joid'}
-env = Environment(loader=PackageLoader('elastic2kibana', 'templates'))
-env.filters['jsonify'] = json.dumps
-
-
-def dumps(self, items):
- for key in items:
- self.visualization[key] = json.dumps(self.visualization[key])
-
-
-def dumps_2depth(self, key1, key2):
- self.visualization[key1][key2] = json.dumps(self.visualization[key1][key2])
-
-
-class Dashboard(dict):
- def __init__(self, project_name, case_name, family, installer, pod, scenarios, visualization):
- super(Dashboard, self).__init__()
- self.project_name = project_name
- self.case_name = case_name
- self.family = family
- self.installer = installer
- self.pod = pod
- self.scenarios = scenarios
- self.visualization = visualization
- self._visualization_title = None
- self._kibana_visualizations = []
- self._kibana_dashboard = None
- self._create_visualizations()
- self._create()
-
- def _create_visualizations(self):
- for scenario in self.scenarios:
- self._kibana_visualizations.append(Visualization(self.project_name,
- self.case_name,
- self.installer,
- self.pod,
- scenario,
- self.visualization))
-
- self._visualization_title = self._kibana_visualizations[0].vis_state_title
-
- def _publish_visualizations(self):
- for visualization in self._kibana_visualizations:
- url = urlparse.urljoin(base_elastic_url, '/.kibana/visualization/{}'.format(visualization.id))
- logger.debug("publishing visualization '{}'".format(url))
- # logger.error("_publish_visualization: %s" % visualization)
- elastic_access.publish_docs(url, es_creds, visualization)
-
- def _create(self):
- db = {
- "query": {
- "project_name": self.project_name,
- "case_name": self.case_name,
- "installer": self.installer,
- "metric": self._visualization_title,
- "pod": self.pod
- },
- "test_family": self.family,
- "ids": [visualization.id for visualization in self._kibana_visualizations]
- }
- template = env.get_template('dashboard.json')
- self.dashboard = json.loads(template.render(db=db))
- dumps(self.dashboard, ['description', 'uiStateJSON', 'panelsJSON','optionsJSON'])
- dumps_2depth(self.dashboard, 'kibanaSavedObjectMeta', 'searchSourceJSON')
- self.id = self.dashboard['title'].replace(' ', '-').replace('/', '-')
-
-
- def _publish(self):
- url = urlparse.urljoin(base_elastic_url, '/.kibana/dashboard/{}'.format(self.id))
- logger.debug("publishing dashboard '{}'".format(url))
- #logger.error("dashboard: %s" % json.dumps(self.dashboard))
- elastic_access.publish_docs(url, es_creds, self.dashboard)
-
- def publish(self):
- self._publish_visualizations()
- self._publish()
-
-
-class VisStateBuilder(object):
- def __init__(self, visualization):
- super(VisStateBuilder, self).__init__()
- self.visualization = visualization
-
- def build(self):
- name = self.visualization.get('name')
- fields = self.visualization.get('fields')
-
- aggs = []
- index = 1
- for field in fields:
- aggs.append({
- "id": index,
- "field": field.get("field")
- })
- index += 1
-
- template = env.get_template('{}.json'.format(name))
- vis = template.render(aggs=aggs)
- return json.loads(vis)
-
-
-class Visualization(object):
- def __init__(self, project_name, case_name, installer, pod, scenario, visualization):
- """
- We need two things
- 1. filter created from
- project_name
- case_name
- installer
- pod
- scenario
- 2. visualization state
- field for y axis (metric) with type (avg, sum, etc.)
- field for x axis (segment) with type (date_histogram)
-
- :return:
- """
- super(Visualization, self).__init__()
- visState = VisStateBuilder(visualization).build()
- self.vis_state_title = visState['title']
-
- vis = {
- "visState": json.dumps(visState),
- "filters": {
- "project_name": project_name,
- "case_name": case_name,
- "installer": installer,
- "metric": self.vis_state_title,
- "pod_name": pod,
- "scenario": scenario
- }
- }
-
- template = env.get_template('visualization.json')
-
- self.visualization = json.loads(template.render(vis=vis))
- dumps(self.visualization, ['visState', 'description', 'uiStateJSON'])
- dumps_2depth(self.visualization, 'kibanaSavedObjectMeta', 'searchSourceJSON')
- self.id = self.visualization['title'].replace(' ', '-').replace('/', '-')
-
def _get_pods_and_scenarios(project_name, case_name, installer):
query_json = json.JSONEncoder().encode({
@@ -183,8 +40,8 @@ def _get_pods_and_scenarios(project_name, case_name, installer):
}
})
- elastic_data = elastic_access.get_docs(urlparse.urljoin(base_elastic_url, '/test_results/mongo2elastic'),
- es_creds,
+ elastic_data = elastic_access.get_docs(urlparse.urljoin(CONF.es_url, '/test_results/mongo2elastic'),
+ CONF.es_creds,
query_json)
pods_and_scenarios = {}
@@ -213,24 +70,35 @@ def construct_dashboards():
:return: list of KibanaDashboards
"""
- kibana_dashboards = []
+ dashboards = []
for project, case_dicts in testcases.testcases_yaml.items():
for case in case_dicts:
case_name = case.get('name')
- visualizations = case.get('visualizations')
+ vis_ps = case.get('visualizations')
family = case.get('test_family')
for installer in _installers:
pods_and_scenarios = _get_pods_and_scenarios(project, case_name, installer)
- for visualization in visualizations:
+ for vis_p in vis_ps:
for pod, scenarios in pods_and_scenarios.iteritems():
- kibana_dashboards.append(Dashboard(project,
- case_name,
- family,
- installer,
- pod,
- scenarios,
- visualization))
- return kibana_dashboards
+ vissAssember = VisualizationsAssembler(project,
+ case_name,
+ installer,
+ pod,
+ scenarios,
+ vis_p,
+ CONF.es_url,
+ CONF.es_creds)
+ dashboardAssembler = DashboardAssembler(project,
+ case_name,
+ family,
+ installer,
+ pod,
+ vissAssember.visAssemblers,
+ CONF.es_url,
+ CONF.es_creds)
+ dashboards.append(dashboardAssembler)
+
+ return dashboards
def generate_js_inputs(js_file_path, kibana_url, dashboards):
@@ -264,8 +132,5 @@ def generate_js_inputs(js_file_path, kibana_url, dashboards):
def main():
dashboards = construct_dashboards()
- for kibana_dashboard in dashboards:
- kibana_dashboard.publish()
-
- if generate_inputs:
- generate_js_inputs(input_file_path, kibana_url, dashboards)
+ if CONF.is_js:
+ generate_js_inputs(CONF.js_path, CONF.kibana_url, dashboards)
diff --git a/utils/test/dashboard/dashboard/elastic2kibana/utility.py b/utils/test/dashboard/dashboard/elastic2kibana/utility.py
new file mode 100644
index 000000000..dccd28aed
--- /dev/null
+++ b/utils/test/dashboard/dashboard/elastic2kibana/utility.py
@@ -0,0 +1,15 @@
+import json
+
+from jinja2 import Environment, PackageLoader
+
+env = Environment(loader=PackageLoader('elastic2kibana', 'templates'))
+env.filters['jsonify'] = json.dumps
+
+
+def dumps(a_dict, items):
+ for key in items:
+ a_dict[key] = json.dumps(a_dict[key])
+
+
+def dumps_2depth(a_dict, key1, key2):
+ a_dict[key1][key2] = json.dumps(a_dict[key1][key2])
diff --git a/utils/test/dashboard/dashboard/elastic2kibana/visualization_assembler.py b/utils/test/dashboard/dashboard/elastic2kibana/visualization_assembler.py
new file mode 100644
index 000000000..e3b6b0d38
--- /dev/null
+++ b/utils/test/dashboard/dashboard/elastic2kibana/visualization_assembler.py
@@ -0,0 +1,107 @@
+import json
+
+import utility
+from common import elastic_access
+
+
+class VisStateBuilder(object):
+ def __init__(self, vis_p):
+ super(VisStateBuilder, self).__init__()
+ self.vis_p = vis_p
+
+ def build(self):
+ name = self.vis_p.get('name')
+ fields = self.vis_p.get('fields')
+
+ aggs = []
+ index = 1
+ for field in fields:
+ aggs.append({
+ "id": index,
+ "field": field.get("field")
+ })
+ index += 1
+
+ template = utility.env.get_template('{}.json'.format(name))
+ vis = template.render(aggs=aggs)
+ return json.loads(vis)
+
+
+class VisualizationAssembler(object):
+ def __init__(self,
+ project,
+ case,
+ installer,
+ pod,
+ scenario,
+ vis_p,
+ es_url,
+ es_creds):
+ super(VisualizationAssembler, self).__init__()
+ self.project = project
+ self.case = case
+ self.installer = installer
+ self.pod = pod
+ self.scenario = scenario
+ self.vis_p = vis_p
+ self.es_url = es_url
+ self.es_creds = es_creds
+ self._assemble()
+ self._publish()
+
+ def _assemble(self):
+ visState = VisStateBuilder(self.vis_p).build()
+ self.vis_state_title = visState['title']
+
+ vis = {
+ "visState": json.dumps(visState),
+ "filters": {
+ "project_name": self.project,
+ "case_name": self.case,
+ "installer": self.installer,
+ "metric": self.vis_state_title,
+ "pod_name": self.pod,
+ "scenario": self.scenario
+ }
+ }
+
+ template = utility.env.get_template('visualization.json')
+
+ self.visualization = json.loads(template.render(vis=vis))
+ utility.dumps(self.visualization,
+ ['visState', 'description', 'uiStateJSON'])
+ utility.dumps_2depth(self.visualization,
+ 'kibanaSavedObjectMeta',
+ 'searchSourceJSON')
+ title = self.visualization['title']
+ self.id = title.replace(' ', '-').replace('/', '-')
+
+ def _publish(self):
+ elastic_access.publish_kibana(self.es_url,
+ self.es_creds,
+ 'visualization',
+ self.id,
+ self.visualization)
+
+
+class VisualizationsAssembler(object):
+ def __init__(self,
+ project,
+ case,
+ installer,
+ pod,
+ scenarios,
+ vis_p,
+ es_url,
+ es_creds):
+ super(VisualizationsAssembler, self).__init__()
+ self.visAssemblers = []
+ for scenario in scenarios:
+ self.visAssemblers.append(VisualizationAssembler(project,
+ case,
+ installer,
+ pod,
+ scenario,
+ vis_p,
+ es_url,
+ es_creds))
diff --git a/utils/test/dashboard/dashboard/mongo2elastic/main.py b/utils/test/dashboard/dashboard/mongo2elastic/main.py
index 76efb14f0..a526d5319 100644
--- a/utils/test/dashboard/dashboard/mongo2elastic/main.py
+++ b/utils/test/dashboard/dashboard/mongo2elastic/main.py
@@ -36,7 +36,68 @@ CONF = APIConfig().parse(args.config_file)
tmp_docs_file = './mongo-{}.json'.format(uuid.uuid4())
-class DocumentPublisher:
+class DocumentVerification(object):
+ def __init__(self, doc):
+ super(DocumentVerification, self).__init__()
+ self.doc = doc
+ self.doc_id = doc['_id'] if '_id' in doc else None
+ self.skip = False
+
+ def mandatory_fields_exist(self):
+ mandatory_fields = ['installer',
+ 'pod_name',
+ 'version',
+ 'case_name',
+ 'project_name',
+ 'details',
+ 'start_date',
+ 'scenario']
+ for key, value in self.doc.items():
+ if key in mandatory_fields:
+ if value is None:
+ logger.info("Skip testcase '%s' because field '%s' missing" %
+ (self.doc_id, key))
+ self.skip = True
+ else:
+ mandatory_fields.remove(key)
+ else:
+ del self.doc[key]
+
+ if len(mandatory_fields) > 0:
+ logger.info("Skip testcase '%s' because field(s) '%s' missing" %
+ (self.doc_id, mandatory_fields))
+ self.skip = True
+
+ return self
+
+ def modify_start_date(self):
+ field = 'start_date'
+ if field in self.doc:
+ self.doc[field] = self._fix_date(self.doc[field])
+
+ return self
+
+ def modify_scenario(self):
+ scenario = 'scenario'
+ version = 'version'
+
+ if (scenario not in self.doc) or \
+ (scenario in self.doc and self.doc[scenario] is None):
+ self.doc[scenario] = self.doc[version]
+
+ return self
+
+ def is_skip(self):
+ return self.skip
+
+ def _fix_date(self, date_string):
+ if isinstance(date_string, dict):
+ return date_string['$date']
+ else:
+ return date_string[:-3].replace(' ', 'T') + 'Z'
+
+
+class DocumentPublisher(object):
def __init__(self, doc, fmt, exist_docs, creds, elastic_url):
self.doc = doc
@@ -76,92 +137,14 @@ class DocumentPublisher:
return date_string[:-3].replace(' ', 'T') + 'Z'
def _verify_document(self):
- """
- Mandatory fields:
- installer
- pod_name
- version
- case_name
- date
- project
- details
-
- these fields must be present and must NOT be None
-
- Optional fields:
- description
-
- these fields will be preserved if the are NOT None
- """
- mandatory_fields = ['installer',
- 'pod_name',
- 'version',
- 'case_name',
- 'project_name',
- 'details']
- mandatory_fields_to_modify = {'start_date': self._fix_date}
- fields_to_swap_or_add = {'scenario': 'version'}
- if '_id' in self.doc:
- mongo_id = self.doc['_id']
- else:
- mongo_id = None
- optional_fields = ['description']
- for key, value in self.doc.items():
- if key in mandatory_fields:
- if value is None:
- # empty mandatory field, invalid input
- logger.info("Skipping testcase with mongo _id '{}' because the testcase was missing value"
- " for mandatory field '{}'".format(mongo_id, key))
- return False
- else:
- mandatory_fields.remove(key)
- elif key in mandatory_fields_to_modify:
- if value is None:
- # empty mandatory field, invalid input
- logger.info("Skipping testcase with mongo _id '{}' because the testcase was missing value"
- " for mandatory field '{}'".format(mongo_id, key))
- return False
- else:
- self.doc[key] = mandatory_fields_to_modify[key](value)
- del mandatory_fields_to_modify[key]
- elif key in fields_to_swap_or_add:
- if value is None:
- swapped_key = fields_to_swap_or_add[key]
- swapped_value = self.doc[swapped_key]
- logger.info("Swapping field '{}' with value None for '{}' with value '{}'.".format(key, swapped_key,
- swapped_value))
- self.doc[key] = swapped_value
- del fields_to_swap_or_add[key]
- else:
- del fields_to_swap_or_add[key]
- elif key in optional_fields:
- if value is None:
- # empty optional field, remove
- del self.doc[key]
- optional_fields.remove(key)
- else:
- # unknown field
- del self.doc[key]
-
- if len(mandatory_fields) > 0:
- # some mandatory fields are missing
- logger.info("Skipping testcase with mongo _id '{}' because the testcase was missing"
- " mandatory field(s) '{}'".format(mongo_id, mandatory_fields))
- return False
- elif len(mandatory_fields_to_modify) > 0:
- # some mandatory fields are missing
- logger.info("Skipping testcase with mongo _id '{}' because the testcase was missing"
- " mandatory field(s) '{}'".format(mongo_id, mandatory_fields_to_modify.keys()))
- return False
- else:
- if len(fields_to_swap_or_add) > 0:
- for key, swap_key in fields_to_swap_or_add.iteritems():
- self.doc[key] = self.doc[swap_key]
-
- return True
+ return not (DocumentVerification(self.doc)
+ .modify_start_date()
+ .modify_scenario()
+ .mandatory_fields_exist()
+ .is_skip())
-class DocumentsPublisher:
+class DocumentsPublisher(object):
def __init__(self, project, case, fmt, days, elastic_url, creds):
self.project = project
@@ -232,6 +215,7 @@ class DocumentsPublisher:
return self
def publish(self):
+ fdocs = None
try:
with open(tmp_docs_file) as fdocs:
for doc_line in fdocs:
@@ -241,7 +225,8 @@ class DocumentsPublisher:
self.creds,
self.elastic_url).format().publish()
finally:
- fdocs.close()
+ if fdocs:
+ fdocs.close()
self._remove()
def _remove(self):
@@ -250,9 +235,9 @@ class DocumentsPublisher:
def main():
- base_elastic_url = urlparse.urljoin(CONF.elastic_url, '/test_results/mongo2elastic')
+ base_elastic_url = urlparse.urljoin(CONF.es_url, '/test_results/mongo2elastic')
days = args.latest_days
- es_creds = CONF.elastic_creds
+ es_creds = CONF.es_creds
for project, case_dicts in testcases.testcases_yaml.items():
for case_dict in case_dicts: