summaryrefslogtreecommitdiffstats
diff options
context:
space:
mode:
-rwxr-xr-xjjb/daisy4nfv/daisy4nfv-basic.sh6
-rwxr-xr-xjjb/daisy4nfv/daisy4nfv-build.sh6
-rwxr-xr-xjjb/daisy4nfv/daisy4nfv-smoke-test.sh6
-rw-r--r--jjb/daisy4nfv/daisy4nfv-verify-jobs.yml228
-rwxr-xr-xjjb/daisy4nfv/daisy4nfv-virtual-deploy.sh6
-rw-r--r--jjb/fuel/fuel-daily-jobs.yml (renamed from jjb/fuel/fuel-ci-jobs.yml)0
-rw-r--r--jjb/joid/joid-daily-jobs.yml (renamed from jjb/joid/joid-ci-jobs.yml)0
-rw-r--r--jjb/multisite/multisite.yml11
-rw-r--r--utils/push-test-logs.sh3
-rw-r--r--utils/test/dashboard/dashboard/common/elastic_access.py54
-rw-r--r--utils/test/dashboard/dashboard/elastic2kibana/main.py9
-rw-r--r--utils/test/dashboard/dashboard/mongo2elastic/main.py2
-rw-r--r--utils/test/dashboard/kibana_cleanup.py4
13 files changed, 293 insertions, 42 deletions
diff --git a/jjb/daisy4nfv/daisy4nfv-basic.sh b/jjb/daisy4nfv/daisy4nfv-basic.sh
new file mode 100755
index 000000000..87f5482e0
--- /dev/null
+++ b/jjb/daisy4nfv/daisy4nfv-basic.sh
@@ -0,0 +1,6 @@
+#!/bin/bash
+
+echo "--------------------------------------------------------"
+echo "This is diasy4nfv basic job!"
+echo "--------------------------------------------------------"
+
diff --git a/jjb/daisy4nfv/daisy4nfv-build.sh b/jjb/daisy4nfv/daisy4nfv-build.sh
new file mode 100755
index 000000000..9eae8481b
--- /dev/null
+++ b/jjb/daisy4nfv/daisy4nfv-build.sh
@@ -0,0 +1,6 @@
+#!/bin/bash
+
+echo "--------------------------------------------------------"
+echo "This is diasy4nfv build job!"
+echo "--------------------------------------------------------"
+
diff --git a/jjb/daisy4nfv/daisy4nfv-smoke-test.sh b/jjb/daisy4nfv/daisy4nfv-smoke-test.sh
new file mode 100755
index 000000000..bd6eb7ee0
--- /dev/null
+++ b/jjb/daisy4nfv/daisy4nfv-smoke-test.sh
@@ -0,0 +1,6 @@
+#!/bin/bash
+
+echo "--------------------------------------------------------"
+echo "This is diasy4nfv smoke test job!"
+echo "--------------------------------------------------------"
+
diff --git a/jjb/daisy4nfv/daisy4nfv-verify-jobs.yml b/jjb/daisy4nfv/daisy4nfv-verify-jobs.yml
new file mode 100644
index 000000000..6444cf8ec
--- /dev/null
+++ b/jjb/daisy4nfv/daisy4nfv-verify-jobs.yml
@@ -0,0 +1,228 @@
+- project:
+ name: 'daisy4nfv-verify-jobs'
+
+ project: 'daisy4nfv'
+
+ installer: 'daisy4nfv'
+#####################################
+# branch definitions
+#####################################
+ stream:
+ - master:
+ branch: '{stream}'
+ gs-pathname: ''
+ disabled: false
+#####################################
+# patch verification phases
+#####################################
+ phase:
+ - 'basic':
+ slave-label: 'opnfv-build'
+ - 'build':
+ slave-label: 'opnfv-build-ubuntu'
+ - 'deploy-virtual':
+ slave-label: 'opnfv-build'
+ - 'smoke-test':
+ slave-label: 'opnfv-build'
+#####################################
+# jobs
+#####################################
+ jobs:
+ - 'daisy4nfv-verify-{stream}'
+ - 'daisy4nfv-verify-{phase}-{stream}'
+#####################################
+# job templates
+#####################################
+- job-template:
+ name: 'daisy4nfv-verify-{stream}'
+
+ project-type: multijob
+
+ disabled: false
+
+ concurrent: true
+
+ properties:
+ - throttle:
+ enabled: true
+ max-total: 4
+ option: 'project'
+
+ scm:
+ - gerrit-trigger-scm:
+ credentials-id: '{ssh-credentials}'
+ refspec: '$GERRIT_REFSPEC'
+ choosing-strategy: 'gerrit'
+
+ wrappers:
+ - ssh-agent-credentials:
+ users:
+ - '{ssh-credentials}'
+ - timeout:
+ timeout: 360
+ fail: true
+
+ triggers:
+ - gerrit:
+ trigger-on:
+ - patchset-created-event:
+ exclude-drafts: 'false'
+ exclude-trivial-rebase: 'false'
+ exclude-no-code-change: 'false'
+ - draft-published-event
+ - comment-added-contains-event:
+ comment-contains-value: 'recheck'
+ - comment-added-contains-event:
+ comment-contains-value: 'reverify'
+ projects:
+ - project-compare-type: 'ANT'
+ project-pattern: '{project}'
+ branches:
+ - branch-compare-type: 'ANT'
+ branch-pattern: '**/{branch}'
+ forbidden-file-paths:
+ - compare-type: ANT
+ pattern: 'docs/**|.gitignore'
+ readable-message: true
+
+ parameters:
+ - project-parameter:
+ project: '{project}'
+ - gerrit-parameter:
+ branch: '{branch}'
+ - 'opnfv-build-defaults'
+ - 'daisy4nfv-verify-defaults':
+ gs-pathname: '{gs-pathname}'
+
+ builders:
+ - description-setter:
+ description: "Built on $NODE_NAME"
+ - multijob:
+ name: basic
+ condition: SUCCESSFUL
+ projects:
+ - name: 'daisy4nfv-verify-basic-{stream}'
+ current-parameters: false
+ node-parameters: false
+ kill-phase-on: FAILURE
+ abort-all-job: true
+ - multijob:
+ name: build
+ condition: SUCCESSFUL
+ projects:
+ - name: 'daisy4nfv-verify-build-{stream}'
+ current-parameters: false
+ node-parameters: false
+ kill-phase-on: FAILURE
+ abort-all-job: true
+ - multijob:
+ name: deploy-virtual
+ condition: SUCCESSFUL
+ projects:
+ - name: 'daisy4nfv-verify-deploy-virtual-{stream}'
+ current-parameters: false
+ node-parameters: false
+ kill-phase-on: FAILURE
+ abort-all-job: true
+ - multijob:
+ name: smoke-test
+ condition: SUCCESSFUL
+ projects:
+ - name: 'daisy4nfv-verify-smoke-test-{stream}'
+ current-parameters: false
+ node-parameters: false
+ kill-phase-on: FAILURE
+ abort-all-job: true
+
+- job-template:
+ name: 'daisy4nfv-verify-{phase}-{stream}'
+
+ disabled: '{obj:disabled}'
+
+ concurrent: true
+
+ properties:
+ - throttle:
+ enabled: true
+ max-total: 6
+ option: 'project'
+ - build-blocker:
+ use-build-blocker: true
+ blocking-jobs:
+ - 'daisy4nfv-verify-deploy-.*'
+ - 'daisy4nfv-verify-test-.*'
+ block-level: 'NODE'
+
+ scm:
+ - gerrit-trigger-scm:
+ credentials-id: '{ssh-credentials}'
+ refspec: '$GERRIT_REFSPEC'
+ choosing-strategy: 'gerrit'
+
+ wrappers:
+ - ssh-agent-credentials:
+ users:
+ - '{ssh-credentials}'
+ - timeout:
+ timeout: 360
+ fail: true
+
+ parameters:
+ - project-parameter:
+ project: '{project}'
+ - gerrit-parameter:
+ branch: '{branch}'
+ - '{slave-label}-defaults'
+ - 'daisy4nfv-verify-defaults':
+ gs-pathname: '{gs-pathname}'
+
+ builders:
+ - description-setter:
+ description: "Built on $NODE_NAME"
+ - '{project}-verify-{phase}-macro'
+#####################################
+# builder macros
+#####################################
+- builder:
+ name: 'daisy4nfv-verify-basic-macro'
+ builders:
+ - shell:
+ !include-raw: ./daisy4nfv-basic.sh
+
+- builder:
+ name: 'daisy4nfv-verify-build-macro'
+ builders:
+ - shell:
+ !include-raw: ./daisy4nfv-build.sh
+
+- builder:
+ name: 'daisy4nfv-verify-deploy-virtual-macro'
+ builders:
+ - shell:
+ !include-raw: ./daisy4nfv-virtual-deploy.sh
+
+- builder:
+ name: 'daisy4nfv-verify-smoke-test-macro'
+ builders:
+ - shell: |
+ #!/bin/bash
+
+ echo "Not activated!"
+#####################################
+# parameter macros
+#####################################
+- parameter:
+ name: 'daisy4nfv-verify-defaults'
+ parameters:
+ - string:
+ name: BUILD_DIRECTORY
+ default: $WORKSPACE/build_output
+ description: "Directory where the build artifact will be located upon the completion of the build."
+ - string:
+ name: CACHE_DIRECTORY
+ default: $HOME/opnfv/cache/$INSTALLER_TYPE
+ description: "Directory where the cache to be used during the build is located."
+ - string:
+ name: GS_URL
+ default: artifacts.opnfv.org/$PROJECT{gs-pathname}
+ description: "URL to Google Storage."
diff --git a/jjb/daisy4nfv/daisy4nfv-virtual-deploy.sh b/jjb/daisy4nfv/daisy4nfv-virtual-deploy.sh
new file mode 100755
index 000000000..8936be6c4
--- /dev/null
+++ b/jjb/daisy4nfv/daisy4nfv-virtual-deploy.sh
@@ -0,0 +1,6 @@
+#!/bin/bash
+
+echo "--------------------------------------------------------"
+echo "This is diasy4nfv virtual deploy job!"
+echo "--------------------------------------------------------"
+
diff --git a/jjb/fuel/fuel-ci-jobs.yml b/jjb/fuel/fuel-daily-jobs.yml
index 1c7946a87..1c7946a87 100644
--- a/jjb/fuel/fuel-ci-jobs.yml
+++ b/jjb/fuel/fuel-daily-jobs.yml
diff --git a/jjb/joid/joid-ci-jobs.yml b/jjb/joid/joid-daily-jobs.yml
index 6d0370983..6d0370983 100644
--- a/jjb/joid/joid-ci-jobs.yml
+++ b/jjb/joid/joid-daily-jobs.yml
diff --git a/jjb/multisite/multisite.yml b/jjb/multisite/multisite.yml
index 21b973093..24c03fd4a 100644
--- a/jjb/multisite/multisite.yml
+++ b/jjb/multisite/multisite.yml
@@ -113,17 +113,6 @@
- project: 'functest-fuel-virtual-suite-{stream}'
current-parameters: true
predefined-parameters:
- FUNCTEST_SUITE_NAME=healthcheck
- same-node: true
- block: true
- block-thresholds:
- build-step-failure-threshold: 'never'
- failure-threshold: 'never'
- unstable-threshold: 'FAILURE'
- - trigger-builds:
- - project: 'functest-fuel-virtual-suite-{stream}'
- current-parameters: true
- predefined-parameters:
FUNCTEST_SUITE_NAME=multisite
same-node: true
block: true
diff --git a/utils/push-test-logs.sh b/utils/push-test-logs.sh
index f24d884f5..87cee78bf 100644
--- a/utils/push-test-logs.sh
+++ b/utils/push-test-logs.sh
@@ -23,7 +23,8 @@ dir_result="${HOME}/opnfv/$project/results/${branch}"
node_list=(\
'lf-pod1' 'lf-pod2' 'intel-pod2' 'intel-pod3' \
'intel-pod5' 'intel-pod6' 'intel-pod7' 'intel-pod8' \
-'ericsson-pod2' \
+'ericsson-pod2' 'ericsson-pod3' 'ericsson-pod4' \
+'ericsson-virtual2' 'ericsson-virtual3' 'ericsson-virtual4' 'ericsson-virtual5' \
'arm-pod1' 'arm-pod3' \
'huawei-pod1' 'huawei-pod2' 'huawei-virtual1' 'huawei-virtual2' 'huawei-virtual3' 'huawei-virtual4')
diff --git a/utils/test/dashboard/dashboard/common/elastic_access.py b/utils/test/dashboard/dashboard/common/elastic_access.py
index e90a17fa3..b454e9a12 100644
--- a/utils/test/dashboard/dashboard/common/elastic_access.py
+++ b/utils/test/dashboard/dashboard/common/elastic_access.py
@@ -5,41 +5,49 @@ import urllib3
http = urllib3.PoolManager()
-def delete_request(url, creds, body=None):
+def _request(method, url, creds=None, body=None):
headers = urllib3.make_headers(basic_auth=creds)
- http.request('DELETE', url, headers=headers, body=body)
+ return http.request(method, url, headers=headers, body=body)
-def publish_json(json_ojb, creds, to):
- json_dump = json.dumps(json_ojb)
+def _post(url, creds=None, body=None):
+ return _request('POST', url, creds=creds, body=body)
+
+
+def _get(url, creds=None, body=None):
+ return json.loads(_request('GET', url, creds=creds, body=body).data)
+
+
+def delete_docs(url, creds=None, body=None):
+ return _request('DELETE', url, creds=creds, body=body)
+
+
+def publish_docs(docs, creds, to):
+ json_docs = json.dumps(docs)
if to == 'stdout':
- print json_dump
+ print json_docs
return 200, None
else:
- headers = urllib3.make_headers(basic_auth=creds)
- result = http.request('POST', to, headers=headers, body=json_dump)
+ result = _post(to, creds=creds, body=json_docs)
return result.status, result.data
-def _get_nr_of_hits(elastic_json):
- return elastic_json['hits']['total']
+def _get_docs_nr(url, creds=None, body=None):
+ res_data = _get('{}/_search?size=0'.format(url), creds=creds, body=body)
+ print type(res_data), res_data
+ return res_data['hits']['total']
-def get_elastic_docs(elastic_url, creds, body=None, field = '_source'):
-
- # 1. get the number of results
- headers = urllib3.make_headers(basic_auth=creds)
- elastic_json = json.loads(http.request('GET', elastic_url + '/_search?size=0', headers=headers, body=body).data)
- print elastic_json
- nr_of_hits = _get_nr_of_hits(elastic_json)
+def get_docs(url, creds=None, body=None, field='_source'):
- # 2. get all results
- elastic_json = json.loads(http.request('GET', elastic_url + '/_search?size={}'.format(nr_of_hits), headers=headers, body=body).data)
+ docs_nr = _get_docs_nr(url, creds=creds, body=body)
+ res_data = _get('{}/_search?size={}'.format(url, docs_nr),
+ creds=creds, body=body)
- elastic_docs = []
- for hit in elastic_json['hits']['hits']:
- elastic_docs.append(hit[field])
- return elastic_docs
+ docs = []
+ for hit in res_data['hits']['hits']:
+ docs.append(hit[field])
+ return docs
def get_elastic_docs_by_days(elastic_url, creds, days):
@@ -61,4 +69,4 @@ def get_elastic_docs_by_days(elastic_url, creds, days):
}}'''.format(days)
else:
raise Exception('Update days must be non-negative')
- return get_elastic_docs(elastic_url, creds, body)
+ return get_docs(elastic_url, creds, body)
diff --git a/utils/test/dashboard/dashboard/elastic2kibana/main.py b/utils/test/dashboard/dashboard/elastic2kibana/main.py
index 37ce03e52..38a49b63f 100644
--- a/utils/test/dashboard/dashboard/elastic2kibana/main.py
+++ b/utils/test/dashboard/dashboard/elastic2kibana/main.py
@@ -5,7 +5,8 @@ import urlparse
import argparse
from jinja2 import PackageLoader, Environment
-from common import logger_utils, elastic_access
+from common import elastic_access
+from common import logger_utils
from conf import testcases
from conf.config import APIConfig
@@ -59,7 +60,7 @@ class KibanaDashboard(dict):
url = urlparse.urljoin(base_elastic_url, '/.kibana/visualization/{}'.format(visualization.id))
logger.debug("publishing visualization '{}'".format(url))
# logger.error("_publish_visualization: %s" % visualization)
- elastic_access.publish_json(visualization, es_creds, url)
+ elastic_access.publish_docs(visualization, es_creds, url)
def _construct_panels(self):
size_x = 6
@@ -137,7 +138,7 @@ class KibanaDashboard(dict):
def _publish(self):
url = urlparse.urljoin(base_elastic_url, '/.kibana/dashboard/{}'.format(self.id))
logger.debug("publishing dashboard '{}'".format(url))
- elastic_access.publish_json(self, es_creds, url)
+ elastic_access.publish_docs(self, es_creds, url)
def publish(self):
self._publish_visualizations()
@@ -251,7 +252,7 @@ def _get_pods_and_scenarios(project_name, case_name, installer):
}
})
- elastic_data = elastic_access.get_elastic_docs(urlparse.urljoin(base_elastic_url, '/test_results/mongo2elastic'),
+ elastic_data = elastic_access.get_docs(urlparse.urljoin(base_elastic_url, '/test_results/mongo2elastic'),
es_creds, query_json)
pods_and_scenarios = {}
diff --git a/utils/test/dashboard/dashboard/mongo2elastic/main.py b/utils/test/dashboard/dashboard/mongo2elastic/main.py
index 25b5320d7..82b01e4b3 100644
--- a/utils/test/dashboard/dashboard/mongo2elastic/main.py
+++ b/utils/test/dashboard/dashboard/mongo2elastic/main.py
@@ -64,7 +64,7 @@ class DocumentPublisher:
self._publish()
def _publish(self):
- status, data = elastic_access.publish_json(self.doc, self.creds, self.to)
+ status, data = elastic_access.publish_docs(self.doc, self.creds, self.to)
if status > 300:
logger.error('Publish record[{}] failed, due to [{}]'
.format(self.doc, json.loads(data)['error']['reason']))
diff --git a/utils/test/dashboard/kibana_cleanup.py b/utils/test/dashboard/kibana_cleanup.py
index 9ce4994f5..ee0190049 100644
--- a/utils/test/dashboard/kibana_cleanup.py
+++ b/utils/test/dashboard/kibana_cleanup.py
@@ -14,10 +14,10 @@ logger.addHandler(file_handler)
def delete_all(url, es_creds):
- ids = elastic_access.get_elastic_docs(url, es_creds, body=None, field='_id')
+ ids = elastic_access.get_docs(url, es_creds, body=None, field='_id')
for id in ids:
del_url = '/'.join([url, id])
- elastic_access.delete_request(del_url, es_creds)
+ elastic_access.delete_docs(del_url, es_creds)
if __name__ == '__main__':