summaryrefslogtreecommitdiffstats
diff options
context:
space:
mode:
-rw-r--r--jjb/apex/apex.yml17
-rw-r--r--jjb/armband/armband-ci-jobs.yml2
-rwxr-xr-xjjb/armband/armband-deploy.sh12
-rwxr-xr-xjjb/armband/armband-download-artifact.sh42
-rw-r--r--jjb/fuel/fuel-ci-jobs.yml38
-rwxr-xr-xjjb/fuel/fuel-deploy.sh4
-rw-r--r--jjb/functest/functest-ci-jobs.yml13
-rw-r--r--jjb/joid/joid-deploy.sh9
-rw-r--r--jjb/netready/netready.yml55
-rw-r--r--jjb/opnfv/opnfv-docker.sh14
-rw-r--r--jjb/opnfv/opnfv-docker.yml1
-rw-r--r--utils/test/result_collection_api/README.md16
-rw-r--r--utils/test/result_collection_api/common/constants.py1
-rw-r--r--utils/test/result_collection_api/resources/handlers.py3
-rw-r--r--utils/test/result_collection_api/resources/models.py58
-rw-r--r--utils/test/result_collection_api/resources/pod_models.py108
-rwxr-xr-xutils/test/result_collection_api/run_test.sh10
-rw-r--r--utils/test/result_collection_api/tests/__init__.py1
-rw-r--r--utils/test/result_collection_api/tests/unit/__init__.py1
-rw-r--r--utils/test/result_collection_api/tests/unit/fake_pymongo.py132
-rw-r--r--utils/test/result_collection_api/tests/unit/test_base.py54
-rw-r--r--utils/test/result_collection_api/tests/unit/test_fake_pymongo.py52
-rw-r--r--utils/test/result_collection_api/tests/unit/test_pod.py86
-rw-r--r--utils/test/result_collection_api/tests/unit/test_version.py14
24 files changed, 670 insertions, 73 deletions
diff --git a/jjb/apex/apex.yml b/jjb/apex/apex.yml
index 282ae7b54..ce673b288 100644
--- a/jjb/apex/apex.yml
+++ b/jjb/apex/apex.yml
@@ -212,12 +212,15 @@
properties:
- build-blocker:
use-build-blocker: true
+ block-level: 'NODE'
blocking-jobs:
- 'apex-daily.*{stream1}'
- 'apex-deploy.*{stream1}'
- 'apex-build.*{stream1}'
- 'apex-runner.*{stream1}'
- 'apex-verify-{stream1}'
+ - throttle:
+ max-per-node: 1
builders:
- 'apex-build'
@@ -230,11 +233,12 @@
block: true
same-node: true
# - trigger-builds:
-# - project: 'functest-apex-{slave}-suite-{stream1}'
+# - project: 'functest-apex-{verify-slave}-suite-{stream1}'
# predefined-parameters: |
# DEPLOY_SCENARIO=os-nosdn-nofeature-ha
# FUNCTEST_SUITE_NAME=vping_userdata
# block: true
+# same-node: true
- trigger-builds:
- project: 'apex-deploy-virtual-os-odl_l2-nofeature-ha-{stream1}'
predefined-parameters: |
@@ -313,7 +317,7 @@
# Required Variables:
# stream: branch with - in place of / (eg. stable)
# branch: branch (eg. stable)
- node: '{slave}'
+ node: '{daily-slave}'
disabled: false
@@ -338,8 +342,11 @@
properties:
- build-blocker:
use-build-blocker: true
+ block-level: 'NODE'
blocking-jobs:
- 'apex-deploy.*{stream}'
+ - throttle:
+ max-per-node: 1
builders:
- 'apex-build'
@@ -349,6 +356,7 @@
BUILD_DIRECTORY=apex-build-{stream}/build
OPNFV_CLEAN=yes
git-revision: false
+ same-node: true
block: true
- 'apex-upload-artifact'
@@ -387,8 +395,11 @@
properties:
- build-blocker:
use-build-blocker: true
+ block-level: 'NODE'
blocking-jobs:
- 'apex-deploy.*{stream}'
+ - throttle:
+ max-per-node: 1
builders:
- 'apex-deploy-virtual'
@@ -604,6 +615,7 @@
properties:
- build-blocker:
use-build-blocker: true
+ block-level: 'NODE'
blocking-jobs:
- 'apex-verify.*{stream1}'
- 'apex-deploy.*{stream1}'
@@ -618,6 +630,7 @@
- project: 'apex-build-{stream1}'
git-revision: true
current-parameters: true
+ same-node: true
block: true
- trigger-builds:
- project: 'apex-deploy-virtual-nosdn-nofeature-ha-{stream1}'
diff --git a/jjb/armband/armband-ci-jobs.yml b/jjb/armband/armband-ci-jobs.yml
index 5ae8a04dc..bbf7c40a4 100644
--- a/jjb/armband/armband-ci-jobs.yml
+++ b/jjb/armband/armband-ci-jobs.yml
@@ -190,4 +190,4 @@
- trigger:
name: 'armband-os-odl_l2-nofeature-ha-arm-pod1-brahmaputra-trigger'
triggers:
- - timed: '0 4 * * *'
+ - timed: '0 20 * * *'
diff --git a/jjb/armband/armband-deploy.sh b/jjb/armband/armband-deploy.sh
index 97430c114..8b0af31e3 100755
--- a/jjb/armband/armband-deploy.sh
+++ b/jjb/armband/armband-deploy.sh
@@ -39,10 +39,10 @@ POD_NAME=${NODE_NAME/*-}
if [[ ! $LAB_NAME =~ (arm|enea) ]]; then
echo "Unsupported/unidentified lab $LAB_NAME. Cannot continue!"
exit 1
-else
- echo "Using configuration for $LAB_NAME"
fi
+echo "Using configuration for $LAB_NAME"
+
# create TMPDIR if it doesn't exist
mkdir -p $TMPDIR
@@ -52,8 +52,11 @@ if [[ $LAB_CONFIG_URL =~ ^git:// ]]; then
LAB_CONFIG_URL=file://${WORKSPACE}/lab-config
fi
+# releng wants us to use nothing else but opnfv.iso for now. We comply.
+ISO_FILE=$WORKSPACE/opnfv.iso
+
# construct the command
-DEPLOY_COMMAND="$WORKSPACE/ci/deploy.sh -b ${LAB_CONFIG_URL} -l $LAB_NAME -p $POD_NAME -s $DEPLOY_SCENARIO -i file://$WORKSPACE/opnfv.iso -H -B $BRIDGE -S $TMPDIR"
+DEPLOY_COMMAND="$WORKSPACE/ci/deploy.sh -l $LAB_NAME -p $POD_NAME -b ${LAB_CONFIG_URL} -s $DEPLOY_SCENARIO -i file://${ISO_FILE} -H -B $BRIDGE -S $TMPDIR"
# log info to console
echo "Deployment parameters"
@@ -72,9 +75,6 @@ echo "Issuing command"
echo "$DEPLOY_COMMAND"
echo
-# FIXME
-export TARGET_LAB=${LAB_NAME}
-export TARGET_POD=${POD_NAME}
$DEPLOY_COMMAND
echo
diff --git a/jjb/armband/armband-download-artifact.sh b/jjb/armband/armband-download-artifact.sh
index 15ad67d6e..18b55d7a7 100755
--- a/jjb/armband/armband-download-artifact.sh
+++ b/jjb/armband/armband-download-artifact.sh
@@ -13,35 +13,33 @@ set -o pipefail
if [[ "$JOB_NAME" =~ "merge" ]]; then
echo "Downloading http://$GS_URL/opnfv-gerrit-$GERRIT_CHANGE_NUMBER.properties"
# get the properties file for the Armband Fuel ISO built for a merged change
- curl -s -o $WORKSPACE/latest.properties http://$GS_URL/opnfv-gerrit-$GERRIT_CHANGE_NUMBER.properties
+ curl -f -s -o $WORKSPACE/latest.properties http://$GS_URL/opnfv-gerrit-$GERRIT_CHANGE_NUMBER.properties
else
# get the latest.properties file in order to get info regarding latest artifact
echo "Downloading http://$GS_URL/latest.properties"
- curl -s -o $WORKSPACE/latest.properties http://$GS_URL/latest.properties
+ curl -f -s -o $WORKSPACE/latest.properties http://$GS_URL/latest.properties
fi
-# check if we got the file
-# FIXME: the file is created even if it didn't exist on the host
-# We should check that the contents are sane
-[[ -f latest.properties ]] || exit 1
-
-# source the file so we get artifact metadata
+# source the file so we get artifact metadata, it will exit if it doesn't exist
source latest.properties
# echo the info about artifact that is used during the deployment
OPNFV_ARTIFACT=${OPNFV_ARTIFACT_URL/*\/}
echo "Using $OPNFV_ARTIFACT for deployment"
+# Releng doesn't want us to use anything but opnfv.iso for now. We comply.
+ISO_FILE=${WORKSPACE}/opnfv.iso
+
# using ISOs for verify & merge jobs from local storage will be enabled later
if [[ ! "$JOB_NAME" =~ (verify|merge) ]]; then
# check if we already have the ISO to avoid redownload
ISOSTORE="/iso_mount/opnfv_ci/${GIT_BRANCH##*/}"
if [[ -f "$ISOSTORE/$OPNFV_ARTIFACT" ]]; then
echo "ISO exists locally. Skipping the download and using the file from ISO store"
- ln -s $ISOSTORE/$OPNFV_ARTIFACT $WORKSPACE/opnfv.iso
+ ln -s $ISOSTORE/$OPNFV_ARTIFACT ${ISO_FILE}
echo "--------------------------------------------------------"
echo
- ls -al $WORKSPACE/opnfv.iso
+ ls -al ${ISO_FILE}
echo
echo "--------------------------------------------------------"
echo "Done!"
@@ -49,18 +47,22 @@ if [[ ! "$JOB_NAME" =~ (verify|merge) ]]; then
fi
fi
+# Use gsutils if available
+if $(which gsutil &>/dev/null); then
+ DOWNLOAD_URL="gs://$OPNFV_ARTIFACT_URL"
+ CMD="gsutil cp ${DOWNLOAD_URL} ${ISO_FILE}"
+else
+ # download image
+ # -f returns error if the file was not found or on server error
+ DOWNLOAD_URL="http://$OPNFV_ARTIFACT_URL"
+ CMD="curl -f -s -o ${ISO_FILE} ${DOWNLOAD_URL}"
+fi
+
# log info to console
-echo "Downloading the $INSTALLER_TYPE artifact using URL http://$OPNFV_ARTIFACT_URL"
+echo "Downloading the $INSTALLER_TYPE artifact using URL $DOWNLOAD_URL"
echo "This could take some time..."
echo "--------------------------------------------------------"
-echo
-
-# download the file
-curl -s -o $WORKSPACE/opnfv.iso http://$OPNFV_ARTIFACT_URL
-
-# The file is always created, check that it is in fact an ISO image
-[[ $(file $WORKSPACE/opnfv.iso) =~ ISO ]]
-
-echo
+echo "$CMD"
+$CMD
echo "--------------------------------------------------------"
echo "Done!"
diff --git a/jjb/fuel/fuel-ci-jobs.yml b/jjb/fuel/fuel-ci-jobs.yml
index 77b711e74..acfcedabf 100644
--- a/jjb/fuel/fuel-ci-jobs.yml
+++ b/jjb/fuel/fuel-ci-jobs.yml
@@ -43,6 +43,8 @@
<<: *master
- virtual:
<<: *master
+ - zte-pod1:
+ <<: *master
#--------------------------------
# scenarios
#--------------------------------
@@ -311,6 +313,42 @@
name: 'fuel-os-nosdn-kvm-noha-ericsson-pod2-brahmaputra-trigger'
triggers:
- timed: ''
+
+#-----------------------------------------------
+# ZTE POD1 Triggers running against master branch
+#-----------------------------------------------
+- trigger:
+ name: 'fuel-os-nosdn-nofeature-ha-zte-pod1-master-trigger'
+ triggers:
+ - timed: ''
+- trigger:
+ name: 'fuel-os-odl_l2-nofeature-ha-zte-pod1-master-trigger'
+ triggers:
+ - timed: '0 12 * * *'
+- trigger:
+ name: 'fuel-os-odl_l3-nofeature-ha-zte-pod1-master-trigger'
+ triggers:
+ - timed: ''
+- trigger:
+ name: 'fuel-os-onos-nofeature-ha-zte-pod1-master-trigger'
+ triggers:
+ - timed: ''
+- trigger:
+ name: 'fuel-os-odl_l2-bgpvpn-ha-zte-pod1-master-trigger'
+ triggers:
+ - timed: ''
+- trigger:
+ name: 'fuel-os-odl_l2-sfc-ha-zte-pod1-master-trigger'
+ triggers:
+ - timed: ''
+- trigger:
+ name: 'fuel-os-nosdn-kvm-ha-zte-pod1-master-trigger'
+ triggers:
+ - timed: ''
+- trigger:
+ name: 'fuel-os-nosdn-ovs-ha-zte-pod1-master-trigger'
+ triggers:
+ - timed: ''
#-----------------------------------------------
# Triggers for other PODs
#-----------------------------------------------
diff --git a/jjb/fuel/fuel-deploy.sh b/jjb/fuel/fuel-deploy.sh
index bab6151d8..ceccc54b6 100755
--- a/jjb/fuel/fuel-deploy.sh
+++ b/jjb/fuel/fuel-deploy.sh
@@ -43,8 +43,8 @@ if [[ "$NODE_NAME" =~ "virtual" ]]; then
POD_NAME="virtual_kvm"
fi
-# we currently support ericsson, intel, and lf labs
-if [[ ! "$LAB_NAME" =~ (ericsson|intel|lf) ]]; then
+# we currently support ericsson, intel, lf and zte labs
+if [[ ! "$LAB_NAME" =~ (ericsson|intel|lf|zte) ]]; then
echo "Unsupported/unidentified lab $LAB_NAME. Cannot continue!"
exit 1
else
diff --git a/jjb/functest/functest-ci-jobs.yml b/jjb/functest/functest-ci-jobs.yml
index 6cfcfdca4..cd3f11e8e 100644
--- a/jjb/functest/functest-ci-jobs.yml
+++ b/jjb/functest/functest-ci-jobs.yml
@@ -218,12 +218,20 @@
builders:
- 'functest-cleanup'
- 'set-functest-env'
- - 'functest-all'
+ - 'functest-daily'
- 'functest-store-results'
- builder:
name: functest-suite-builder
builders:
+ - 'functest-cleanup'
+ - 'set-functest-env'
+ - 'functest-suite'
+
+
+- builder:
+ name: functest-suite
+ builders:
- shell: |
#!/bin/bash
set -e
@@ -237,8 +245,9 @@
container_id=$(docker ps -a | grep opnfv/functest | awk '{print $1}' | head -1)
docker exec $container_id $cmd
+
- builder:
- name: functest-all
+ name: functest-daily
builders:
- shell: |
#!/bin/bash
diff --git a/jjb/joid/joid-deploy.sh b/jjb/joid/joid-deploy.sh
index 907db4f57..bcc4b6412 100644
--- a/jjb/joid/joid-deploy.sh
+++ b/jjb/joid/joid-deploy.sh
@@ -181,10 +181,17 @@ cat << EOF > $JOID_ADMIN_OPENRC
export OS_USERNAME=admin
export OS_PASSWORD=$OS_ADMIN_PASSWORD
export OS_TENANT_NAME=admin
-export OS_AUTH_URL=http://$KEYSTONE:5000/v2.0
+export OS_AUTH_URL=http://$KEYSTONE:35537/v2.0
export OS_REGION_NAME=Canonical
+export OS_ENDPOINT_TYPE='adminURL'
+export CINDER_ENDPOINT_TYPE='adminURL'
+export GLANCE_ENDPOINT_TYPE='adminURL'
+export KEYSTONE_ENDPOINT_TYPE='adminURL'
+export NEUTRON_ENDPOINT_TYPE='adminURL'
+export NOVA_ENDPOINT_TYPE='adminURL'
export SDN_CONTROLLER=$SDN_CONTROLLER_IP
export SDN_PASSWORD=$SDN_PASSWORD
+export OS_INTERFACE=admin
EOF
##
diff --git a/jjb/netready/netready.yml b/jjb/netready/netready.yml
new file mode 100644
index 000000000..bc8f66691
--- /dev/null
+++ b/jjb/netready/netready.yml
@@ -0,0 +1,55 @@
+- project:
+ name: netready
+
+ project: '{name}'
+
+ jobs:
+ - 'netready-verify-{stream}'
+
+ stream:
+ - master:
+ branch: '{stream}'
+ gs-pathname: ''
+ - brahmaputra:
+ branch: 'stable/{stream}'
+ gs-pathname: '/{stream}'
+
+- job-template:
+ name: 'netready-verify-{stream}'
+
+ parameters:
+ - project-parameter:
+ project: '{project}'
+ - gerrit-parameter:
+ branch: '{branch}'
+ scm:
+ - gerrit-trigger-scm:
+ credentials-id: '{ssh-credentials}'
+ refspec: '$GERRIT_REFSPEC'
+ choosing-strategy: 'gerrit'
+
+ triggers:
+ - gerrit:
+ trigger-on:
+ - patchset-created-event:
+ exclude-drafts: 'false'
+ exclude-trivial-rebase: 'false'
+ exclude-no-code-change: 'false'
+ - draft-published-event
+ - comment-added-contains-event:
+ comment-contains-value: 'recheck'
+ - comment-added-contains-event:
+ comment-contains-value: 'reverify'
+ projects:
+ - project-compare-type: 'ANT'
+ project-pattern: '{project}'
+ branches:
+ - branch-compare-type: 'ANT'
+ branch-pattern: '**/{branch}'
+ forbidden-file-paths:
+ - compare-type: ANT
+ pattern: 'docs/**'
+
+ builders:
+ - shell: |
+ echo "Nothing to verify!"
diff --git a/jjb/opnfv/opnfv-docker.sh b/jjb/opnfv/opnfv-docker.sh
index 702c0ce52..1093d5fec 100644
--- a/jjb/opnfv/opnfv-docker.sh
+++ b/jjb/opnfv/opnfv-docker.sh
@@ -67,16 +67,18 @@ fi
# cd to directory where Dockerfile is located
-if [[ "$DOCKER_REPO_NAME" == "opnfv/functest" ]]; then
+if [[ "$DOCKER_REPO_NAME" == "opnfv/bottlenecks" ]]; then
+ cd $WORKSPACE/ci/docker
+elif [[ "$DOCKER_REPO_NAME" == "opnfv/cperf" ]]; then
cd $WORKSPACE/docker
-elif [[ "$DOCKER_REPO_NAME" == "opnfv/yardstick" ]]; then
- cd $WORKSPACE/ci/docker/yardstick-ci
-elif [[ "$DOCKER_REPO_NAME" == "opnfv/storperf" ]]; then
+elif [[ "$DOCKER_REPO_NAME" == "opnfv/functest" ]]; then
cd $WORKSPACE/docker
elif [[ "$DOCKER_REPO_NAME" == "opnfv/qtip" ]]; then
cd $WORKSPACE/docker
-elif [[ "$DOCKER_REPO_NAME" == "opnfv/bottlenecks" ]]; then
- cd $WORKSPACE/ci/docker
+elif [[ "$DOCKER_REPO_NAME" == "opnfv/storeperf" ]]; then
+ cd $WORKSPACE/docker
+elif [[ "$DOCKER_REPO_NAME" == "opnfv/yardstick" ]]; then
+ cd $WORKSPACE/ci/docker/yardstick-ci
else
echo "ERROR: DOCKER_REPO_NAME parameter not valid: $DOCKER_REPO_NAME"
exit 1
diff --git a/jjb/opnfv/opnfv-docker.yml b/jjb/opnfv/opnfv-docker.yml
index 936f22040..6b4924295 100644
--- a/jjb/opnfv/opnfv-docker.yml
+++ b/jjb/opnfv/opnfv-docker.yml
@@ -8,6 +8,7 @@
project:
- 'bottlenecks'
+ - 'cperf'
- 'functest'
- 'storperf'
- 'qtip'
diff --git a/utils/test/result_collection_api/README.md b/utils/test/result_collection_api/README.md
new file mode 100644
index 000000000..d73274c69
--- /dev/null
+++ b/utils/test/result_collection_api/README.md
@@ -0,0 +1,16 @@
+# result_collection_api
+
+## prepare:
+Install:
+
+```
+pip install testtools
+pip install discover
+```
+
+## How to:
+run_test.sh:
+
+```
+bash ./run_test.sh
+```
diff --git a/utils/test/result_collection_api/common/constants.py b/utils/test/result_collection_api/common/constants.py
index 2c825c109..4d39a142d 100644
--- a/utils/test/result_collection_api/common/constants.py
+++ b/utils/test/result_collection_api/common/constants.py
@@ -12,3 +12,4 @@ DEFAULT_REPRESENTATION = "application/json"
HTTP_BAD_REQUEST = 400
HTTP_FORBIDDEN = 403
HTTP_NOT_FOUND = 404
+HTTP_OK = 200
diff --git a/utils/test/result_collection_api/resources/handlers.py b/utils/test/result_collection_api/resources/handlers.py
index c1e8eb182..fff166237 100644
--- a/utils/test/result_collection_api/resources/handlers.py
+++ b/utils/test/result_collection_api/resources/handlers.py
@@ -13,7 +13,8 @@ from tornado.web import RequestHandler, asynchronous, HTTPError
from tornado import gen
from datetime import datetime, timedelta
-from models import Pod, TestProject, TestCase, TestResult
+from models import TestProject, TestCase, TestResult
+from resources.pod_models import Pod
from common.constants import DEFAULT_REPRESENTATION, HTTP_BAD_REQUEST, \
HTTP_NOT_FOUND, HTTP_FORBIDDEN
from common.config import prepare_put_request
diff --git a/utils/test/result_collection_api/resources/models.py b/utils/test/result_collection_api/resources/models.py
index 06e95f94f..adf6842c3 100644
--- a/utils/test/result_collection_api/resources/models.py
+++ b/utils/test/result_collection_api/resources/models.py
@@ -5,47 +5,41 @@
# are made available under the terms of the Apache License, Version 2.0
# which accompanies this distribution, and is available at
# http://www.apache.org/licenses/LICENSE-2.0
+# feng.xiaowei@zte.com.cn mv Pod to pod_models.py 6-18-2016
+# feng.xiaowei@zte.com.cn add MetaCreateResponse/MetaGetResponse 6-18-2016
##############################################################################
-class Pod:
- """ describes a POD platform """
- def __init__(self):
- self._id = ""
- self.name = ""
- self.creation_date = ""
- self.mode = ""
- self.details = ""
+class MetaCreateResponse(object):
+ def __init__(self, success=True, uri=''):
+ self.success = success
+ self.uri = uri
@staticmethod
- def pod_from_dict(pod_dict):
- if pod_dict is None:
+ def from_dict(meta_dict):
+ if meta_dict is None:
return None
- p = Pod()
- p._id = pod_dict.get('_id')
- p.creation_date = str(pod_dict.get('creation_date'))
- p.name = pod_dict.get('name')
- p.mode = pod_dict.get('mode')
- p.details = pod_dict.get('details')
- return p
+ meta = MetaCreateResponse()
+ meta.success = meta_dict.get('success')
+ meta.uri = meta_dict.get('uri')
+ return meta
- def format(self):
- return {
- "name": self.name,
- "mode": self.mode,
- "details": self.details,
- "creation_date": str(self.creation_date),
- }
- def format_http(self):
- return {
- "_id": str(self._id),
- "name": self.name,
- "mode": self.mode,
- "details": self.details,
- "creation_date": str(self.creation_date),
- }
+class MetaGetResponse(object):
+ def __init__(self, success=True, total=0):
+ self.success = success
+ self.total = total
+
+ @staticmethod
+ def from_dict(meta_dict):
+ if meta_dict is None:
+ return None
+
+ meta = MetaGetResponse()
+ meta.success = meta_dict.get('success')
+ meta.total = meta_dict.get('total')
+ return meta
class TestProject:
diff --git a/utils/test/result_collection_api/resources/pod_models.py b/utils/test/result_collection_api/resources/pod_models.py
new file mode 100644
index 000000000..5c4ef7221
--- /dev/null
+++ b/utils/test/result_collection_api/resources/pod_models.py
@@ -0,0 +1,108 @@
+from models import MetaCreateResponse, MetaGetResponse
+
+
+class PodCreateRequest(object):
+ def __init__(self, name='', mode='', details=''):
+ self.name = name
+ self.mode = mode
+ self.details = details
+
+ def format(self):
+ return {
+ "name": self.name,
+ "mode": self.mode,
+ "details": self.details,
+ }
+
+ @staticmethod
+ def from_dict(req_dict):
+ if req_dict is None:
+ return None
+
+ req = PodCreateRequest()
+ req.name = req_dict.get('name')
+ req.mode = req_dict.get('mode')
+ req.details = req_dict.get('details')
+ return req
+
+
+class Pod(PodCreateRequest):
+ """ describes a POD platform """
+ def __init__(self, name='', mode='', details='', _id='', create_date=''):
+ super(Pod, self).__init__(name, mode, details)
+ self._id = _id
+ self.creation_date = create_date
+
+ @staticmethod
+ def pod_from_dict(pod_dict):
+ if pod_dict is None:
+ return None
+
+ p = Pod()
+ p._id = pod_dict.get('_id')
+ p.creation_date = str(pod_dict.get('creation_date'))
+ p.name = pod_dict.get('name')
+ p.mode = pod_dict.get('mode')
+ p.details = pod_dict.get('details')
+ return p
+
+ def format(self):
+ f = super(Pod, self).format()
+ f['creation_date'] = str(self.creation_date)
+ return f
+
+ def format_http(self):
+ f = self.format()
+ f['_id'] = str(self._id)
+ return f
+
+
+class PodCreateResponse(object):
+ def __init__(self, pod=None, meta=None):
+ self.pod = pod
+ self.meta = meta
+
+ @staticmethod
+ def from_dict(res_dict):
+ if res_dict is None:
+ return None
+
+ res = PodCreateResponse()
+ res.pod = Pod.pod_from_dict(res_dict.get('pod'))
+ res.meta = MetaCreateResponse.from_dict(res_dict.get('meta'))
+ return res
+
+
+class PodGetResponse(PodCreateRequest):
+ def __init__(self, name='', mode='', details='', create_date=''):
+ self.creation_date = create_date
+ super(PodGetResponse, self).__init__(name, mode, details)
+
+ @staticmethod
+ def from_dict(req_dict):
+ if req_dict is None:
+ return None
+
+ res = PodGetResponse()
+ res.creation_date = str(req_dict.get('creation_date'))
+ res.name = req_dict.get('name')
+ res.mode = req_dict.get('mode')
+ res.details = req_dict.get('details')
+ return res
+
+
+class PodsGetResponse(object):
+ def __init__(self, pods=[], meta=None):
+ self.pods = pods
+ self.meta = meta
+
+ @staticmethod
+ def from_dict(res_dict):
+ if res_dict is None:
+ return None
+
+ res = PodsGetResponse()
+ for pod in res_dict.get('pods'):
+ res.pods.append(PodGetResponse.from_dict(pod))
+ res.meta = MetaGetResponse.from_dict(res_dict.get('meta'))
+ return res
diff --git a/utils/test/result_collection_api/run_test.sh b/utils/test/result_collection_api/run_test.sh
new file mode 100755
index 000000000..6006fcf8b
--- /dev/null
+++ b/utils/test/result_collection_api/run_test.sh
@@ -0,0 +1,10 @@
+#! /bin/bash
+
+# Before run this script, make sure that testtools and discover
+# had been installed in your env
+# or else using pip to install them as follows:
+# pip install testtools, discover
+
+find . -type f -name "*.pyc" -delete
+testrargs="discover ./tests/unit"
+python -m testtools.run $testrargs \ No newline at end of file
diff --git a/utils/test/result_collection_api/tests/__init__.py b/utils/test/result_collection_api/tests/__init__.py
new file mode 100644
index 000000000..3ed9fd0f3
--- /dev/null
+++ b/utils/test/result_collection_api/tests/__init__.py
@@ -0,0 +1 @@
+__author__ = 'root'
diff --git a/utils/test/result_collection_api/tests/unit/__init__.py b/utils/test/result_collection_api/tests/unit/__init__.py
new file mode 100644
index 000000000..3ed9fd0f3
--- /dev/null
+++ b/utils/test/result_collection_api/tests/unit/__init__.py
@@ -0,0 +1 @@
+__author__ = 'root'
diff --git a/utils/test/result_collection_api/tests/unit/fake_pymongo.py b/utils/test/result_collection_api/tests/unit/fake_pymongo.py
new file mode 100644
index 000000000..e5ded376e
--- /dev/null
+++ b/utils/test/result_collection_api/tests/unit/fake_pymongo.py
@@ -0,0 +1,132 @@
+from bson.objectid import ObjectId
+from concurrent.futures import ThreadPoolExecutor
+
+__author__ = 'serena'
+
+
+class MemCursor(object):
+ def __init__(self, collection):
+ self.collection = collection
+ self.count = len(self.collection)
+
+ def _is_next_exist(self):
+ return self.count != 0
+
+ @property
+ def fetch_next(self):
+ with ThreadPoolExecutor(max_workers=2) as executor:
+ result = executor.submit(self._is_next_exist)
+ return result
+
+ def next_object(self):
+ self.count -= 1
+ return self.collection.pop()
+
+
+class MemDb(object):
+
+ def __init__(self):
+ self.contents = []
+ pass
+
+ def _find_one(self, spec_or_id=None, *args):
+ if spec_or_id is not None and not isinstance(spec_or_id, dict):
+ spec_or_id = {"_id": spec_or_id}
+ cursor = self._find(spec_or_id, *args)
+ for result in cursor:
+ return result
+ return None
+
+ def find_one(self, spec_or_id=None, *args):
+ with ThreadPoolExecutor(max_workers=2) as executor:
+ result = executor.submit(self._find_one, spec_or_id, *args)
+ return result
+
+ def _insert(self, doc_or_docs):
+
+ docs = doc_or_docs
+ return_one = False
+ if isinstance(docs, dict):
+ return_one = True
+ docs = [docs]
+
+ ids = []
+ for doc in docs:
+ if '_id' not in doc:
+ doc['_id'] = ObjectId()
+ if not self._find_one(doc['_id']):
+ ids.append(doc['_id'])
+ self.contents.append(doc_or_docs)
+
+ if len(ids) == 0:
+ return None
+ if return_one:
+ return ids[0]
+ else:
+ return ids
+
+ def insert(self, doc_or_docs):
+ with ThreadPoolExecutor(max_workers=2) as executor:
+ result = executor.submit(self._insert, doc_or_docs)
+ return result
+
+ @staticmethod
+ def _in(content, *args):
+ for arg in args:
+ for k, v in arg.iteritems():
+ if content.get(k, None) != v:
+ return False
+
+ return True
+
+ def _find(self, *args):
+ res = []
+ for content in self.contents:
+ if self._in(content, *args):
+ res.append(content)
+
+ return res
+
+ def find(self, *args):
+ return MemCursor(self._find(*args))
+
+ def _update(self, spec, document):
+ updated = False
+ for index in range(len(self.contents)):
+ content = self.contents[index]
+ if self._in(content, spec):
+ for k, v in document.iteritems():
+ updated = True
+ content[k] = v
+ self.contents[index] = content
+ return updated
+
+ def update(self, spec, document):
+ with ThreadPoolExecutor(max_workers=2) as executor:
+ result = executor.submit(self._update, spec, document)
+ return result
+
+ def _remove(self, spec_or_id=None):
+ if spec_or_id is None:
+ self.contents = []
+ if not isinstance(spec_or_id, dict):
+ spec_or_id = {'_id': spec_or_id}
+ for index in range(len(self.contents)):
+ content = self.contents[index]
+ if self._in(content, spec_or_id):
+ del self.contents[index]
+ return True
+ return False
+
+ def remove(self, spec_or_id=None):
+ with ThreadPoolExecutor(max_workers=2) as executor:
+ result = executor.submit(self._remove, spec_or_id)
+ return result
+
+ def clear(self):
+ self._remove()
+
+pod = MemDb()
+test_projects = MemDb()
+test_cases = MemDb()
+test_results = MemDb()
diff --git a/utils/test/result_collection_api/tests/unit/test_base.py b/utils/test/result_collection_api/tests/unit/test_base.py
new file mode 100644
index 000000000..98190fb94
--- /dev/null
+++ b/utils/test/result_collection_api/tests/unit/test_base.py
@@ -0,0 +1,54 @@
+import json
+from tornado.web import Application
+from tornado.testing import AsyncHTTPTestCase
+
+from resources.handlers import VersionHandler, PodHandler, \
+ TestProjectHandler, TestCasesHandler, TestResultsHandler, DashboardHandler
+import fake_pymongo
+
+
+class TestBase(AsyncHTTPTestCase):
+ headers = {'Content-Type': 'application/json; charset=UTF-8'}
+
+ def setUp(self):
+ self.addCleanup(self._clear)
+ super(TestBase, self).setUp()
+
+ def get_app(self):
+ return Application(
+ [
+ (r"/version", VersionHandler),
+ (r"/pods", PodHandler),
+ (r"/pods/([^/]+)", PodHandler),
+ (r"/test_projects", TestProjectHandler),
+ (r"/test_projects/([^/]+)", TestProjectHandler),
+ (r"/test_projects/([^/]+)/cases", TestCasesHandler),
+ (r"/test_projects/([^/]+)/cases/([^/]+)", TestCasesHandler),
+ (r"/results", TestResultsHandler),
+ (r"/results([^/]*)", TestResultsHandler),
+ (r"/results/([^/]*)", TestResultsHandler),
+ (r"/dashboard", DashboardHandler),
+ (r"/dashboard([^/]*)", DashboardHandler),
+ (r"/dashboard/([^/]*)", DashboardHandler),
+ ],
+ db=fake_pymongo,
+ debug=True,
+ )
+
+ def create(self, uri, body=None):
+ return self.fetch(uri,
+ method='POST',
+ body=json.dumps(body),
+ headers=self.headers)
+
+ def get(self, uri):
+ return self.fetch(uri,
+ method='GET',
+ headers=self.headers)
+
+ @staticmethod
+ def _clear():
+ fake_pymongo.pod.clear()
+ fake_pymongo.test_projects.clear()
+ fake_pymongo.test_cases.clear()
+ fake_pymongo.test_results.clear()
diff --git a/utils/test/result_collection_api/tests/unit/test_fake_pymongo.py b/utils/test/result_collection_api/tests/unit/test_fake_pymongo.py
new file mode 100644
index 000000000..5ddbf28d9
--- /dev/null
+++ b/utils/test/result_collection_api/tests/unit/test_fake_pymongo.py
@@ -0,0 +1,52 @@
+import unittest
+from tornado.web import Application
+from tornado import gen
+from tornado.testing import AsyncHTTPTestCase, gen_test
+
+import fake_pymongo
+
+
+class MyTest(AsyncHTTPTestCase):
+ def setUp(self):
+ super(MyTest, self).setUp()
+ self.db = fake_pymongo
+ self.io_loop.run_sync(self.fixture_setup)
+
+ def get_app(self):
+ return Application()
+
+ @gen.coroutine
+ def fixture_setup(self):
+ self.test1 = {'_id': '1', 'name': 'test1'}
+ self.test2 = {'name': 'test2'}
+ yield self.db.pod.insert({'_id': '1', 'name': 'test1'})
+ yield self.db.pod.insert({'name': 'test2'})
+
+ @gen_test
+ def test_find_one(self):
+ user = yield self.db.pod.find_one({'name': 'test1'})
+ self.assertEqual(user, self.test1)
+
+ @gen_test
+ def test_find(self):
+ cursor = self.db.pod.find()
+ names = []
+ while (yield cursor.fetch_next):
+ ob = cursor.next_object()
+ names.append(ob.get('name'))
+ self.assertItemsEqual(names, ['test1', 'test2'])
+
+ @gen_test
+ def test_update(self):
+ yield self.db.pod.update({'_id': '1'}, {'name': 'new_test1'})
+ user = yield self.db.pod.find_one({'_id': '1'})
+ self.assertEqual(user.get('name', None), 'new_test1')
+
+ @gen_test
+ def test_remove(self):
+ yield self.db.pod.remove({'_id': '1'})
+ user = yield self.db.pod.find_one({'_id': '1'})
+ self.assertIsNone(user)
+
+if __name__ == '__main__':
+ unittest.main()
diff --git a/utils/test/result_collection_api/tests/unit/test_pod.py b/utils/test/result_collection_api/tests/unit/test_pod.py
new file mode 100644
index 000000000..5a3d485ab
--- /dev/null
+++ b/utils/test/result_collection_api/tests/unit/test_pod.py
@@ -0,0 +1,86 @@
+import unittest
+import json
+
+from test_base import TestBase
+from resources.pod_models import PodCreateRequest, \
+ PodCreateResponse, PodsGetResponse
+from common.constants import HTTP_OK, HTTP_BAD_REQUEST, HTTP_FORBIDDEN
+
+
+class TestPodCreate(TestBase):
+ req = PodCreateRequest(name='zte-1', mode='alive', details='zte pod 1')
+
+ def test_withoutBody(self):
+ res = self.create('/pods', body=None)
+ self.assertEqual(res.code, HTTP_BAD_REQUEST)
+
+ def test_success(self):
+ res = self.create('/pods', body=self.req.format())
+ self.assertEqual(res.code, HTTP_OK)
+ res_body = PodCreateResponse.from_dict(json.loads(res.body))
+ self._assertMeta(res_body.meta, True)
+ self._assertBody(res_body.pod)
+
+ def test_alreadyExist(self):
+ self.create('/pods', body=self.req.format())
+ res = self.create('/pods', body=self.req.format())
+ self.assertEqual(res.code, HTTP_FORBIDDEN)
+ self.assertIn('already exists', res.body)
+
+ def _assertMeta(self, meta, success):
+ self.assertEqual(meta.success, success)
+ if success:
+ self.assertEqual(meta.uri, '/pods/{}'.format(self.req.name))
+
+ def _assertBody(self, res):
+ self.assertEqual(res.name, self.req.name)
+ self.assertEqual(res.mode, self.req.mode)
+ self.assertEqual(res.details, self.req.details)
+ self.assertIsNotNone(res.creation_date)
+ self.assertIsNotNone(res._id)
+
+
+class TestPodGet(TestBase):
+ def test_notExist(self):
+ res = self.get('/pods/notExist')
+ body = PodsGetResponse.from_dict(json.loads(res.body))
+ self._assertMeta(body.meta, 0)
+
+ def test_getOne(self):
+ self.create('/pods', body=TestPodCreate.req.format())
+ res = self.get('/pods/{}'.format(TestPodCreate.req.name))
+ body = PodsGetResponse.from_dict(json.loads(res.body))
+ self._assertMeta(body.meta, 1)
+ self._assertBody(TestPodCreate.req, body.pods[0])
+
+ def test_list(self):
+ req = PodCreateRequest(name='zte-2', mode='alive', details='zte pod 2')
+ self.create('/pods', body=TestPodCreate.req.format())
+ self.create('/pods', body=req.format())
+ res = self.get('/pods')
+ body = PodsGetResponse.from_dict(json.loads(res.body))
+ self._assertMeta(body.meta, 2)
+ for pod in body.pods:
+ if req.name == pod.name:
+ self._assertBody(req, pod)
+ else:
+ self._assertBody(TestPodCreate.req, pod)
+
+ def _assertMeta(self, meta, total):
+ def check_success():
+ if total is 0:
+ return False
+ else:
+ return True
+ self.assertEqual(meta.total, total)
+ self.assertEqual(meta.success, check_success())
+
+ def _assertBody(self, req, res):
+ self.assertEqual(res.name, req.name)
+ self.assertEqual(res.mode, req.mode)
+ self.assertEqual(res.details, req.details)
+ self.assertIsNotNone(res.creation_date)
+
+
+if __name__ == '__main__':
+ unittest.main()
diff --git a/utils/test/result_collection_api/tests/unit/test_version.py b/utils/test/result_collection_api/tests/unit/test_version.py
new file mode 100644
index 000000000..918f2f052
--- /dev/null
+++ b/utils/test/result_collection_api/tests/unit/test_version.py
@@ -0,0 +1,14 @@
+import unittest
+
+from test_base import TestBase
+
+__author__ = 'serena'
+
+
+class TestVersion(TestBase):
+ def test_get_version(self):
+ response = self.fetch('/version')
+ self.assertEqual(response.code, 200)
+
+if __name__ == '__main__':
+ unittest.main()