summaryrefslogtreecommitdiffstats
diff options
context:
space:
mode:
-rw-r--r--jjb/apex/apex.yml2
-rw-r--r--jjb/apex/apex.yml.j22
-rw-r--r--jjb/compass4nfv/compass-ci-jobs.yml37
-rw-r--r--jjb/compass4nfv/compass-deploy.sh27
-rw-r--r--jjb/doctor/doctor.yml4
-rw-r--r--jjb/joid/joid-daily-jobs.yml165
-rw-r--r--jjb/releng/opnfv-docker-arm.yml4
-rw-r--r--jjb/releng/opnfv-docker.sh2
-rw-r--r--jjb/releng/opnfv-docker.yml53
-rwxr-xr-xjjb/yardstick/yardstick-daily.sh7
-rw-r--r--utils/test/testapi/opnfv_testapi/cmd/server.py25
-rw-r--r--utils/test/testapi/opnfv_testapi/common/check.py11
-rw-r--r--utils/test/testapi/opnfv_testapi/common/config.py36
-rw-r--r--utils/test/testapi/opnfv_testapi/db/__init__.py0
-rw-r--r--utils/test/testapi/opnfv_testapi/db/api.py38
-rw-r--r--utils/test/testapi/opnfv_testapi/resources/handlers.py36
-rw-r--r--utils/test/testapi/opnfv_testapi/resources/result_handlers.py35
-rw-r--r--utils/test/testapi/opnfv_testapi/router/url_mappings.py4
-rw-r--r--utils/test/testapi/opnfv_testapi/tests/unit/common/test_config.py27
-rw-r--r--utils/test/testapi/opnfv_testapi/tests/unit/conftest.py8
-rw-r--r--utils/test/testapi/opnfv_testapi/tests/unit/fake_pymongo.py3
-rw-r--r--utils/test/testapi/opnfv_testapi/tests/unit/resources/test_base.py25
-rw-r--r--utils/test/testapi/opnfv_testapi/tests/unit/resources/test_result.py16
-rw-r--r--utils/test/testapi/opnfv_testapi/tests/unit/resources/test_token.py2
-rw-r--r--utils/test/testapi/opnfv_testapi/ui/auth/sign.py11
-rw-r--r--utils/test/testapi/opnfv_testapi/ui/auth/user.py3
-rw-r--r--utils/test/testapi/opnfv_testapi/ui/root.py4
-rwxr-xr-xutils/test/testapi/run_test.sh40
28 files changed, 272 insertions, 355 deletions
diff --git a/jjb/apex/apex.yml b/jjb/apex/apex.yml
index 0123d755a..fd4dc9ff5 100644
--- a/jjb/apex/apex.yml
+++ b/jjb/apex/apex.yml
@@ -740,7 +740,7 @@
predefined-parameters:
DEPLOY_SCENARIO=$DEPLOY_SCENARIO
kill-phase-on: NEVER
- enable-condition: "DEPLOY_SCENARIO =~ /os-(nosdn-nofeature|nosdn-kvm|odl_l3-fdio)-ha/"
+ enable-condition: "def m = '$DEPLOY_SCENARIO' ==~ /os-(nosdn-nofeature|nosdn-kvm|odl_l3-fdio)-ha/"
abort-all-job: false
git-revision: false
- conditional-step:
diff --git a/jjb/apex/apex.yml.j2 b/jjb/apex/apex.yml.j2
index 7466a8301..d7b67c344 100644
--- a/jjb/apex/apex.yml.j2
+++ b/jjb/apex/apex.yml.j2
@@ -684,7 +684,7 @@
predefined-parameters:
DEPLOY_SCENARIO=$DEPLOY_SCENARIO
kill-phase-on: NEVER
- enable-condition: "DEPLOY_SCENARIO =~ /os-(nosdn-nofeature|nosdn-kvm|odl_l3-fdio)-ha/"
+ enable-condition: "def m = '$DEPLOY_SCENARIO' ==~ /os-(nosdn-nofeature|nosdn-kvm|odl_l3-fdio)-ha/"
abort-all-job: false
git-revision: false
- conditional-step:
diff --git a/jjb/compass4nfv/compass-ci-jobs.yml b/jjb/compass4nfv/compass-ci-jobs.yml
index 467e168bc..22320baee 100644
--- a/jjb/compass4nfv/compass-ci-jobs.yml
+++ b/jjb/compass4nfv/compass-ci-jobs.yml
@@ -10,12 +10,14 @@
stream: master
branch: '{stream}'
gs-pathname: ''
+ ppa-pathname: '/{stream}'
disabled: false
openstack-version: ocata
danube: &danube
stream: danube
branch: 'stable/{stream}'
gs-pathname: '/{stream}'
+ ppa-pathname: '/{stream}'
disabled: false
openstack-version: newton
#--------------------------------
@@ -118,6 +120,7 @@
- compass-ci-parameter:
installer: '{installer}'
gs-pathname: '{gs-pathname}'
+ ppa-pathname: '{ppa-pathname}'
- string:
name: DEPLOY_SCENARIO
default: '{scenario}'
@@ -281,6 +284,7 @@
- compass-ci-parameter:
installer: '{installer}'
gs-pathname: '{gs-pathname}'
+ ppa-pathname: '{ppa-pathname}'
- '{slave-label}-defaults'
- '{installer}-defaults'
@@ -290,10 +294,25 @@
builders:
- description-setter:
description: "POD: $NODE_NAME"
- - shell:
- !include-raw-escape: ./compass-download-artifact.sh
- - shell:
- !include-raw-escape: ./compass-deploy.sh
+ - conditional-step:
+ condition-kind: regex-match
+ regex: master
+ label: '{stream}'
+ steps:
+ - shell:
+ !include-raw-escape: ./compass-build.sh
+ - shell:
+ !include-raw-escape: ./compass-deploy.sh
+ - conditional-step:
+ condition-kind: regex-match
+ regex: danube
+ label: '{stream}'
+ steps:
+ - shell:
+ !include-raw-escape: ./compass-download-artifact.sh
+ - shell:
+ !include-raw-escape: ./compass-deploy.sh
+
########################
# parameter macros
@@ -309,6 +328,16 @@
name: GS_URL
default: '$GS_BASE{gs-pathname}'
description: "URL to Google Storage."
+ - string:
+ name: CACHE_DIRECTORY
+ default: "$HOME/opnfv/cache/$PROJECT{gs-pathname}"
+ description: "Directory where the cache to be used during the build is located."
+ - string:
+ name: PPA_REPO
+ default: "http://artifacts.opnfv.org/compass4nfv/package{ppa-pathname}"
+ - string:
+ name: PPA_CACHE
+ default: "$WORKSPACE/work/repo/"
########################
# trigger macros
diff --git a/jjb/compass4nfv/compass-deploy.sh b/jjb/compass4nfv/compass-deploy.sh
index 2668ccdf8..88327abcf 100644
--- a/jjb/compass4nfv/compass-deploy.sh
+++ b/jjb/compass4nfv/compass-deploy.sh
@@ -6,24 +6,23 @@ echo "Starting the deployment on baremetal environment using $INSTALLER_TYPE. Th
echo "--------------------------------------------------------"
echo
-# source the properties file so we get OPNFV vars
-source $BUILD_DIRECTORY/latest.properties
-
-# echo the info about artifact that is used during the deployment
-echo "Using ${OPNFV_ARTIFACT_URL/*\/} for deployment"
-
-if [[ ! "$JOB_NAME" =~ (verify|merge) ]]; then
- # for none-merge deployments
- # checkout the commit that was used for building the downloaded artifact
- # to make sure the ISO and deployment mechanism uses same versions
- echo "Checking out $OPNFV_GIT_SHA1"
- git checkout $OPNFV_GIT_SHA1 --quiet
-fi
-
echo 1 > /proc/sys/vm/drop_caches
export CONFDIR=$WORKSPACE/deploy/conf
if [[ "$BRANCH" = 'stable/danube' ]]; then
+ # source the properties file so we get OPNFV vars
+ source $BUILD_DIRECTORY/latest.properties
+ # echo the info about artifact that is used during the deployment
+ echo "Using ${OPNFV_ARTIFACT_URL/*\/} for deployment"
+
+ if [[ ! "$JOB_NAME" =~ (verify|merge) ]]; then
+ # for none-merge deployments
+ # checkout the commit that was used for building the downloaded artifact
+ # to make sure the ISO and deployment mechanism uses same versions
+ echo "Checking out $OPNFV_GIT_SHA1"
+ git checkout $OPNFV_GIT_SHA1 --quiet
+ fi
+
export ISO_URL=file://$BUILD_DIRECTORY/compass.iso
else
export ISO_URL=file://$BUILD_DIRECTORY/compass.tar.gz
diff --git a/jjb/doctor/doctor.yml b/jjb/doctor/doctor.yml
index 5bb8f7499..c6b2cb6d0 100644
--- a/jjb/doctor/doctor.yml
+++ b/jjb/doctor/doctor.yml
@@ -196,7 +196,7 @@
wget https://git.opnfv.org/functest/plain/functest/ci/download_images.sh -O functest/ci/download_images.sh
- 'functest-suite-builder'
- shell: |
- functest_log="$HOME/opnfv/functest/results/{stream}/{project}.log"
+ functest_log="$HOME/opnfv/functest/results/{stream}/$FUNCTEST_SUITE_NAME.log"
# NOTE: checking the test result, as the previous job could return
# 0 regardless the result of doctor test scenario.
grep -e ' OK$' $functest_log || exit 1
@@ -205,7 +205,7 @@
- archive:
artifacts: 'tests/*.log'
- archive:
- artifacts: 'functest_results/{project}.log'
+ artifacts: 'functest_results/$FUNCTEST_SUITE_NAME.log'
#####################################
diff --git a/jjb/joid/joid-daily-jobs.yml b/jjb/joid/joid-daily-jobs.yml
index 13ea9b308..1ff260ac8 100644
--- a/jjb/joid/joid-daily-jobs.yml
+++ b/jjb/joid/joid-daily-jobs.yml
@@ -17,11 +17,6 @@
branch: '{stream}'
disabled: false
gs-pathname: ''
- danube: &danube
- stream: danube
- branch: 'stable/{stream}'
- disabled: false
- gs-pathname: '/{stream}'
#--------------------------------
# POD, INSTALLER, AND BRANCH MAPPING
#--------------------------------
@@ -34,12 +29,6 @@
- virtual:
slave-label: joid-virtual
<<: *master
- - baremetal:
- slave-label: joid-baremetal
- <<: *danube
- - virtual:
- slave-label: joid-virtual
- <<: *danube
#--------------------------------
# None-CI PODs
#--------------------------------
@@ -62,7 +51,7 @@
- 'os-nosdn-lxd-noha':
auto-trigger-name: 'joid-{scenario}-{pod}-{stream}-trigger'
- 'os-odl_l2-nofeature-ha':
- auto-trigger-name: 'joid-{scenario}-{pod}-{stream}-trigger'
+ auto-trigger-name: 'daily-trigger-disabled'
- 'os-onos-nofeature-ha':
auto-trigger-name: 'daily-trigger-disabled'
- 'os-odl_l2-nofeature-noha':
@@ -76,9 +65,11 @@
- 'os-ocl-nofeature-noha':
auto-trigger-name: 'daily-trigger-disabled'
- 'k8-nosdn-nofeature-noha':
- auto-trigger-name: 'joid-{scenario}-{pod}-{stream}-trigger'
+ auto-trigger-name: 'daily-trigger-disabled'
- 'k8-nosdn-lb-noha':
auto-trigger-name: 'joid-{scenario}-{pod}-{stream}-trigger'
+ - 'k8-ovn-lb-noha':
+ auto-trigger-name: 'joid-{scenario}-{pod}-{stream}-trigger'
jobs:
- 'joid-{scenario}-{pod}-daily-{stream}'
@@ -256,23 +247,6 @@
name: 'joid-os-nosdn-nofeature-ha-cengn-pod1-master-trigger'
triggers:
- timed: ''
-# os-nosdn-nofeature-ha trigger - branch: danube
-- trigger:
- name: 'joid-os-nosdn-nofeature-ha-baremetal-danube-trigger'
- triggers:
- - timed: '0 2 * * *'
-- trigger:
- name: 'joid-os-nosdn-nofeature-ha-virtual-danube-trigger'
- triggers:
- - timed: ''
-- trigger:
- name: 'joid-os-nosdn-nofeature-ha-orange-pod1-danube-trigger'
- triggers:
- - timed: ''
-- trigger:
- name: 'joid-os-nosdn-nofeature-ha-cengn-pod1-danube-trigger'
- triggers:
- - timed: ''
# os-odl_l2-nofeature-ha trigger - branch: master
- trigger:
name: 'joid-os-odl_l2-nofeature-ha-baremetal-master-trigger'
@@ -290,23 +264,6 @@
name: 'joid-os-odl_l2-nofeature-ha-cengn-pod1-master-trigger'
triggers:
- timed: ''
-# os-odl_l2-nofeature-ha trigger - branch: danube
-- trigger:
- name: 'joid-os-odl_l2-nofeature-ha-baremetal-danube-trigger'
- triggers:
- - timed: '0 7 * * *'
-- trigger:
- name: 'joid-os-odl_l2-nofeature-ha-virtual-danube-trigger'
- triggers:
- - timed: ''
-- trigger:
- name: 'joid-os-odl_l2-nofeature-ha-orange-pod1-danube-trigger'
- triggers:
- - timed: ''
-- trigger:
- name: 'joid-os-odl_l2-nofeature-ha-cengn-pod1-danube-trigger'
- triggers:
- - timed: ''
# os-onos-nofeature-ha trigger - branch: master
- trigger:
name: 'joid-os-onos-nofeature-ha-baremetal-master-trigger'
@@ -324,23 +281,6 @@
name: 'joid-os-onos-nofeature-ha-cengn-pod1-master-trigger'
triggers:
- timed: ''
-# os-onos-nofeature-ha trigger - branch: danube
-- trigger:
- name: 'joid-os-onos-nofeature-ha-baremetal-danube-trigger'
- triggers:
- - timed: '0 12 * * *'
-- trigger:
- name: 'joid-os-onos-nofeature-ha-virtual-danube-trigger'
- triggers:
- - timed: ''
-- trigger:
- name: 'joid-os-onos-nofeature-ha-orange-pod1-danube-trigger'
- triggers:
- - timed: ''
-- trigger:
- name: 'joid-os-onos-nofeature-ha-cengn-pod1-danube-trigger'
- triggers:
- - timed: ''
# os-onos-sfc-ha trigger - branch: master
- trigger:
name: 'joid-os-onos-sfc-ha-baremetal-master-trigger'
@@ -358,23 +298,6 @@
name: 'joid-os-onos-sfc-ha-cengn-pod1-master-trigger'
triggers:
- timed: ''
-# os-onos-sfc-ha trigger - branch: danube
-- trigger:
- name: 'joid-os-onos-sfc-ha-baremetal-danube-trigger'
- triggers:
- - timed: '0 17 * * *'
-- trigger:
- name: 'joid-os-onos-sfc-ha-virtual-danube-trigger'
- triggers:
- - timed: ''
-- trigger:
- name: 'joid-os-onos-sfc-ha-orange-pod1-danube-trigger'
- triggers:
- - timed: ''
-- trigger:
- name: 'joid-os-onos-sfc-ha-cengn-pod1-danube-trigger'
- triggers:
- - timed: ''
# os-nosdn-lxd-noha trigger - branch: master
- trigger:
name: 'joid-os-nosdn-lxd-noha-baremetal-master-trigger'
@@ -392,23 +315,6 @@
name: 'joid-os-nosdn-lxd-noha-cengn-pod1-master-trigger'
triggers:
- timed: ''
-# os-nosdn-lxd-noha trigger - branch: danube
-- trigger:
- name: 'joid-os-nosdn-lxd-noha-baremetal-danube-trigger'
- triggers:
- - timed: '0 22 * * *'
-- trigger:
- name: 'joid-os-nosdn-lxd-noha-virtual-danube-trigger'
- triggers:
- - timed: ''
-- trigger:
- name: 'joid-os-nosdn-lxd-noha-orange-pod1-danube-trigger'
- triggers:
- - timed: ''
-- trigger:
- name: 'joid-os-nosdn-lxd-noha-cengn-pod1-danube-trigger'
- triggers:
- - timed: ''
# os-nosdn-lxd-ha trigger - branch: master
- trigger:
name: 'joid-os-nosdn-lxd-ha-baremetal-master-trigger'
@@ -426,23 +332,6 @@
name: 'joid-os-nosdn-lxd-ha-cengn-pod1-master-trigger'
triggers:
- timed: ''
-# os-nosdn-lxd-ha trigger - branch: danube
-- trigger:
- name: 'joid-os-nosdn-lxd-ha-baremetal-danube-trigger'
- triggers:
- - timed: '0 10 * * *'
-- trigger:
- name: 'joid-os-nosdn-lxd-ha-virtual-danube-trigger'
- triggers:
- - timed: ''
-- trigger:
- name: 'joid-os-nosdn-lxd-ha-orange-pod1-danube-trigger'
- triggers:
- - timed: ''
-- trigger:
- name: 'joid-os-nosdn-lxd-ha-cengn-pod1-danube-trigger'
- triggers:
- - timed: ''
# os-nosdn-nofeature-noha trigger - branch: master
- trigger:
name: 'joid-os-nosdn-nofeature-noha-baremetal-master-trigger'
@@ -460,23 +349,6 @@
name: 'joid-os-nosdn-nofeature-noha-cengn-pod1-master-trigger'
triggers:
- timed: ''
-# os-nosdn-nofeature-noha trigger - branch: danube
-- trigger:
- name: 'joid-os-nosdn-nofeature-noha-baremetal-danube-trigger'
- triggers:
- - timed: '0 4 * * *'
-- trigger:
- name: 'joid-os-nosdn-nofeature-noha-virtual-danube-trigger'
- triggers:
- - timed: ''
-- trigger:
- name: 'joid-os-nosdn-nofeature-noha-orange-pod1-danube-trigger'
- triggers:
- - timed: ''
-- trigger:
- name: 'joid-os-nosdn-nofeature-noha-cengn-pod1-danube-trigger'
- triggers:
- - timed: ''
# k8-nosdn-nofeature-noha trigger - branch: master
- trigger:
name: 'joid-k8-nosdn-nofeature-noha-baremetal-master-trigger'
@@ -494,23 +366,6 @@
name: 'joid-k8-nosdn-nofeature-noha-cengn-pod1-master-trigger'
triggers:
- timed: ''
-# k8-nosdn-nofeature-noha trigger - branch: danube
-- trigger:
- name: 'joid-k8-nosdn-nofeature-noha-baremetal-danube-trigger'
- triggers:
- - timed: '0 15 * * *'
-- trigger:
- name: 'joid-k8-nosdn-nofeature-noha-virtual-danube-trigger'
- triggers:
- - timed: ''
-- trigger:
- name: 'joid-k8-nosdn-nofeature-noha-orange-pod1-danube-trigger'
- triggers:
- - timed: ''
-- trigger:
- name: 'joid-k8-nosdn-nofeature-noha-cengn-pod1-danube-trigger'
- triggers:
- - timed: ''
# k8-nosdn-lb-noha trigger - branch: master
- trigger:
name: 'joid-k8-nosdn-lb-noha-baremetal-master-trigger'
@@ -528,20 +383,20 @@
name: 'joid-k8-nosdn-lb-noha-cengn-pod1-master-trigger'
triggers:
- timed: ''
-# k8-nosdn-lb-noha trigger - branch: danube
+# k8-ovn-lb-noha trigger - branch: master
- trigger:
- name: 'joid-k8-nosdn-lb-noha-baremetal-danube-trigger'
+ name: 'joid-k8-ovn-lb-noha-baremetal-master-trigger'
triggers:
- - timed: '0 20 * * *'
+ - timed: '5 17 * * *'
- trigger:
- name: 'joid-k8-nosdn-lb-noha-virtual-danube-trigger'
+ name: 'joid-k8-ovn-lb-noha-virtual-master-trigger'
triggers:
- timed: ''
- trigger:
- name: 'joid-k8-nosdn-lb-noha-orange-pod1-danube-trigger'
+ name: 'joid-k8-ovn-lb-noha-orange-pod1-master-trigger'
triggers:
- timed: ''
- trigger:
- name: 'joid-k8-nosdn-lb-noha-cengn-pod1-danube-trigger'
+ name: 'joid-k8-ovn-lb-noha-cengn-pod1-master-trigger'
triggers:
- timed: ''
diff --git a/jjb/releng/opnfv-docker-arm.yml b/jjb/releng/opnfv-docker-arm.yml
index 417fc702c..7502b17a1 100644
--- a/jjb/releng/opnfv-docker-arm.yml
+++ b/jjb/releng/opnfv-docker-arm.yml
@@ -65,6 +65,10 @@
default: ""
description: "Release version, e.g. 1.0, 2.0, 3.0"
- string:
+ name: DOCKER_DIR
+ default: "docker"
+ description: "Directory containing files needed by the Dockerfile"
+ - string:
name: DOCKERFILE
default: "Dockerfile.aarch64"
description: "Dockerfile to use for creating the image."
diff --git a/jjb/releng/opnfv-docker.sh b/jjb/releng/opnfv-docker.sh
index ebd0c9f3d..0de3df28e 100644
--- a/jjb/releng/opnfv-docker.sh
+++ b/jjb/releng/opnfv-docker.sh
@@ -54,7 +54,7 @@ if [[ -n "$(docker images | grep $DOCKER_REPO_NAME)" ]]; then
done
fi
-cd $WORKSPACE/docker
+cd $WORKSPACE/$DOCKER_DIR
HOST_ARCH=$(uname -m)
if [ ! -f "${DOCKERFILE}" ]; then
# If this is expected to be a Dockerfile for other arch than x86
diff --git a/jjb/releng/opnfv-docker.yml b/jjb/releng/opnfv-docker.yml
index 095ba4129..b898fa3f1 100644
--- a/jjb/releng/opnfv-docker.yml
+++ b/jjb/releng/opnfv-docker.yml
@@ -23,62 +23,101 @@
other-receivers: &other-receivers
receivers: ''
- project:
+ dockerfile: "Dockerfile"
+ dockerdir: "docker"
+
+ # This is the dockerhub repo the image will be pushed to as
+ # 'opnfv/{dockerrepo}. See: DOCKER_REPO_NAME parameter.
+ # 'project' is the OPNFV repo we expect to contain the Dockerfile
+ dockerrepo:
# projects with jobs for master
- 'releng-anteater':
+ project: 'releng-anteater'
<<: *master
<<: *other-receivers
- 'bottlenecks':
+ project: 'bottlenecks'
<<: *master
<<: *other-receivers
- 'cperf':
+ project: 'cperf'
<<: *master
<<: *other-receivers
- 'dovetail':
+ project: 'dovetail'
<<: *master
<<: *other-receivers
- 'functest':
+ project: 'functest'
<<: *master
<<: *functest-receivers
- 'qtip':
+ project: 'qtip'
<<: *master
<<: *other-receivers
- 'storperf':
+ project: 'storperf'
+ <<: *master
+ <<: *other-receivers
+ - 'storperf-master':
+ project: 'storperf'
+ dockerdir: 'docker/storperf-master'
+ <<: *master
+ <<: *other-receivers
+ - 'storperf-httpfrontend':
+ project: 'storperf'
+ dockerdir: 'docker/storperf-httpfrontend'
+ <<: *master
+ <<: *other-receivers
+ - 'storperf-reporting':
+ project: 'storperf'
+ dockerdir: 'docker/storperf-reporting'
<<: *master
<<: *other-receivers
- 'yardstick':
+ project: 'yardstick'
<<: *master
<<: *other-receivers
# projects with jobs for stable
- 'bottlenecks':
+ project: 'bottlenecks'
<<: *danube
<<: *other-receivers
- 'functest':
+ project: 'functest'
<<: *danube
<<: *functest-receivers
- 'qtip':
+ project: 'qtip'
<<: *danube
<<: *other-receivers
- 'storperf':
+ project: 'storperf'
<<: *danube
<<: *other-receivers
- 'yardstick':
+ project: 'yardstick'
<<: *danube
<<: *other-receivers
jobs:
- - '{project}-docker-build-push-{stream}'
+ - "{dockerrepo}-docker-build-push-{stream}"
- project:
name: opnfv-monitor-docker # projects which only monitor dedicated file or path
+ dockerfile: "Dockerfile"
+ dockerdir: "docker"
+
project:
# projects with jobs for master
- 'daisy':
+ dockerrepo: 'daisy'
<<: *master
- 'escalator':
+ dockerrepo: 'escalator'
<<: *master
jobs:
@@ -88,7 +127,7 @@
# job templates
########################
- job-template:
- name: '{project}-docker-build-push-{stream}'
+ name: '{dockerrepo}-docker-build-push-{stream}'
disabled: '{obj:disabled}'
@@ -103,9 +142,13 @@
description: "To enable/disable pushing the image to Dockerhub."
- string:
name: DOCKER_REPO_NAME
- default: "opnfv/{project}"
+ default: "opnfv/{dockerrepo}"
description: "Dockerhub repo to be pushed to."
- string:
+ name: DOCKER_DIR
+ default: "{dockerdir}"
+ description: "Directory containing files needed by the Dockerfile"
+ - string:
name: COMMIT_ID
default: ""
description: "commit id to make a snapshot docker image"
@@ -115,7 +158,7 @@
description: "Release version, e.g. 1.0, 2.0, 3.0"
- string:
name: DOCKERFILE
- default: "Dockerfile"
+ default: "{dockerfile}"
description: "Dockerfile to use for creating the image."
scm:
diff --git a/jjb/yardstick/yardstick-daily.sh b/jjb/yardstick/yardstick-daily.sh
index cf37ac262..56d087473 100755
--- a/jjb/yardstick/yardstick-daily.sh
+++ b/jjb/yardstick/yardstick-daily.sh
@@ -3,6 +3,7 @@ set -e
[[ $CI_DEBUG == true ]] && redirect="/dev/stdout" || redirect="/dev/null"
rc_file_vol=""
+cacert_file_vol=""
sshkey=""
if [[ ${INSTALLER_TYPE} == 'apex' ]]; then
@@ -23,6 +24,10 @@ if [[ ${INSTALLER_TYPE} == 'joid' ]]; then
rc_file_vol="-v $LAB_CONFIG/admin-openrc:/etc/yardstick/openstack.creds"
# If dev lab, credentials may not be the default ones, just provide a path to put them into docker
# replace the default one by the customized one provided by jenkins config
+elif [[ ${INSTALLER_TYPE} == 'compass' && ${BRANCH} == 'master' ]]; then
+ cacert_file_vol="-v ${HOME}/os_cacert:/etc/yardstick/os_cacert"
+ echo "export OS_CACERT=/etc/yardstick/os_cacert" >> ${HOME}/opnfv-openrc.sh
+ rc_file_vol="-v ${HOME}/opnfv-openrc.sh:/etc/yardstick/openstack.creds"
else
rc_file_vol="-v ${HOME}/opnfv-openrc.sh:/etc/yardstick/openstack.creds"
fi
@@ -50,7 +55,7 @@ sudo rm -rf ${dir_result}/*
map_log_dir="-v ${dir_result}:/tmp/yardstick"
# Run docker
-cmd="sudo docker run ${opts} ${envs} ${rc_file_vol} ${map_log_dir} ${sshkey} opnfv/yardstick:${DOCKER_TAG} \
+cmd="sudo docker run ${opts} ${envs} ${rc_file_vol} ${cacert_file_vol} ${map_log_dir} ${sshkey} opnfv/yardstick:${DOCKER_TAG} \
exec_tests.sh ${YARDSTICK_DB_BACKEND} ${YARDSTICK_SCENARIO_SUITE_NAME}"
echo "Yardstick: Running docker cmd: ${cmd}"
${cmd}
diff --git a/utils/test/testapi/opnfv_testapi/cmd/server.py b/utils/test/testapi/opnfv_testapi/cmd/server.py
index 545d5e367..a5ac5eb6b 100644
--- a/utils/test/testapi/opnfv_testapi/cmd/server.py
+++ b/utils/test/testapi/opnfv_testapi/cmd/server.py
@@ -29,40 +29,18 @@ TODOs :
"""
-import argparse
-import sys
-
-import motor
import tornado.ioloop
-from opnfv_testapi.common import config
+from opnfv_testapi.common.config import CONF
from opnfv_testapi.router import url_mappings
from opnfv_testapi.tornado_swagger import swagger
-CONF = None
-
-
-def parse_config(argv=[]):
- global CONF
- parser = argparse.ArgumentParser()
- parser.add_argument("-c", "--config-file", dest='config_file',
- help="Config file location")
- args = parser.parse_args(argv)
- if args.config_file:
- config.Config.CONFIG = args.config_file
- CONF = config.Config()
-
-
-def get_db():
- return motor.MotorClient(CONF.mongo_url)[CONF.mongo_dbname]
-
def make_app():
swagger.docs(base_url=CONF.swagger_base_url,
static_path=CONF.static_path)
return swagger.Application(
url_mappings.mappings,
- db=get_db(),
debug=CONF.api_debug,
auth=CONF.api_authenticate,
cookie_secret='opnfv-testapi',
@@ -70,7 +48,6 @@ def make_app():
def main():
- parse_config(sys.argv[1:])
application = make_app()
application.listen(CONF.api_port)
tornado.ioloop.IOLoop.current().start()
diff --git a/utils/test/testapi/opnfv_testapi/common/check.py b/utils/test/testapi/opnfv_testapi/common/check.py
index 67e8fbd40..24ba876a9 100644
--- a/utils/test/testapi/opnfv_testapi/common/check.py
+++ b/utils/test/testapi/opnfv_testapi/common/check.py
@@ -13,6 +13,7 @@ from tornado import web
from opnfv_testapi.common import message
from opnfv_testapi.common import raises
+from opnfv_testapi.db import api as dbapi
def authenticate(method):
@@ -26,7 +27,7 @@ def authenticate(method):
except KeyError:
raises.Unauthorized(message.unauthorized())
query = {'access_token': token}
- check = yield self._eval_db_find_one(query, 'tokens')
+ check = yield dbapi.db_find_one('tokens', query)
if not check:
raises.Forbidden(message.invalid_token())
ret = yield gen.coroutine(method)(self, *args, **kwargs)
@@ -38,7 +39,7 @@ def not_exist(xstep):
@functools.wraps(xstep)
def wrap(self, *args, **kwargs):
query = kwargs.get('query')
- data = yield self._eval_db_find_one(query)
+ data = yield dbapi.db_find_one(self.table, query)
if not data:
raises.NotFound(message.not_found(self.table, query))
ret = yield gen.coroutine(xstep)(self, data, *args, **kwargs)
@@ -78,7 +79,7 @@ def carriers_exist(xstep):
carriers = kwargs.pop('carriers', {})
if carriers:
for table, query in carriers:
- exist = yield self._eval_db_find_one(query(), table)
+ exist = yield dbapi.db_find_one(table, query())
if not exist:
raises.Forbidden(message.not_found(table, query()))
ret = yield gen.coroutine(xstep)(self, *args, **kwargs)
@@ -91,7 +92,7 @@ def new_not_exists(xstep):
def wrap(self, *args, **kwargs):
query = kwargs.get('query')
if query:
- to_data = yield self._eval_db_find_one(query())
+ to_data = yield dbapi.db_find_one(self.table, query())
if to_data:
raises.Forbidden(message.exist(self.table, query()))
ret = yield gen.coroutine(xstep)(self, *args, **kwargs)
@@ -105,7 +106,7 @@ def updated_one_not_exist(xstep):
db_keys = kwargs.pop('db_keys', [])
query = self._update_query(db_keys, data)
if query:
- to_data = yield self._eval_db_find_one(query)
+ to_data = yield dbapi.db_find_one(self.table, query)
if to_data:
raises.Forbidden(message.exist(self.table, query))
ret = yield gen.coroutine(xstep)(self, data, *args, **kwargs)
diff --git a/utils/test/testapi/opnfv_testapi/common/config.py b/utils/test/testapi/opnfv_testapi/common/config.py
index f73c0abf2..4cd53c619 100644
--- a/utils/test/testapi/opnfv_testapi/common/config.py
+++ b/utils/test/testapi/opnfv_testapi/common/config.py
@@ -8,14 +8,16 @@
# feng.xiaowei@zte.com.cn remove prepare_put_request 5-30-2016
##############################################################################
import ConfigParser
+import argparse
import os
+import sys
class Config(object):
- CONFIG = None
def __init__(self):
- self.file = self.CONFIG if self.CONFIG else self._default_config()
+ self.config_file = None
+ self._set_config_file()
self._parse()
self._parse_per_page()
self.static_path = os.path.join(
@@ -24,11 +26,11 @@ class Config(object):
'static')
def _parse(self):
- if not os.path.exists(self.file):
- raise Exception("%s not found" % self.file)
+ if not os.path.exists(self.config_file):
+ raise Exception("%s not found" % self.config_file)
config = ConfigParser.RawConfigParser()
- config.read(self.file)
+ config.read(self.config_file)
self._parse_section(config)
def _parse_section(self, config):
@@ -53,8 +55,24 @@ class Config(object):
value = False
return value
- @staticmethod
- def _default_config():
+ def _set_config_file(self):
+ if not self._set_sys_config_file():
+ self._set_default_config_file()
+
+ def _set_sys_config_file(self):
+ parser = argparse.ArgumentParser()
+ parser.add_argument("-c", "--config-file", dest='config_file',
+ help="Config file location", metavar="FILE")
+ args, _ = parser.parse_known_args(sys.argv)
+ try:
+ self.config_file = args.config_file
+ finally:
+ return self.config_file is not None
+
+ def _set_default_config_file(self):
is_venv = os.getenv('VIRTUAL_ENV')
- return os.path.join('/' if not is_venv else is_venv,
- 'etc/opnfv_testapi/config.ini')
+ self.config_file = os.path.join('/' if not is_venv else is_venv,
+ 'etc/opnfv_testapi/config.ini')
+
+
+CONF = Config()
diff --git a/utils/test/testapi/opnfv_testapi/db/__init__.py b/utils/test/testapi/opnfv_testapi/db/__init__.py
new file mode 100644
index 000000000..e69de29bb
--- /dev/null
+++ b/utils/test/testapi/opnfv_testapi/db/__init__.py
diff --git a/utils/test/testapi/opnfv_testapi/db/api.py b/utils/test/testapi/opnfv_testapi/db/api.py
new file mode 100644
index 000000000..c057480d4
--- /dev/null
+++ b/utils/test/testapi/opnfv_testapi/db/api.py
@@ -0,0 +1,38 @@
+import motor
+
+from opnfv_testapi.common.config import CONF
+
+DB = motor.MotorClient(CONF.mongo_url)[CONF.mongo_dbname]
+
+
+def db_update(collection, query, update_req):
+ return _eval_db(collection, 'update', query, update_req, check_keys=False)
+
+
+def db_delete(collection, query):
+ return _eval_db(collection, 'remove', query)
+
+
+def db_aggregate(collection, pipelines):
+ return _eval_db(collection, 'aggregate', pipelines, allowDiskUse=True)
+
+
+def db_list(collection, query):
+ return _eval_db(collection, 'find', query)
+
+
+def db_save(collection, data):
+ return _eval_db(collection, 'insert', data, check_keys=False)
+
+
+def db_find_one(collection, query):
+ return _eval_db(collection, 'find_one', query)
+
+
+def _eval_db(collection, method, *args, **kwargs):
+ exec_collection = DB.__getattr__(collection)
+ return exec_collection.__getattribute__(method)(*args, **kwargs)
+
+
+def _eval_db_find_one(query, table=None):
+ return _eval_db(table, 'find_one', query)
diff --git a/utils/test/testapi/opnfv_testapi/resources/handlers.py b/utils/test/testapi/opnfv_testapi/resources/handlers.py
index f23cc57ee..8a3a2db38 100644
--- a/utils/test/testapi/opnfv_testapi/resources/handlers.py
+++ b/utils/test/testapi/opnfv_testapi/resources/handlers.py
@@ -20,8 +20,8 @@
# feng.xiaowei@zte.com.cn remove DashboardHandler 5-30-2016
##############################################################################
-from datetime import datetime
import json
+from datetime import datetime
from tornado import gen
from tornado import web
@@ -29,6 +29,7 @@ from tornado import web
from opnfv_testapi.common import check
from opnfv_testapi.common import message
from opnfv_testapi.common import raises
+from opnfv_testapi.db import api as dbapi
from opnfv_testapi.resources import models
from opnfv_testapi.tornado_swagger import swagger
@@ -38,7 +39,6 @@ DEFAULT_REPRESENTATION = "application/json"
class GenericApiHandler(web.RequestHandler):
def __init__(self, application, request, **kwargs):
super(GenericApiHandler, self).__init__(application, request, **kwargs)
- self.db = self.settings["db"]
self.json_args = None
self.table = None
self.table_cls = None
@@ -90,8 +90,7 @@ class GenericApiHandler(web.RequestHandler):
if self.table != 'results':
data.creation_date = datetime.now()
- _id = yield self._eval_db(self.table, 'insert', data.format(),
- check_keys=False)
+ _id = yield dbapi.db_save(self.table, data.format())
if 'name' in self.json_args:
resource = data.name
else:
@@ -110,17 +109,14 @@ class GenericApiHandler(web.RequestHandler):
total_pages = 0
if page > 0:
- cursor = self._eval_db(self.table, 'find', query)
+ cursor = dbapi.db_list(self.table, query)
records_count = yield cursor.count()
total_pages = self._calc_total_pages(records_count,
last,
page,
per_page)
pipelines = self._set_pipelines(query, sort, last, page, per_page)
- cursor = self._eval_db(self.table,
- 'aggregate',
- pipelines,
- allowDiskUse=True)
+ cursor = dbapi.db_aggregate(self.table, pipelines)
data = list()
while (yield cursor.fetch_next):
data.append(self.format_data(cursor.next_object()))
@@ -176,7 +172,7 @@ class GenericApiHandler(web.RequestHandler):
@check.authenticate
@check.not_exist
def _delete(self, data, query=None):
- yield self._eval_db(self.table, 'remove', query)
+ yield dbapi.db_delete(self.table, query)
self.finish_request()
@check.authenticate
@@ -186,8 +182,7 @@ class GenericApiHandler(web.RequestHandler):
def _update(self, data, query=None, **kwargs):
data = self.table_cls.from_dict(data)
update_req = self._update_requests(data)
- yield self._eval_db(self.table, 'update', query, update_req,
- check_keys=False)
+ yield dbapi.db_update(self.table, query, update_req)
update_req['_id'] = str(data._id)
self.finish_request(update_req)
@@ -230,23 +225,6 @@ class GenericApiHandler(web.RequestHandler):
query[key] = new
return query if not equal else dict()
- def _eval_db(self, table, method, *args, **kwargs):
- exec_collection = self.db.__getattr__(table)
- return exec_collection.__getattribute__(method)(*args, **kwargs)
-
- def _eval_db_find_one(self, query, table=None):
- if table is None:
- table = self.table
- return self._eval_db(table, 'find_one', query)
-
- def db_save(self, collection, data):
- self._eval_db(collection, 'insert', data, check_keys=False)
-
- def db_find_one(self, query, collection=None):
- if not collection:
- collection = self.table
- return self._eval_db(collection, 'find_one', query)
-
class VersionHandler(GenericApiHandler):
@swagger.operation(nickname='listAllVersions')
diff --git a/utils/test/testapi/opnfv_testapi/resources/result_handlers.py b/utils/test/testapi/opnfv_testapi/resources/result_handlers.py
index 5eb1b925c..2bf1792f2 100644
--- a/utils/test/testapi/opnfv_testapi/resources/result_handlers.py
+++ b/utils/test/testapi/opnfv_testapi/resources/result_handlers.py
@@ -13,7 +13,7 @@ import json
from bson import objectid
-from opnfv_testapi.common import config
+from opnfv_testapi.common.config import CONF
from opnfv_testapi.common import message
from opnfv_testapi.common import raises
from opnfv_testapi.resources import handlers
@@ -21,8 +21,6 @@ from opnfv_testapi.resources import result_models
from opnfv_testapi.tornado_swagger import swagger
from opnfv_testapi.ui.auth import constants as auth_const
-CONF = config.Config()
-
class GenericResultHandler(handlers.GenericApiHandler):
def __init__(self, application, request, **kwargs):
@@ -68,7 +66,7 @@ class GenericResultHandler(handlers.GenericApiHandler):
del query['public']
if role != "reviewer":
query['user'] = openid
- elif k != 'last' and k != 'page':
+ elif k not in ['last', 'page', 'descend']:
query[k] = v
if date_range:
query['start_date'] = date_range
@@ -169,18 +167,27 @@ class ResultsCLHandler(GenericResultHandler):
@type signed: L{string}
@in signed: query
@required signed: False
+ @param descend: true, newest2oldest; false, oldest2newest
+ @type descend: L{string}
+ @in descend: query
+ @required descend: False
"""
- limitations = {'sort': {'_id': -1}}
- last = self.get_query_argument('last', 0)
- if last is not None:
- last = self.get_int('last', last)
- limitations.update({'last': last})
+ def descend_limit():
+ descend = self.get_query_argument('descend', 'true')
+ return -1 if descend.lower() == 'true' else 1
+
+ def last_limit():
+ return self.get_int('last', self.get_query_argument('last', 0))
+
+ def page_limit():
+ return self.get_int('page', self.get_query_argument('page', 0))
- page = self.get_query_argument('page', None)
- if page is not None:
- page = self.get_int('page', page)
- limitations.update({'page': page,
- 'per_page': CONF.api_results_per_page})
+ limitations = {
+ 'sort': {'_id': descend_limit()},
+ 'last': last_limit(),
+ 'page': page_limit(),
+ 'per_page': CONF.api_results_per_page
+ }
self._list(query=self.set_query(), **limitations)
diff --git a/utils/test/testapi/opnfv_testapi/router/url_mappings.py b/utils/test/testapi/opnfv_testapi/router/url_mappings.py
index 37e719b65..562fa5efe 100644
--- a/utils/test/testapi/opnfv_testapi/router/url_mappings.py
+++ b/utils/test/testapi/opnfv_testapi/router/url_mappings.py
@@ -8,7 +8,7 @@
##############################################################################
import tornado.web
-from opnfv_testapi.common import config
+from opnfv_testapi.common.config import CONF
from opnfv_testapi.resources import handlers
from opnfv_testapi.resources import pod_handlers
from opnfv_testapi.resources import project_handlers
@@ -58,7 +58,7 @@ mappings = [
# static path
(r'/(.*\.(css|png|gif|js|html|json|map|woff2|woff|ttf))',
tornado.web.StaticFileHandler,
- {'path': config.Config().static_path}),
+ {'path': CONF.static_path}),
(r'/', root.RootHandler),
(r'/api/v1/auth/signin', sign.SigninHandler),
diff --git a/utils/test/testapi/opnfv_testapi/tests/unit/common/test_config.py b/utils/test/testapi/opnfv_testapi/tests/unit/common/test_config.py
index 446b9442a..cc8743ca8 100644
--- a/utils/test/testapi/opnfv_testapi/tests/unit/common/test_config.py
+++ b/utils/test/testapi/opnfv_testapi/tests/unit/common/test_config.py
@@ -1,16 +1,15 @@
-import os
+import argparse
-from opnfv_testapi.common import config
-
-def test_config_success():
- config_file = os.path.join(os.path.dirname(__file__),
- '../../../../etc/config.ini')
- config.Config.CONFIG = config_file
- conf = config.Config()
- assert conf.mongo_url == 'mongodb://127.0.0.1:27017/'
- assert conf.mongo_dbname == 'test_results_collection'
- assert conf.api_port == 8000
- assert conf.api_debug is True
- assert conf.api_authenticate is False
- assert conf.swagger_base_url == 'http://localhost:8000'
+def test_config_normal(mocker, config_normal):
+ mocker.patch(
+ 'argparse.ArgumentParser.parse_known_args',
+ return_value=(argparse.Namespace(config_file=config_normal), None))
+ from opnfv_testapi.common import config
+ CONF = config.Config()
+ assert CONF.mongo_url == 'mongodb://127.0.0.1:27017/'
+ assert CONF.mongo_dbname == 'test_results_collection'
+ assert CONF.api_port == 8000
+ assert CONF.api_debug is True
+ assert CONF.api_authenticate is False
+ assert CONF.swagger_base_url == 'http://localhost:8000'
diff --git a/utils/test/testapi/opnfv_testapi/tests/unit/conftest.py b/utils/test/testapi/opnfv_testapi/tests/unit/conftest.py
new file mode 100644
index 000000000..feff1daaa
--- /dev/null
+++ b/utils/test/testapi/opnfv_testapi/tests/unit/conftest.py
@@ -0,0 +1,8 @@
+from os import path
+
+import pytest
+
+
+@pytest.fixture
+def config_normal():
+ return path.join(path.dirname(__file__), 'common/normal.ini')
diff --git a/utils/test/testapi/opnfv_testapi/tests/unit/fake_pymongo.py b/utils/test/testapi/opnfv_testapi/tests/unit/fake_pymongo.py
index d95ff372e..0ca83df62 100644
--- a/utils/test/testapi/opnfv_testapi/tests/unit/fake_pymongo.py
+++ b/utils/test/testapi/opnfv_testapi/tests/unit/fake_pymongo.py
@@ -6,9 +6,10 @@
# which accompanies this distribution, and is available at
# http://www.apache.org/licenses/LICENSE-2.0
##############################################################################
+from operator import itemgetter
+
from bson.objectid import ObjectId
from concurrent.futures import ThreadPoolExecutor
-from operator import itemgetter
def thread_execute(method, *args, **kwargs):
diff --git a/utils/test/testapi/opnfv_testapi/tests/unit/resources/test_base.py b/utils/test/testapi/opnfv_testapi/tests/unit/resources/test_base.py
index 6e4d454de..dcec4e958 100644
--- a/utils/test/testapi/opnfv_testapi/tests/unit/resources/test_base.py
+++ b/utils/test/testapi/opnfv_testapi/tests/unit/resources/test_base.py
@@ -12,13 +12,9 @@ from os import path
import mock
from tornado import testing
-from opnfv_testapi.common import config
from opnfv_testapi.resources import models
from opnfv_testapi.tests.unit import fake_pymongo
-config.Config.CONFIG = path.join(path.dirname(__file__),
- '../../../../etc/config.ini')
-
class TestBase(testing.AsyncHTTPTestCase):
headers = {'Content-Type': 'application/json; charset=UTF-8'}
@@ -37,20 +33,21 @@ class TestBase(testing.AsyncHTTPTestCase):
def tearDown(self):
self.db_patcher.stop()
+ self.config_patcher.stop()
def _patch_server(self):
- from opnfv_testapi.cmd import server
- server.parse_config([
- '--config-file',
- path.join(path.dirname(__file__), path.pardir, 'common/normal.ini')
- ])
- self.db_patcher = mock.patch('opnfv_testapi.cmd.server.get_db',
- self._fake_pymongo)
+ import argparse
+ config = path.join(path.dirname(__file__), '../common/normal.ini')
+ self.config_patcher = mock.patch(
+ 'argparse.ArgumentParser.parse_known_args',
+ return_value=(argparse.Namespace(config_file=config), None))
+ self.db_patcher = mock.patch('opnfv_testapi.db.api.DB',
+ fake_pymongo)
+ self.config_patcher.start()
self.db_patcher.start()
- @staticmethod
- def _fake_pymongo():
- return fake_pymongo
+ def set_config_file(self):
+ self.config_file = 'normal.ini'
def get_app(self):
from opnfv_testapi.cmd import server
diff --git a/utils/test/testapi/opnfv_testapi/tests/unit/resources/test_result.py b/utils/test/testapi/opnfv_testapi/tests/unit/resources/test_result.py
index f199bc7d5..1e83ed308 100644
--- a/utils/test/testapi/opnfv_testapi/tests/unit/resources/test_result.py
+++ b/utils/test/testapi/opnfv_testapi/tests/unit/resources/test_result.py
@@ -61,9 +61,9 @@ class TestResultBase(base.TestBase):
self.scenario = 'odl-l2'
self.criteria = 'passed'
self.trust_indicator = result_models.TI(0.7)
- self.start_date = "2016-05-23 07:16:09.477097"
- self.stop_date = "2016-05-23 07:16:19.477097"
- self.update_date = "2016-05-24 07:16:19.477097"
+ self.start_date = str(datetime.now())
+ self.stop_date = str(datetime.now() + timedelta(minutes=1))
+ self.update_date = str(datetime.now() + timedelta(days=1))
self.update_step = -0.05
super(TestResultBase, self).setUp()
self.details = Details(timestart='0', duration='9s', status='OK')
@@ -275,7 +275,7 @@ class TestResultGet(TestResultBase):
@executor.query(httplib.OK, '_query_period_one', 1)
def test_queryPeriodSuccess(self):
- return self._set_query('period=11')
+ return self._set_query('period=5')
@executor.query(httplib.BAD_REQUEST, message.must_int('last'))
def test_queryLastNotInt(self):
@@ -306,7 +306,7 @@ class TestResultGet(TestResultBase):
'scenario',
'trust_indicator',
'criteria',
- 'period=11')
+ 'period=5')
@executor.query(httplib.OK, '_query_success', 0)
def test_notFound(self):
@@ -324,10 +324,10 @@ class TestResultGet(TestResultBase):
@executor.query(httplib.OK, '_query_success', 1)
def test_filterErrorStartdate(self):
self._create_error_start_date(None)
- # self._create_error_start_date('None')
+ self._create_error_start_date('None')
self._create_error_start_date('null')
self._create_error_start_date('')
- return self._set_query('period=11')
+ return self._set_query('period=5')
def _query_success(self, body, number):
self.assertEqual(number, len(body.results))
@@ -338,7 +338,7 @@ class TestResultGet(TestResultBase):
def _query_period_one(self, body, number):
self.assertEqual(number, len(body.results))
- self.assert_res(body.results[0], self.req_10d_before)
+ self.assert_res(body.results[0], self.req_d)
def _create_error_start_date(self, start_date):
req = copy.deepcopy(self.req_d)
diff --git a/utils/test/testapi/opnfv_testapi/tests/unit/resources/test_token.py b/utils/test/testapi/opnfv_testapi/tests/unit/resources/test_token.py
index b4ba88742..940e256c6 100644
--- a/utils/test/testapi/opnfv_testapi/tests/unit/resources/test_token.py
+++ b/utils/test/testapi/opnfv_testapi/tests/unit/resources/test_token.py
@@ -10,7 +10,6 @@ from tornado import web
from opnfv_testapi.common import message
from opnfv_testapi.resources import project_models
-from opnfv_testapi.router import url_mappings
from opnfv_testapi.tests.unit import executor
from opnfv_testapi.tests.unit import fake_pymongo
from opnfv_testapi.tests.unit.resources import test_base as base
@@ -18,6 +17,7 @@ from opnfv_testapi.tests.unit.resources import test_base as base
class TestToken(base.TestBase):
def get_app(self):
+ from opnfv_testapi.router import url_mappings
return web.Application(
url_mappings.mappings,
db=fake_pymongo,
diff --git a/utils/test/testapi/opnfv_testapi/ui/auth/sign.py b/utils/test/testapi/opnfv_testapi/ui/auth/sign.py
index 5b3622579..462395225 100644
--- a/utils/test/testapi/opnfv_testapi/ui/auth/sign.py
+++ b/utils/test/testapi/opnfv_testapi/ui/auth/sign.py
@@ -1,14 +1,12 @@
from six.moves.urllib import parse
from tornado import gen
from tornado import web
-import logging
-from opnfv_testapi.common import config
+from opnfv_testapi.common.config import CONF
+from opnfv_testapi.db import api as dbapi
from opnfv_testapi.ui.auth import base
from opnfv_testapi.ui.auth import constants as const
-CONF = config.Config()
-
class SigninHandler(base.BaseHandler):
def get(self):
@@ -48,10 +46,9 @@ class SigninReturnHandler(base.BaseHandler):
'fullname': self.get_query_argument(const.OPENID_NS_SREG_FULLNAME),
const.ROLE: role
}
- user = yield self.db_find_one({'openid': openid})
+ user = yield dbapi.db_find_one(self.table, {'openid': openid})
if not user:
- self.db_save(self.table, new_user_info)
- logging.info('save to db:%s', new_user_info)
+ dbapi.db_save(self.table, new_user_info)
else:
role = user.get(const.ROLE)
diff --git a/utils/test/testapi/opnfv_testapi/ui/auth/user.py b/utils/test/testapi/opnfv_testapi/ui/auth/user.py
index 2fca2a8cb..955cdeead 100644
--- a/utils/test/testapi/opnfv_testapi/ui/auth/user.py
+++ b/utils/test/testapi/opnfv_testapi/ui/auth/user.py
@@ -2,6 +2,7 @@ from tornado import gen
from tornado import web
from opnfv_testapi.common import raises
+from opnfv_testapi.db import api as dbapi
from opnfv_testapi.ui.auth import base
@@ -12,7 +13,7 @@ class ProfileHandler(base.BaseHandler):
openid = self.get_secure_cookie('openid')
if openid:
try:
- user = yield self.db_find_one({'openid': openid})
+ user = yield dbapi.db_find_one(self.table, {'openid': openid})
self.finish_request({
"openid": user.get('openid'),
"email": user.get('email'),
diff --git a/utils/test/testapi/opnfv_testapi/ui/root.py b/utils/test/testapi/opnfv_testapi/ui/root.py
index bba7a8632..5b2c922d7 100644
--- a/utils/test/testapi/opnfv_testapi/ui/root.py
+++ b/utils/test/testapi/opnfv_testapi/ui/root.py
@@ -1,10 +1,10 @@
from opnfv_testapi.resources.handlers import GenericApiHandler
-from opnfv_testapi.common import config
+from opnfv_testapi.common.config import CONF
class RootHandler(GenericApiHandler):
def get_template_path(self):
- return config.Config().static_path
+ return CONF.static_path
def get(self):
self.render('testapi-ui/index.html')
diff --git a/utils/test/testapi/run_test.sh b/utils/test/testapi/run_test.sh
deleted file mode 100755
index 1e05dd6ba..000000000
--- a/utils/test/testapi/run_test.sh
+++ /dev/null
@@ -1,40 +0,0 @@
-#!/bin/bash
-
-set -o errexit
-
-# Get script directory
-SCRIPTDIR=`dirname $0`
-
-echo "Running unit tests..."
-
-# Creating virtual environment
-if [ ! -z $VIRTUAL_ENV ]; then
- venv=$VIRTUAL_ENV
-else
- venv=$SCRIPTDIR/.venv
- virtualenv $venv
-fi
-source $venv/bin/activate
-
-# Install requirements
-pip install -r $SCRIPTDIR/requirements.txt
-pip install -r $SCRIPTDIR/test-requirements.txt
-
-find . -type f -name "*.pyc" -delete
-
-nosetests --with-xunit \
- --with-coverage \
- --cover-erase \
- --cover-package=$SCRIPTDIR/opnfv_testapi/cmd \
- --cover-package=$SCRIPTDIR/opnfv_testapi/common \
- --cover-package=$SCRIPTDIR/opnfv_testapi/resources \
- --cover-package=$SCRIPTDIR/opnfv_testapi/router \
- --cover-xml \
- --cover-html \
- $SCRIPTDIR/opnfv_testapi/tests
-
-exit_code=$?
-
-deactivate
-
-exit $exit_code