diff options
-rwxr-xr-x | jjb/apex/apex-upload-artifact.sh | 47 | ||||
-rw-r--r-- | jjb/compass4nfv/compass-project-jobs.yml | 2 | ||||
-rwxr-xr-x | jjb/moon/moon-verify.sh | 3 | ||||
-rw-r--r-- | jjb/moon/moon.yml | 54 | ||||
-rw-r--r-- | jjb/opnfv/installer-params.yml | 16 | ||||
-rw-r--r-- | jjb/opnfv/test-sign.yml | 42 | ||||
-rw-r--r-- | jjb/sandbox/sandbox-verify-jobs.yml | 179 | ||||
-rw-r--r-- | utils/gpg_import_key.sh | 42 | ||||
-rw-r--r-- | utils/push-test-logs.sh | 2 | ||||
-rwxr-xr-x | utils/test-sign-artifact.sh | 26 | ||||
-rw-r--r-- | utils/test/result_collection_api/etc/config.ini | 2 | ||||
-rw-r--r-- | utils/test/result_collection_api/opnfv_testapi/resources/handlers.py | 5 | ||||
-rw-r--r-- | utils/test/result_collection_api/opnfv_testapi/resources/result_handlers.py | 22 | ||||
-rw-r--r-- | utils/test/result_collection_api/opnfv_testapi/resources/result_models.py | 101 | ||||
-rw-r--r-- | utils/test/result_collection_api/opnfv_testapi/tests/unit/fake_pymongo.py | 5 | ||||
-rw-r--r-- | utils/test/result_collection_api/opnfv_testapi/tests/unit/test_fake_pymongo.py | 2 | ||||
-rw-r--r-- | utils/test/result_collection_api/opnfv_testapi/tests/unit/test_result.py | 51 |
17 files changed, 561 insertions, 40 deletions
diff --git a/jjb/apex/apex-upload-artifact.sh b/jjb/apex/apex-upload-artifact.sh index 0598f5615..d45c7c01b 100755 --- a/jjb/apex/apex-upload-artifact.sh +++ b/jjb/apex/apex-upload-artifact.sh @@ -11,6 +11,38 @@ echo # source the opnfv.properties to get ARTIFACT_VERSION source $WORKSPACE/opnfv.properties +# clone releng repository +echo "Cloning releng repository..." +[ -d releng ] && rm -rf releng +git clone https://gerrit.opnfv.org/gerrit/releng $WORKSPACE/releng/ &> /dev/null +#this is where we import the siging key +if [ -f $WORKSPACE/releng/utils/gpg_import_key.sh ]; then + source $WORKSPACE/releng/utils/gpg_import_key.sh +fi + +signrpm () { +for artifact in $RPM_LIST $SRPM_LIST; do + echo "Signing artifact: ${artifact}" + gpg2 -vvv --batch --yes --no-tty \ + --default-key opnfv-helpdesk@rt.linuxfoundation.org \ + --passphrase besteffort \ + --detach-sig $artifact + gsutil cp "$artifact".sig gs://$GS_URL/$(basename "$artifact".sig) + echo "Upload complete for ${artifact} signature" +done +} + +signiso () { +time gpg2 -vvv --batch --yes --no-tty \ + --default-key opnfv-helpdesk@rt.linuxfoundation.org \ + --passphrase notreallysecure \ + --detach-sig $BUILD_DIRECTORY/release/OPNFV-CentOS-7-x86_64-$OPNFV_ARTIFACT_VERSION.iso + +gsutil cp $BUILD_DIRECTORY/release/OPNFV-CentOS-7-x86_64-$OPNFV_ARTIFACT_VERSION.iso.sig gs://$GS_URL/opnfv-$OPNFV_ARTIFACT_VERSION.iso.sig +echo "ISO signature Upload Complete!" +} + +uploadiso () { # upload artifact and additional files to google storage gsutil cp $BUILD_DIRECTORY/release/OPNFV-CentOS-7-x86_64-$OPNFV_ARTIFACT_VERSION.iso gs://$GS_URL/opnfv-$OPNFV_ARTIFACT_VERSION.iso > gsutil.iso.log echo "ISO Upload Complete!" @@ -26,7 +58,10 @@ VERSION_EXTENSION=$(echo $(basename $OPNFV_SRPM_URL) | sed 's/opnfv-apex-//') for pkg in common undercloud opendaylight-sfc onos; do SRPM_LIST+=" ${SRPM_INSTALL_PATH}/opnfv-apex-${pkg}-${VERSION_EXTENSION}" done +} +uploadrpm () { +#This is where we upload the rpms for artifact in $RPM_LIST $SRPM_LIST; do echo "Uploading artifact: ${artifact}" gsutil cp $artifact gs://$GS_URL/$(basename $artifact) > gsutil.iso.log @@ -34,6 +69,18 @@ for artifact in $RPM_LIST $SRPM_LIST; do done gsutil cp $WORKSPACE/opnfv.properties gs://$GS_URL/opnfv-$OPNFV_ARTIFACT_VERSION.properties > gsutil.properties.log gsutil cp $WORKSPACE/opnfv.properties gs://$GS_URL/latest.properties > gsutil.latest.log +} + +if gpg2 --list-keys | grep "opnfv-helpdesk@rt.linuxfoundation.org"; then + echo "Signing Key avaliable" + signiso + uploadiso + signrpm + uploadrpm +else + uploadiso + uploadrpm +fi echo echo "--------------------------------------------------------" diff --git a/jjb/compass4nfv/compass-project-jobs.yml b/jjb/compass4nfv/compass-project-jobs.yml index 8cd67ba0b..da286878e 100644 --- a/jjb/compass4nfv/compass-project-jobs.yml +++ b/jjb/compass4nfv/compass-project-jobs.yml @@ -100,7 +100,7 @@ - project: 'functest-{slave-label}-suite-{stream}' current-parameters: true predefined-parameters: - FUNCTEST_SUITE_NAME=vping_userdata + FUNCTEST_SUITE_NAME=healthcheck same-node: true block: true block-thresholds: diff --git a/jjb/moon/moon-verify.sh b/jjb/moon/moon-verify.sh new file mode 100755 index 000000000..23bf47c06 --- /dev/null +++ b/jjb/moon/moon-verify.sh @@ -0,0 +1,3 @@ +#!/bin/bash + +echo "Hello World" diff --git a/jjb/moon/moon.yml b/jjb/moon/moon.yml new file mode 100644 index 000000000..0044eb9ec --- /dev/null +++ b/jjb/moon/moon.yml @@ -0,0 +1,54 @@ +- project: + name: moon + + project: '{name}' + + jobs: + - 'moon-verify-{stream}' + + stream: + - master: + branch: '{stream}' + gs-pathname: '' + +- job-template: + name: 'moon-verify-{stream}' + + parameters: + - project-parameter: + project: '{project}' + - gerrit-parameter: + branch: '{branch}' + - 'opnfv-build-ubuntu-defaults' + + scm: + - gerrit-trigger-scm: + credentials-id: '{ssh-credentials}' + refspec: '$GERRIT_REFSPEC' + choosing-strategy: 'gerrit' + + triggers: + - gerrit: + trigger-on: + - patchset-created-event: + exclude-drafts: 'false' + exclude-trivial-rebase: 'false' + exclude-no-code-change: 'false' + - draft-published-event + - comment-added-contains-event: + comment-contains-value: 'recheck' + - comment-added-contains-event: + comment-contains-value: 'reverify' + projects: + - project-compare-type: 'ANT' + project-pattern: '{project}' + branches: + - branch-compare-type: 'ANT' + branch-pattern: '**/{branch}' + forbidden-file-paths: + - compare-type: ANT + pattern: 'docs/**|.gitignore' + + builders: + - shell: + !include-raw: ./moon-verify.sh diff --git a/jjb/opnfv/installer-params.yml b/jjb/opnfv/installer-params.yml index f95d79f93..60fee9263 100644 --- a/jjb/opnfv/installer-params.yml +++ b/jjb/opnfv/installer-params.yml @@ -93,3 +93,19 @@ name: CPU_ARCHITECTURE default: 'amd64' description: "CPU Architecture to use for Ubuntu distro " + +- parameter: + name: 'sandbox-defaults' + parameters: + - string: + name: INSTALLER_IP + default: '10.20.0.2' + description: 'IP of the installer' + - string: + name: INSTALLER_TYPE + default: sandbox + description: 'Installer used for deploying OPNFV on this POD' + - string: + name: EXTERNAL_NETWORK + default: 'admin_floating_net' + description: 'external network for test' diff --git a/jjb/opnfv/test-sign.yml b/jjb/opnfv/test-sign.yml new file mode 100644 index 000000000..b27d75777 --- /dev/null +++ b/jjb/opnfv/test-sign.yml @@ -0,0 +1,42 @@ +- project: + name: test-sign + + project: 'releng' + + jobs: + - 'test-sign-daily-{stream}' + + stream: + - master: + branch: '{stream}' + gs-pathname: '' + + +- job-template: + name: 'test-sign-daily-{stream}' + + # Job template for daily builders + # + # Required Variables: + # stream: branch with - in place of / (eg. stable) + # branch: branch (eg. stable) + node: master + + disabled: false + + parameters: + - project-parameter: + project: '{project}' + + scm: + - git-scm: + credentials-id: '{ssh-credentials}' + refspec: '' + branch: '{branch}' + + triggers: + - timed: 'H H * * *' + + builders: + - shell: | + $WORKSPACE/utils/test-sign-artifact.sh diff --git a/jjb/sandbox/sandbox-verify-jobs.yml b/jjb/sandbox/sandbox-verify-jobs.yml new file mode 100644 index 000000000..eda212b10 --- /dev/null +++ b/jjb/sandbox/sandbox-verify-jobs.yml @@ -0,0 +1,179 @@ +- project: + name: 'sandbox-verify-jobs' + + project: 'sandbox' + + installer: 'sandbox' + + stream: + - master: + branch: '{stream}' + gs-pathname: '' + +# what are the verification activities we do for this installer + activity: + - 'basic' +# - 'build' +# - 'deploy' +# - 'test' + + jobs: + - 'sandbox-verify-{stream}' + - 'sandbox-verify-{activity}-{stream}' + +- job-template: + name: 'sandbox-verify-{stream}' + + parameters: + - project-parameter: + project: '{project}' + - gerrit-parameter: + branch: '{branch}' + - 'opnfv-build-ubuntu-defaults' + + triggers: + - gerrit: + trigger-on: + - patchset-created-event: + exclude-drafts: 'false' + exclude-trivial-rebase: 'false' + exclude-no-code-change: 'false' + - draft-published-event + - comment-added-contains-event: + comment-contains-value: 'recheck' + - comment-added-contains-event: + comment-contains-value: 'reverify' + projects: + - project-compare-type: 'ANT' + project-pattern: '{project}' + branches: + - branch-compare-type: 'ANT' + branch-pattern: '**/{branch}' + forbidden-file-paths: + - compare-type: ANT + pattern: 'docs/**|.gitignore' + readable-message: true + + builders: + - description-setter: + description: "POD: $NODE_NAME" + - '{project}-verify-builder' + - trigger-builds: + - project: 'sandbox-verify-basic-{stream}' + current-parameters: true + same-node: true + git-revision: true + block: true + - trigger-builds: + - project: 'sandbox-verify-build-{stream}' + current-parameters: false + block: true + git-revision: true + same-node: true + - trigger-builds: + - project: 'sandbox-verify-deploy-{stream}' + current-parameters: false + block: true + git-revision: true + same-node: false + - trigger-builds: + - project: 'sandbox-verify-test-{stream}' + current-parameters: false + block: true + git-revision: true + same-node: false + +- job-template: + name: 'sandbox-verify-{activity}-{stream}' + + wrappers: + - timeout: + timeout: 180 + fail: true + + parameters: + - project-parameter: + project: '{project}' + - '{installer}-defaults' + - '{project}-verify-{activity}-parameter' + + scm: + - git-scm: + credentials-id: '{ssh-credentials}' + refspec: '' + branch: '{branch}' + + builders: + - description-setter: + description: "POD: $NODE_NAME" + - '{project}-verify-{activity}-builder' + +##################################### +# parameter builders +##################################### +- parameter: + name: 'sandbox-verify-basic-parameter' + parameters: + - 'opnfv-build-ubuntu-defaults' + +- parameter: + name: 'sandbox-verify-build-parameter' + parameters: + - 'opnfv-build-ubuntu-defaults' + +- parameter: + name: 'sandbox-verify-deploy-parameter' + parameters: + - 'opnfv-build-centos-defaults' + +- parameter: + name: 'sandbox-verify-test-parameter' + parameters: + - 'opnfv-build-centos-defaults' +##################################### +# builder builders +##################################### +- builder: + name: 'sandbox-verify-builder' + builders: + - shell: | + #!/bin/bash + + # this is the builder for the parent/upstream job which we do nothing + echo "Hello World" + +- builder: + name: 'sandbox-verify-basic-builder' + builders: + - shell: | + #!/bin/bash + + # this is where we check the commit message, unit test, etc. + echo "Hello World" + +- builder: + name: 'sandbox-verify-build-builder' + builders: + - shell: | + #!/bin/bash + + # this is where we do the build + echo "Hello World" + +- builder: + name: 'sandbox-verify-deploy-builder' + builders: + - shell: | + #!/bin/bash + + # this is where we start the virtual deployment + echo "Hello World" + +- builder: + name: 'sandbox-verify-test-builder' + builders: + - shell: | + #!/bin/bash + + # this is where we do functest smoketest + echo "Hello World" diff --git a/utils/gpg_import_key.sh b/utils/gpg_import_key.sh new file mode 100644 index 000000000..3afeda839 --- /dev/null +++ b/utils/gpg_import_key.sh @@ -0,0 +1,42 @@ +#!/bin/bash -e +# SPDX-license-identifier: Apache-2.0 +############################################################################## +# Copyright (c) 2016 NEC and others. +# All rights reserved. This program and the accompanying materials +# are made available under the terms of the Apache License, Version 2.0 +# which accompanies this distribution, and is available at +# http://www.apache.org/licenses/LICENSE-2.0 +############################################################################## + +function isinstalled { +if rpm -q "$@" >/dev/null 2>&1; then + true + else + echo installing "$1" + sudo yum install "$1" + false +fi +} + +if ! isinstalled gnupg2; then + echo "error with install" + exit 1 +fi + +if ! which gsutil; + then echo "error gsutil not installed"; + exit 1 +fi + +if gpg2 --list-keys | grep "opnfv-helpdesk@rt.linuxfoundation.org"; then + echo "Key Already available" +else + if [ -z "$NODE_NAME" ]; + then echo "Cannot find node name" + exit 1 + else echo "Importing key for '$NODE_NAME'"; + gsutil cp gs://opnfv-signing-keys/"$NODE_NAME"-subkey . + gpg2 --import "$NODE_NAME"-subkey + rm -f "$NODE_NAME"-subkey + fi +fi diff --git a/utils/push-test-logs.sh b/utils/push-test-logs.sh index 7486adb40..964b41908 100644 --- a/utils/push-test-logs.sh +++ b/utils/push-test-logs.sh @@ -17,7 +17,7 @@ res_build_date=${1:-$(date -u +"%Y-%m-%d_%H-%M-%S")} project=$PROJECT branch=${GIT_BRANCH##*/} testbed=$NODE_NAME -dir_result="${HOME}/opnfv/$project/results" +dir_result="${HOME}/opnfv/$project/results/${branch}" # src: https://wiki.opnfv.org/display/INF/Hardware+Infrastructure # + intel-pod3 (vsperf) node_list=(\ diff --git a/utils/test-sign-artifact.sh b/utils/test-sign-artifact.sh new file mode 100755 index 000000000..f09b7f4e2 --- /dev/null +++ b/utils/test-sign-artifact.sh @@ -0,0 +1,26 @@ +#!/bin/bash + +export PATH=$PATH:/usr/local/bin/ + +# clone releng repository +echo "Cloning releng repository..." +[ -d releng ] && rm -rf releng +git clone https://gerrit.opnfv.org/gerrit/releng $WORKSPACE/releng/ &> /dev/null +#this is where we import the siging key +if [ -f $WORKSPACE/releng/utils/gpg_import_key.sh ]; then + source $WORKSPACE/releng/utils/gpg_import_key.sh +fi + +artifact="foo" +echo foo > foo + +testsign () { + echo "Signing artifact: ${artifact}" + gpg2 -vvv --batch \ + --default-key opnfv-helpdesk@rt.linuxfoundation.org \ + --passphrase besteffort \ + --detach-sig $artifact +} + +testsign + diff --git a/utils/test/result_collection_api/etc/config.ini b/utils/test/result_collection_api/etc/config.ini index 16346bf36..0edb73a3f 100644 --- a/utils/test/result_collection_api/etc/config.ini +++ b/utils/test/result_collection_api/etc/config.ini @@ -13,4 +13,4 @@ port = 8000 debug = True [swagger] -base_url = http://testresults.opnfv.org/test
\ No newline at end of file +base_url = http://localhost:8000 diff --git a/utils/test/result_collection_api/opnfv_testapi/resources/handlers.py b/utils/test/result_collection_api/opnfv_testapi/resources/handlers.py index 873701103..f98c35e8f 100644 --- a/utils/test/result_collection_api/opnfv_testapi/resources/handlers.py +++ b/utils/test/result_collection_api/opnfv_testapi/resources/handlers.py @@ -198,9 +198,8 @@ class GenericApiHandler(RequestHandler): comparing values """ if not (new_value is None): - if len(new_value) > 0: - if new_value != old_value: - edit_request[key] = new_value + if new_value != old_value: + edit_request[key] = new_value return edit_request diff --git a/utils/test/result_collection_api/opnfv_testapi/resources/result_handlers.py b/utils/test/result_collection_api/opnfv_testapi/resources/result_handlers.py index 5198ba355..400b84ac1 100644 --- a/utils/test/result_collection_api/opnfv_testapi/resources/result_handlers.py +++ b/utils/test/result_collection_api/opnfv_testapi/resources/result_handlers.py @@ -45,7 +45,7 @@ class GenericResultHandler(GenericApiHandler): obj = {"$gte": str(period)} query['start_date'] = obj elif k == 'trust_indicator': - query[k] = float(v) + query[k + '.current'] = float(v) elif k != 'last': query[k] = v return query @@ -116,8 +116,8 @@ class ResultsCLHandler(GenericResultHandler): @type last: L{string} @in last: query @required last: False - @param trust_indicator: must be int/long/float - @type trust_indicator: L{string} + @param trust_indicator: must be float + @type trust_indicator: L{float} @in trust_indicator: query @required trust_indicator: False """ @@ -180,3 +180,19 @@ class ResultsGURHandler(GenericResultHandler): query = dict() query["_id"] = ObjectId(result_id) self._get_one(query) + + @swagger.operation(nickname="update") + def put(self, result_id): + """ + @description: update a single result by _id + @param body: fields to be updated + @type body: L{ResultUpdateRequest} + @in body: body + @rtype: L{Result} + @return 200: update success + @raise 404: result not exist + @raise 403: nothing to update + """ + query = {'_id': ObjectId(result_id)} + db_keys = [] + self._update(query, db_keys) diff --git a/utils/test/result_collection_api/opnfv_testapi/resources/result_models.py b/utils/test/result_collection_api/opnfv_testapi/resources/result_models.py index fdd80593a..dd1e3dc53 100644 --- a/utils/test/result_collection_api/opnfv_testapi/resources/result_models.py +++ b/utils/test/result_collection_api/opnfv_testapi/resources/result_models.py @@ -10,7 +10,69 @@ from opnfv_testapi.tornado_swagger import swagger @swagger.model() +class TIHistory(object): + """ + @ptype step: L{float} + """ + def __init__(self, date=None, step=0): + self.date = date + self.step = step + + def format(self): + return { + "date": self.date, + "step": self.step + } + + @staticmethod + def from_dict(a_dict): + if a_dict is None: + return None + + return TIHistory(a_dict.get('date'), a_dict.get('step')) + + +@swagger.model() +class TI(object): + """ + @property histories: trust_indicator update histories + @ptype histories: C{list} of L{TIHistory} + @ptype current: L{float} + """ + def __init__(self, current=0): + self.current = current + self.histories = list() + + def format(self): + hs = [] + for h in self.histories: + hs.append(h.format()) + + return { + "current": self.current, + "histories": hs + } + + @staticmethod + def from_dict(a_dict): + if a_dict is None: + return None + t = TI() + t.current = a_dict.get('current') + if 'histories' in a_dict.keys(): + for history in a_dict.get('histories', None): + t.histories.append(TIHistory.from_dict(history)) + else: + t.histories = [] + return t + + +@swagger.model() class ResultCreateRequest(object): + """ + @property trust_indicator: + @ptype trust_indicator: L{TI} + """ def __init__(self, pod_name=None, project_name=None, @@ -50,15 +112,30 @@ class ResultCreateRequest(object): "build_tag": self.build_tag, "scenario": self.scenario, "criteria": self.criteria, - "trust_indicator": self.trust_indicator + "trust_indicator": self.trust_indicator.format() + } + + +@swagger.model() +class ResultUpdateRequest(object): + """ + @property trust_indicator: + @ptype trust_indicator: L{TI} + """ + def __init__(self, trust_indicator=None): + self.trust_indicator = trust_indicator + + def format(self): + return { + "trust_indicator": self.trust_indicator.format(), } @swagger.model() class TestResult(object): """ - @property trust_indicator: must be int/long/float - @ptype trust_indicator: L{float} + @property trust_indicator: used for long duration test case + @ptype trust_indicator: L{TI} """ def __init__(self, _id=None, case_name=None, project_name=None, pod_name=None, installer=None, version=None, @@ -98,19 +175,7 @@ class TestResult(object): t.build_tag = a_dict.get('build_tag') t.scenario = a_dict.get('scenario') t.criteria = a_dict.get('criteria') - # 0 < trust indicator < 1 - # if bad value => set this indicator to 0 - t.trust_indicator = a_dict.get('trust_indicator') - if t.trust_indicator is not None: - if isinstance(t.trust_indicator, (int, long, float)): - if t.trust_indicator < 0: - t.trust_indicator = 0 - elif t.trust_indicator > 1: - t.trust_indicator = 1 - else: - t.trust_indicator = 0 - else: - t.trust_indicator = 0 + t.trust_indicator = TI.from_dict(a_dict.get('trust_indicator')) return t def format(self): @@ -126,7 +191,7 @@ class TestResult(object): "build_tag": self.build_tag, "scenario": self.scenario, "criteria": self.criteria, - "trust_indicator": self.trust_indicator + "trust_indicator": self.trust_indicator.format() } def format_http(self): @@ -143,7 +208,7 @@ class TestResult(object): "build_tag": self.build_tag, "scenario": self.scenario, "criteria": self.criteria, - "trust_indicator": self.trust_indicator + "trust_indicator": self.trust_indicator.format() } diff --git a/utils/test/result_collection_api/opnfv_testapi/tests/unit/fake_pymongo.py b/utils/test/result_collection_api/opnfv_testapi/tests/unit/fake_pymongo.py index 6ab98c720..450969248 100644 --- a/utils/test/result_collection_api/opnfv_testapi/tests/unit/fake_pymongo.py +++ b/utils/test/result_collection_api/opnfv_testapi/tests/unit/fake_pymongo.py @@ -116,8 +116,8 @@ class MemDb(object): if k == 'start_date': if not MemDb._compare_date(v, content.get(k)): return False - elif k == 'trust_indicator': - if float(content.get(k)) != float(v): + elif k == 'trust_indicator.current': + if content.get('trust_indicator').get('current') != v: return False elif content.get(k, None) != v: return False @@ -173,7 +173,6 @@ class MemDb(object): def _check_keys(self, doc): for key in doc.keys(): - print('key', key, 'value', doc.get(key)) if '.' in key: raise NameError('key {} must not contain .'.format(key)) if key.startswith('$'): diff --git a/utils/test/result_collection_api/opnfv_testapi/tests/unit/test_fake_pymongo.py b/utils/test/result_collection_api/opnfv_testapi/tests/unit/test_fake_pymongo.py index 27382f089..9a1253e94 100644 --- a/utils/test/result_collection_api/opnfv_testapi/tests/unit/test_fake_pymongo.py +++ b/utils/test/result_collection_api/opnfv_testapi/tests/unit/test_fake_pymongo.py @@ -8,9 +8,9 @@ ############################################################################## import unittest -from tornado.web import Application from tornado import gen from tornado.testing import AsyncHTTPTestCase, gen_test +from tornado.web import Application import fake_pymongo diff --git a/utils/test/result_collection_api/opnfv_testapi/tests/unit/test_result.py b/utils/test/result_collection_api/opnfv_testapi/tests/unit/test_result.py index bba3b228f..98ef7c08c 100644 --- a/utils/test/result_collection_api/opnfv_testapi/tests/unit/test_result.py +++ b/utils/test/result_collection_api/opnfv_testapi/tests/unit/test_result.py @@ -6,15 +6,16 @@ # which accompanies this distribution, and is available at # http://www.apache.org/licenses/LICENSE-2.0 ############################################################################## -import unittest import copy +import unittest +from datetime import datetime, timedelta from opnfv_testapi.common.constants import HTTP_OK, HTTP_BAD_REQUEST, \ HTTP_NOT_FOUND from opnfv_testapi.resources.pod_models import PodCreateRequest from opnfv_testapi.resources.project_models import ProjectCreateRequest from opnfv_testapi.resources.result_models import ResultCreateRequest, \ - TestResult, TestResults + TestResult, TestResults, ResultUpdateRequest, TI, TIHistory from opnfv_testapi.resources.testcase_models import TestcaseCreateRequest from test_base import TestBase @@ -55,9 +56,11 @@ class TestResultBase(TestBase): self.build_tag = 'v3.0' self.scenario = 'odl-l2' self.criteria = 'passed' - self.trust_indicator = 0.7 + self.trust_indicator = TI(0.7) self.start_date = "2016-05-23 07:16:09.477097" self.stop_date = "2016-05-23 07:16:19.477097" + self.update_date = "2016-05-24 07:16:19.477097" + self.update_step = -0.05 super(TestResultBase, self).setUp() self.details = Details(timestart='0', duration='9s', status='OK') self.req_d = ResultCreateRequest(pod_name=self.pod, @@ -74,6 +77,7 @@ class TestResultBase(TestBase): trust_indicator=self.trust_indicator) self.get_res = TestResult self.list_res = TestResults + self.update_res = TestResult self.basePath = '/api/v1/results' self.req_pod = PodCreateRequest(self.pod, 'metal', 'zte pod 1') self.req_project = ProjectCreateRequest(self.project, 'vping test') @@ -103,10 +107,19 @@ class TestResultBase(TestBase): self.assertEqual(result.build_tag, req.build_tag) self.assertEqual(result.scenario, req.scenario) self.assertEqual(result.criteria, req.criteria) - self.assertEqual(result.trust_indicator, req.trust_indicator) self.assertEqual(result.start_date, req.start_date) self.assertEqual(result.stop_date, req.stop_date) self.assertIsNotNone(result._id) + ti = result.trust_indicator + self.assertEqual(ti.current, req.trust_indicator.current) + if ti.histories: + history = ti.histories[0] + self.assertEqual(history.date, self.update_date) + self.assertEqual(history.step, self.update_step) + + def _create_d(self): + _, res = self.create_d() + return res.href.split('/')[-1] class TestResultCreate(TestResultBase): @@ -172,8 +185,7 @@ class TestResultCreate(TestResultBase): class TestResultGet(TestResultBase): def test_getOne(self): - _, res = self.create_d() - _id = res.href.split('/')[-1] + _id = self._create_d() code, body = self.get(_id) self.assert_res(code, body) @@ -266,8 +278,6 @@ class TestResultGet(TestResultBase): self.assert_res(code, result, req) def _create_changed_date(self, **kwargs): - import copy - from datetime import datetime, timedelta req = copy.deepcopy(self.req_d) req.start_date = datetime.now() + timedelta(**kwargs) req.stop_date = str(req.start_date + timedelta(minutes=10)) @@ -276,13 +286,36 @@ class TestResultGet(TestResultBase): return req def _set_query(self, *args): + def get_value(arg): + return eval('self.' + arg) \ + if arg != 'trust_indicator' else self.trust_indicator.current uri = '' for arg in args: if '=' in arg: uri += arg + '&' else: - uri += '{}={}&'.format(arg, eval('self.' + arg)) + uri += '{}={}&'.format(arg, get_value(arg)) return uri[0: -1] + +class TestResultUpdate(TestResultBase): + def test_success(self): + _id = self._create_d() + + new_ti = copy.deepcopy(self.trust_indicator) + new_ti.current += self.update_step + new_ti.histories.append(TIHistory(self.update_date, self.update_step)) + new_data = copy.deepcopy(self.req_d) + new_data.trust_indicator = new_ti + update = ResultUpdateRequest(trust_indicator=new_ti) + code, body = self.update(update, _id) + self.assertEqual(_id, body._id) + self.assert_res(code, body, new_data) + + code, new_body = self.get(_id) + self.assertEqual(_id, new_body._id) + self.assert_res(code, new_body, new_data) + + if __name__ == '__main__': unittest.main() |