diff options
40 files changed, 580 insertions, 102 deletions
diff --git a/.gitignore b/.gitignore index 918e32154..0aa7b8c09 100644 --- a/.gitignore +++ b/.gitignore @@ -33,3 +33,5 @@ cover/ coverage.xml nosetests.xml testapi_venv/ +.cache +.tox diff --git a/jjb/copper/copper.yml b/jjb/copper/copper.yml index b65466e01..e380fd555 100644 --- a/jjb/copper/copper.yml +++ b/jjb/copper/copper.yml @@ -64,4 +64,4 @@ set -o nounset set -o pipefail - shellcheck -f tty tests/*.sh + # shellcheck -f tty tests/*.sh diff --git a/jjb/daisy4nfv/daisy4nfv-download-artifact.sh b/jjb/daisy4nfv/daisy4nfv-download-artifact.sh index 90b5fa62f..b9af2e8ad 100755 --- a/jjb/daisy4nfv/daisy4nfv-download-artifact.sh +++ b/jjb/daisy4nfv/daisy4nfv-download-artifact.sh @@ -12,7 +12,7 @@ set -o errexit set -o pipefail # use proxy url to replace the nomral URL, for googleusercontent.com will be blocked randomly -[[ "$NODE_NAME" =~ (zte) ]] && GS_URL=$GS_BASE_PROXY +[[ "$NODE_NAME" =~ (zte) ]] && GS_URL=${GS_BASE_PROXY%%/*}/$GS_URL if [[ "$JOB_NAME" =~ "merge" ]]; then echo "Downloading http://$GS_URL/opnfv-gerrit-$GERRIT_CHANGE_NUMBER.properties" @@ -43,7 +43,7 @@ echo "--------------------------------------------------------" echo # download the file -curl -s -o $WORKSPACE/opnfv.bin http://$OPNFV_ARTIFACT_URL > gsutil.bin.log 2>&1 +curl -L -s -o $WORKSPACE/opnfv.bin http://$OPNFV_ARTIFACT_URL > gsutil.bin.log 2>&1 # list the file ls -al $WORKSPACE/opnfv.bin diff --git a/jjb/dovetail/dovetail-ci-jobs.yml b/jjb/dovetail/dovetail-ci-jobs.yml index e2a334d40..22bc28109 100644 --- a/jjb/dovetail/dovetail-ci-jobs.yml +++ b/jjb/dovetail/dovetail-ci-jobs.yml @@ -20,8 +20,8 @@ dovetail-branch: '{stream}' gs-pathname: '' docker-tag: 'latest' - colorado: &colorado - stream: colorado + danube: &danube + stream: danube branch: 'stable/{stream}' dovetail-branch: master gs-pathname: '/{stream}' @@ -54,12 +54,12 @@ slave-label: fuel-baremetal SUT: fuel auto-trigger-name: 'daily-trigger-disabled' - <<: *colorado + <<: *danube - virtual: slave-label: fuel-virtual SUT: fuel auto-trigger-name: 'daily-trigger-disabled' - <<: *colorado + <<: *danube #compass CI PODs - baremetal: slave-label: compass-baremetal @@ -75,12 +75,12 @@ slave-label: compass-baremetal SUT: compass auto-trigger-name: 'daily-trigger-disabled' - <<: *colorado + <<: *danube - virtual: slave-label: compass-virtual SUT: compass auto-trigger-name: 'daily-trigger-disabled' - <<: *colorado + <<: *danube #apex CI PODs - apex-verify-master: slave-label: '{pod}' @@ -92,16 +92,16 @@ SUT: apex auto-trigger-name: 'daily-trigger-disabled' <<: *master - - apex-verify-colorado: - slave-label: '{pod}' - SUT: apex - auto-trigger-name: 'daily-trigger-disabled' - <<: *colorado - - apex-daily-colorado: - slave-label: '{pod}' - SUT: apex - auto-trigger-name: 'daily-trigger-disabled' - <<: *colorado +# - apex-verify-colorado: +# slave-label: '{pod}' +# SUT: apex +# auto-trigger-name: 'daily-trigger-disabled' +# <<: *danube +# - apex-daily-colorado: +# slave-label: '{pod}' +# SUT: apex +# auto-trigger-name: 'daily-trigger-disabled' +# <<: *danube #armband CI PODs - armband-baremetal: slave-label: armband-baremetal @@ -117,12 +117,12 @@ slave-label: armband-baremetal SUT: fuel auto-trigger-name: 'daily-trigger-disabled' - <<: *colorado + <<: *danube - armband-virtual: slave-label: armband-virtual SUT: fuel auto-trigger-name: 'daily-trigger-disabled' - <<: *colorado + <<: *danube #-------------------------------- # None-CI PODs #-------------------------------- diff --git a/jjb/functest/functest-ci-jobs.yml b/jjb/functest/functest-ci-jobs.yml index e85144c92..9bf6a1a6e 100644 --- a/jjb/functest/functest-ci-jobs.yml +++ b/jjb/functest/functest-ci-jobs.yml @@ -284,6 +284,7 @@ - 'tempest_smoke_serial' - 'rally_sanity' - 'odl' + - 'odl_netvirt' - 'onos' - 'promise' - 'doctor' diff --git a/jjb/global/releng-macros.yml b/jjb/global/releng-macros.yml index c245ee813..34f53fb5f 100644 --- a/jjb/global/releng-macros.yml +++ b/jjb/global/releng-macros.yml @@ -466,8 +466,8 @@ - cobertura: report-file: "coverage.xml" only-stable: "true" - health-auto-update: "true" - stability-auto-update: "true" + health-auto-update: "false" + stability-auto-update: "false" zoom-coverage-chart: "true" targets: - files: diff --git a/jjb/infra/bifrost-cleanup-job.yml b/jjb/infra/bifrost-cleanup-job.yml index 6b761eea3..f1b38ca4b 100644 --- a/jjb/infra/bifrost-cleanup-job.yml +++ b/jjb/infra/bifrost-cleanup-job.yml @@ -67,7 +67,7 @@ fi # No force (-f). We always verify upstream jobs so if there are no logs # something else went wrong and we need to break immediately and investigate - gsutil rm -r $BIFROST_GS_URL + gsutil -m rm -r $BIFROST_GS_URL triggers: - '{project}-gerrit-trigger-cleanup': @@ -86,13 +86,8 @@ server-name: 'review.openstack.org' escape-quotes: true trigger-on: - - patchset-created-event: - exclude-drafts: 'false' - exclude-trivial-rebase: 'false' - exclude-no-code-change: 'false' - - patchset-uploaded-event: 'false' - # We only run this when the change is merged since - # we don't need the logs anymore + # We only run this when the change is merged or + # abandoned since we don't need the logs anymore - change-merged-event: 'true' - change-abandoned-event: 'true' - change-restored-event: 'false' @@ -119,13 +114,8 @@ - gerrit: server-name: 'gerrit.opnfv.org' trigger-on: - - patchset-created-event: - exclude-drafts: 'false' - exclude-trivial-rebase: 'false' - exclude-no-code-change: 'false' - - patchset-uploaded-event: 'false' - # We only run this when the change is merged since - # we don't need the logs anymore + # We only run this when the change is merged or + # abandoned since we don't need the logs anymore - change-merged-event: 'true' - change-abandoned-event: 'true' - change-restored-event: 'false' diff --git a/jjb/models/models.yml b/jjb/models/models.yml index f419c8821..89d22bcbd 100644 --- a/jjb/models/models.yml +++ b/jjb/models/models.yml @@ -64,4 +64,4 @@ set -o nounset set -o pipefail - shellcheck -f tty tests/*.sh + # shellcheck -f tty tests/*.sh diff --git a/jjb/multisite/multisite-daily-jobs.yml b/jjb/multisite/multisite-daily-jobs.yml index 6b022fd75..23c95f627 100644 --- a/jjb/multisite/multisite-daily-jobs.yml +++ b/jjb/multisite/multisite-daily-jobs.yml @@ -138,8 +138,8 @@ - name: 'functest-fuel-virtual-suite-{stream}' current-parameters: false predefined-parameters: | - DEPLOY_SCENARIO='os-nosdn-multisite-noha' - FUNCTEST_SUITE_NAME='multisite' + DEPLOY_SCENARIO=os-nosdn-multisite-noha + FUNCTEST_SUITE_NAME=multisite OS_REGION=RegionOne REGIONONE_IP=100.64.209.10 REGIONTWO_IP=100.64.209.11 diff --git a/jjb/opnfvdocs/docs-rtd.yaml b/jjb/opnfvdocs/docs-rtd.yaml index 01b28204e..2d5ec4fa4 100644 --- a/jjb/opnfvdocs/docs-rtd.yaml +++ b/jjb/opnfvdocs/docs-rtd.yaml @@ -78,7 +78,7 @@ - shell: | sudo pip install virtualenv virtualenv $WORKSPACE/venv - source $WORKSPACE/venv/bin/activate + . $WORKSPACE/venv/bin/activate pip install --upgrade pip pip freeze pip install tox diff --git a/jjb/ves/ves.yml b/jjb/ves/ves.yml index 5f0da3320..3d3ba2ca8 100644 --- a/jjb/ves/ves.yml +++ b/jjb/ves/ves.yml @@ -64,5 +64,5 @@ set -o nounset set -o pipefail - shellcheck -f tty tests/*.sh - shellcheck -f tty utils/*.sh + # shellcheck -f tty tests/*.sh + # shellcheck -f tty utils/*.sh diff --git a/jjb/yardstick/yardstick-ci-jobs.yml b/jjb/yardstick/yardstick-ci-jobs.yml index 604eaed25..1f2f3122c 100644 --- a/jjb/yardstick/yardstick-ci-jobs.yml +++ b/jjb/yardstick/yardstick-ci-jobs.yml @@ -272,7 +272,7 @@ publishers: - email: - recipients: jean.gaoliang@huawei.com matthew.lijun@huawei.com + recipients: jean.gaoliang@huawei.com limingjiang@huawei.com ######################## # builder macros diff --git a/utils/fetch_os_creds.sh b/utils/fetch_os_creds.sh index f00e022f9..c99afaca8 100755 --- a/utils/fetch_os_creds.sh +++ b/utils/fetch_os_creds.sh @@ -201,6 +201,17 @@ elif [ "$installer_type" == "foreman" ]; then 'source keystonerc_admin;keystone endpoint-list'" \ | grep $admin_ip | sed 's/ /\n/g' | grep ^http | head -1) &> /dev/null +elif [ "$installer_type" == "daisy" ]; then + verify_connectivity $installer_ip + cluster=$(sshpass -p r00tme ssh 2>/dev/null $ssh_options root@${installer_ip} \ + "source ~/daisyrc_admin; daisy cluster-list"|grep active|head -1|awk -F "|" '{print $3}') &> /dev/null + if [ -z $cluster ]; then + echo "No active cluster detected in daisy" + exit 1 + fi + + sshpass -p r00tme scp 2>/dev/null $ssh_options root@${installer_ip}:/etc/kolla/admin-openrc.sh $dest_path &> /dev/null + else error "Installer $installer is not supported by this script" fi diff --git a/utils/test/reporting/docker/reporting.sh b/utils/test/reporting/docker/reporting.sh index 1bef1b811..78bcc4e82 100755 --- a/utils/test/reporting/docker/reporting.sh +++ b/utils/test/reporting/docker/reporting.sh @@ -4,7 +4,7 @@ export PYTHONPATH="${PYTHONPATH}:." export CONFIG_REPORTING_YAML=./reporting.yaml declare -a versions=(colorado master) -declare -a projects=(functest yardstick) +declare -a projects=(functest storperf yardstick) project=$1 reporting_type=$2 @@ -30,6 +30,7 @@ cp -Rf js display # $1 | $2 # functest | status, vims, tempest # yardstick | +# storperf | if [ -z "$1" ]; then echo "********************************" @@ -52,6 +53,13 @@ if [ -z "$1" ]; then echo "********************************" python ./yardstick/reporting-status.py echo "Yardstick reporting status...OK" + + echo "********************************" + echo " Storperf reporting " + echo "********************************" + python ./storperf/reporting-status.py + echo "Storperf reporting status...OK" + else if [ -z "$2" ]; then reporting_type="status" diff --git a/utils/test/reporting/functest/reporting-status.py b/utils/test/reporting/functest/reporting-status.py index df5632335..95f9e66e8 100755 --- a/utils/test/reporting/functest/reporting-status.py +++ b/utils/test/reporting/functest/reporting-status.py @@ -28,9 +28,9 @@ testValid = [] otherTestCases = [] reportingDate = datetime.datetime.now().strftime("%Y-%m-%d %H:%M") -# init just tempest to get the list of scenarios -# as all the scenarios run Tempest -tempest = tc.TestCase("tempest_smoke_serial", "functest", -1) +# init just connection_check to get the list of scenarios +# as all the scenarios run connection_check +healthcheck = tc.TestCase("connection_check", "functest", -1) # Retrieve the Functest configuration to detect which tests are relevant # according to the installer, scenario @@ -92,7 +92,9 @@ for version in versions: # For all the installers for installer in installers: # get scenarios - scenario_results = rp_utils.getScenarios(tempest, installer, version) + scenario_results = rp_utils.getScenarios(healthcheck, + installer, + version) scenario_stats = rp_utils.getScenarioStats(scenario_results) items = {} scenario_result_criteria = {} diff --git a/utils/test/reporting/functest/testCase.py b/utils/test/reporting/functest/testCase.py index e40aa7f00..f77136e11 100644 --- a/utils/test/reporting/functest/testCase.py +++ b/utils/test/reporting/functest/testCase.py @@ -27,6 +27,7 @@ class TestCase(object): 'ocl': 'OCL', 'tempest_smoke_serial': 'Tempest (smoke)', 'tempest_full_parallel': 'Tempest (full)', + 'tempest_defcore': 'Tempest (Defcore)', 'rally_sanity': 'Rally (smoke)', 'bgpvpn': 'bgpvpn', 'rally_full': 'Rally (full)', @@ -45,8 +46,8 @@ class TestCase(object): 'api_check': 'Health (api)', 'snaps_smoke': 'SNAPS', 'snaps_health_check': 'Health (dhcp)', - 'gluon_vping': 'Netready', - 'barometercollectd': 'Barometer'} + 'netready': 'Netready', + 'barometer': 'Barometer'} try: self.displayName = display_name_matrix[self.name] except: @@ -125,6 +126,7 @@ class TestCase(object): 'ocl': 'ocl', 'tempest_smoke_serial': 'tempest_smoke_serial', 'tempest_full_parallel': 'tempest_full_parallel', + 'tempest_defcore': 'tempest_defcore', 'rally_sanity': 'rally_sanity', 'bgpvpn': 'bgpvpn', 'rally_full': 'rally_full', @@ -143,8 +145,8 @@ class TestCase(object): 'api_check': 'api_check', 'snaps_smoke': 'snaps_smoke', 'snaps_health_check': 'snaps_health_check', - 'gluon_vping': 'gluon_vping', - 'barometercollectd': 'barometercollectd'} + 'netready': 'gluon_vping', + 'barometer': 'barometercollectd'} try: return test_match_matrix[self.name] except: diff --git a/utils/test/reporting/html/danube.html b/utils/test/reporting/html/danube.html index 58d6bc0fe..d21875b53 100644 --- a/utils/test/reporting/html/danube.html +++ b/utils/test/reporting/html/danube.html @@ -76,6 +76,17 @@ </div>
</a>
</article>
+ <article class="style4">
+ <span class="image">
+ <img src="img/storperf.jpg" alt="" />
+ </span>
+ <a href="master/storperf/status-apex.html">
+ <h2>Storperf</h2>
+ <div class="content">
+ <p>Storage testing</p>
+ </div>
+ </a>
+ </article>
</section>
</div>
</div>
diff --git a/utils/test/reporting/img/storperf.jpg b/utils/test/reporting/img/storperf.jpg Binary files differnew file mode 100644 index 000000000..37492e69e --- /dev/null +++ b/utils/test/reporting/img/storperf.jpg diff --git a/utils/test/reporting/reporting.yaml b/utils/test/reporting/reporting.yaml index 2fb6b7831..81e976a28 100644 --- a/utils/test/reporting/reporting.yaml +++ b/utils/test/reporting/reporting.yaml @@ -55,6 +55,13 @@ yardstick: test_conf: https://git.opnfv.org/cgit/yardstick/plain/tests/ci/report_config.yaml log_level: ERROR +storperf: + test_list: + - snia_steady_state + log_level: ERROR + qtip: bottleneck: + +vsperf: diff --git a/utils/test/reporting/storperf/reporting-status.py b/utils/test/reporting/storperf/reporting-status.py new file mode 100644 index 000000000..674fdd880 --- /dev/null +++ b/utils/test/reporting/storperf/reporting-status.py @@ -0,0 +1,128 @@ +#!/usr/bin/python +# +# This program and the accompanying materials +# are made available under the terms of the Apache License, Version 2.0 +# which accompanies this distribution, and is available at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +import datetime +import jinja2 +import os + +# manage conf +import utils.reporting_utils as rp_utils + +import utils.scenarioResult as sr + +installers = rp_utils.get_config('general.installers') +versions = rp_utils.get_config('general.versions') +PERIOD = rp_utils.get_config('general.period') + +# Logger +logger = rp_utils.getLogger("Storperf-Status") +reportingDate = datetime.datetime.now().strftime("%Y-%m-%d %H:%M") + +logger.info("*******************************************") +logger.info("* Generating reporting scenario status *") +logger.info("* Data retention = %s days *" % PERIOD) +logger.info("* *") +logger.info("*******************************************") + +# retrieve the list of storperf tests +storperf_tests = rp_utils.get_config('storperf.test_list') +logger.info("Storperf tests: %s" % storperf_tests) + +# For all the versions +for version in versions: + # For all the installers + for installer in installers: + # get scenarios results data + # for the moment we consider only 1 case snia_steady_state + scenario_results = rp_utils.getScenarios("snia_steady_state", + installer, + version) + # logger.info("scenario_results: %s" % scenario_results) + + scenario_stats = rp_utils.getScenarioStats(scenario_results) + logger.info("scenario_stats: %s" % scenario_stats) + items = {} + scenario_result_criteria = {} + + # From each scenarios get results list + for s, s_result in scenario_results.items(): + logger.info("---------------------------------") + logger.info("installer %s, version %s, scenario %s", installer, + version, s) + ten_criteria = len(s_result) + + ten_score = 0 + for v in s_result: + if "PASS" in v['criteria']: + ten_score += 1 + + logger.info("ten_score: %s / %s" % (ten_score, ten_criteria)) + + LASTEST_TESTS = rp_utils.get_config( + 'general.nb_iteration_tests_success_criteria') + four_result = s_result[:LASTEST_TESTS] + four_criteria = len(four_result) + four_score = 0 + for v in four_result: + if "PASS" in v['criteria']: + four_score += 1 + logger.info("four_score: %s / %s " % (four_score, four_criteria)) + + try: + s_status = (four_score * 100) / four_criteria + except: + s_status = 0 + logger.info("Score percent = %s" % str(s_status)) + s_four_score = str(four_score) + '/' + str(four_criteria) + s_ten_score = str(ten_score) + '/' + str(ten_criteria) + s_score_percent = str(s_status) + + if '100' == s_status: + logger.info(">>>>> scenario OK, save the information") + else: + logger.info(">>>> scenario not OK, last 4 iterations = %s, \ + last 10 days = %s" % (s_four_score, s_ten_score)) + + # Save daily results in a file + path_validation_file = ("./display/" + version + + "/storperf/scenario_history.txt") + + if not os.path.exists(path_validation_file): + with open(path_validation_file, 'w') as f: + info = 'date,scenario,installer,details,score\n' + f.write(info) + + with open(path_validation_file, "a") as f: + info = (reportingDate + "," + s + "," + installer + + "," + s_ten_score + "," + + str(s_score_percent) + "\n") + f.write(info) + + scenario_result_criteria[s] = sr.ScenarioResult(s_status, + s_four_score, + s_ten_score, + s_score_percent) + + logger.info("--------------------------") + + templateLoader = jinja2.FileSystemLoader(".") + templateEnv = jinja2.Environment(loader=templateLoader, + autoescape=True) + + TEMPLATE_FILE = "./storperf/template/index-status-tmpl.html" + template = templateEnv.get_template(TEMPLATE_FILE) + + outputText = template.render(scenario_results=scenario_result_criteria, + installer=installer, + period=PERIOD, + version=version, + date=reportingDate) + + with open("./display/" + version + + "/storperf/status-" + installer + ".html", "wb") as fh: + fh.write(outputText) diff --git a/utils/test/reporting/storperf/template/index-status-tmpl.html b/utils/test/reporting/storperf/template/index-status-tmpl.html new file mode 100644 index 000000000..e3a18b1ce --- /dev/null +++ b/utils/test/reporting/storperf/template/index-status-tmpl.html @@ -0,0 +1,111 @@ + <html> + <head> + <meta charset="utf-8"> + <!-- Bootstrap core CSS --> + <link href="https://maxcdn.bootstrapcdn.com/bootstrap/3.3.4/css/bootstrap.min.css" rel="stylesheet"> + <link href="../../css/default.css" rel="stylesheet"> + <script type="text/javascript" src="http://ajax.googleapis.com/ajax/libs/jquery/1/jquery.min.js"></script> + <script type="text/javascript" src="https://maxcdn.bootstrapcdn.com/bootstrap/3.3.4/js/bootstrap.min.js"></script> + <script type="text/javascript" src="http://d3js.org/d3.v2.min.js"></script> + <script type="text/javascript" src="../../js/gauge.js"></script> + <script type="text/javascript" src="../../js/trend.js"></script> + <script> + function onDocumentReady() { + // Gauge management + {% for scenario in scenario_results.keys() -%} + var gaugeScenario{{loop.index}} = gauge('#gaugeScenario{{loop.index}}'); + {%- endfor %} + // assign success rate to the gauge + function updateReadings() { + {% for scenario in scenario_results.keys() -%} + gaugeScenario{{loop.index}}.update({{scenario_results[scenario].getScorePercent()}}); + {%- endfor %} + } + updateReadings(); + } + + // trend line management + d3.csv("./scenario_history.csv", function(data) { + // *************************************** + // Create the trend line + {% for scenario in scenario_results.keys() -%} + // for scenario {{scenario}} + // Filter results + var trend{{loop.index}} = data.filter(function(row) { + return row["scenario"]=="{{scenario}}" && row["installer"]=="{{installer}}"; + }) + // Parse the date + trend{{loop.index}}.forEach(function(d) { + d.date = parseDate(d.date); + d.score = +d.score + }); + // Draw the trend line + var mytrend = trend("#trend_svg{{loop.index}}",trend{{loop.index}}) + // **************************************** + {%- endfor %} + }); + if ( !window.isLoaded ) { + window.addEventListener("load", function() { + onDocumentReady(); + }, false); + } else { + onDocumentReady(); + } + </script> + <script type="text/javascript"> + $(document).ready(function (){ + $(".btn-more").click(function() { + $(this).hide(); + $(this).parent().find(".panel-default").show(); + }); + }) + </script> + </head> + <body> + <div class="container"> + <div class="masthead"> + <h3 class="text-muted">Storperf status page ({{version}}, {{date}})</h3> + <nav> + <ul class="nav nav-justified"> + <li class="active"><a href="http://testresults.opnfv.org/reporting/index.html">Home</a></li> + <li><a href="status-apex.html">Apex</a></li> + <li><a href="status-compass.html">Compass</a></li> + <li><a href="status-daisy.html">Daisy</a></li> + <li><a href="status-fuel.html">Fuel</a></li> + <li><a href="status-joid.html">Joid</a></li> + </ul> + </nav> + </div> +<div class="row"> + <div class="col-md-1"></div> + <div class="col-md-10"> + <div class="page-header"> + <h2>{{installer}}</h2> + </div> + + <div class="scenario-overview"> + <div class="panel-heading"><h4><b>List of last scenarios ({{version}}) run over the last {{period}} days </b></h4></div> + <table class="table"> + <tr> + <th width="40%">Scenario</th> + <th width="20%">Status</th> + <th width="20%">Trend</th> + <th width="10%">Last 4 Iterations</th> + <th width="10%">Last 10 Days</th> + </tr> + {% for scenario,result in scenario_results.iteritems() -%} + <tr class="tr-ok"> + <td>{{scenario}}</td> + <td><div id="gaugeScenario{{loop.index}}"></div></td> + <td><div id="trend_svg{{loop.index}}"></div></td> + <td>{{scenario_results[scenario].getFourDaysScore()}}</td> + <td>{{scenario_results[scenario].getTenDaysScore()}}</td> + </tr> + {%- endfor %} + </table> + </div> + + + </div> + <div class="col-md-1"></div> +</div> diff --git a/utils/test/reporting/utils/reporting_utils.py b/utils/test/reporting/utils/reporting_utils.py index 1879fb628..47d67f362 100644 --- a/utils/test/reporting/utils/reporting_utils.py +++ b/utils/test/reporting/utils/reporting_utils.py @@ -101,7 +101,15 @@ def getApiResults(case, installer, scenario, version): def getScenarios(case, installer, version): - case = case.getName() + try: + case = case.getName() + except: + # if case is not an object test case, try the string + if type(case) == str: + case = case + else: + raise ValueError("Case cannot be evaluated") + period = get_config('general.period') url_base = get_config('testapi.url') diff --git a/utils/test/reporting/yardstick/scenarioResult.py b/utils/test/reporting/utils/scenarioResult.py index 1f7eb2b24..1f7eb2b24 100644 --- a/utils/test/reporting/yardstick/scenarioResult.py +++ b/utils/test/reporting/utils/scenarioResult.py diff --git a/utils/test/reporting/yardstick/reporting-status.py b/utils/test/reporting/yardstick/reporting-status.py index a0f0b0184..12f42ca31 100644 --- a/utils/test/reporting/yardstick/reporting-status.py +++ b/utils/test/reporting/yardstick/reporting-status.py @@ -10,7 +10,7 @@ import datetime import jinja2 import os -import scenarioResult as sr +import utils.scenarioResult as sr from scenarios import config as cf # manage conf diff --git a/utils/test/testapi/.coveragerc b/utils/test/testapi/.coveragerc new file mode 100644 index 000000000..23fb97fba --- /dev/null +++ b/utils/test/testapi/.coveragerc @@ -0,0 +1,27 @@ +# .coveragerc to control coverage.py + +[run] +branch = True +source = + opnfv_testapi + +[report] +# Regexes for lines to exclude from consideration +exclude_lines = + # Have to re-enable the standard pragma + pragma: no cover + + # Don't complain about missing debug-only code: + def __repr__ + if self\.debug + + # Don't complain if tests don't hit defensive assertion code: + raise AssertionError + raise NotImplementedError + + # Don't complain if non-runnable code isn't run: + if 0: + if __name__ == .__main__.: + +ignore_errors = True + diff --git a/utils/test/testapi/docker/Dockerfile b/utils/test/testapi/docker/Dockerfile index 86513e05b..e031e194c 100644 --- a/utils/test/testapi/docker/Dockerfile +++ b/utils/test/testapi/docker/Dockerfile @@ -8,13 +8,12 @@ # $ docker build -t opnfv/testapi:tag . # # Execution: -# $ docker run -dti -p 8000:8000 \ -# -e "swagger_url=http://10.63.243.17:8000" \ +# $ docker run -dti -p 8001:8000 \ +# -e "swagger_url=http://10.63.243.17:8001" \ # -e "mongodb_url=mongodb://10.63.243.17:27017/" \ -# -e "api_port=8000" # opnfv/testapi:tag # -# NOTE: providing swagger_url, api_port, mongodb_url is optional. +# NOTE: providing swagger_url, mongodb_url is optional. # If not provided, it will use the default one # configured in config.ini # diff --git a/utils/test/testapi/docker/prepare-env.sh b/utils/test/testapi/docker/prepare-env.sh index 99433cc8c..9f07efbd1 100755 --- a/utils/test/testapi/docker/prepare-env.sh +++ b/utils/test/testapi/docker/prepare-env.sh @@ -9,8 +9,3 @@ fi if [ "$swagger_url" != "" ]; then sudo crudini --set --existing $FILE swagger base_url $swagger_url fi - -if [ "$api_port" != "" ];then - sudo crudini --set --existing $FILE api port $api_port -fi - diff --git a/utils/test/testapi/opnfv_testapi/cmd/server.py b/utils/test/testapi/opnfv_testapi/cmd/server.py index 013ee6642..fa2b72250 100644 --- a/utils/test/testapi/opnfv_testapi/cmd/server.py +++ b/utils/test/testapi/opnfv_testapi/cmd/server.py @@ -30,6 +30,7 @@ TODOs : """ import argparse +import sys import motor import tornado.ioloop @@ -38,30 +39,34 @@ from opnfv_testapi.common import config from opnfv_testapi.router import url_mappings from opnfv_testapi.tornado_swagger import swagger -# optionally get config file from command line -parser = argparse.ArgumentParser() -parser.add_argument("-c", "--config-file", dest='config_file', - help="Config file location") -args = parser.parse_args() -CONF = config.APIConfig().parse(args.config_file) +CONF = None -# connecting to MongoDB server, and choosing database -client = motor.MotorClient(CONF.mongo_url) -db = client[CONF.mongo_dbname] -swagger.docs(base_url=CONF.swagger_base_url) +def parse_config(argv=[]): + global CONF + parser = argparse.ArgumentParser() + parser.add_argument("-c", "--config-file", dest='config_file', + help="Config file location") + args = parser.parse_args(argv) + CONF = config.APIConfig().parse(args.config_file) + + +def get_db(): + return motor.MotorClient(CONF.mongo_url)[CONF.mongo_dbname] def make_app(): + swagger.docs(base_url=CONF.swagger_base_url) return swagger.Application( url_mappings.mappings, - db=db, + db=get_db(), debug=CONF.api_debug_on, auth=CONF.api_authenticate_on ) def main(): + parse_config(sys.argv[1:]) application = make_app() application.listen(CONF.api_port) tornado.ioloop.IOLoop.current().start() diff --git a/utils/test/testapi/opnfv_testapi/common/config.py b/utils/test/testapi/opnfv_testapi/common/config.py index 84a127391..105d4fabf 100644 --- a/utils/test/testapi/opnfv_testapi/common/config.py +++ b/utils/test/testapi/opnfv_testapi/common/config.py @@ -8,6 +8,7 @@ # feng.xiaowei@zte.com.cn remove prepare_put_request 5-30-2016 ############################################################################## import ConfigParser +import os class ParseError(Exception): @@ -22,7 +23,7 @@ class ParseError(Exception): return 'error parsing config file : %s' % self.msg -class APIConfig: +class APIConfig(object): """ The purpose of this class is to load values correctly from the config file. Each key is declared as an attribute in __init__() and linked in parse() @@ -42,13 +43,13 @@ class APIConfig: try: return self._parser.get(section, param) except ConfigParser.NoOptionError: - raise ParseError("[%s.%s] parameter not found" % (section, param)) + raise ParseError("No parameter: [%s.%s]" % (section, param)) def _get_int_parameter(self, section, param): try: return int(self._get_parameter(section, param)) except ValueError: - raise ParseError("[%s.%s] not an int" % (section, param)) + raise ParseError("Not int: [%s.%s]" % (section, param)) def _get_bool_parameter(self, section, param): result = self._get_parameter(section, param) @@ -58,7 +59,7 @@ class APIConfig: return False raise ParseError( - "[%s.%s : %s] not a boolean" % (section, param, result)) + "Not boolean: [%s.%s : %s]" % (section, param, result)) @staticmethod def parse(config_location=None): @@ -67,10 +68,11 @@ class APIConfig: if config_location is None: config_location = obj._default_config_location + if not os.path.exists(config_location): + raise ParseError("%s not found" % config_location) + obj._parser = ConfigParser.SafeConfigParser() obj._parser.read(config_location) - if not obj._parser: - raise ParseError("%s not found" % config_location) # Linking attributes to keys from file with their sections obj.mongo_url = obj._get_parameter("mongo", "url") @@ -84,15 +86,3 @@ class APIConfig: obj.swagger_base_url = obj._get_parameter("swagger", "base_url") return obj - - def __str__(self): - return "mongo_url = %s \n" \ - "mongo_dbname = %s \n" \ - "api_port = %s \n" \ - "api_debug_on = %s \n" \ - "swagger_base_url = %s \n" % (self.mongo_url, - self.mongo_dbname, - self.api_port, - self.api_debug_on, - self.api_authenticate_on, - self.swagger_base_url) diff --git a/utils/test/testapi/opnfv_testapi/tests/unit/common/__init__.py b/utils/test/testapi/opnfv_testapi/tests/unit/common/__init__.py new file mode 100644 index 000000000..e69de29bb --- /dev/null +++ b/utils/test/testapi/opnfv_testapi/tests/unit/common/__init__.py diff --git a/utils/test/testapi/opnfv_testapi/tests/unit/common/noparam.ini b/utils/test/testapi/opnfv_testapi/tests/unit/common/noparam.ini new file mode 100644 index 000000000..fda2a09e9 --- /dev/null +++ b/utils/test/testapi/opnfv_testapi/tests/unit/common/noparam.ini @@ -0,0 +1,16 @@ +# to add a new parameter in the config file, +# the CONF object in config.ini must be updated +[mongo] +# URL of the mongo DB +# Mongo auth url => mongodb://user1:pwd1@host1/?authSource=db1 +url = mongodb://127.0.0.1:27017/ + +[api] +# Listening port +port = 8000 +# With debug_on set to true, error traces will be shown in HTTP responses +debug = True +authenticate = False + +[swagger] +base_url = http://localhost:8000 diff --git a/utils/test/testapi/opnfv_testapi/tests/unit/common/normal.ini b/utils/test/testapi/opnfv_testapi/tests/unit/common/normal.ini new file mode 100644 index 000000000..77cc6c6ee --- /dev/null +++ b/utils/test/testapi/opnfv_testapi/tests/unit/common/normal.ini @@ -0,0 +1,17 @@ +# to add a new parameter in the config file, +# the CONF object in config.ini must be updated +[mongo] +# URL of the mongo DB +# Mongo auth url => mongodb://user1:pwd1@host1/?authSource=db1 +url = mongodb://127.0.0.1:27017/ +dbname = test_results_collection + +[api] +# Listening port +port = 8000 +# With debug_on set to true, error traces will be shown in HTTP responses +debug = True +authenticate = False + +[swagger] +base_url = http://localhost:8000 diff --git a/utils/test/testapi/opnfv_testapi/tests/unit/common/nosection.ini b/utils/test/testapi/opnfv_testapi/tests/unit/common/nosection.ini new file mode 100644 index 000000000..9988fc0a4 --- /dev/null +++ b/utils/test/testapi/opnfv_testapi/tests/unit/common/nosection.ini @@ -0,0 +1,11 @@ +# to add a new parameter in the config file, +# the CONF object in config.ini must be updated +[api] +# Listening port +port = 8000 +# With debug_on set to true, error traces will be shown in HTTP responses +debug = True +authenticate = False + +[swagger] +base_url = http://localhost:8000 diff --git a/utils/test/testapi/opnfv_testapi/tests/unit/common/notboolean.ini b/utils/test/testapi/opnfv_testapi/tests/unit/common/notboolean.ini new file mode 100644 index 000000000..b3f327670 --- /dev/null +++ b/utils/test/testapi/opnfv_testapi/tests/unit/common/notboolean.ini @@ -0,0 +1,17 @@ +# to add a new parameter in the config file, +# the CONF object in config.ini must be updated +[mongo] +# URL of the mongo DB +# Mongo auth url => mongodb://user1:pwd1@host1/?authSource=db1 +url = mongodb://127.0.0.1:27017/ +dbname = test_results_collection + +[api] +# Listening port +port = 8000 +# With debug_on set to true, error traces will be shown in HTTP responses +debug = True +authenticate = notboolean + +[swagger] +base_url = http://localhost:8000 diff --git a/utils/test/testapi/opnfv_testapi/tests/unit/common/notint.ini b/utils/test/testapi/opnfv_testapi/tests/unit/common/notint.ini new file mode 100644 index 000000000..d1b752a34 --- /dev/null +++ b/utils/test/testapi/opnfv_testapi/tests/unit/common/notint.ini @@ -0,0 +1,17 @@ +# to add a new parameter in the config file, +# the CONF object in config.ini must be updated +[mongo] +# URL of the mongo DB +# Mongo auth url => mongodb://user1:pwd1@host1/?authSource=db1 +url = mongodb://127.0.0.1:27017/ +dbname = test_results_collection + +[api] +# Listening port +port = notint +# With debug_on set to true, error traces will be shown in HTTP responses +debug = True +authenticate = False + +[swagger] +base_url = http://localhost:8000 diff --git a/utils/test/testapi/opnfv_testapi/tests/unit/common/test_config.py b/utils/test/testapi/opnfv_testapi/tests/unit/common/test_config.py new file mode 100644 index 000000000..aaff6bb91 --- /dev/null +++ b/utils/test/testapi/opnfv_testapi/tests/unit/common/test_config.py @@ -0,0 +1,36 @@ +import ConfigParser +import os + +import pytest + +from opnfv_testapi.common import config + + +@pytest.fixture() +def config_dir(): + return os.path.dirname(__file__) + + +@pytest.mark.parametrize('exception, config_file, excepted', [ + (config.ParseError, None, '/etc/opnfv_testapi/config.ini not found'), + (ConfigParser.NoSectionError, 'nosection.ini', 'No section:'), + (config.ParseError, 'noparam.ini', 'No parameter:'), + (config.ParseError, 'notint.ini', 'Not int:'), + (config.ParseError, 'notboolean.ini', 'Not boolean:')]) +def pytest_config_exceptions(config_dir, exception, config_file, excepted): + file = '{}/{}'.format(config_dir, config_file) if config_file else None + with pytest.raises(exception) as error: + config.APIConfig().parse(file) + assert excepted in str(error.value) + + +def test_config_success(): + config_dir = os.path.join(os.path.dirname(__file__), + '../../../../etc/config.ini') + conf = config.APIConfig().parse(config_dir) + assert conf.mongo_url == 'mongodb://127.0.0.1:27017/' + assert conf.mongo_dbname == 'test_results_collection' + assert conf.api_port == 8000 + assert conf.api_debug_on is True + assert conf.api_authenticate_on is False + assert conf.swagger_base_url == 'http://localhost:8000' diff --git a/utils/test/testapi/opnfv_testapi/tests/unit/test_base.py b/utils/test/testapi/opnfv_testapi/tests/unit/test_base.py index b2be8d593..b955f4a5a 100644 --- a/utils/test/testapi/opnfv_testapi/tests/unit/test_base.py +++ b/utils/test/testapi/opnfv_testapi/tests/unit/test_base.py @@ -7,19 +7,21 @@ # http://www.apache.org/licenses/LICENSE-2.0 ############################################################################## import json +from os import path +import mock from tornado import testing -from tornado import web import fake_pymongo +from opnfv_testapi.cmd import server from opnfv_testapi.resources import models -from opnfv_testapi.router import url_mappings class TestBase(testing.AsyncHTTPTestCase): headers = {'Content-Type': 'application/json; charset=UTF-8'} def setUp(self): + self._patch_server() self.basePath = '' self.create_res = models.CreateResponse self.get_res = None @@ -30,13 +32,24 @@ class TestBase(testing.AsyncHTTPTestCase): self.addCleanup(self._clear) super(TestBase, self).setUp() + def tearDown(self): + self.db_patcher.stop() + + def _patch_server(self): + server.parse_config([ + '--config-file', + path.join(path.dirname(__file__), 'common/normal.ini') + ]) + self.db_patcher = mock.patch('opnfv_testapi.cmd.server.get_db', + self._fake_pymongo) + self.db_patcher.start() + + @staticmethod + def _fake_pymongo(): + return fake_pymongo + def get_app(self): - return web.Application( - url_mappings.mappings, - db=fake_pymongo, - debug=True, - auth=False - ) + return server.make_app() def create_d(self, *args): return self.create(self.req_d, *args) diff --git a/utils/test/testapi/run_test.sh b/utils/test/testapi/run_test.sh index 51db09f65..4efc7af3b 100755 --- a/utils/test/testapi/run_test.sh +++ b/utils/test/testapi/run_test.sh @@ -15,6 +15,8 @@ source $SCRIPTDIR/testapi_venv/bin/activate pip install -r $SCRIPTDIR/requirements.txt pip install coverage pip install nose>=1.3.1 +pip install pytest +pip install mock find . -type f -name "*.pyc" -delete diff --git a/utils/test/testapi/test-requirements.txt b/utils/test/testapi/test-requirements.txt new file mode 100644 index 000000000..4633ad637 --- /dev/null +++ b/utils/test/testapi/test-requirements.txt @@ -0,0 +1,11 @@ +# The order of packages is significant, because pip processes them in the order +# of appearance. Changing the order has an impact on the overall integration +# process, which may cause wedges in the gate later. + +tox +mock +pytest +pytest-cov +coverage +pykwalify +pip_check_reqs diff --git a/utils/test/testapi/tox.ini b/utils/test/testapi/tox.ini new file mode 100644 index 000000000..81c9dfab1 --- /dev/null +++ b/utils/test/testapi/tox.ini @@ -0,0 +1,41 @@ +# Tox (http://tox.testrun.org/) is a tool for running tests +# in multiple virtualenvs. This configuration file will run the +# test suite on all supported python versions. To use it, "pip install tox" +# and then run "tox" from this directory. + +[tox] +envlist = py27,pep8 +skipsdist = True +sitepackages = True + +[testenv] +usedevelop = True +install_command = pip install -U {opts} {packages} +deps = + -rrequirements.txt + -rtest-requirements.txt +commands= + py.test \ + --basetemp={envtmpdir} \ + --cov \ + {posargs} +setenv= + HOME = {envtmpdir} + PYTHONPATH = {toxinidir} + +[testenv:pep8] +deps = flake8 +commands = flake8 {toxinidir} + +[flake8] +# H803 skipped on purpose per list discussion. +# E123, E125 skipped as they are invalid PEP-8. + +show-source = True +ignore = E123,E125,H803,E501 +builtins = _ +exclude = build,dist,doc,legacy,.eggs,.git,.tox,.venv,testapi_venv,venv + +[pytest] +testpaths = opnfv_testapi/tests +python_functions = test_* |