summaryrefslogtreecommitdiffstats
diff options
context:
space:
mode:
-rwxr-xr-xjjb/armband/armband-deploy.sh1
-rw-r--r--jjb/compass4nfv/compass-dovetail-jobs.yml2
-rw-r--r--jjb/doctor/doctor.yml8
-rw-r--r--jjb/fuel/fuel-daily-jobs.yml42
-rw-r--r--jjb/global/releng-macros.yml10
-rw-r--r--jjb/opera/opera-daily-jobs.yml34
-rw-r--r--jjb/releng/testapi-automate.yml129
-rw-r--r--jjb/releng/testapi-docker-deploy.sh81
-rw-r--r--jjb/yardstick/yardstick-project-jobs.yml17
-rwxr-xr-xprototypes/bifrost/scripts/destroy-env.sh9
-rw-r--r--utils/test/testapi/htmlize/htmlize.py4
-rw-r--r--utils/test/vnfcatalogue/helpers/README.md22
-rw-r--r--utils/test/vnfcatalogue/helpers/migrate.js78
-rw-r--r--utils/test/vnfcatalogue/helpers/schema.js51
14 files changed, 421 insertions, 67 deletions
diff --git a/jjb/armband/armband-deploy.sh b/jjb/armband/armband-deploy.sh
index 6ddd2e961..adabfcaeb 100755
--- a/jjb/armband/armband-deploy.sh
+++ b/jjb/armband/armband-deploy.sh
@@ -8,7 +8,6 @@
# which accompanies this distribution, and is available at
# http://www.apache.org/licenses/LICENSE-2.0
##############################################################################
-set -o errexit
set -o nounset
set -o pipefail
diff --git a/jjb/compass4nfv/compass-dovetail-jobs.yml b/jjb/compass4nfv/compass-dovetail-jobs.yml
index 3337cd0ca..0fd2f9e66 100644
--- a/jjb/compass4nfv/compass-dovetail-jobs.yml
+++ b/jjb/compass4nfv/compass-dovetail-jobs.yml
@@ -28,7 +28,7 @@
scenario:
- 'os-nosdn-nofeature-ha':
disabled: false
- auto-trigger-name: 'weekly-trigger-disabled'
+ auto-trigger-name: 'dovetail-weekly-trigger'
jobs:
- 'compass-{scenario}-{pod}-weekly-{stream}'
diff --git a/jjb/doctor/doctor.yml b/jjb/doctor/doctor.yml
index 11b4ffbc2..2333fca14 100644
--- a/jjb/doctor/doctor.yml
+++ b/jjb/doctor/doctor.yml
@@ -22,10 +22,9 @@
- fuel:
slave-label: 'ool-virtual2'
pod: 'ool-virtual2'
- # TODO(r-mibu): enable this once joid is ready
- #- joid:
- # slave-label: 'ool-virtual3'
- # pod: 'ool-virtual3'
+ - joid:
+ slave-label: 'ool-virtual3'
+ pod: 'ool-virtual3'
inspector:
- 'sample'
@@ -145,6 +144,7 @@
branch: '{branch}'
builders:
+ - 'clean-workspace-log'
- 'functest-suite-builder'
- shell: |
functest_log="$HOME/opnfv/functest/results/{stream}/{project}.log"
diff --git a/jjb/fuel/fuel-daily-jobs.yml b/jjb/fuel/fuel-daily-jobs.yml
index a9af1bcba..02267bdf9 100644
--- a/jjb/fuel/fuel-daily-jobs.yml
+++ b/jjb/fuel/fuel-daily-jobs.yml
@@ -83,6 +83,8 @@
auto-trigger-name: 'daily-trigger-disabled'
- 'os-nosdn-kvm_ovs_dpdk-ha':
auto-trigger-name: 'fuel-{scenario}-{pod}-daily-{stream}-trigger'
+ - 'os-nosdn-kvm_ovs_dpdk_bar-ha':
+ auto-trigger-name: 'fuel-{scenario}-{pod}-daily-{stream}-trigger'
# NOHA scenarios
- 'os-nosdn-nofeature-noha':
auto-trigger-name: 'fuel-{scenario}-{pod}-daily-{stream}-trigger'
@@ -311,6 +313,10 @@
name: 'fuel-os-nosdn-kvm_ovs_dpdk-ha-baremetal-daily-master-trigger'
triggers:
- timed: '30 12 * * *'
+- trigger:
+ name: 'fuel-os-nosdn-kvm_ovs_dpdk_bar-ha-baremetal-daily-master-trigger'
+ triggers:
+ - timed: '30 8 * * *'
# NOHA Scenarios
- trigger:
name: 'fuel-os-nosdn-nofeature-noha-baremetal-daily-master-trigger'
@@ -396,6 +402,10 @@
name: 'fuel-os-nosdn-kvm_ovs_dpdk-ha-baremetal-daily-danube-trigger'
triggers:
- timed: ''
+- trigger:
+ name: 'fuel-os-nosdn-kvm_ovs_dpdk_bar-ha-baremetal-daily-danube-trigger'
+ triggers:
+ - timed: ''
# NOHA Scenarios
- trigger:
name: 'fuel-os-nosdn-nofeature-noha-baremetal-daily-danube-trigger'
@@ -480,6 +490,10 @@
name: 'fuel-os-nosdn-kvm_ovs_dpdk-ha-virtual-daily-master-trigger'
triggers:
- timed: ''
+- trigger:
+ name: 'fuel-os-nosdn-kvm_ovs_dpdk_bar-ha-virtual-daily-master-trigger'
+ triggers:
+ - timed: ''
# NOHA Scenarios
- trigger:
name: 'fuel-os-nosdn-nofeature-noha-virtual-daily-master-trigger'
@@ -564,6 +578,10 @@
name: 'fuel-os-nosdn-kvm_ovs_dpdk-ha-virtual-daily-danube-trigger'
triggers:
- timed: ''
+- trigger:
+ name: 'fuel-os-nosdn-kvm_ovs_dpdk_bar-ha-virtual-daily-danube-trigger'
+ triggers:
+ - timed: ''
# NOHA Scenarios
- trigger:
name: 'fuel-os-nosdn-nofeature-noha-virtual-daily-danube-trigger'
@@ -648,6 +666,10 @@
name: 'fuel-os-nosdn-kvm_ovs_dpdk-ha-zte-pod1-daily-master-trigger'
triggers:
- timed: ''
+- trigger:
+ name: 'fuel-os-nosdn-kvm_ovs_dpdk_bar-ha-zte-pod1-daily-master-trigger'
+ triggers:
+ - timed: ''
# NOHA Scenarios
- trigger:
name: 'fuel-os-nosdn-nofeature-noha-zte-pod1-daily-master-trigger'
@@ -733,6 +755,10 @@
name: 'fuel-os-nosdn-kvm_ovs_dpdk-ha-zte-pod2-daily-master-trigger'
triggers:
- timed: ''
+- trigger:
+ name: 'fuel-os-nosdn-kvm_ovs_dpdk_bar-ha-zte-pod2-daily-master-trigger'
+ triggers:
+ - timed: ''
# NOHA Scenarios
- trigger:
name: 'fuel-os-nosdn-nofeature-noha-zte-pod2-daily-master-trigger'
@@ -817,6 +843,10 @@
name: 'fuel-os-nosdn-kvm_ovs_dpdk-ha-zte-pod3-daily-master-trigger'
triggers:
- timed: ''
+- trigger:
+ name: 'fuel-os-nosdn-kvm_ovs_dpdk_bar-ha-zte-pod3-daily-master-trigger'
+ triggers:
+ - timed: ''
# NOHA Scenarios
- trigger:
name: 'fuel-os-nosdn-nofeature-noha-zte-pod3-daily-master-trigger'
@@ -901,6 +931,10 @@
name: 'fuel-os-nosdn-kvm_ovs_dpdk-ha-zte-pod1-daily-danube-trigger'
triggers:
- timed: ''
+- trigger:
+ name: 'fuel-os-nosdn-kvm_ovs_dpdk_bar-ha-zte-pod1-daily-danube-trigger'
+ triggers:
+ - timed: ''
# NOHA Scenarios
- trigger:
name: 'fuel-os-nosdn-nofeature-noha-zte-pod1-daily-danube-trigger'
@@ -986,6 +1020,10 @@
name: 'fuel-os-nosdn-kvm_ovs_dpdk-ha-zte-pod2-daily-danube-trigger'
triggers:
- timed: ''
+- trigger:
+ name: 'fuel-os-nosdn-kvm_ovs_dpdk_bar-ha-zte-pod2-daily-danube-trigger'
+ triggers:
+ - timed: ''
# NOHA Scenarios
- trigger:
name: 'fuel-os-nosdn-nofeature-noha-zte-pod2-daily-danube-trigger'
@@ -1070,6 +1108,10 @@
name: 'fuel-os-nosdn-kvm_ovs_dpdk-ha-zte-pod3-daily-danube-trigger'
triggers:
- timed: ''
+- trigger:
+ name: 'fuel-os-nosdn-kvm_ovs_dpdk_bar-ha-zte-pod3-daily-danube-trigger'
+ triggers:
+ - timed: ''
# NOHA Scenarios
- trigger:
name: 'fuel-os-nosdn-nofeature-noha-zte-pod3-daily-danube-trigger'
diff --git a/jjb/global/releng-macros.yml b/jjb/global/releng-macros.yml
index e6440966a..06152fe6b 100644
--- a/jjb/global/releng-macros.yml
+++ b/jjb/global/releng-macros.yml
@@ -68,9 +68,9 @@
- timed: ''
- trigger:
- name: 'weekly-trigger-disabled'
+ name: 'dovetail-weekly-trigger'
triggers:
- - timed: ''
+ - timed: 'H H * * 0'
# NOTE: unused macro, but we may use this for some jobs.
- trigger:
@@ -423,6 +423,12 @@
sed -r -i '4,$s/^/ /g' lint.log
fi
+- builder:
+ name: clean-workspace-log
+ builders:
+ - shell: |
+ find $WORKSPACE -type f -print -name '*.log' | xargs rm -f
+
- publisher:
name: archive-artifacts
publishers:
diff --git a/jjb/opera/opera-daily-jobs.yml b/jjb/opera/opera-daily-jobs.yml
index 47aa2a43d..f1ea1aa74 100644
--- a/jjb/opera/opera-daily-jobs.yml
+++ b/jjb/opera/opera-daily-jobs.yml
@@ -63,9 +63,6 @@
project: '{project}'
branch: '{branch}'
- 'huawei-virtual7-defaults'
- - 'compass-defaults'
- - 'opera-compass-parameter':
- gs-pathname: '{gs-pathname}'
builders:
- description-setter:
@@ -84,7 +81,10 @@
condition: SUCCESSFUL
projects:
- name: 'compass-deploy-virtual-daily-{stream}'
- current-parameters: true
+ current-parameters: false
+ predefined-parameters: |
+ DEPLOY_SCENARIO=os-nosdn-openo-noha
+ COMPASS_OS_VERSION=xenial
node-parameters: true
kill-phase-on: FAILURE
abort-all-job: true
@@ -93,7 +93,7 @@
# condition: SUCCESSFUL
# projects:
# - name: 'functest-compass-baremetal-suite-{stream}'
-# current-parameters: true
+# current-parameters: false
# predefined-parameters:
# FUNCTEST_SUITE_NAME=opera
# node-parameters: true
@@ -145,27 +145,3 @@
#!/bin/bash
echo "Hello world!"
-########################
-# parameter macros
-########################
-- parameter:
- name: opera-compass-parameter
- parameters:
- - string:
- name: BUILD_DIRECTORY
- default: $WORKSPACE/build_output
- description: "Directory where the build artifact will be located upon the completion of the build."
- - string:
- name: GS_URL
- default: '$GS_BASE{gs-pathname}'
- description: "URL to Google Storage."
- - choice:
- name: COMPASS_OPENSTACK_VERSION
- choices:
- - 'newton'
- - string:
- name: DEPLOY_SCENARIO
- default: 'os-nosdn-openo-noha'
- - string:
- name: COMPASS_OS_VERSION
- default: 'xenial'
diff --git a/jjb/releng/testapi-automate.yml b/jjb/releng/testapi-automate.yml
index 47d217e51..b06072b73 100644
--- a/jjb/releng/testapi-automate.yml
+++ b/jjb/releng/testapi-automate.yml
@@ -4,8 +4,16 @@
- master:
branch: '{stream}'
gs-pathname: ''
+
+ phase:
+ - 'docker-update'
+ - 'docker-deploy':
+ slave-label: 'testresults'
+ - 'generate-doc'
+
jobs:
- 'testapi-automate-{stream}'
+ - 'testapi-automate-{phase}-{stream}'
- 'testapi-verify-{stream}'
project: 'releng'
@@ -15,6 +23,11 @@
slave-label: 'testresults'
+ parameters:
+ - project-parameter:
+ project: '{project}'
+ branch: '{branch}'
+
scm:
- git-scm
@@ -80,10 +93,20 @@
healthy: 50
unhealthy: 40
failing: 30
+ - 'email-publisher'
- job-template:
name: 'testapi-automate-{stream}'
+ project-type: multijob
+
+ properties:
+ - throttle:
+ enabled: true
+ max-total: 1
+ max-per-node: 1
+ option: 'project'
+
parameters:
- project-parameter:
project: '{project}'
@@ -97,6 +120,12 @@
scm:
- git-scm
+ wrappers:
+ - ssh-agent-wrapper
+ - timeout:
+ timeout: 360
+ fail: true
+
triggers:
- gerrit:
server-name: 'gerrit.opnfv.org'
@@ -112,12 +141,72 @@
branch-pattern: '**/{branch}'
file-paths:
- compare-type: 'ANT'
- pattern: 'utils/**'
+ pattern: 'utils/test/testapi/**'
+
+ builders:
+ - description-setter:
+ description: "Built on $NODE_NAME"
+ - multijob:
+ name: docker-update
+ condition: SUCCESSFUL
+ projects:
+ - name: 'testapi-automate-docker-update-{stream}'
+ current-parameters: true
+ kill-phase-on: FAILURE
+ abort-all-job: true
+ - multijob:
+ name: docker-deploy
+ condition: SUCCESSFUL
+ projects:
+ - name: 'testapi-automate-docker-deploy-{stream}'
+ current-parameters: false
+ node-label-name: SLAVE_LABEL
+ node-label: testresults
+ kill-phase-on: FAILURE
+ abort-all-job: true
+ - multijob:
+ name: generate-doc
+ condition: SUCCESSFUL
+ projects:
+ - name: 'testapi-automate-generate-doc-{stream}'
+ current-parameters: true
+ kill-phase-on: FAILURE
+ abort-all-job: true
+
+ publishers:
+ - 'email-publisher'
+
+- job-template:
+ name: 'testapi-automate-{phase}-{stream}'
+
+ properties:
+ - throttle:
+ enabled: true
+ max-per-node: 1
+ option: 'project'
+
+ parameters:
+ - project-parameter:
+ project: '{project}'
+ branch: '{branch}'
+ - string:
+ name: DOCKER_TAG
+ default: "latest"
+ description: "Tag name for testapi docker image"
+
+ wrappers:
+ - ssh-agent-wrapper
+ - timeout:
+ timeout: 120
+ fail: true
+
+ scm:
+ - git-scm
builders:
- - docker-update
- - testapi-doc-build
- - upload-doc-artifact
+ - description-setter:
+ description: "Built on $NODE_NAME"
+ - 'testapi-automate-{phase}-macro'
################################
# job builders
@@ -129,25 +218,49 @@
bash ./jjb/releng/testapi-backup-mongodb.sh
- builder:
- name: run-unit-tests
+ name: 'run-unit-tests'
builders:
- shell: |
bash ./utils/test/testapi/run_test.sh
- builder:
- name: docker-update
+ name: 'testapi-automate-docker-update-macro'
builders:
- shell: |
bash ./jjb/releng/testapi-docker-update.sh
- builder:
- name: testapi-doc-build
+ name: 'testapi-automate-generate-doc-macro'
+ builders:
+ - 'testapi-doc-build'
+ - 'upload-doc-artifact'
+
+- builder:
+ name: 'testapi-doc-build'
builders:
- shell: |
bash ./utils/test/testapi/htmlize/doc-build.sh
- builder:
- name: upload-doc-artifact
+ name: 'upload-doc-artifact'
builders:
- shell: |
bash ./utils/test/testapi/htmlize/push-doc-artifact.sh
+
+- builder:
+ name: 'testapi-automate-docker-deploy-macro'
+ builders:
+ - shell: |
+ bash ./jjb/releng/testapi-docker-deploy.sh
+
+################################
+# job publishers
+################################
+
+- publisher:
+ name: 'email-publisher'
+ publishers:
+ - email:
+ recipients: rohitsakala@gmail.com serena.feng.711@gmail.com
+ notify-every-unstable-build: false
+ send-to-individuals: true
diff --git a/jjb/releng/testapi-docker-deploy.sh b/jjb/releng/testapi-docker-deploy.sh
new file mode 100644
index 000000000..04d71f76e
--- /dev/null
+++ b/jjb/releng/testapi-docker-deploy.sh
@@ -0,0 +1,81 @@
+#!/bin/bash
+
+function check() {
+
+ # Verify hosted
+ sleep 5
+ cmd=`curl -s --head --request GET http://testresults.opnfv.org/auto/swagger/spec | grep '200 OK' > /dev/null`
+ rc=$?
+ echo $rc
+
+ if [[ $rc == 0 ]]
+ then
+ return 0
+ else
+ return 1
+ fi
+
+}
+
+echo "Getting contianer Id of the currently running one"
+contId=$(sudo docker ps | grep "opnfv/testapi:latest" | awk '{print $1}')
+
+echo "Pulling the latest image"
+sudo docker pull opnfv/testapi:latest
+
+echo "Deleting old containers of opnfv/testapi:old"
+sudo docker ps -a | grep "opnfv/testapi" | grep "old" | awk '{print $1}' | xargs -r sudo docker rm -f
+
+echo "Deleting old images of opnfv/testapi:latest"
+sudo docker images | grep "opnfv/testapi" | grep "old" | awk '{print $3}' | xargs -r sudo docker rmi -f
+
+
+if [[ -z "$contId" ]]
+then
+ echo "No running testapi container"
+
+ echo "Removing stopped testapi containers in the previous iterations"
+ sudo docker ps -f status=exited | grep "opnfv_testapi" | awk '{print $1}' | xargs -r sudo docker rm -f
+else
+ echo $contId
+
+ echo "Get the image id of the currently running conatiner"
+ currImgId=$(sudo docker ps | grep "$contId" | awk '{print $2}')
+ echo $currImgId
+
+ if [[ -z "$currImgId" ]]
+ then
+ echo "No image id found for the container id"
+ exit 1
+ fi
+
+ echo "Changing current image tag to old"
+ sudo docker tag "$currImgId" opnfv/testapi:old
+
+ echo "Removing stopped testapi containers in the previous iteration"
+ sudo docker ps -f status=exited | grep "opnfv_testapi" | awk '{print $1}' | xargs -r sudo docker rm -f
+
+ echo "Renaming the running container name to opnfv_testapi as to identify it."
+ sudo docker rename $contId opnfv_testapi
+
+ echo "Stop the currently running container"
+ sudo docker stop $contId
+fi
+
+echo "Running a container with the new image"
+sudo docker run -dti -p "8711:8000" -e "mongodb_url=mongodb://172.17.0.1:27017" -e "swagger_url=http://testresults.opnfv.org/auto" opnfv/testapi:latest
+
+if check; then
+ echo "TestResults Hosted."
+else
+ echo "TestResults Hosting Failed"
+ if [[ $(sudo docker images | grep "opnfv/testapi" | grep "old" | awk '{print $3}') ]]; then
+ echo "Running old Image"
+ sudo docker run -dti -p "8711:8000" -e "mongodb_url=mongodb://172.17.0.1:27017" -e "swagger_url=http://testresults.opnfv.org/auto" opnfv/testapi:old
+ exit 1
+ fi
+fi
+
+# Echo Images and Containers
+sudo docker images
+sudo docker ps -a
diff --git a/jjb/yardstick/yardstick-project-jobs.yml b/jjb/yardstick/yardstick-project-jobs.yml
index 4b7ff6faf..bbfa152a2 100644
--- a/jjb/yardstick/yardstick-project-jobs.yml
+++ b/jjb/yardstick/yardstick-project-jobs.yml
@@ -108,19 +108,8 @@
set -o errexit
set -o pipefail
+ sudo apt-get install -y build-essential python-dev python3-dev
+
echo "Running unit tests..."
cd $WORKSPACE
- virtualenv $WORKSPACE/yardstick_venv
- source $WORKSPACE/yardstick_venv/bin/activate
-
- # install python packages
- sudo apt-get install -y build-essential python-dev python-pip python-pkg-resources
- easy_install -U setuptools==33.1.1
- easy_install -U pip
- pip install -r requirements.txt || pip install -r tests/ci/requirements.txt
- pip install -e .
-
- # unit tests
- ./run_tests.sh
-
- deactivate
+ tox
diff --git a/prototypes/bifrost/scripts/destroy-env.sh b/prototypes/bifrost/scripts/destroy-env.sh
index 9920046e1..14869b0e2 100755
--- a/prototypes/bifrost/scripts/destroy-env.sh
+++ b/prototypes/bifrost/scripts/destroy-env.sh
@@ -23,17 +23,14 @@ virsh undefine compute00.opnfvlocal || true
service ironic-conductor stop || true
-echo "removing from database"
+echo "removing ironic database"
if $(which mysql &> /dev/null); then
- mysql -u root ironic --execute "truncate table ports;"
- mysql -u root ironic --execute "delete from node_tags;"
- mysql -u root ironic --execute "delete from nodes;"
- mysql -u root ironic --execute "delete from conductors;"
+ mysql -u root ironic --execute "drop database ironic;"
fi
echo "removing leases"
[[ -e /var/lib/misc/dnsmasq/dnsmasq.leases ]] && > /var/lib/misc/dnsmasq/dnsmasq.leases
echo "removing logs"
-rm -rf /var/log/libvirt/baremetal_logs/*.log
+rm -rf /var/log/libvirt/baremetal_logs/*
# clean up dib images only if requested explicitly
CLEAN_DIB_IMAGES=${CLEAN_DIB_IMAGES:-false}
diff --git a/utils/test/testapi/htmlize/htmlize.py b/utils/test/testapi/htmlize/htmlize.py
index 075e31f79..70976d2bc 100644
--- a/utils/test/testapi/htmlize/htmlize.py
+++ b/utils/test/testapi/htmlize/htmlize.py
@@ -39,12 +39,12 @@ if __name__ == '__main__':
parser.add_argument('-ru', '--resource-listing-url',
type=str,
required=False,
- default='http://testresults.opnfv.org/test/swagger/spec.json',
+ default='http://testresults.opnfv.org/auto/swagger/spec.json',
help='Resource Listing Spec File')
parser.add_argument('-au', '--api-declaration-url',
type=str,
required=False,
- default='http://testresults.opnfv.org/test/swagger/spec',
+ default='http://testresults.opnfv.org/auto/swagger/spec',
help='API Declaration Spec File')
parser.add_argument('-o', '--output-directory',
required=True,
diff --git a/utils/test/vnfcatalogue/helpers/README.md b/utils/test/vnfcatalogue/helpers/README.md
new file mode 100644
index 000000000..6c0ca78c3
--- /dev/null
+++ b/utils/test/vnfcatalogue/helpers/README.md
@@ -0,0 +1,22 @@
+# Helper Directory
+
+## Helper to migrate database
+
+First make sure nodejs and mysql are installed. Then use
+
+```bash
+npm install bookshelf mysql knex when lodash --save
+```
+
+Create a database named **vnf_catalogue**.
+Enter the mysql credentials in migrate.js.
+
+Then use
+
+```bash
+node migrate
+```
+
+If successful the script will return success message. The current script is
+idempotent is nature, if run twice it will just return error and write nothing.
+
diff --git a/utils/test/vnfcatalogue/helpers/migrate.js b/utils/test/vnfcatalogue/helpers/migrate.js
new file mode 100644
index 000000000..ec209053c
--- /dev/null
+++ b/utils/test/vnfcatalogue/helpers/migrate.js
@@ -0,0 +1,78 @@
+/*******************************************************************************
+ * Copyright (c) 2017 Kumar Rishabh(penguinRaider) and others.
+ *
+ * All rights reserved. This program and the accompanying materials
+ * are made available under the terms of the Apache License, Version 2.0
+ * which accompanies this distribution, and is available at
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *******************************************************************************/
+
+var knex = require('knex')({
+ client: 'mysql',
+ connection: {
+ host : 'localhost',
+ user : '*',
+ password : '*',
+ database : 'vnf_catalogue',
+ charset : 'utf8'
+ }
+});
+var Schema = require('./schema');
+var sequence = require('when/sequence');
+var _ = require('lodash');
+function createTable(tableName) {
+ return knex.schema.createTable(tableName, function (table) {
+ var column;
+ var columnKeys = _.keys(Schema[tableName]);
+ _.each(columnKeys, function (key) {
+ if (Schema[tableName][key].type === 'text' && Schema[tableName][key].hasOwnProperty('fieldtype')) {
+ column = table[Schema[tableName][key].type](key, Schema[tableName][key].fieldtype);
+ }
+ else if (Schema[tableName][key].type === 'string' && Schema[tableName][key].hasOwnProperty('maxlength')) {
+ column = table[Schema[tableName][key].type](key, Schema[tableName][key].maxlength);
+ }
+ else {
+ column = table[Schema[tableName][key].type](key);
+ }
+ if (Schema[tableName][key].hasOwnProperty('nullable') && Schema[tableName][key].nullable === true) {
+ column.nullable();
+ }
+ else {
+ column.notNullable();
+ }
+ if (Schema[tableName][key].hasOwnProperty('primary') && Schema[tableName][key].primary === true) {
+ column.primary();
+ }
+ if (Schema[tableName][key].hasOwnProperty('unique') && Schema[tableName][key].unique) {
+ column.unique();
+ }
+ if (Schema[tableName][key].hasOwnProperty('unsigned') && Schema[tableName][key].unsigned) {
+ column.unsigned();
+ }
+ if (Schema[tableName][key].hasOwnProperty('references')) {
+ column.references(Schema[tableName][key].references);
+ }
+ if (Schema[tableName][key].hasOwnProperty('defaultTo')) {
+ column.defaultTo(Schema[tableName][key].defaultTo);
+ }
+ });
+ });
+}
+function createTables () {
+ var tables = [];
+ var tableNames = _.keys(Schema);
+ tables = _.map(tableNames, function (tableName) {
+ return function () {
+ return createTable(tableName);
+ };
+ });
+ return sequence(tables);
+}
+createTables()
+.then(function() {
+ console.log('Tables created!!');
+ process.exit(0);
+})
+.catch(function (error) {
+ throw error;
+});
diff --git a/utils/test/vnfcatalogue/helpers/schema.js b/utils/test/vnfcatalogue/helpers/schema.js
new file mode 100644
index 000000000..2aaf99ae2
--- /dev/null
+++ b/utils/test/vnfcatalogue/helpers/schema.js
@@ -0,0 +1,51 @@
+/*******************************************************************************
+ * Copyright (c) 2017 Kumar Rishabh(penguinRaider) and others.
+ *
+ * All rights reserved. This program and the accompanying materials
+ * are made available under the terms of the Apache License, Version 2.0
+ * which accompanies this distribution, and is available at
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *******************************************************************************/
+var Schema = {
+ photo: {
+ photo_id: {type: 'increments', nullable: false, primary: true},
+ photo_url: {type: 'string', maxlength: 254, nullable: false}
+ },
+ user: {
+ user_id: {type: 'increments', nullable: false, primary: true},
+ user_name: {type: 'string', maxlength: 254, nullable: false},
+ password: {type: 'string', maxlength: 150, nullable: false},
+ email_id: {type: 'string', maxlength: 254, nullable: false, unique: true, validations: {isEmail: true}},
+ photo_id: {type: 'integer', nullable: true, unsigned: true, references: 'photo.photo_id'},
+ company: {type: 'string', maxlength: 254, nullable: false},
+ introduction: {type: 'string', maxlength: 510, nullable: false},
+ last_login: {type: 'dateTime', nullable: true},
+ created_at: {type: 'dateTime', nullable: false},
+ },
+ vnf: {
+ vnf_id: {type: 'increments', nullable: false, primary: true},
+ vnf_name: {type: 'string', maxlength: 254, nullable: false},
+ repo_url: {type: 'string', maxlength: 254, nullable: false},
+ photo_id: {type: 'integer', nullable: true, unsigned: true, references: 'photo.photo_id'},
+ submitter_id: {type: 'integer', nullable: false, unsigned: true, references: 'user.user_id'},
+ lines_of_code: {type: 'integer', nullable: true, unsigned: true},
+ versions: {type: 'integer', nullable: true, unsigned: true},
+ no_of_developers: {type: 'integer', nullable: true, unsigned: true},
+ },
+ tag: {
+ tag_id: {type: 'increments', nullable: false, primary: true},
+ name: {type: 'string', maxlength: 150, nullable: false}
+ },
+ vnf_tags: {
+ vnf_tag_id: {type: 'increments', nullable: false, primary: true},
+ tag_id: {type: 'integer', nullable: false, unsigned: true, references: 'tag.tag_id'},
+ vnf_id: {type: 'integer', nullable: false, unsigned: true, references: 'vnf.vnf_id'},
+ },
+ vnf_contributors: {
+ vnf_contributors_id: {type: 'increments', nullable: false, primary: true},
+ user_id: {type: 'integer', nullable: false, unsigned: true, references: 'user.user_id'},
+ vnf_id: {type: 'integer', nullable: false, unsigned: true, references: 'vnf.vnf_id'},
+ created_at: {type: 'dateTime', nullable: false},
+ }
+};
+module.exports = Schema;