summaryrefslogtreecommitdiffstats
diff options
context:
space:
mode:
-rw-r--r--deploy/config/bm_environment/zte-baremetal1/deploy.yml3
-rw-r--r--deploy/config/schemas.py74
-rw-r--r--deploy/config/vm_environment/zte-virtual1/deploy.yml5
-rw-r--r--deploy/config/vm_environment/zte-virtual2/deploy.yml5
-rw-r--r--deploy/deploy.py24
-rw-r--r--deploy/environment.py11
-rw-r--r--requirements.txt1
-rw-r--r--test-requirements.txt6
8 files changed, 106 insertions, 23 deletions
diff --git a/deploy/config/bm_environment/zte-baremetal1/deploy.yml b/deploy/config/bm_environment/zte-baremetal1/deploy.yml
index 58516e88..d24a9569 100644
--- a/deploy/config/bm_environment/zte-baremetal1/deploy.yml
+++ b/deploy/config/bm_environment/zte-baremetal1/deploy.yml
@@ -1,3 +1,4 @@
+adapter: ipmi
hosts:
- name: 'controller01'
roles:
@@ -16,8 +17,6 @@ hosts:
- 'COMPUTER'
disks:
daisy: 50
- controller: 50
- compute: 50
daisy_passwd: 'r00tme'
daisy_ip: '10.20.7.3'
daisy_gateway: '10.20.7.1'
diff --git a/deploy/config/schemas.py b/deploy/config/schemas.py
new file mode 100644
index 00000000..52ded2b4
--- /dev/null
+++ b/deploy/config/schemas.py
@@ -0,0 +1,74 @@
+##############################################################################
+# Copyright (c) 2017 ZTE Corporation and others.
+#
+# All rights reserved. This program and the accompanying materials
+# are made available under the terms of the Apache License, Version 2.0
+# which accompanies this distribution, and is available at
+# http://www.apache.org/licenses/LICENSE-2.0
+##############################################################################
+
+from jsonschema import Draft4Validator, FormatChecker
+
+
+MIN_DAISY_DISK_SIZE = 50
+# minimal size of root_lv_size is 102400 mega-bytes
+MIN_NODE_DISK_SIZE = 110
+
+hosts_schema = {
+ 'type': 'array',
+ 'items': {
+ 'type': 'object',
+ 'properties': {
+ 'name': {'type': 'string', 'minLength': 1},
+ 'roles': {
+ 'type': 'array',
+ 'items': {
+ 'type': 'string',
+ 'enum': ['COMPUTER', 'CONTROLLER_LB', 'CONTROLLER_HA']
+ }
+ }
+ }
+ }
+}
+
+disks_schema = {
+ 'type': 'object',
+ 'properties': {
+ 'daisy': {'type': 'integer', 'minimum': MIN_DAISY_DISK_SIZE},
+ 'controller': {'type': 'integer', 'minimum': MIN_NODE_DISK_SIZE},
+ 'compute': {'type': 'integer', 'minimum': MIN_NODE_DISK_SIZE}
+ }
+}
+
+schema_mapping = {
+ 'adapter': {'type': 'string', 'enum': ['ipmi', 'libvirt']},
+ 'hosts': hosts_schema,
+ 'disks': disks_schema,
+ 'daisy_passwd': {'type': 'string'},
+ 'daisy_ip': {'type': 'string', 'format': 'ip-address'},
+ 'daisy_gateway': {'type': 'string', 'format': 'ip-address'},
+ 'ceph_disk_name': {'type': 'string'},
+}
+
+deploy_schema = {
+ 'type': 'object',
+ 'properties': schema_mapping,
+ 'required': ['hosts', 'daisy_passwd', 'daisy_ip', 'daisy_gateway']
+}
+
+
+def _validate(data, schema):
+ v = Draft4Validator(schema, format_checker=FormatChecker())
+ errors = sorted(v.iter_errors(data), key=lambda e: e.path)
+ return errors
+
+
+def item_validate(data, schema_type):
+ if schema_type not in schema_mapping:
+ return str('Schema Type %s does not exist' % schema_type)
+ else:
+ return _validate(data, schema_mapping.get(schema_type))
+
+
+def deploy_schema_validate(data):
+ return _validate(data, deploy_schema)
diff --git a/deploy/config/vm_environment/zte-virtual1/deploy.yml b/deploy/config/vm_environment/zte-virtual1/deploy.yml
index 14508fae..0b3a2c52 100644
--- a/deploy/config/vm_environment/zte-virtual1/deploy.yml
+++ b/deploy/config/vm_environment/zte-virtual1/deploy.yml
@@ -1,3 +1,4 @@
+adapter: libvirt
hosts:
- name: 'all_in_one'
roles:
@@ -5,8 +6,8 @@ hosts:
- 'COMPUTER'
disks:
daisy: 50
- controller: 50
- compute: 50
+ controller: 110
+ compute: 110
daisy_passwd: 'r00tme'
daisy_ip: '10.20.11.2'
daisy_gateway: '10.20.11.1'
diff --git a/deploy/config/vm_environment/zte-virtual2/deploy.yml b/deploy/config/vm_environment/zte-virtual2/deploy.yml
index f2c8ac11..e086e7cf 100644
--- a/deploy/config/vm_environment/zte-virtual2/deploy.yml
+++ b/deploy/config/vm_environment/zte-virtual2/deploy.yml
@@ -1,3 +1,4 @@
+adapter: libvirt
hosts:
- name: 'controller01'
roles:
@@ -16,8 +17,8 @@ hosts:
- 'COMPUTER'
disks:
daisy: 50
- controller: 50
- compute: 50
+ controller: 110
+ compute: 110
daisy_passwd: 'r00tme'
daisy_ip: '10.20.11.2'
daisy_gateway: '10.20.11.1'
diff --git a/deploy/deploy.py b/deploy/deploy.py
index 23464b5a..5926a74d 100644
--- a/deploy/deploy.py
+++ b/deploy/deploy.py
@@ -20,10 +20,17 @@
import argparse
import yaml
+
+from config.schemas import (
+ MIN_DAISY_DISK_SIZE,
+ deploy_schema_validate
+)
from utils import (
WORKSPACE,
save_log_to_file,
LI,
+ LE,
+ err_exit,
log_bar,
path_join,
check_sudo_privilege,
@@ -31,7 +38,6 @@ from utils import (
make_file_executable,
confirm_dir_exists
)
-
from environment import (
DaisyEnvironment,
)
@@ -64,6 +70,11 @@ class DaisyDeployment(object):
self.pxe_bridge = pxe_bridge
self.deploy_log = deploy_log
+ result = deploy_schema_validate(self.deploy_struct)
+ if result:
+ LE(result)
+ err_exit('Configuration deploy.yml check failed!')
+
self.adapter = self._get_adapter_info()
LI('The adapter is %s' % self.adapter)
@@ -84,19 +95,14 @@ class DaisyDeployment(object):
self.storage_dir)
def _get_adapter_info(self):
- # TODO: specify the adapter info in deploy.yml
- if 'adapter' in self.deploy_struct:
- return self.deploy_struct['adapter']
- elif self.pod_name and 'virtual' in self.pod_name:
- return 'libvirt'
- else:
- return 'ipmi'
+ default_adapter = 'libvirt' if 'virtual' in self.pod_name else 'ipmi'
+ return self.deploy_struct.get('adapter', default_adapter)
def _get_daisy_server_info(self):
address = self.deploy_struct.get('daisy_ip', '10.20.11.2')
gateway = self.deploy_struct.get('daisy_gateway', '10.20.11.1')
password = self.deploy_struct.get('daisy_passwd', 'r00tme')
- disk_size = self.deploy_struct.get('disks', {'daisy': 50})['daisy']
+ disk_size = self.deploy_struct.get('disks', {}).get('daisy', MIN_DAISY_DISK_SIZE)
# TODO: get VM name of daisy server from deploy.yml or vm template
name = 'daisy'
image = path_join(self.storage_dir, name + '.qcow2')
diff --git a/deploy/environment.py b/deploy/environment.py
index 088e5008..21a0970a 100644
--- a/deploy/environment.py
+++ b/deploy/environment.py
@@ -11,6 +11,9 @@ import os
import shutil
import time
+from config.schemas import (
+ MIN_NODE_DISK_SIZE,
+)
from daisy_server import (
DaisyServer
)
@@ -183,8 +186,9 @@ class VirtualEnvironment(DaisyEnvironmentBase):
def create_virtual_node(self, node):
name = node['name']
roles = node['roles']
- controller_size = self.deploy_struct.get('disks', {'controller': 200}).get('controller')
- compute_size = self.deploy_struct.get('disks', {'compute': 200}).get('compute')
+ disks = self.deploy_struct.get('disks', {})
+ controller_size = disks.get('controller', MIN_NODE_DISK_SIZE)
+ compute_size = disks.get('compute', MIN_NODE_DISK_SIZE)
LI('Begin to create virtual node %s, roles %s' % (name, roles))
if 'CONTROLLER_LB' in roles:
@@ -201,9 +205,6 @@ class VirtualEnvironment(DaisyEnvironmentBase):
if 'template' in node:
template = node['template']
disk_file = path_join(self.storage_dir, name + '.qcow2')
- # TODO: modify the sizes in deploy.yml to more than 100G
- if size < 200:
- size = 200
create_virtual_disk(disk_file, size)
create_vm(template, name, disk_file)
diff --git a/requirements.txt b/requirements.txt
index 7cf1ddc9..7a329b1e 100644
--- a/requirements.txt
+++ b/requirements.txt
@@ -1,4 +1,5 @@
configobj
+jsonschema
paramiko
pyyaml
scp
diff --git a/test-requirements.txt b/test-requirements.txt
index ea421d4a..1def47c3 100644
--- a/test-requirements.txt
+++ b/test-requirements.txt
@@ -1,9 +1,9 @@
configobj
+jsonschema
paramiko
-pyyaml
-scp
pytest
pytest-cov
pytest-faker
pytest-mock
-
+pyyaml
+scp