1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
20
21
22
23
24
25
26
27
28
29
30
31
32
33
34
35
36
37
38
39
40
41
42
43
44
45
46
47
48
49
50
51
52
53
54
55
56
57
58
59
60
61
62
63
64
65
66
67
68
69
70
71
72
73
74
75
76
77
78
79
80
81
82
83
84
85
86
87
88
89
90
91
92
93
94
95
96
97
98
99
100
101
102
103
104
105
106
107
108
109
110
111
112
113
114
115
116
117
118
119
120
121
122
123
124
125
126
127
128
129
130
131
132
133
134
135
136
137
138
139
140
141
142
143
144
145
146
147
148
149
150
151
152
153
154
155
156
157
158
159
160
161
162
163
164
165
166
167
168
169
170
171
172
173
174
175
176
177
178
179
180
181
182
183
184
185
186
187
188
189
190
191
192
193
194
195
196
197
198
199
200
201
202
203
204
205
206
207
208
209
210
211
212
213
214
215
216
217
218
219
220
221
222
223
224
225
226
227
228
229
230
231
232
233
234
235
236
237
238
239
240
241
242
243
244
245
246
247
248
249
250
251
252
253
254
255
256
257
258
259
260
261
262
263
264
265
266
267
268
269
270
271
272
273
274
275
276
277
278
279
280
281
|
##############################################################################
# Copyright (c) 2017 ZTE Corporation and others.
#
# All rights reserved. This program and the accompanying materials
# are made available under the terms of the Apache License, Version 2.0
# which accompanies this distribution, and is available at
# http://www.apache.org/licenses/LICENSE-2.0
##############################################################################
import os
import paramiko
import scp
import tempfile
import time
import yaml
from utils import (
WORKSPACE,
LD,
LI,
LW,
err_exit,
log_bar,
path_join,
update_config
)
TIMEOUT = 300
BLOCK_SIZE = 1024
def log_from_stream(res, data, log_func):
lines = data.splitlines()
res_data = res
if res_data:
lines[0] = res_data + lines[0]
res_data = None
if not data.endswith("\n"):
res_data = lines[-1]
del (lines[-1])
for string in lines:
log_func(string)
if res_data and len(res_data) >= BLOCK_SIZE:
log_func(res_data)
res_data = None
return res_data
LEN_OF_NAME_PART = 50
LEN_OF_SIZE_PART = 15
def log_scp(filename, size, send):
if size != send:
return
unit = " B"
if size > 1024:
size /= 1024
unit = " KB"
if size > 1024:
size /= 1024
unit = " MB"
name_part = 'SCP: ' + filename + ' '
size_part = ' ' + str(size) + unit + ' 100%'
if len(name_part) <= LEN_OF_NAME_PART:
LD(name_part.ljust(LEN_OF_NAME_PART, '.') + size_part.rjust(LEN_OF_SIZE_PART, '.'))
else:
LD(name_part)
LD(" ".ljust(LEN_OF_NAME_PART, '.') + size_part.rjust(LEN_OF_SIZE_PART, '.'))
class DaisyServer(object):
def __init__(self, name, address, password, remote_dir, bin_file,
adapter, scenario, deploy_file_name, net_file_name):
self.name = name
self.address = address
self.password = password
self.remote_dir = remote_dir
self.bin_file = bin_file
self.adapter = adapter
self.ssh_client = None
self.scenario = scenario
self.deploy_file_name = deploy_file_name
self.net_file_name = net_file_name
def connect(self):
LI('Try to connect to Daisy Server ...')
self.ssh_client = paramiko.SSHClient()
self.ssh_client.set_missing_host_key_policy(paramiko.AutoAddPolicy())
count = 0
MAX_COUNT = 120
while count < MAX_COUNT:
try:
self.ssh_client.connect(hostname=self.address,
username='root',
password=self.password,
timeout=TIMEOUT)
except (paramiko.ssh_exception.SSHException,
paramiko.ssh_exception.NoValidConnectionsError):
count += 1
LD('Attempted SSH connection %d time(s)' % count)
time.sleep(2)
else:
break
if count >= MAX_COUNT:
err_exit('SSH connect to Daisy Server failed')
LI('SSH connection established')
LI('Try ssh_run: ls -al')
self.ssh_run('ls -al', check=True)
def close(self):
self.ssh_client.close()
def ssh_exec_cmd(self, cmd):
stdin, stdout, stderr = self.ssh_client.exec_command(cmd, timeout=TIMEOUT)
response = stdout.read().strip()
error = stderr.read().strip()
if error:
self.close()
err_exit('SSH client error occurred')
else:
return response
def ssh_run(self, cmd, check=False, exit_msg='Ssh_run failed'):
transport = self.ssh_client.get_transport()
transport.set_keepalive(1)
session = transport.open_session()
res_data = None
session.exec_command(cmd)
while True:
if session.recv_ready():
data = session.recv(BLOCK_SIZE)
while data:
res_data = log_from_stream(res_data, data, LI)
data = session.recv(BLOCK_SIZE)
if res_data:
LI(res_data)
res_data = None
if session.recv_stderr_ready():
data = session.recv_stderr(BLOCK_SIZE)
while data:
res_data = log_from_stream(res_data, data, LW)
data = session.recv_stderr(BLOCK_SIZE)
if res_data:
LW(res_data)
res_data = None
if session.exit_status_ready():
break
status = session.recv_exit_status()
if check and status:
err_exit(exit_msg)
return status
def scp_get(self, remote, local='.'):
scp_client = scp.SCPClient(self.ssh_client.get_transport(),
progress=log_scp,
socket_timeout=TIMEOUT)
scp_client.get(remote, local_path=local, recursive=True)
def scp_put(self, local, remote='.'):
scp_client = scp.SCPClient(self.ssh_client.get_transport(),
progress=log_scp,
socket_timeout=TIMEOUT)
scp_client.put(local, remote_path=remote, recursive=True)
def create_dir(self, remote_dir):
cmd = 'mkdir -p %s' % remote_dir
self.ssh_exec_cmd(cmd)
def delete_dir(self, remote_dir):
cmd = 'if [[ -f {DIR} || -d {DIR} ]]; then rm -fr {DIR}; fi'.format(DIR=remote_dir)
self.ssh_exec_cmd(cmd)
def prepare_files(self):
self.delete_dir(self.remote_dir)
LI('Copy WORKSPACE directory to Daisy Server')
self.scp_put(WORKSPACE, self.remote_dir)
time.sleep(2)
LI('Copy finished')
self.create_dir('/home/daisy_install')
LI('Write Daisy Server address into daisy.conf')
update_config(path_join(WORKSPACE, 'deploy/daisy.conf'),
'daisy_management_ip',
self.address,
section='DEFAULT')
LI('Copy daisy.conf to Daisy Server')
self.scp_put(path_join(WORKSPACE, 'deploy/daisy.conf'), '/home/daisy_install/')
if os.path.dirname(os.path.abspath(self.bin_file)) != WORKSPACE:
LI('Copy opnfv.bin to Daisy Server')
self.scp_put(self.bin_file, path_join(self.remote_dir, 'opnfv.bin'))
def install_daisy(self):
self.prepare_files()
LI('Begin to install Daisy')
status = self.ssh_run('%s install' % path_join(self.remote_dir, 'opnfv.bin'))
log_bar('Daisy installation completed ! status = %s' % status)
def prepare_configurations(self):
if self.adapter != 'libvirt':
return
LI('Prepare some configuration files')
cmd = 'export PYTHONPATH={python_path}; python {script} -nw {net_file} -b {is_bare}'.format(
python_path=self.remote_dir,
script=path_join(self.remote_dir, 'deploy/prepare/execute.py'),
net_file=path_join(self.remote_dir, self.net_file_name),
is_bare=1 if self.adapter == 'ipmi' else 0)
self.ssh_run(cmd)
def prepare_cluster(self, deploy_file, net_file):
LI('Copy cluster configuration files to Daisy Server')
self.scp_put(deploy_file, path_join(self.remote_dir, self.deploy_file_name))
self.scp_put(net_file, path_join(self.remote_dir, self.net_file_name))
self.prepare_configurations()
LI('Prepare cluster and PXE')
cmd = "python {script} --dha {deploy_file} --network {net_file} --cluster \'yes\'".format(
script=path_join(self.remote_dir, 'deploy/tempest.py'),
deploy_file=path_join(self.remote_dir, self.deploy_file_name),
net_file=path_join(self.remote_dir, self.net_file_name))
self.ssh_run(cmd, check=True)
def copy_new_deploy_config(self, data):
(dummy, conf_file) = tempfile.mkstemp()
with open(conf_file, 'w') as fh:
fh.write(yaml.safe_dump(data))
fh.flush()
self.scp_put(conf_file, path_join(self.remote_dir, self.deploy_file_name))
def prepare_host_and_pxe(self):
LI('Prepare host and PXE')
cmd = "python {script} --dha {deploy_file} --network {net_file} --host \'yes\' --isbare {is_bare} --scenario {scenario}".format(
script=path_join(self.remote_dir, 'deploy/tempest.py'),
deploy_file=path_join(self.remote_dir, self.deploy_file_name),
net_file=path_join(self.remote_dir, self.net_file_name),
is_bare=1 if self.adapter == 'ipmi' else 0,
scenario=self.scenario)
self.ssh_run(cmd, check=True)
def install_virtual_nodes(self):
LI('Daisy install virtual nodes')
cmd = "python {script} --dha {deploy_file} --network {net_file} --install \'yes\'".format(
script=path_join(self.remote_dir, 'deploy/tempest.py'),
deploy_file=path_join(self.remote_dir, self.deploy_file_name),
net_file=path_join(self.remote_dir, self.net_file_name))
self.ssh_run(cmd, check=True)
def check_os_installation(self, nodes_num):
LI('Check Operating System installation progress')
cmd = '{script} -d {is_bare} -n {nodes_num}'.format(
script=path_join(self.remote_dir, 'deploy/check_os_progress.sh'),
is_bare=1 if self.adapter == 'ipmi' else 0,
nodes_num=nodes_num)
self.ssh_run(cmd, check=True)
def check_openstack_installation(self, nodes_num):
LI('Check OpenStack installation progress')
cmd = '{script} -n {nodes_num}'.format(
script=path_join(self.remote_dir, 'deploy/check_openstack_progress.sh'),
nodes_num=nodes_num)
self.ssh_run(cmd, check=True)
def post_deploy(self):
LI('Post deploy ...')
cmd = 'export PYTHONPATH={python_path}; python {script} -nw {net_file}'.format(
python_path=self.remote_dir,
script=path_join(self.remote_dir, 'deploy/post/execute.py'),
net_file=path_join(self.remote_dir, self.net_file_name))
self.ssh_run(cmd, check=False)
|