diff options
author | zhifeng.jiang <jiang.zhifeng@zte.com.cn> | 2016-07-03 23:04:40 +0800 |
---|---|---|
committer | zhifeng.jiang <jiang.zhifeng@zte.com.cn> | 2016-07-05 20:05:26 +0800 |
commit | 4c7387f811c01bd74b5ae6e0d0cca4bc98d298e8 (patch) | |
tree | 3ca8f61a152b196ca65ef526ae9659c0bddbb411 | |
parent | 1560721b960531785a29d70a572f0ee791234fb0 (diff) |
Add some UT test cases and fix pep8 errors for cli and env_setup.
JIRA:QTIP-89
Change-Id: I4a46898071001f679f1a032a560d605dffc8eb9f
Signed-off-by: zhifeng.jiang <jiang.zhifeng@zte.com.cn>
-rw-r--r-- | func/cli.py | 121 | ||||
-rw-r--r-- | func/env_setup.py | 132 | ||||
-rw-r--r-- | requirements.txt | 1 | ||||
-rw-r--r-- | setup.py | 11 | ||||
-rw-r--r-- | tests/cli_test.py | 20 | ||||
-rw-r--r-- | tests/env_setup_test.py | 59 | ||||
-rw-r--r-- | tests/output/hosts | 3 | ||||
-rw-r--r-- | tests/test_case/bm_ping.yaml | 29 | ||||
-rw-r--r-- | tests/test_case/bm_with_proxy.yaml | 39 | ||||
-rw-r--r-- | tests/test_case/bm_without_proxy.yaml | 33 | ||||
-rw-r--r-- | tests/test_case/vm.yaml | 48 | ||||
-rw-r--r-- | tests/test_case/vm_error.yaml | 42 | ||||
-rw-r--r-- | tox.ini | 13 |
13 files changed, 418 insertions, 133 deletions
diff --git a/func/cli.py b/func/cli.py index 129ab96c..5e8f02cf 100644 --- a/func/cli.py +++ b/func/cli.py @@ -15,94 +15,95 @@ from func.spawn_vm import SpawnVM import argparse -class cli(): - - def _getfile(self, filepath): - - with open('test_list/'+filepath,'r') as finput: - _benchmarks=finput.readlines() - for items in range( len(_benchmarks)): - _benchmarks[items]=_benchmarks[items].rstrip() +class cli: + + @staticmethod + def _getfile(file_path): + with open('test_list/' + file_path, 'r') as fin_put: + _benchmarks = fin_put.readlines() + for items in range(len(_benchmarks)): + _benchmarks[items] = _benchmarks[items].rstrip() return _benchmarks - def _getsuite(self, filepath): + @staticmethod + def _getsuite(file_path): - return filepath + return file_path - def _checkTestList(self, filename): + @staticmethod + def _check_test_list(filename): - if os.path.isfile('test_list/'+filename): + if os.path.isfile('test_list/' + filename): return True else: return False - def _checkLabName(self, labname): + @staticmethod + def _check_lab_name(lab_name): - if os.path.isdir('test_cases/'+labname): + if os.path.isdir('test_cases/' + lab_name): return True else: return False - def _get_fname(self,file_name): + @staticmethod + def _get_f_name(file_name): return file_name[0: file_name.find('.')] - def __init__(self): - - suite=[] + @staticmethod + def _parse_args(args): parser = argparse.ArgumentParser() - parser.add_argument('-l ', '--lab', help='Name of Lab on which being tested, These can' \ - 'be found in the test_cases/ directory. Please ' \ - 'ensure that you have edited the respective files '\ - 'before using them. For testing other than through Jenkins'\ - ' The user should list default after -l . all the fields in'\ - ' the files are necessary and should be filled') - parser.add_argument('-f', '--file', help = 'File in test_list with the list of tests. there are three files' \ - '\n compute '\ - '\n storage '\ - '\n network '\ - 'They contain all the tests that will be run. They are listed by suite.' \ - 'Please ensure there are no empty lines') - args = parser.parse_args() - - if not self._checkTestList(args.file): + parser.add_argument('-l ', '--lab', help='Name of Lab on which being tested, These can' + 'be found in the test_cases/ directory. Please ' + 'ensure that you have edited the respective files ' + 'before using them. For testing other than through Jenkins' + ' The user should list default after -l . all the fields in' + ' the files are necessary and should be filled') + parser.add_argument('-f', '--file', help='File in test_list with the list of tests. there are three files' + '\n compute ' + '\n storage ' + '\n network ' + 'They contain all the tests that will be run. They are listed by suite.' + 'Please ensure there are no empty lines') + return parser.parse_args(args) + + def __init__(self, args=sys.argv[1:]): + + suite = [] + args = self._parse_args(args) + + if not self._check_test_list(args.file): print '\n\n ERROR: Test File Does not exist in test_list/ please enter correct file \n\n' sys.exit(0) - if not self._checkLabName(args.lab): - print '\n\n You have specified a lab that is not present in test_cases/ please enter correct'\ - ' file. If unsure how to proceed, use -l default.\n\n' + if not self._check_lab_name(args.lab): + print '\n\n You have specified a lab that is not present in test_cases/ please enter correct \ + file. If unsure how to proceed, use -l default.\n\n' sys.exit(0) benchmarks = self._getfile(args.file) suite.append(args.file) - suite=self._getsuite(suite) - for items in range (len(benchmarks)): - if (suite and benchmarks): - - roles='' - vm_info='' - benchmark_details='' - pip='' - obj='' + suite = self._getsuite(suite) + for items in range(len(benchmarks)): + if suite and benchmarks: obj = Env_setup() - if os.path.isfile('./test_cases/'+args.lab.lower()+'/'+suite[0]+'/' +benchmarks[items]): - [benchmark, roles, vm_info, benchmark_details, pip, proxy_info] = obj.parse('./test_cases/' - +args.lab.lower()+'/'+suite[0]+'/'+benchmarks[items]) + if os.path.isfile('./test_cases/' + args.lab.lower() + '/' + suite[0] + '/' + benchmarks[items]): + [benchmark, vm_info, benchmark_details, proxy_info] = \ + obj.parse('./test_cases/' + args.lab.lower() + '/' + suite[0] + '/' + benchmarks[items]) if len(vm_info) != 0: - vmObj ='' - vmObj = SpawnVM(vm_info) - if obj.callpingtest(): - obj.callsshtest() - obj.updateAnsible() - dvr = Driver() - dvr.drive_bench(benchmark, - obj.roles_dict.items(), - self._get_fname(benchmarks[items]), - benchmark_details, - obj.ip_pw_dict.items(), - proxy_info) + SpawnVM(vm_info) + obj.call_ping_test() + obj.call_ssh_test() + obj.update_ansible() + dvr = Driver() + dvr.drive_bench(benchmark, + obj.roles_dict.items(), + self._get_f_name(benchmarks[items]), + benchmark_details, + obj.ip_pw_dict.items(), + proxy_info) else: print (benchmarks[items], ' is not a Template in the Directory - \ Enter a Valid file name. or use qtip.py -h for list') diff --git a/func/env_setup.py b/func/env_setup.py index c1e2a003..9c0dadb3 100644 --- a/func/env_setup.py +++ b/func/env_setup.py @@ -13,19 +13,22 @@ from collections import defaultdict import yaml import time import paramiko -class Env_setup(): +import socket + + +class Env_setup: roles_ip_list = [] # ROLE and its corresponding IP address list ip_pw_list = [] # IP and password, this will be used to ssh roles_dict = defaultdict(list) ip_pw_dict = defaultdict(list) ip_pip_list = [] vm_parameters = defaultdict(list) - benchmark_details= defaultdict() + benchmark_details = defaultdict() benchmark = '' def __init__(self): print '\nParsing class initiated\n' - self.roles_ip_list[:]=[] + self.roles_ip_list[:] = [] self.ip_pw_list[:] = [] self.roles_dict.clear() self.ip_pw_dict.clear() @@ -35,41 +38,44 @@ class Env_setup(): self.benchmark_details.clear() self.benchmark = '' - def writeTofile(self, role): - fname2 = open('./data/hosts', 'w') + @staticmethod + def write_to_file(role): + f_name_2 = open('./data/hosts', 'w') print role.items() for k in role: - fname2.write('[' + k + ']\n') + f_name_2.write('[' + k + ']\n') num = len(role[k]) for x in range(num): - fname2.write(role[k][x] + '\n') - fname2.close + f_name_2.write(role[k][x] + '\n') + f_name_2.close() - def sshtest(self, lister): - print 'list: ',lister + @staticmethod + def ssh_test(lister): + print 'list: ', lister for k, v in lister: - ipvar = k - pwvar = v + ip_var = k print '\nBeginning SSH Test!\n' if v != '': - print ('\nSSH->>>>> {0} {1}\n'.format(k,v)) + print ('\nSSH->>>>> {0} {1}\n'.format(k, v)) time.sleep(2) ssh_c = 'ssh-keyscan {0} >> ~/.ssh/known_hosts'.format(k) os.system(ssh_c) - ssh_cmd = './data/qtip_creds.sh {0}'.format(ipvar) + ssh_cmd = './data/qtip_creds.sh {0}'.format(ip_var) print ssh_cmd - res = os.system(ssh_cmd) + os.system(ssh_cmd) for infinity in range(100): - try : + try: ssh = paramiko.SSHClient() ssh.set_missing_host_key_policy(paramiko.AutoAddPolicy()) - ssh.connect(k , key_filename= './data/QtipKey') + ssh.connect(k, key_filename='./data/QtipKey') stdin, stdout, stderr = ssh.exec_command('ls') print('SSH successful') + for line in stdout: + print '... ' + line.strip('\n') break - except: - print 'Retrying aSSH' + except socket.error: + print 'Retrying aSSH %s' % infinity time.sleep(1) if v == '': print ('SSH->>>>>', k) @@ -79,99 +85,91 @@ class Env_setup(): os.system(ssh_c) for infinity in range(10): - try : + try: ssh = paramiko.SSHClient() ssh.set_missing_host_key_policy(paramiko.AutoAddPolicy()) - ssh.connect(k, key_filename= './data/QtipKey') + ssh.connect(k, key_filename='./data/QtipKey') stdin, stdout, stderr = ssh.exec_command('ls') + print('SSH successful') + for line in stdout: + print '... ' + line.strip('\n') break - except: - print 'Retrying SSH' + except socket.error: + print 'Retrying SSH %s' % infinity + + @staticmethod + def ping_test(lister): - def pingtest(self, lister): - pingFlag = 0 - result = True for k, v in lister.iteritems(): time.sleep(10) for val in v: ipvar = val ping_cmd = 'ping -D -c1 {0}'.format(ipvar) - while (os.system(ping_cmd) != 0) &(pingFlag <=20): + while os.system(ping_cmd) != 0: print '\nWaiting for machine\n' time.sleep(10) - pingFlag = pingFlag+1 - if pingFlag <= 2: - print ('\n\n %s is UP \n\n ' % ipvar) - else: - result = False - return result - + print ('\n\n %s is UP \n\n ' % ipvar) - def GetHostMachineinfo(self, Hosttag): + def get_host_machine_info(self, host_tag): - num = len(Hosttag) + num = len(host_tag) offset = len(self.roles_ip_list) for x in range(num): hostlabel = 'machine_' + str(x + 1) self.roles_ip_list.insert( - offset, (Hosttag[hostlabel]['role'], Hosttag[hostlabel]['ip'])) + offset, (host_tag[hostlabel]['role'], host_tag[hostlabel]['ip'])) self.ip_pw_list.insert( - offset, (Hosttag[hostlabel]['ip'], Hosttag[hostlabel]['pw'])) + offset, (host_tag[hostlabel]['ip'], host_tag[hostlabel]['pw'])) - def GetVirtualMachineinfo(self, Virtualtag): + def get_virtual_machine_info(self, virtual_tag): - num = len(Virtualtag) + num = len(virtual_tag) for x in range(num): - hostlabel = 'virtualmachine_' + str(x + 1) - for k, v in Virtualtag[hostlabel].iteritems(): + host_label = 'virtualmachine_' + str(x + 1) + for k, v in virtual_tag[host_label].iteritems(): self.vm_parameters[k].append(v) - def GetBenchmarkDetails(self, detail_dic): + def get_bench_mark_details(self, detail_dic): print detail_dic - for k,v in detail_dic.items(): - self.benchmark_details[k]= v + for k, v in detail_dic.items(): + self.benchmark_details[k] = v - def parse(self, configfilepath): + def parse(self, config_file_path): try: - fname = open(configfilepath, 'r+') - doc = yaml.load(fname) -# valid_file = validate_yaml.Validate_Yaml(doc) - fname.close() - for scenario in doc: + f_name = open(config_file_path, 'r+') + doc = yaml.load(f_name) + f_name.close() + if doc['Scenario']['benchmark']: self.benchmark = doc['Scenario']['benchmark'] if doc['Context']['Virtual_Machines']: - self.GetVirtualMachineinfo(doc['Context']['Virtual_Machines']) + self.get_virtual_machine_info(doc['Context']['Virtual_Machines']) if doc['Context']['Host_Machines']: - self.GetHostMachineinfo(doc['Context']['Host_Machines']) - if doc.get('Scenario',{}).get('benchmark_details',{}): - self.GetBenchmarkDetails(doc.get('Scenario',{}).get('benchmark_details',{})) - if 'Proxy_Environment' in doc['Context'].keys(): + self.get_host_machine_info(doc['Context']['Host_Machines']) + if doc.get('Scenario', {}).get('benchmark_details', {}): + self.get_bench_mark_details(doc.get('Scenario', {}).get('benchmark_details', {})) + if 'Proxy_Environment' in doc['Context'].keys(): self.proxy_info['http_proxy'] = doc['Context']['Proxy_Environment']['http_proxy'] self.proxy_info['https_proxy'] = doc['Context']['Proxy_Environment']['https_proxy'] - self.proxy_info['no_proxy'] = doc['Context']['Proxy_Environment']['no_proxy'] + self.proxy_info['no_proxy'] = doc['Context']['Proxy_Environment']['no_proxy'] for k, v in self.roles_ip_list: self.roles_dict[k].append(v) for k, v in self.ip_pw_list: self.ip_pw_dict[k].append(v) return ( self.benchmark, - self.roles_dict.items(), self.vm_parameters, self.benchmark_details.items(), - self.ip_pw_dict.items(), self.proxy_info) - except KeyboardInterrupt: - fname.close() print 'ConfigFile Closed: exiting!' sys.exit(0) - def updateAnsible(self): - self.writeTofile(self.roles_dict) + def update_ansible(self): + self.write_to_file(self.roles_dict) - def callpingtest(self): - self.pingtest(self.roles_dict) + def call_ping_test(self): + self.ping_test(self.roles_dict) - def callsshtest(self): - self.sshtest(self.ip_pw_list) + def call_ssh_test(self): + self.ssh_test(self.ip_pw_list) diff --git a/requirements.txt b/requirements.txt index cc048389..358b7d0d 100644 --- a/requirements.txt +++ b/requirements.txt @@ -1,4 +1,5 @@ pyyaml==3.10 +paramiko==1.16.0 python-neutronclient==2.6.0 python-novaclient==2.28.1 python-glanceclient==1.1.0 diff --git a/setup.py b/setup.py new file mode 100644 index 00000000..cb19e879 --- /dev/null +++ b/setup.py @@ -0,0 +1,11 @@ +#!/usr/bin/env python + +from distutils.core import setup + +setup(name='qtip', + py_modules=['qtip'], + version='1.0', + author='opnfv', + packages=['func','data.ref_results','data.report'], + ) + diff --git a/tests/cli_test.py b/tests/cli_test.py new file mode 100644 index 00000000..f12e8fed --- /dev/null +++ b/tests/cli_test.py @@ -0,0 +1,20 @@ +import pytest +from func.cli import cli + + +class TestClass: + @pytest.mark.parametrize("test_input, expected", [ + (['-l', + 'zte', + '-f', + 'compute'], "You have specified a lab that is not present in test_cases"), + (['-l', + 'zte-pod1', + '-f', + 'test'], "Test File Does not exist in test_list") + ]) + def test_cli_error(self, capfd, test_input, expected): + with pytest.raises(SystemExit): + cli(test_input) + resout, reserr = capfd.readouterr() + assert expected in resout diff --git a/tests/env_setup_test.py b/tests/env_setup_test.py new file mode 100644 index 00000000..9112ff94 --- /dev/null +++ b/tests/env_setup_test.py @@ -0,0 +1,59 @@ +import pytest +import filecmp +from func.env_setup import Env_setup + + +class TestClass: + + @pytest.mark.parametrize("test_input, expected", [ + ("tests/test_case/bm_with_proxy.yaml", ["dhrystone", + {}, + [], + {'http_proxy': 'http://10.20.0.1:8118', + 'https_proxy': 'http://10.20.0.1:8118', + 'no_proxy': 'localhost,127.0.0.1,10.20.*,192.168.*'}]), + ("tests/test_case/bm_without_proxy.yaml", ["dhrystone", + {}, + [], + {}]), + ("tests/test_case/vm.yaml", ["iperf", + {'availability_zone': ['compute1', 'compute1'], + 'OS_image': ['QTIP_CentOS', 'QTIP_CentOS'], + 'public_network': ['admin-floating_net', 'admin-floating_net'], + 'flavor': ['m1.large', 'm1.large'], + 'role': ['1-server', '2-host']}, + [('duration', 20), ('protocol', 'tcp'), ('bandwidthGbps', 0)], + {'http_proxy': 'http://10.20.0.1:8118', + 'https_proxy': 'http://10.20.0.1:8118', + 'no_proxy': 'localhost,127.0.0.1,10.20.*,192.168.*'}]) + ]) + def test_parse_success(self, test_input, expected): + print (test_input) + print (expected) + test_class = Env_setup() + benchmark, vm_para, details, proxy = \ + test_class.parse(test_input) + assert benchmark == expected[0] + assert vm_para == expected[1] + assert sorted(details) == sorted(expected[2]) + assert proxy == expected[3] + + def test_parse_vm_error(self): + test_class = Env_setup() + with pytest.raises(KeyError) as excinfo: + test_class.parse("tests/test_case/vm_error.yaml") + assert "benchmark" in str(excinfo.value) + + def test_update_ansible(self): + test_class = Env_setup() + test_class.parse("tests/test_case/bm_without_proxy.yaml") + test_class.update_ansible() + result = filecmp.cmp('tests/output/hosts', 'data/hosts') + assert result + + def test_ping(self, capfd): + test_class = Env_setup() + test_class.parse("tests/test_case/bm_ping.yaml") + test_class.call_ping_test() + resout, reserr = capfd.readouterr() + assert '127.0.0.1 is UP' in resout diff --git a/tests/output/hosts b/tests/output/hosts new file mode 100644 index 00000000..9b47df0e --- /dev/null +++ b/tests/output/hosts @@ -0,0 +1,3 @@ +[host] +10.20.0.29 +10.20.0.28 diff --git a/tests/test_case/bm_ping.yaml b/tests/test_case/bm_ping.yaml new file mode 100644 index 00000000..41d696e2 --- /dev/null +++ b/tests/test_case/bm_ping.yaml @@ -0,0 +1,29 @@ +
+Scenario:
+ benchmark: dhrystone
+ host: machine_1
+ server:
+
+Context:
+ Host_Machines:
+ machine_1:
+ ip: 127.0.0.1
+ pw:
+ role: host
+
+ Virtual_Machines:
+
+
+Test_Description:
+ Test_category: "Compute"
+ Benchmark: "dhrystone"
+ Overview: >
+ ''' This test will run the dhrystone benchmark in parallel on machine_1 and machine_2.\n
+ if you wish to add a virtual machine add the following information under the Virtual_Machine tag
+
+ virtualmachine_1:
+ availability_zone:
+ public_network:
+ OS_image:
+ flavor:
+ role: '''
diff --git a/tests/test_case/bm_with_proxy.yaml b/tests/test_case/bm_with_proxy.yaml new file mode 100644 index 00000000..1d73300b --- /dev/null +++ b/tests/test_case/bm_with_proxy.yaml @@ -0,0 +1,39 @@ +
+Scenario:
+ benchmark: dhrystone
+ host: machine_1, machine_2
+ server:
+
+Context:
+ Host_Machines:
+ machine_1:
+ ip: 10.20.0.28
+ pw:
+ role: host
+ machine_2:
+ ip: 10.20.0.29
+ pw:
+ role: host
+
+ Virtual_Machines:
+
+ Proxy_Environment:
+ http_proxy: http://10.20.0.1:8118
+ https_proxy: http://10.20.0.1:8118
+ no_proxy: localhost,127.0.0.1,10.20.*,192.168.*
+
+
+
+Test_Description:
+ Test_category: "Compute"
+ Benchmark: "dhrystone"
+ Overview: >
+ ''' This test will run the dhrystone benchmark in parallel on machine_1 and machine_2.\n
+ if you wish to add a virtual machine add the following information under the Virtual_Machine tag
+
+ virtualmachine_1:
+ availability_zone:
+ public_network:
+ OS_image:
+ flavor:
+ role: '''
diff --git a/tests/test_case/bm_without_proxy.yaml b/tests/test_case/bm_without_proxy.yaml new file mode 100644 index 00000000..a9ae3b71 --- /dev/null +++ b/tests/test_case/bm_without_proxy.yaml @@ -0,0 +1,33 @@ +
+Scenario:
+ benchmark: dhrystone
+ host: machine_1, machine_2
+ server:
+
+Context:
+ Host_Machines:
+ machine_1:
+ ip: 10.20.0.28
+ pw:
+ role: host
+ machine_2:
+ ip: 10.20.0.29
+ pw:
+ role: host
+
+ Virtual_Machines:
+
+
+Test_Description:
+ Test_category: "Compute"
+ Benchmark: "dhrystone"
+ Overview: >
+ ''' This test will run the dhrystone benchmark in parallel on machine_1 and machine_2.\n
+ if you wish to add a virtual machine add the following information under the Virtual_Machine tag
+
+ virtualmachine_1:
+ availability_zone:
+ public_network:
+ OS_image:
+ flavor:
+ role: '''
diff --git a/tests/test_case/vm.yaml b/tests/test_case/vm.yaml new file mode 100644 index 00000000..4c8453ca --- /dev/null +++ b/tests/test_case/vm.yaml @@ -0,0 +1,48 @@ +Scenario:
+ benchmark: iperf
+ topology: Client and Server on ONE compute
+ server : virtualmachine_1
+ client: virtualmachine_2
+ description: 'Leave the bandwidth as 0 to throttle maximum traffic'
+ benchmark_details:
+ duration: 20
+ protocol: tcp
+ bandwidthGbps: 0
+
+Context:
+ Host_Machines:
+
+ Virtual_Machines:
+ virtualmachine_1:
+ availability_zone: compute1
+ OS_image: QTIP_CentOS
+ public_network: 'admin-floating_net'
+ role: 1-server
+ flavor: m1.large
+
+ virtualmachine_2:
+ availability_zone: compute1
+ OS_image: QTIP_CentOS
+ public_network: 'admin-floating_net'
+ role: 2-host
+ flavor: m1.large
+
+ Proxy_Environment:
+ http_proxy: http://10.20.0.1:8118
+ https_proxy: http://10.20.0.1:8118
+ no_proxy: localhost,127.0.0.1,10.20.*,192.168.*
+
+Test_Description:
+ Test_category: "network"
+ Benchmark: "iperf"
+ Overview: >
+ '''This test will run the IPERF benchmark on virutalmachine_1 and virtualmachine_2. On the\n
+ same compute node
+ if you wish to add a host machine add the following information under the Host_Machine tag
+
+ machine_1:
+ ip:
+ pw:
+ role:
+ '''
+
diff --git a/tests/test_case/vm_error.yaml b/tests/test_case/vm_error.yaml new file mode 100644 index 00000000..f13d3a00 --- /dev/null +++ b/tests/test_case/vm_error.yaml @@ -0,0 +1,42 @@ +Scenario:
+ topology: Client and Server on ONE compute
+ server : virtualmachine_1
+ client: virtualmachine_2
+ description: 'Leave the bandwidth as 0 to throttle maximum traffic'
+ benchmark_details:
+ duration: 20
+ protocol: tcp
+ bandwidthGbps: 0
+
+Context:
+ Host_Machines:
+
+ Virtual_Machines:
+ virtualmachine_1:
+ availability_zone: compute1
+ OS_image: QTIP_CentOS
+ public_network: 'admin-floating_net'
+ role: 1-server
+ flavor: m1.large
+
+ virtualmachine_2:
+ availability_zone: compute1
+ OS_image: QTIP_CentOS
+ public_network: 'admin-floating_net'
+ role: 2-host
+ flavor: m1.large
+
+Test_Description:
+ Test_category: "network"
+ Benchmark: "iperf"
+ Overview: >
+ '''This test will run the IPERF benchmark on virutalmachine_1 and virtualmachine_2. On the\n
+ same compute node
+ if you wish to add a host machine add the following information under the Host_Machine tag
+
+ machine_1:
+ ip:
+ pw:
+ role:
+ '''
+
@@ -8,24 +8,25 @@ envlist = py27 skipsdist = True [testenv] -changedir=tests -deps = +usedevelop = True +install_command = pip install -U {opts} {packages} +deps = -r{toxinidir}/requirements.txt -r{toxinidir}/test-requirements.txt commands= py.test \ - --basetemp={envtmpdir} \ # py.test tempdir setting - {posargs} # substitute with tox' positional arguments + --basetemp={envtmpdir} \ + {posargs} tests [testenv:pep8] deps = flake8 -commands = flake8 {toxinidir} +commands = flake8 {toxinidir} [flake8] # H803 skipped on purpose per list discussion. # E123, E125 skipped as they are invalid PEP-8. show-source = True -ignore = E123,E125,H803 +ignore = E123,E125,H803,E501 builtins = _ exclude=.venv,.git,.tox,dist,doc,build |