summaryrefslogtreecommitdiffstats
path: root/verigraph/service/src/tests/j-verigraph-generator
diff options
context:
space:
mode:
Diffstat (limited to 'verigraph/service/src/tests/j-verigraph-generator')
-rw-r--r--verigraph/service/src/tests/j-verigraph-generator/README.rst54
-rw-r--r--verigraph/service/src/tests/j-verigraph-generator/__init__.py8
-rw-r--r--verigraph/service/src/tests/j-verigraph-generator/batch_generator.py186
-rw-r--r--verigraph/service/src/tests/j-verigraph-generator/code_generator.py59
-rw-r--r--verigraph/service/src/tests/j-verigraph-generator/config.py88
-rw-r--r--verigraph/service/src/tests/j-verigraph-generator/json_generator.py261
-rw-r--r--verigraph/service/src/tests/j-verigraph-generator/routing_generator.py80
-rw-r--r--verigraph/service/src/tests/j-verigraph-generator/test_class_generator.py399
-rw-r--r--verigraph/service/src/tests/j-verigraph-generator/test_generator.py160
-rw-r--r--verigraph/service/src/tests/j-verigraph-generator/utility.py257
10 files changed, 1552 insertions, 0 deletions
diff --git a/verigraph/service/src/tests/j-verigraph-generator/README.rst b/verigraph/service/src/tests/j-verigraph-generator/README.rst
new file mode 100644
index 0000000..c796af7
--- /dev/null
+++ b/verigraph/service/src/tests/j-verigraph-generator/README.rst
@@ -0,0 +1,54 @@
+.. This work is licensed under a Creative Commons Attribution 4.0 International License.
+.. http://creativecommons.org/licenses/by/4.0
+
+CODE\_GENERATOR Java serializer and formatter
+
+UTILITY Contains utility methods used by other modules
+
+JSON\_GENERATOR Interactive module to generate the configuration files
+(default names are "chains.json" and "config.json") "chains.json"
+describes all the chains of nodes belonging to a certain scenario
+
+TEST\_CLASS\_GENERATOR Generates one or multiple test scenarios given
+the two configuration files above (default names are "chains.json" and
+"config.json") All the test scenarios have to be placed in the examples
+folder (i. e. under "j-verigraph/service/src/tests/examples"). Here is
+the script help:
+
+test\_class\_generator.py -c -f -o
+
+Supposing the module gets executed from the project root directory (i.e.
+"j-verigraph"), a sample command is the following:
+
+service/src/tests/j-verigraph-generator/test\_class\_generator.py -c
+"service/src/tests/j-verigraph-generator/examples/budapest/chains.json"
+-f
+"service/src/tests/j-verigraph-generator/examples/budapest/config.json"
+-o "service/src/tests/examples/Scenario"
+
+Keep in mind that in the previous command "Scenario" represents a prefix
+which will be followed by an underscore and an incremental number
+starting from 1, which represents the n-th scenario starting from the
+previously mentioned "chains.json" file (this file can indeed contain
+multiple chains).
+
+TEST\_GENERATOR Generates a file which performs the verification test
+through Z3 (theorem prover from Microsoft Research) given a certain
+scenario generated with the above snippet. All the test modules have to
+be placed under the "tests" directory (i.e. under
+"j-verigraph/service/src/tests"). Here is the module help:
+
+test\_generator.py -i -o -s -d
+
+Supposing the module gets executed from the project root directory (i.e.
+"j-verigraph") a sample command given the previously generated scenario
+is the following:
+
+service/src/tests/j-verigraph-generator/test\_generator.py -i
+service/src/tests/examples/Scenario\_1.java -o
+service/src/tests/Test.java -s user1 -d webserver
+
+The aforementioned "Test.java" file can be compiled and executed
+normally. Its output will be either "SAT" or "UNSAT". For possible
+statistics the test is repeated 10 times and the average execution time
+in seconds is printed to the console.
diff --git a/verigraph/service/src/tests/j-verigraph-generator/__init__.py b/verigraph/service/src/tests/j-verigraph-generator/__init__.py
new file mode 100644
index 0000000..d8a620f
--- /dev/null
+++ b/verigraph/service/src/tests/j-verigraph-generator/__init__.py
@@ -0,0 +1,8 @@
+##############################################################################
+# Copyright (c) 2017 Politecnico di Torino and others.
+#
+# All rights reserved. This program and the accompanying materials
+# are made available under the terms of the Apache License, Version 2.0
+# which accompanies this distribution, and is available at
+# http://www.apache.org/licenses/LICENSE-2.0
+##############################################################################
diff --git a/verigraph/service/src/tests/j-verigraph-generator/batch_generator.py b/verigraph/service/src/tests/j-verigraph-generator/batch_generator.py
new file mode 100644
index 0000000..517bdf7
--- /dev/null
+++ b/verigraph/service/src/tests/j-verigraph-generator/batch_generator.py
@@ -0,0 +1,186 @@
+#!/usr/bin/python
+
+##############################################################################
+# Copyright (c) 2017 Politecnico di Torino and others.
+#
+# All rights reserved. This program and the accompanying materials
+# are made available under the terms of the Apache License, Version 2.0
+# which accompanies this distribution, and is available at
+# http://www.apache.org/licenses/LICENSE-2.0
+##############################################################################
+
+import json
+from pprint import pprint
+import sys, getopt
+import commands
+import os
+from config import *
+from utility import *
+import subprocess
+
+#global variables
+chains = {}
+chains["chains"] = []
+routing = {}
+routing["routing_table"] = []
+configuration = {}
+configuration["nodes"] = []
+#end of global variables
+
+
+#generates json file describing the chains (default chains.json)
+def generate_chains(curr_dir, multiplier, flowspace):
+ filename = "chains.json"
+
+ multiplier = int(multiplier)
+ number_of_chains = multiplier*multiplier
+ for i in range(0, int(number_of_chains)):
+ chains["chains"].insert(i, {})
+ chains["chains"][i]["id"] = i+1
+ chains["chains"][i]["flowspace"] = flowspace
+ chain_nodes = multiplier
+ chains["chains"][i]["nodes"] = []
+ #set attributes for nth client
+ chains["chains"][i]["nodes"].insert(0, {})
+ node_name = "client_" + str((i%multiplier)+1)
+ chains["chains"][i]["nodes"][0]["name"] = node_name
+ node_type = "web_client"
+ chains["chains"][i]["nodes"][0]["functional_type"] = node_type
+ node_address = "ip_web_client_" + str((i%multiplier)+1)
+ chains["chains"][i]["nodes"][0]["address"] = node_address
+ #set attributes for chain of firewalls
+ for j in range(1, chain_nodes+1):
+ chains["chains"][i]["nodes"].insert(j, {})
+ node_name = "firewall_" + str(j)
+ chains["chains"][i]["nodes"][j]["name"] = node_name
+ node_type = "firewall"
+ chains["chains"][i]["nodes"][j]["functional_type"] = node_type
+ node_address = "ip_firewall_" + str(j)
+ chains["chains"][i]["nodes"][j]["address"] = node_address
+ #set attributes for nth web server
+ chains["chains"][i]["nodes"].insert(chain_nodes+1, {})
+ node_name = "server_" + str((i%multiplier)+1)
+ chains["chains"][i]["nodes"][chain_nodes+1]["name"] = node_name
+ node_type = "web_server"
+ chains["chains"][i]["nodes"][chain_nodes+1]["functional_type"] = node_type
+ node_address = "ip_web_server_" + str((i%multiplier)+1)
+ chains["chains"][i]["nodes"][chain_nodes+1]["address"] = node_address
+ #pprint(chains)
+ with smart_open(curr_dir + "/" + filename) as f:
+ print >>f, json.dumps(chains)
+ return filename
+
+#generates json file describing the node configurations (default config.json)
+def generate_config(curr_dir):
+ chains_file = "chains.json"
+
+ chains = parse_chains(curr_dir + "/" + chains_file)
+
+ print "Chains read from file:"
+ pprint(chains)
+ chains_id = []
+
+ for chain_id, chain in chains.items():
+ chains_id.append(chain_id)
+ print "Chain #" + str(chain_id) + " has " + str(len(chain)) + " elements"
+ for node_name in chain.keys():
+ print node_name + " ",
+ print ""
+
+
+ filename = "config.json"
+
+ config_names = []
+
+ i = -1
+
+ for number_of_chain in chains_id:
+ number_of_nodes = len(chains[number_of_chain].keys())
+
+ # for i in range(0, number_of_nodes):
+
+ for node_name, node_map in chains[number_of_chain].items():
+ if node_name in config_names:
+ continue
+ config_names.append(node_name)
+ i += 1
+ configuration["nodes"].insert(i, {})
+ # node_id = raw_input("Node #" + str(i+1) + " id? -->")
+ # configuration["nodes"][i]["id"] = node_id
+ configuration["nodes"][i]["id"] = node_name
+
+ name_split = node_name.split("_")
+
+ #init = raw_input("Any parameter for inizialization of node " + node_name + "? (N/Y)-->")
+ init_list = devices_initialization[node_map["functional_type"]]
+ if init_list != []:
+ for init_item in init_list:
+ init_param = "ip_" + init_item + "_" + name_split[1]
+ configuration["nodes"][i][init_item] = init_param
+
+ node_description = name_split[0] + " denies any traffic from web_client #" + name_split[1] + " to web_server #" + name_split[1]
+ configuration["nodes"][i]["description"] = node_description
+ while(True):
+ #node_configuration_type = raw_input("Node " + node_id +"'s configuration type (list, maps)? (L/M) -->")
+ #n = search_node_in_chains(node_id)
+
+ node_configuration_type = devices_configuration_methods[node_map["functional_type"]]
+ if node_configuration_type == "list":
+ #list
+ configuration["nodes"][i]["configuration"] = []
+
+ break
+ if node_configuration_type == "maps":
+ #maps
+ configuration["nodes"][i]["configuration"] = []
+ n_entries = 1
+
+ for m in range(0, n_entries):
+ configuration["nodes"][i]["configuration"].insert(m, {})
+
+ map_elements = 1
+
+ for n in range(0, map_elements):
+ key = "ip_web_server_" + name_split[1]
+ value = "ip_web_client_" + name_split[1]
+ configuration["nodes"][i]["configuration"][m][key] = value
+ break
+ else:
+ print "Invalid config, please edit the config file"
+ #pprint(configuration)
+ with smart_open(curr_dir + "/" + filename) as f:
+ print >>f, json.dumps(configuration)
+ return filename
+
+def main(argv):
+ #exit if any command line argument is missing
+ if len(argv) < 4:
+ print 'batch_generator.py -m <multiplier> -o <output_directory>'
+ sys.exit(2)
+ #initialize json file names
+ chains_file = ''
+ configuration_file = ''
+ output_dir = ''
+ multiplier = ''
+ #parse command line arguments and exit if there is an error
+ try:
+ opts, args = getopt.getopt(argv,"hm:o:",["mutliplier=","help","odir="])
+ except getopt.GetoptError as err:
+ print str(err)
+ print 'batch_generator.py -m <multiplier> -o <output_directory>'
+ sys.exit(2)
+ for opt, arg in opts:
+ if opt in ("-h", "--help"):
+ print 'batch_generator.py -m <multiplier> -o <output_directory>'
+ sys.exit()
+ elif opt in ("-o", "--output"):
+ output_dir = arg
+ elif opt in ("-m", "--multiplier"):
+ multiplier = arg
+
+ generate_chains(output_dir, multiplier, "tcp=80")
+ generate_config(output_dir)
+
+
+if __name__ == "__main__":
+ main(sys.argv[1:])
diff --git a/verigraph/service/src/tests/j-verigraph-generator/code_generator.py b/verigraph/service/src/tests/j-verigraph-generator/code_generator.py
new file mode 100644
index 0000000..5b9834f
--- /dev/null
+++ b/verigraph/service/src/tests/j-verigraph-generator/code_generator.py
@@ -0,0 +1,59 @@
+#!/usr/bin/python
+
+##############################################################################
+# Copyright (c) 2017 Politecnico di Torino and others.
+#
+# All rights reserved. This program and the accompanying materials
+# are made available under the terms of the Apache License, Version 2.0
+# which accompanies this distribution, and is available at
+# http://www.apache.org/licenses/LICENSE-2.0
+##############################################################################
+
+import sys, string
+
+class CodeGeneratorBackend:
+
+ def begin(self, tab="\t"):
+ self.code = []
+ self.tab = tab
+ self.level = 0
+
+ def end(self):
+ return string.join(self.code, "")
+
+ def write(self, string):
+ self.code.append(self.tab * self.level + string)
+
+ def writeln(self, string):
+ self.code.append(self.tab * self.level + string + "\n")
+
+ def append(self, string):
+ self.code.append(string)
+
+ def indent(self):
+ self.level = self.level + 1
+
+ def dedent(self):
+ if self.level == 0:
+ raise SyntaxError, "internal error in code generator"
+ self.level = self.level - 1
+
+ def write_list(self, data, delimiter=True, wrapper="'"):
+ if delimiter == True:
+ self.code.append("{")
+ first = True
+ for element in data:
+ if (first == False):
+ self.code.append(", ")
+ else:
+ first = False
+ if wrapper == "'":
+ self.code.append("'" + str(element) + "'")
+ elif wrapper == "\"":
+ self.code.append("\"" + str(element) + "\"")
+ elif wrapper == "b":
+ self.code.append("(" + str(element) + ")")
+ else:
+ self.code.append(str(element))
+ if delimiter == True:
+ self.code.append("}")
diff --git a/verigraph/service/src/tests/j-verigraph-generator/config.py b/verigraph/service/src/tests/j-verigraph-generator/config.py
new file mode 100644
index 0000000..3fe5d6c
--- /dev/null
+++ b/verigraph/service/src/tests/j-verigraph-generator/config.py
@@ -0,0 +1,88 @@
+#!/usr/bin/python
+
+##############################################################################
+# Copyright (c) 2017 Politecnico di Torino and others.
+#
+# All rights reserved. This program and the accompanying materials
+# are made available under the terms of the Apache License, Version 2.0
+# which accompanies this distribution, and is available at
+# http://www.apache.org/licenses/LICENSE-2.0
+##############################################################################
+
+devices_to_classes = { "webclient" : "PolitoWebClient",
+ "webserver" : "PolitoWebServer",
+ "cache" : "PolitoCache",
+ "nat" : "PolitoNat",
+ "firewall" : "AclFirewall",
+ "mailclient" : "PolitoMailClient",
+ "mailserver" : "PolitoMailServer",
+ "antispam" : "PolitoAntispam",
+ "endpoint": "EndHost",
+ "dpi": "PolitoIDS",
+ "endhost": "PolitoEndHost",
+ "vpnaccess":"PolitoVpnAccess",
+ "vpnexit":"PolitoVpnExit",
+ "fieldmodifier":"PolitoFieldModifier"
+ }
+devices_to_configuration_methods = {"webclient" : "",
+ "webserver" : "",
+ "cache" : "installCache",
+ "nat" : "setInternalAddress",
+ "firewall" : "addAcls",
+ "mailclient" : "",
+ "mailserver" : "",
+ "antispam" : "",
+ "endpoint": "",
+ "dpi": "installIDS",
+ "endhost": "installEndHost",
+ "vpnaccess":"vpnAccessModel",
+ "vpnexit":"vpnAccessModel",
+ "fieldmodifier":"installFieldModifier"
+ }
+devices_initialization = { "webclient" : ["webserver"],
+ "webserver" : [],
+ "cache" : [],
+ "nat" : [],
+ "firewall" : [],
+ "mailclient" : ["mailserver"],
+ "mailserver" : [],
+ "antispam" : [],
+ "endpoint": [],
+ "dpi":[] ,
+ "endhost":[],
+ "vpnaccess":[],
+ "vpnexit":[],
+ "fieldmodifier":[]
+ }
+
+convert_configuration_property_to_ip = { "webclient" : ["value"],
+ "webserver" : [],
+ "cache" : ["value"],
+ "nat" : ["value"],
+ "firewall" : ["key", "value"],
+ "mailclient" : ["value"],
+ "mailserver" : [],
+ "antispam" : [],
+ "endpoint": [],
+ "dpi": [],
+ "endhost": [],
+ "vpnaccess": ["value"],
+ "vpnexit": ["value"],
+ "fieldmodifier": []
+ }
+
+devices_configuration_fields = { "webclient" : "",
+ "webserver" : "",
+ "cache" : "cached address",
+ "nat" : "natted address",
+ "firewall" : "acl entry",
+ "mailclient" : "",
+ "mailserver" : "",
+ "antispam" : "",
+ "endpoint": "",
+ "dpi":"words blacklist",
+ "endhost":"",
+ "vpnaccess":"vpn access",
+ "vpnexit":"vpn exit",
+ "fieldmodifier":"field modifier"
+ }
diff --git a/verigraph/service/src/tests/j-verigraph-generator/json_generator.py b/verigraph/service/src/tests/j-verigraph-generator/json_generator.py
new file mode 100644
index 0000000..d65ea43
--- /dev/null
+++ b/verigraph/service/src/tests/j-verigraph-generator/json_generator.py
@@ -0,0 +1,261 @@
+#!/usr/bin/python
+
+##############################################################################
+# Copyright (c) 2017 Politecnico di Torino and others.
+#
+# All rights reserved. This program and the accompanying materials
+# are made available under the terms of the Apache License, Version 2.0
+# which accompanies this distribution, and is available at
+# http://www.apache.org/licenses/LICENSE-2.0
+##############################################################################
+
+import json
+from pprint import pprint
+import sys
+import commands
+import os
+from config import *
+from utility import *
+import batch_generator
+import subprocess
+
+#global variables
+chains = {}
+chains["chains"] = []
+routing = {}
+routing["routing_table"] = []
+configuration = {}
+configuration["nodes"] = []
+#end of global variables
+
+
+
+#generates json file describing the chains (default chains.json)
+def generate_chains(curr_dir):
+ filename = "chains.json"
+ fn = raw_input("Please enter a file name for the json file describing the nodes chains (default \"chains.json\") -->")
+ if fn != "":
+ filename = fn
+
+ number_of_chains = check_input_is_int("Please enter the number of chains you wish to simulate: -->")
+ for i in range(0, int(number_of_chains)):
+ chains["chains"].insert(i, {})
+ #decomment the following 2 lines to make chain id an arbitrary integer
+ #chain_id = check_input_is_int("Chain #" + str(i+1) + " id? -->")
+ #chains["chains"][i]["id"] = chain_id
+ chains["chains"][i]["id"] = i+1
+ flowspace = raw_input("Chain #" + str(i+1) + " flowspace? -->")
+ chains["chains"][i]["flowspace"] = flowspace
+ chain_nodes = check_input_is_int("How many nodes does the chain #" + str(i+1) + " have? -->")
+ chains["chains"][i]["nodes"] = []
+ for j in range(0, chain_nodes):
+ chains["chains"][i]["nodes"].insert(j, {})
+ node_name = raw_input("Node #" + str(j+1) + " name? -->")
+ chains["chains"][i]["nodes"][j]["name"] = node_name
+ print "Available functional types are:"
+ for device in devices_to_classes.keys():
+ print device + " ",
+ while True:
+ node_type = raw_input("Node #" + str(j+1) + " functional_type (see valid options above)? -->")
+ if node_type in devices_to_classes.keys():
+ break
+ chains["chains"][i]["nodes"][j]["functional_type"] = node_type
+ node_address = raw_input("Node #" + str(j+1) + " address? -->")
+ chains["chains"][i]["nodes"][j]["address"] = node_address
+ #pprint(chains)
+ with smart_open(curr_dir + "/" + filename) as f:
+ print >>f, json.dumps(chains)
+ return filename
+
+#generates json file describing the node configurations (default config.json)
+def generate_config(curr_dir):
+ chains_file = "chains.json"
+ while True:
+ list_files(curr_dir)
+ fn = raw_input("Please enter the file name of the json file containing the chains (default \"chains.json\") -->")
+ if fn != "":
+ chains_file = fn
+ try:
+ chains = parse_chains(curr_dir + "/" + chains_file)
+ except:
+ print "Chains file is not valid"
+ continue
+ break
+ print "Chains read from file:"
+ pprint(chains)
+ chains_id = []
+
+ for chain_id, chain in chains.items():
+ chains_id.append(chain_id)
+ print "Chain #" + str(chain_id) + " has " + str(len(chain)) + " elements"
+ for node_name in chain.keys():
+ print node_name + " ",
+ print ""
+
+ while True:
+ number_of_chain = check_input_is_int("Please enter the number of the chain you wish to configure: -->")
+ if number_of_chain in chains_id:
+ break
+ else:
+ print "Please enter a valid chain id (see options above)"
+
+ filename = "config.json"
+ fn = raw_input("Please enter a file name for the json file describing the nodes configuration (default \"config.json\") -->")
+ if fn != "":
+ filename = fn
+
+ number_of_nodes = len(chains[number_of_chain].keys())
+
+# for i in range(0, number_of_nodes):
+ i = -1
+ for node_name, node_map in chains[number_of_chain].items():
+ i += 1
+ configuration["nodes"].insert(i, {})
+# node_id = raw_input("Node #" + str(i+1) + " id? -->")
+# configuration["nodes"][i]["id"] = node_id
+ configuration["nodes"][i]["id"] = node_name
+ #init = raw_input("Any parameter for inizialization of node " + node_name + "? (N/Y)-->")
+ init_list = devices_initialization[node_map["functional_type"]]
+ if init_list != []:
+ for init_item in init_list:
+ init_param = raw_input("Please enter the IP address of parameter \"" + init_item + "\" for node " + node_name + ": -->")
+ configuration["nodes"][i][init_item] = init_param
+
+ node_description = raw_input("Node " + node_name +"'s configuration description? -->")
+ configuration["nodes"][i]["description"] = node_description
+ while(True):
+ #node_configuration_type = raw_input("Node " + node_id +"'s configuration type (list, maps)? (L/M) -->")
+ #n = search_node_in_chains(node_id)
+
+ node_configuration_type = devices_configuration_methods[node_map["functional_type"]]
+ if node_configuration_type == "list":
+ #list
+ configuration["nodes"][i]["configuration"] = []
+ config_elements = check_input_is_int("How many configuration elements for node " + node_name + "? (type 0 to skip configuration) -->")
+ for e in range(0, config_elements):
+ element = raw_input("\tPlease enter " + devices_configuration_fields[node_map["functional_type"]] + "#" + str(e+1) + " -->")
+ configuration["nodes"][i]["configuration"].append(element)
+ break
+ elif node_configuration_type == "maps":
+ #maps
+ configuration["nodes"][i]["configuration"] = []
+ n_entries = check_input_is_int("How many maps for the configuration of node " + node_name + "? (type 0 to skip configuration) -->")
+
+ for m in range(0, n_entries):
+ configuration["nodes"][i]["configuration"].insert(m, {})
+
+ map_elements = check_input_is_int("How many elements for map #" + str(m+1) + "? -->")
+
+ for n in range(0, map_elements):
+ key = raw_input("\tKey for " + devices_configuration_fields[node_map["functional_type"]] + "#" + str(n+1) + ": -->")
+ value = raw_input("\tValue for " + devices_configuration_fields[node_map["functional_type"]] + "#" + str(n+1) + ": -->")
+ configuration["nodes"][i]["configuration"][m][key] = value
+ break
+ else:
+ print "Invalid config, please edit the config file"
+ #pprint(configuration)
+ with smart_open(curr_dir + "/" + filename) as f:
+ print >>f, json.dumps(configuration)
+ return filename
+
+
+def main():
+
+ chains_file = ""
+ configuration_file = ""
+ routing_file = ""
+ curr_dir = os.getcwd()
+ current_path = curr_dir
+
+ set_dir = raw_input("Change working directory? (" + curr_dir + ") (N/Y) -->")
+ if set_dir == "Y" or set_dir == "y":
+ print "List of subdirectories:"
+ print list_directories(curr_dir)
+ while True:
+ curr_dir = os.path.abspath(raw_input("Enter working path (relative or absolute path are supported) -->"))
+ if os.path.exists(curr_dir):
+ current_path = curr_dir
+ break
+ else:
+ print "Please enter a valid path!"
+
+ directory = raw_input("Do you want to create a new test directory? (N/Y) -->")
+ if directory == "Y" or directory =="y":
+ directory_name = raw_input("Directory name? -->")
+ print commands.getoutput("mkdir -v " + curr_dir + "/" + directory_name)
+ current_path = curr_dir + "/" + directory_name
+
+ print "Files will be created at " + current_path
+
+ firewall_chain = False
+
+ while True:
+ choice = raw_input("""CHAINS?\n
+ Choose one of the following options:\n
+ 1) Automatic generation of chains.json and config.json for an N-firewall chain
+ 2) Generate step-by-step
+ 3) Verify the integrity of an existing json file
+ 4) Skip step\n-->""")
+ try:
+ if int(choice) == 1:
+ multiplier = check_input_is_int("Please enter N -->")
+ arguments = ["-m", str(multiplier), "-o", current_path]
+ batch_generator.main(arguments)
+ firewall_chain = True
+ break
+ elif int(choice) == 2:
+ chains_file = generate_chains(current_path)
+ break
+ elif int(choice) == 3:
+ chains_file = raw_input("Input file for CHAINS? -->")
+ if(check_chains_integrity(current_path + "/" + chains_file)) == True:
+ break
+ else:
+ print "Input json file for CHAINS not well formed, please try again!"
+ elif int(choice) == 4:
+ break
+ else:
+ print "Invalid choice, please try again!"
+ except ValueError, e:
+ print "Invalid choice, please try again!"
+ continue
+
+ while True:
+
+ if firewall_chain == True:
+ chains_file = "chains.json"
+ configuration_file = "config.json"
+ routing_file = ""
+ break
+
+ choice = raw_input("""CONFIGURATION?\n
+ Choose one of the following options:\n
+ 1) Generate step-by-step
+ 2) Verify the integrity of an existing json file
+ 3) Skip step\n-->""")
+ try:
+ if int(choice) == 1:
+ configuration_file = generate_config(current_path)
+ break
+ elif int(choice) == 2:
+ configuration_file = raw_input("Input file for CONFIGURATION? -->")
+ if(check_config_integrity(current_path + "/" + configuration_file)) == True:
+ break
+ else:
+ print "Input json file for CONFIGURATION not well formed, please try again!"
+ elif int(choice) == 3:
+ break
+ else:
+ print "Invalid choice, please try again!"
+ except ValueError, e:
+ print "Invalid choice, please try again!"
+ continue
+
+ print "All done, you are ready to launch the test generator like so:"
+ print "test_class_generator.py -c " + chains_file + " -f " + configuration_file + " -o <output_file>"
+
+ return chains_file, configuration_file, routing_file, current_path
+
+
+if __name__ == "__main__":
+ main()
diff --git a/verigraph/service/src/tests/j-verigraph-generator/routing_generator.py b/verigraph/service/src/tests/j-verigraph-generator/routing_generator.py
new file mode 100644
index 0000000..c8956f2
--- /dev/null
+++ b/verigraph/service/src/tests/j-verigraph-generator/routing_generator.py
@@ -0,0 +1,80 @@
+#!/usr/bin/python
+
+##############################################################################
+# Copyright (c) 2017 Politecnico di Torino and others.
+#
+# All rights reserved. This program and the accompanying materials
+# are made available under the terms of the Apache License, Version 2.0
+# which accompanies this distribution, and is available at
+# http://www.apache.org/licenses/LICENSE-2.0
+##############################################################################
+
+from pprint import pprint
+import sys, getopt
+import os
+from utility import *
+
+# used by test_class_generator
+def generate_routing_from_chain(chain):
+ routing = {}
+ routing["routing_table"] = {}
+
+ chain = chain["nodes"]
+ for i in range(0, len(chain)):
+ routing["routing_table"][chain[i]["name"]] = {}
+ for j in range(i-1, -1, -1):
+ routing["routing_table"][chain[i]["name"]][chain[j]["address"]] = chain[i-1]["name"]
+ for k in range (i+1, len(chain)):
+ routing["routing_table"][chain[i]["name"]][chain[k]["address"]] = chain[i+1]["name"]
+ pprint(routing)
+ return routing
+
+def generate_routing_from_chains_file(chains_file, chain_number):
+ routing = {}
+ routing["routing_table"] = {}
+
+ chains = convert_unicode_to_ascii(parse_json_file(chains_file))
+ chain = None
+ for chn in chains["chains"]:
+ if chn["id"] == chain_number:
+ chain = chn["nodes"]
+ break
+ if chain == None:
+ return routing
+
+ for i in range(0, len(chain)):
+ routing["routing_table"][chain[i]["name"]] = {}
+ for j in range(i-1, -1, -1):
+ routing["routing_table"][chain[i]["name"]][chain[j]["address"]] = chain[i-1]["name"]
+ for k in range (i+1, len(chain)):
+ routing["routing_table"][chain[i]["name"]][chain[k]["address"]] = chain[i+1]["name"]
+ pprint(routing)
+ return routing
+
+def main(argv):
+ if len(argv) < 4:
+ print 'routing_generator.py -c <chains_file> -n <chain_number>'
+ sys.exit(2)
+ chains_file = ""
+ chain_number = ""
+ try:
+ opts, args = getopt.getopt(argv,"hc:n:",["chains=","id="])
+ except getopt.GetoptError:
+ print 'routing_generator.py -c <chains_file> -n <chain_number>'
+ sys.exit(2)
+ for opt, arg in opts:
+ if opt == '-h':
+ print 'routing_generator.py -c <chains_file> -n <chain_number>'
+ sys.exit()
+ elif opt in ("-c", "--chains"):
+ chains_file = arg
+ elif opt in ("-n", "--id"):
+ chain_number = arg
+
+ print "Chains file is " + chains_file
+ print "Chain id is " + chain_number
+
+ return generate_routing_from_chains_file(chains_file, chain_number)
+
+if __name__ == '__main__':
+ main(sys.argv[1:]) \ No newline at end of file
diff --git a/verigraph/service/src/tests/j-verigraph-generator/test_class_generator.py b/verigraph/service/src/tests/j-verigraph-generator/test_class_generator.py
new file mode 100644
index 0000000..7bf446c
--- /dev/null
+++ b/verigraph/service/src/tests/j-verigraph-generator/test_class_generator.py
@@ -0,0 +1,399 @@
+#!/usr/bin/python
+
+##############################################################################
+# Copyright (c) 2017 Politecnico di Torino and others.
+#
+# All rights reserved. This program and the accompanying materials
+# are made available under the terms of the Apache License, Version 2.0
+# which accompanies this distribution, and is available at
+# http://www.apache.org/licenses/LICENSE-2.0
+##############################################################################
+
+from pprint import pprint
+from pprint import pformat
+import sys, getopt
+from code_generator import CodeGeneratorBackend
+import os, errno
+from config import *
+from utility import *
+from routing_generator import *
+import logging
+from pip._vendor.pkg_resources import null_ns_handler
+
+#generates a custom test file
+def generate_test_file(chain, number, configuration, output_file="test_class"):
+
+ route = {}
+ config = {}
+ chn = {}
+
+ #initiatlize the config dictionary for each node
+ for node in chain["nodes"]:
+ config[node["name"]] = {}
+
+ #initiatlize the route dictionary for each node
+ for node in chain["nodes"]:
+ route[node["name"]] = {}
+
+ #initiatlize the chn dictionary for each node
+ for node in chain["nodes"]:
+ chn[node["name"]] = {}
+
+ #set chn values: chn[name][key] = value
+ for node in chain["nodes"]:
+ for key, value in node.items():
+ try:
+ #name key is redundant in map
+ if key != "name":
+ chn[node["name"]][key] = value
+ except KeyError, e:
+ logging.debug("Field " + str(key) + " not found for node " + str(node["name"]))
+ logging.debug("Cotinuing...")
+ continue
+
+ #debug print of chn
+ logging.debug(pformat((chn)))
+
+ routing = generate_routing_from_chain(chain)
+
+ for node_name, node_rt in routing["routing_table"].items():
+ route[node_name] = node_rt
+
+ #debug print of route
+ logging.debug(pformat((route)))
+
+ #set config: config[node_name][key] = value
+ for node in configuration["nodes"]:
+ for key, value in node.items():
+ #id field is redundant
+ if key != "id":
+ try:
+ if key == "configuration":
+ #init config[node_name][key] with an empty array
+ config[node["id"]][key] = []
+
+ for value_item in value:
+ change_key = "key" in convert_configuration_property_to_ip[chn[node["id"]]["functional_type"]]
+ change_value = "value" in convert_configuration_property_to_ip[chn[node["id"]]["functional_type"]]
+ if (change_key==False and change_value==False):
+ config[node["id"]][key].append(value_item)
+ continue
+ # config[node_name][configuration] is a dictionary
+ if isinstance(value_item, dict):
+ for config_item_key, config_item_value in value_item.items():
+ new_key = config_item_key
+ changed_key = False
+ changed_value = False
+ if change_key and config_item_key in chn.keys():
+ changed_key = True
+ new_key = "ip_" + str(config_item_key)
+ value_item[new_key] = str(config_item_value)
+ del value_item[config_item_key]
+ if change_value and config_item_value in chn.keys():
+ changed_value = True
+ new_value = "ip_" + str(config_item_value)
+ value_item[new_key] = new_value
+ if(change_key==changed_key) and (change_value==changed_value):
+ config[node["id"]][key].append(value_item)
+ else:
+ if change_value:
+ if value_item in chn.keys():
+ new_value = "ip_" + str(value_item)
+ config[node["id"]][key].append(new_value)
+ else:
+ config[node["id"]][key].append(str(value_item))
+ else:
+ config[node["id"]][key] = value
+ except KeyError, e:
+ #node not found in current chain
+ logging.debug("Field '" + key + "' not found for node '" + str(node["id"]) + "'")
+ logging.debug(key + " probably doesn't belong to the current chain, thus it will be skipped")
+ #sys.exit(1)
+ continue
+
+ # debug print of config
+ logging.debug(pformat((config)))
+
+ #prepare a few more helpful data structures
+ nodes_names = []
+ nodes_types = []
+ nodes_addresses = []
+ nodes_ip_mappings = []
+ nodes_rt = {}
+
+ #initialize vectors for node names and routing tables
+ for name in chn.keys():
+ nodes_names.append(name)
+ nodes_rt[name] = []
+
+ #add functional types, addresses and ip mapping to vectors
+ for node, field in chn.items():
+ nodes_types.append(field["functional_type"])
+ nodes_addresses.append(field["address"])
+ nodes_ip_mappings.append(field["address"])
+
+ for node, rt in route.items():
+ for dest, next_hop in rt.items():
+ row = "nctx.am.get(\"" + dest + "\"), " + next_hop
+ try:
+ nodes_rt[node].append(row)
+ except KeyError, e:
+ #node not found, notify and exit
+ logging.debug("Node " + node + " not found!")
+ sys.exit(1)
+
+ #begin file generation
+ logging.debug("* instantiating chain #" + str(number))
+ dirname = os.path.dirname(output_file)
+ basename = os.path.basename(output_file)
+ basename = os.path.splitext(basename)[0]
+ basename = basename[0].upper() + basename[1:]
+ with smart_open(dirname + "/" + basename + "_" + str(number) + ".java") as f:
+ c = CodeGeneratorBackend()
+ c.begin(tab=" ")
+
+ c.writeln("package tests.scenarios;")
+
+ #imports here
+ c.writeln("import java.util.ArrayList;")
+ c.writeln("import com.microsoft.z3.Context;")
+ c.writeln("import com.microsoft.z3.DatatypeExpr;")
+
+ c.writeln("import mcnet.components.Checker;")
+ c.writeln("import mcnet.components.NetContext;")
+ c.writeln("import mcnet.components.Network;")
+ c.writeln("import mcnet.components.NetworkObject;")
+ c.writeln("import mcnet.components.Tuple;")
+ c.writeln("import mcnet.netobjs.PacketModel;")
+
+ #import components
+ #for i in range(0, len(nodes_names)):
+ # c.writeln("import mcnet.netobjs." + devices_to_classes[str(nodes_types[i])] + ";")
+
+ for key, value in devices_to_classes.items():
+ c.writeln("import mcnet.netobjs." + value + ";")
+
+ c.writeln("public class " + basename + "_" + str(number) + "{")
+
+ c.indent()
+ c.writeln("public Checker check;")
+ # declare components
+ for i in range(0, len(nodes_names)):
+ c.writeln("public " + devices_to_classes[str(nodes_types[i])] + " " + str(nodes_names[i]) + ";")
+
+ # method setDevices
+ c.writeln("private void setDevices(Context ctx, NetContext nctx, Network net){")
+ c.indent()
+ for i in range(0, len(nodes_names)):
+ c.write(str(nodes_names[i]) + " = new " + devices_to_classes[str(nodes_types[i])] + "(ctx, new Object[]{nctx.nm.get(\"" + nodes_names[i] + "\"), net, nctx")
+ if devices_initialization[nodes_types[i]] != [] :
+ for param in devices_initialization[nodes_types[i]]:
+ print "configuring node " + nodes_names[i]
+ for config_param in config[nodes_names[i]]["configuration"]:
+ if param in config_param:
+ c.append(", nctx.am.get(\"" + config_param[param] + "\")")
+ c.append("});\n")
+ c.dedent()
+ c.writeln("}")
+ # end method setDevices
+
+ # method doMappings
+ c.writeln("private void doMappings(NetContext nctx, ArrayList<Tuple<NetworkObject,ArrayList<DatatypeExpr>>> adm){")
+ c.indent()
+ for i in range(0, len(nodes_names)):
+ c.writeln("ArrayList<DatatypeExpr> al" + str(i) + " = new ArrayList<DatatypeExpr>();")
+ c.writeln("al" + str(i) + ".add(nctx.am.get(\"" + nodes_ip_mappings[i] + "\"));")
+ c.writeln("adm.add(new Tuple<>((NetworkObject)" + nodes_names[i] + ", al" + str(i) + "));")
+ c.dedent()
+ c.writeln("}")
+ # end method doMappings
+
+ # for each node methods setRouting and configureDevice
+ for i in range(0, len(nodes_names)):
+ # method setRouting
+ c.writeln("private void setRouting" + nodes_names[i] + "(NetContext nctx, Network net, ArrayList<Tuple<DatatypeExpr,NetworkObject>> rt_" + nodes_names[i] + "){")
+ c.indent()
+ for row in nodes_rt[nodes_names[i]]:
+ c.writeln("rt_" + nodes_names[i] + ".add(new Tuple<DatatypeExpr,NetworkObject>(" + row + "));")
+ c.writeln("net.routingTable(" + nodes_names[i] + ", rt_" + nodes_names[i] + ");")
+ c.dedent()
+ c.writeln("}")
+ # end method setRouting
+ # method configureDevice
+ c.writeln("private void configureDevice" + nodes_names[i] + "(NetContext nctx) {")
+ c.indent()
+ #configure middle-box only if its configuration is not empty
+ if config[nodes_names[i]]["configuration"] != [] :
+ if nodes_types[i] == "cache":
+ c.write(nodes_names[i] + "." + devices_to_configuration_methods[nodes_types[i]] + "(new NetworkObject[]")
+ cache_ips = config[nodes_names[i]]["configuration"]
+ cache_hosts = []
+ for cache_ip in cache_ips:
+ i = -1
+ for host in nodes_addresses:
+ i += 1
+ if host == cache_ip:
+ cache_hosts.append(nodes_names[i])
+ c.write_list(formatted_list_from_list_of_maps(cache_hosts), wrapper="")
+ c.append(");\n")
+ elif nodes_types[i] == "nat":
+ c.writeln("ArrayList<DatatypeExpr> ia" + str(i) +" = new ArrayList<DatatypeExpr>();")
+ config_elements = []
+ config_elements = formatted_list_from_list_of_maps(config[nodes_names[i]]["configuration"])
+ for address in config_elements:
+ c.writeln("ia" + str(i) + ".add(nctx.am.get(\"" + address + "\"));")
+ c.writeln(nodes_names[i] + ".natModel(nctx.am.get(\"ip_" + nodes_names[i] + "\"));")
+ c.writeln(nodes_names[i] + "." + devices_to_configuration_methods[nodes_types[i]] + "(ia" + str(i) +");")
+ elif nodes_types[i] == "firewall":
+ c.writeln("ArrayList<Tuple<DatatypeExpr,DatatypeExpr>> acl" + str(i) + " = new ArrayList<Tuple<DatatypeExpr,DatatypeExpr>>();")
+ for config_element in config[nodes_names[i]]["configuration"]:
+ if isinstance(config_element,dict):
+ for key, value in config_element.items():
+ if key in nodes_addresses and value in nodes_addresses:
+ c.writeln("acl" + str(i) + ".add(new Tuple<DatatypeExpr,DatatypeExpr>(nctx.am.get(\"" + key + "\"),nctx.am.get(\"" + value + "\")));")
+ c.writeln(nodes_names[i] + "." + devices_to_configuration_methods[nodes_types[i]] + "(acl" + str(i) + ");")
+ elif nodes_types[i] == "antispam":
+ c.write(nodes_names[i] + "." + devices_to_configuration_methods[nodes_types[i]] + "(new int[]")
+ c.write_list(formatted_list_from_list_of_maps(config[nodes_names[i]]["configuration"]))
+ c.append(");\n")
+ elif nodes_types[i] == "dpi":
+ for index in range(0, len(config[nodes_names[i]]["configuration"])):
+ config[nodes_names[i]]["configuration"][index] = "String.valueOf(\"" + str(config[nodes_names[i]]["configuration"][index]) + "\").hashCode()"
+ c.write(nodes_names[i] + "." + devices_to_configuration_methods[nodes_types[i]] + "(new int[]")
+ c.write_list(formatted_list_from_list_of_maps(config[nodes_names[i]]["configuration"]), wrapper="")
+ c.append(");\n")
+ elif nodes_types[i] == "endhost":
+ c.writeln("PacketModel pModel" + str(i) + " = new PacketModel();")
+ if "body" in config[nodes_names[i]]["configuration"][0]:
+ c.writeln("pModel" + str(i) + ".setBody(String.valueOf(\"" + config[nodes_names[i]]["configuration"][0]["body"] + "\").hashCode());")
+ if "sequence" in config[nodes_names[i]]["configuration"][0]:
+ c.writeln("pModel" + str(i) + ".setSeq(" + config[nodes_names[i]]["configuration"][0]["sequence"] + ");")
+ if "protocol" in config[nodes_names[i]]["configuration"][0]:
+ c.writeln("pModel" + str(i) + ".setProto(nctx." + config[nodes_names[i]]["configuration"][0]["protocol"] + ");")
+ if "email_from" in config[nodes_names[i]]["configuration"][0]:
+ c.writeln("pModel" + str(i) + ".setEmailFrom(String.valueOf(\"" + config[nodes_names[i]]["configuration"][0]["email_from"] + "\").hashCode());")
+ if "url" in config[nodes_names[i]]["configuration"][0]:
+ c.writeln("pModel" + str(i) + ".setUrl(String.valueOf(\"" + config[nodes_names[i]]["configuration"][0]["url"] + "\").hashCode());")
+ if "options" in config[nodes_names[i]]["configuration"][0]:
+ c.writeln("pModel" + str(i) + ".setOptions(String.valueOf(\"" + config[nodes_names[i]]["configuration"][0]["options"] + "\").hashCode());")
+ if "destination" in config[nodes_names[i]]["configuration"][0]:
+ c.writeln("pModel" + str(i) + ".setIp_dest(nctx.am.get(\"" + config[nodes_names[i]]["configuration"][0]["destination"] + "\"));")
+
+ c.writeln(nodes_names[i] + "." + devices_to_configuration_methods[nodes_types[i]] + "(pModel" + str(i) + ");")
+ elif nodes_types[i] == "vpnaccess":
+ c.writeln(nodes_names[i] + "." + devices_to_configuration_methods[nodes_types[i]] + "(nctx.am.get(\"" + nodes_addresses[i] + "\"), nctx.am.get(\"" + config[nodes_names[i]]["configuration"][0]["vpnexit"] + "\"));")
+ elif nodes_types[i] == "vpnexit":
+ c.writeln(nodes_names[i] + "." + devices_to_configuration_methods[nodes_types[i]] + "(nctx.am.get(\"" + config[nodes_names[i]]["configuration"][0]["vpnaccess"] + "\"), nctx.am.get(\"" + nodes_addresses[i] + "\"));")
+
+ # config is empty but configure device anyway
+ elif nodes_types[i] == "fieldmodifier":
+ c.writeln(nodes_names[i] + "." + devices_to_configuration_methods[nodes_types[i]] + "();")
+ c.dedent()
+ c.writeln("}")
+ # end method configureDevice
+
+
+ c.writeln("public " + basename + "_" + str(number) + "(Context ctx){")
+ c.indent()
+ c.write("NetContext nctx = new NetContext (ctx,new String[]")
+ c.write_list(nodes_names, wrapper="\"")
+ c.append(", new String[]")
+ c.write_list(nodes_addresses, wrapper="\"")
+ c.append(");\n")
+ c.writeln("Network net = new Network (ctx,new Object[]{nctx});")
+ # call method setDevices
+ c.writeln("setDevices(ctx, nctx, net);")
+
+ #SET ADDRESS MAPPINGS
+ c.writeln("ArrayList<Tuple<NetworkObject,ArrayList<DatatypeExpr>>> adm = new ArrayList<Tuple<NetworkObject,ArrayList<DatatypeExpr>>>();")
+ # call doMappings
+ c.writeln("doMappings(nctx, adm);")
+ c.writeln("net.setAddressMappings(adm);")
+
+ #CONFIGURE ROUTING TABLE
+ for i in range(0, len(nodes_names)):
+ c.writeln("ArrayList<Tuple<DatatypeExpr,NetworkObject>> rt_" + nodes_names[i] + " = new ArrayList<Tuple<DatatypeExpr,NetworkObject>>(); ")
+ c.writeln("setRouting" + nodes_names[i] + "(nctx, net, rt_" + nodes_names[i] + ");")
+
+ #ATTACH DEVICES
+ c.write("net.attach(")
+ c.write_list(nodes_names, delimiter = False, wrapper="")
+ c.append(");\n")
+
+ #CONFIGURE MIDDLE-BOXES
+ for i in range(0, len(nodes_names)):
+ c.writeln("configureDevice" + nodes_names[i] + "(nctx);")
+
+ c.writeln("check = new Checker(ctx,nctx,net);")
+
+ c.dedent()
+ c.writeln("}")
+
+ c.dedent()
+ c.writeln("}")
+
+ #write c object to file
+ print >>f, c.end()
+
+ logging.debug("wrote test file " + os.path.abspath(dirname + "/" + basename + "_" + str(number)) + ".java" + " successfully!")
+
+
+def main(argv):
+ #exit if any command line argument is missing
+ if len(argv) < 6:
+ print 'test_class_generator.py -c <chain_file> -f <conf_file> -o <output_name>'
+ sys.exit(1)
+
+ #initialize json file names
+ chains_file = ''
+ configuration_file = ''
+ output_file = ''
+
+ #parse command line arguments and exit in case of any error
+ try:
+ opts, args = getopt.getopt(argv,"hc:f:r:o:",["help","chain=","config=","route=","ofile="])
+ except getopt.GetoptError as err:
+ print str(err)
+ print 'test_class_generator.py -c <chain_file> -f <conf_file> -o <output_name>'
+ sys.exit(2)
+ for opt, arg in opts:
+ if opt in ("-h", "--help"):
+ print 'test_class_generator.py -c <chain_file> -f <conf_file> -o <output_name'
+ sys.exit()
+ elif opt in ("-c", "--chain"):
+ chains_file = arg
+ elif opt in ("-f", "--config"):
+ configuration_file = arg
+ elif opt in ("-o", "--ofile"):
+ output_file = arg
+
+ #set logging
+ logging.basicConfig(stream=sys.stderr, level=logging.DEBUG)
+
+ #parse chains file
+ chains = convert_unicode_to_ascii(parse_json_file(chains_file))
+
+ #parse configuration file
+ configuration = convert_unicode_to_ascii(parse_json_file(configuration_file))
+
+ logging.debug(pformat((chains)))
+ logging.debug(pformat((configuration)))
+
+ #custom formatted prints
+ print_chains(chains)
+ print_configuration(configuration)
+
+ #counter for the number of chains
+ number_of_chains = 0
+
+ #generate test classes
+ for chain in chains["chains"]:
+ #increment the number of chains
+ number_of_chains += 1;
+ #generate test files
+ generate_test_file(chain, number_of_chains, configuration, output_file)
+
+
+if __name__ == "__main__":
+ main(sys.argv[1:])
+
diff --git a/verigraph/service/src/tests/j-verigraph-generator/test_generator.py b/verigraph/service/src/tests/j-verigraph-generator/test_generator.py
new file mode 100644
index 0000000..f4629bb
--- /dev/null
+++ b/verigraph/service/src/tests/j-verigraph-generator/test_generator.py
@@ -0,0 +1,160 @@
+#!/usr/bin/python
+
+##############################################################################
+# Copyright (c) 2017 Politecnico di Torino and others.
+#
+# All rights reserved. This program and the accompanying materials
+# are made available under the terms of the Apache License, Version 2.0
+# which accompanies this distribution, and is available at
+# http://www.apache.org/licenses/LICENSE-2.0
+##############################################################################
+
+from pprint import pprint
+from code_generator import CodeGeneratorBackend
+import sys, getopt
+import contextlib
+import os
+from utility import *
+import logging
+
+def main(argv):
+ if len(argv) < 8:
+ print 'test_generator.py -i <inputfile> -o <outputfile> -s <source> -d <destination>'
+ sys.exit(2)
+ #initialize command line arguments values
+ inputfile = ''
+ outputfile = ''
+ source = ''
+ destination = ''
+ #parse command line arguments and exit if there is an error
+ try:
+ opts, args = getopt.getopt(argv,"hi:o:s:d:",["ifile=","ofile=","source=","destination="])
+ except getopt.GetoptError:
+ print 'test_generator.py -i <inputfile> -o <outputfile> -s <source> -d <destination>'
+ sys.exit(2)
+ for opt, arg in opts:
+ if opt == '-h':
+ print 'test_generator.py -i <inputfile> -o <outputfile> -s <source> -d <destination>'
+ sys.exit()
+ elif opt in ("-i", "--ifile"):
+ inputfile = arg
+ elif opt in ("-o", "--ofile"):
+ outputfile = arg
+ elif opt in ("-s", "--source"):
+ source = arg
+ elif opt in ("-d", "--destination"):
+ destination = arg
+ #set logging
+ logging.basicConfig(stream=sys.stderr, level=logging.INFO)
+ #capitalize ouput filename
+ dirname = os.path.dirname(outputfile)
+ basename = os.path.basename(outputfile)
+ basename = os.path.splitext(basename)[0]
+ basename = basename[0].upper() + basename[1:]
+
+ #print arguments
+ logging.debug('Input file is', inputfile)
+ logging.debug('Output file is', dirname + "/" + basename)
+ logging.debug('Source node is', source)
+ logging.debug('Destination node is', destination)
+
+ #begin file generation
+ with smart_open(dirname + "/" + basename + ".java") as f:
+ c = CodeGeneratorBackend()
+ c.begin(tab=" ")
+ c.writeln("package tests;")
+ c.writeln("import java.util.Calendar;")
+ c.writeln("import java.util.Date;")
+ c.writeln("import java.util.HashMap;")
+
+ c.writeln("import com.microsoft.z3.Context;")
+ c.writeln("import com.microsoft.z3.FuncDecl;")
+ c.writeln("import com.microsoft.z3.Model;")
+ c.writeln("import com.microsoft.z3.Status;")
+ c.writeln("import com.microsoft.z3.Z3Exception;")
+ c.writeln("import mcnet.components.IsolationResult;")
+
+
+ inputfile = os.path.basename(inputfile)
+ c.writeln("import tests.scenarios." + os.path.splitext(inputfile)[0] + ";")
+ c.writeln("public class " + basename + "{")
+
+ c.indent()
+ c.writeln("Context ctx;")
+
+ c.write("public void resetZ3() throws Z3Exception{\n\
+ HashMap<String, String> cfg = new HashMap<String, String>();\n\
+ cfg.put(\"model\", \"true\");\n\
+ ctx = new Context(cfg);\n\
+ \r\t}\n")
+
+ c.write("public void printVector (Object[] array){\n\
+ int i=0;\n\
+ System.out.println( \"*** Printing vector ***\");\n\
+ for (Object a : array){\n\
+ i+=1;\n\
+ System.out.println( \"#\"+i);\n\
+ System.out.println(a);\n\
+ System.out.println( \"*** \"+ i+ \" elements printed! ***\");\n\
+ }\n\
+ \r\t}\n")
+
+ c.write("public void printModel (Model model) throws Z3Exception{\n\
+ for (FuncDecl d : model.getFuncDecls()){\n\
+ System.out.println(d.getName() +\" = \"+ d.toString());\n\
+ System.out.println(\"\");\n\
+ }\n\
+ \r\t}\n")
+
+ c.writeln("public int run() throws Z3Exception{")
+ c.indent()
+
+ c.writeln(basename + " p = new " + basename + "();")
+
+ #adding time estimation
+ #c.writeln("int k = 0;")
+ #c.writeln("long t = 0;")
+
+ #c.writeln("for(;k<1;k++){")
+ #c.indent()
+
+ c.writeln("p.resetZ3();")
+
+ c.write(os.path.splitext(inputfile)[0] + " model = new " + os.path.splitext(inputfile)[0] + "(p.ctx);\n")
+
+ #c.writeln("Calendar cal = Calendar.getInstance();")
+ #c.writeln("Date start_time = cal.getTime();")
+
+ c.write("IsolationResult ret =model.check.checkIsolationProperty(model.")
+ c.append(source + ", model." + destination + ");\n")
+ #c.writeln("Calendar cal2 = Calendar.getInstance();")
+ #c.writeln("t = t+(cal2.getTime().getTime() - start_time.getTime());")
+
+ c.writeln("if (ret.result == Status.UNSATISFIABLE){\n\
+ System.out.println(\"UNSAT\");\n\
+ return -1;\n\
+ }else if (ret.result == Status.SATISFIABLE){\n\
+ System.out.println(\"SAT\");\n\
+ return 0;\n\
+ }else{\n\
+ System.out.println(\"UNKNOWN\");\n\
+ return -2;\n\
+ \r\t\t}")
+
+ #c.dedent()
+ #c.writeln("}")
+
+ #c.writeln("")
+ #c.writeln("System.out.printf(\"Mean execution time " + source + " -> " + destination + ": %.16f\", ((float) t/(float)1000)/k);")
+
+ c.dedent()
+ c.writeln("}")
+
+ c.dedent()
+ c.writeln("}")
+
+ print >>f, c.end()
+ logging.debug("File " + os.path.abspath(dirname + "/" + basename + ".java") + " has been successfully generated!!")
+
+if __name__ == "__main__":
+ main(sys.argv[1:])
diff --git a/verigraph/service/src/tests/j-verigraph-generator/utility.py b/verigraph/service/src/tests/j-verigraph-generator/utility.py
new file mode 100644
index 0000000..47d0180
--- /dev/null
+++ b/verigraph/service/src/tests/j-verigraph-generator/utility.py
@@ -0,0 +1,257 @@
+#!/usr/bin/python
+
+##############################################################################
+# Copyright (c) 2017 Politecnico di Torino and others.
+#
+# All rights reserved. This program and the accompanying materials
+# are made available under the terms of the Apache License, Version 2.0
+# which accompanies this distribution, and is available at
+# http://www.apache.org/licenses/LICENSE-2.0
+##############################################################################
+
+import json
+import contextlib
+import sys
+import os
+import subprocess
+from pprint import pprint
+
+#manages output easily (can either write to file or to stdout)
+@contextlib.contextmanager
+def smart_open(filename=None):
+ if filename and filename != '-':
+ fh = open(filename, 'w')
+ else:
+ fh = sys.stdout
+ try:
+ yield fh
+ finally:
+ if fh is not sys.stdout:
+ fh.close()
+
+def check_input_is_int(text):
+ while True:
+ data = raw_input(text)
+ try:
+ int_value = int(data)
+ except ValueError:
+ print "Please enter a valid number!"
+ continue
+ return int_value
+
+#parses a json file into a unicode dictionary
+def parse_json_file(filename):
+ with open(filename) as json_file:
+ return json.load(json_file)
+
+#returns an ascii dictionary from a unicode one
+def convert_unicode_to_ascii(input):
+ if isinstance(input, dict):
+ return {convert_unicode_to_ascii(key): convert_unicode_to_ascii(value) for key, value in input.iteritems()}
+ elif isinstance(input, list):
+ return [convert_unicode_to_ascii(element) for element in input]
+ elif isinstance(input, unicode):
+ return input.encode('utf-8')
+ else:
+ return input
+
+#parses a chains file
+def parse_chains(chains_file):
+ chains_json = convert_unicode_to_ascii(parse_json_file(chains_file))
+
+ chains = {}
+
+ for chn in chains_json["chains"]:
+ try:
+ chains[chn["id"]] = {}
+ #initiatlize the config dictionary for each node
+ for node in chn["nodes"]:
+ chains[chn["id"]][node["name"]] = {}
+ except:
+ raise KeyError("Chains file is not valid!")
+
+ for chn in chains_json["chains"]:
+ try:
+ #set chn values ---> chn(name, (field, value))
+ for node in chn["nodes"]:
+ for key, value in node.items():
+ #name key is redundant in map
+ if key != "name":
+ chains[chn["id"]][node["name"]][key] = value
+ except:
+ raise KeyError("Chains file is not valid!")
+ return chains
+
+def check_chains_integrity(filename):
+ print "Checking input file..."
+ try:
+ chains = convert_unicode_to_ascii(parse_json_file(filename))
+ print "File correctly parsed"
+ if isinstance(chains["chains"], list) == False:
+ print "Child of chains is not a list!"
+ return False
+ for chain in chains["chains"]:
+ print "Chain found, checking its fields..."
+ print "Checking chain id field... "
+ chain["id"]
+ print "OK!"
+ print "Checking chain flowspace field... "
+ chain["flowspace"]
+ print "OK!"
+ if isinstance(chain["nodes"], list) == False:
+ print "Chain #" + chain["id"] + " does not have a list of nodes!"
+ return False
+ for node in chain["nodes"]:
+ print "Node found, checking its fields..."
+ print "Checking node name... "
+ node["name"]
+ print "OK!"
+ print "Checking node functional_type field... "
+ node["functional_type"]
+ print "OK!"
+ print "Checking node address field... "
+ node["address"]
+ print "OK!"
+ except (KeyboardInterrupt, SystemExit):
+ raise
+ except:
+ print "One or more required fields are missing!"
+ return False
+ print filename + " validated successfully!"
+ return True
+
+def check_config_integrity(filename):
+ print "Checking input file..."
+ try:
+ config = convert_unicode_to_ascii(parse_json_file(filename))
+ pprint(config)
+ print "File correctly parsed"
+ if isinstance(config["nodes"], list) == False:
+ print "Child of nodes is not a list!"
+ return False
+ for node in config["nodes"]:
+ print "Node found, checking its fields..."
+ print "Checking id field... "
+ node["id"]
+ print "OK!"
+ print "Checking description field... "
+ node["description"]
+ print "OK!"
+ print "Checking configuration field... "
+ node["configuration"]
+ print "OK!"
+ if isinstance(node["configuration"], list) == False:
+ print "Checking if node configuration is a list..."
+ print "Node with id " + node["id"] + " does not have a configuration list!"
+ return False
+ for c in node["configuration"]:
+ print "Checking if node configuration element is a string or a dictionary..."
+ if (isinstance(c, str) == False and isinstance(c, dict) == False):
+ print "At least one element of node with id " + node["id"] + " has an invalid configuration (it is neither a string or a map)"
+ return False
+ except (KeyboardInterrupt, SystemExit):
+ raise
+ except:
+ print "One or more required fields are missing!"
+ return False
+ print filename + " validated successfully!"
+ return True
+
+def check_routing_integrity(filename):
+ print "Checking input file..."
+ try:
+ routing = convert_unicode_to_ascii(parse_json_file(filename))
+ print "File correctly parsed"
+ if isinstance(routing["routing_table"], list) == False:
+ print "Child of routing_table is not a list!"
+ return False
+ for node in routing["routing_table"]:
+ if isinstance(node, dict) == False:
+ print "Child of routing_table is not a map!"
+ return False
+ for n, rt in node.items():
+ if isinstance(rt, list) == False:
+ print "Routing table of element " + n + " is not a list!"
+ return False
+ for entry in rt:
+ if isinstance(entry, dict) == False:
+ print "Invalid entry for node " + n + " (not a map)!"
+ return False
+ except (KeyboardInterrupt, SystemExit):
+ raise
+ except:
+ print "One or more required fields are missing!"
+ return False
+ return True
+
+#prints every node for each input chain
+def print_chains(chains):
+ for chain in chains["chains"]:
+ print "CHAIN #" + str(chain["id"])
+ for node in chain["nodes"]:
+ print "Name: " + str(node["name"])
+ print "Functional type: " + str(node["functional_type"])
+ print "Address: " + str(node["address"])
+ print "-----------------------------------"
+ print ""
+
+#prints every node's configuration
+def print_configuration(configuration):
+ print "NODES CONFIGURATION"
+ for node in configuration["nodes"]:
+ print "Name: " + str(node["id"])
+ print "Description: " + str(node["description"])
+ print "Configuration: "
+ pprint(node["configuration"])
+ print "-----------------------------------"
+ print ""
+
+#print every node's routing table
+def print_routing_table(routing):
+ print "ROUTING"
+ for table in routing["routing_table"]:
+ for node,rt in table.items():
+ print "Name: " + str(node)
+ pprint(rt)
+ print "-----------------------------------"
+ print ""
+
+#returns a list of tuple [(k1, v1), (k2, v2)] from a list of maps like [{k1 : v1},{k2 : v2}]
+def formatted_list_from_list_of_maps(maps):
+ l = []
+ for map in maps:
+ if isinstance(map, dict):
+ for k, v in map.items():
+ #l.append("(ctx." + str(k) + ", ctx." + str(v) + ")")
+ l.append(str(k))
+ l.append(str(v))
+ else:
+ #l.append("ctx." + map)
+ l.append(map)
+ return l
+
+def list_directories(dir):
+ #output = subprocess.call(["ls", "-d", "*/"])
+ output = subprocess.call(["find", dir, "-type", "d"])
+
+ #TREE VERSION
+ #find = subprocess.Popen(["find", ".", "-type", "d"], stdout=subprocess.PIPE)
+ #output = subprocess.check_output(["sed", "-e", "s/[^-][^\/]*\// |/g", "-e", "s/|\([^ ]\)/|-\1/"], stdin=find.stdout)
+ #find.wait()
+
+# ps = subprocess.Popen(('ps', '-A'), stdout=subprocess.PIPE)
+# output = subprocess.check_output(('grep', 'process_name'), stdin=ps.stdout)
+# ps.wait()
+ return output
+
+def list_files(dir):
+ output = subprocess.call(["find", dir, "-type", "f"])
+ return output
+
+def search_node_in_chains(n):
+ found = []
+ for chain in chains["chains"]:
+ for node in chain["nodes"]:
+ if node["name"] == n:
+ found.append(node)
+ return found \ No newline at end of file