--- /dev/null
+n_packets;n_packets
+n_bytes;n_bytes
+arp;arp
+duration;duration
+barrier;barrier
+strict;strict
+flow-name;flow-name
+flags;flags
+id;id
+cookie_mask;cookie_mask
+dl_type;dl_type
+++ /dev/null
-in_port;in_port
-dl_vlan;dl_vlan
-dl_vlan_pcp;dl_vlan_pcp
-dl_src;dl_src
-dl_dst;dl_dst
-dl_type;dl_type
-ipv4-source;nw_src
-ipv4-destination;nw_dst
-nw_proto;nw_proto
-nw_tos;nw_tos
-nw_ecn;nw_ecn
-nw_ttl;nw_ttl
-tp_src;tp_src
-tp_dst;tp_dst
-icmp_type;icmp_type
-icmp_code;icmp_code
-table;table
-ip;ip
-icmp;icmp
-tcp;tcp
-udp;udp
-arp;arp
-vlan_tci;vlan_tci
-ip_frag;ip_frag
-arp_sha;arp_sha
-arp_tha;arp_tha
-ipv6_src;ipv6_src
-ipv6_dst;ipv6_dst
-ipv6_label;ipv6_label
-nd_target;nd_target
-nd_sll;nd_sll
-nd_tll;nd_tll
-tun_id;tun_id
-reg;reg
-ipv6;ipv6
-tcp6;tcp6
-udp6;udp6
-icmp6;icmp6
-apply_actions;actions
-exit;exit
-cookie;cookie
-priority;priority
-idle-timeout;idle_timeout
-hard-timeout;hard_timeout
-out_port;out_port
-duration;duration
-table_id;table
-priority;priority
-n_packets;n_packets
-n_bytes;n_bytes
+++ /dev/null
-import os
-import sys
-import time
-import logging
-import argparse
-import unittest
-import requests
-import xml.dom.minidom as md
-from xml.etree import ElementTree as ET
-from netaddr import IPNetwork
-from string import lower
-
-import mininet.node
-import mininet.topo
-import mininet.net
-import mininet.util
-
-from mininet.node import RemoteController
-from mininet.node import OVSKernelSwitch
-
-def create_network(controller_ip, controller_port):
- """Create topology and mininet network."""
- topo = mininet.topo.Topo()
-
- topo.addSwitch('s1')
- topo.addHost('h1')
- topo.addHost('h2')
-
- topo.addLink('h1', 's1')
- topo.addLink('h2', 's1')
-
- switch=mininet.util.customConstructor(
- {'ovsk':OVSKernelSwitch}, 'ovsk,protocols=OpenFlow13')
-
- controller=mininet.util.customConstructor(
- {'remote': RemoteController}, 'remote,ip=%s:%s' % (controller_ip,
- controller_port))
-
-
- net = mininet.net.Mininet(topo=topo, switch=switch, controller=controller)
-
- return net
-
-
-def get_flows(net):
- """Get list of flows from network's first switch.
-
- Return list of all flows on switch, sorted by duration (newest first)
- One flow is a dictionary with all flow's attribute:value pairs. Matches
- are stored under 'matches' key as another dictionary.
- Example:
-
- {
- 'actions': 'drop',
- 'cookie': '0xa,',
- 'duration': '3.434s,',
- 'hard_timeout': '12,',
- 'idle_timeout': '34,',
- 'matches': {
- 'ip': None,
- 'nw_dst': '10.0.0.0/24'
- },
- 'n_bytes': '0,',
- 'n_packets': '0,',
- 'priority': '2',
- 'table': '1,'
- }
-
- """
- log = logging.getLogger(__name__)
- def parse_matches(flow, matches):
- flow['matches'] = {}
-
- for match in matches:
- split_match = match.split('=', 1)
- if len(split_match) == 1:
- flow['matches'][split_match[0]] = None
- else:
- flow['matches'][split_match[0]] = split_match[1].rstrip(',')
-
- switch = net.switches[0]
- output = switch.cmdPrint(
- 'ovs-ofctl -O OpenFlow13 dump-flows %s' % switch.name)
-# output = switch.cmdPrint(
-# 'ovs-ofctl -F openflow10 dump-flows %s' % switch.name)
-
- log.debug('switch flow table: {}'.format(output))
-
- flows = []
-
- for line in output.splitlines()[1:]:
- flow = {}
- for word in line.split():
- word.rstrip(',')
- try:
- key, value = word.split('=', 1)
- except ValueError:
- #TODO: need to figure out what to do here?
- continue
-
- if key == 'priority':
- values = value.split(',')
- flow[key] = values[0]
- parse_matches(flow, values[1:])
- else:
- flow[key] = value.rstrip(',')
-
- flows.append(flow)
-
- # sort by duration
- return sorted(flows, key=lambda x: x['duration'].rstrip('s'))
-
-
-def translate_to_flow(flow, name, dictionary):
- switch_flow_name = dictionary[name]
-
- key_err = '{} needs to be present in flow definition. Flow definition ' \
- 'was: {}.'.format(switch_flow_name, flow)
- assert switch_flow_name in flow, key_err
- return switch_flow_name
-
-
-def get_text_value(element):
- return element.childNodes[0].nodeValue
-
-
-def compare_elements(expected_match, actual_match, kw, comparators, default):
- for child in expected_match.childNodes:
- if child.nodeType is expected_match.TEXT_NODE:
- continue
-
- comparator = comparators.get(child.nodeName, default)
- comparator(child, actual_match, kw)
-
-
-def fallback_comparator(xml_element, switch_flow, kw):
- # print 'fallback_comparator-xml_element', xml_element.toxml()
- # print 'fallback_comparator: switch_flow', switch_flow
- # print 'fallback_comparator: kw', kws
-
- name = translate_to_flow(switch_flow, xml_element.nodeName, kw)
-
- actual = switch_flow[name]
- expected = xml_element.childNodes[0].nodeValue
-
- data = xml_element.toxml(), name, actual
- # print 'fallback_comparator: data', data
-
- assert expected == actual, 'xml part: %s && switch %s=%s' % data
-
-
-def default_comparator(xml_element, switch_flow):
- fallback_comparator(xml_element, switch_flow, keywords)
-
-
-def integer_comparator(expected, actual, kw, base):
- expected_value = int(expected.childNodes[0].data)
-
- name = kw.get(expected.nodeName)
- actual_value = int(actual[name], base)
-
- data = expected.toxml(), name, actual
- assert expected_value == actual_value, \
- 'xml value: %s && actual value %s=%s' % data
-
-
-def cookie_comparator(cookie, switch_flow):
- integer_comparator(cookie, switch_flow, keywords, 16)
-
-
-def ethernet_address_comparator(child, actual_match, kw):
- expected_address = child.getElementsByTagName("address")[0].childNodes[0].data
- actual_address = actual_match[kw.get(child.nodeName)]
-
- data = child.toxml(), kw.get(child.nodeName), actual_address
-
- assert lower(expected_address) == lower(actual_address), \
- 'xml address: %s && actual address %s=%s' % data
-
-
-def proto_match_comparator(expected_match, actual_match, kw):
-
- def compare_base10_integer(expected_match, actual_match, kw):
- integer_comparator(expected_match, actual_match, kw, 10)
-
- def compare_vlan_id(expected_match, actual_match, kw):
- integer_comparator(expected_match.getElementsByTagName('vlan-id')[0], \
- actual_match, kw, 10)
-
- PROTO_COMPARATORS = {
- 'vlan-id': compare_vlan_id,
- }
-
- # print 'ethernet_match_comparator-expected_match:', expected_match.toxml()
- # print 'ethernet_match_comparator-actual_match:', actual_match
-
- compare_elements(expected_match, actual_match, kw, \
- PROTO_COMPARATORS, compare_base10_integer)
-
-
-def metadata_match_comparator(child, actual_match, kw):
- emd = int(child.getElementsByTagName("metadata")[0].childNodes[0].data)
-
- name = kw.get(child.nodeName)
- data = child.toxml(), name, actual_match
-
- amd = int(actual_match[kw.get(name)], 16)
-
- emasks = child.getElementsByTagName("metadata-mask")
- if len(emasks) != 0:
- print 'metadata-mask present'
-
- assert emd == amd, 'metadata: expected %s && actual %s=%s' % data
-
-
-
-def ethernet_match_comparator(expected_match, actual_match, kw):
- def compare_etype(child, actual_match, kw):
- expected_etype = int(child.getElementsByTagName("type")[0].childNodes[0].data)
- name = kw.get(child.nodeName)
- data = child.toxml(), name, actual_match
-
- if expected_etype == 2048: # IP
- assert ((actual_match.get('ip', 'IP Not-present') is None) or \
- (actual_match.get('tcp', 'TCP Not-present') is None) or \
- (actual_match.get('sctp', 'SCTP Not-present') is None) or \
- (actual_match.get('udp', 'UDP Not-present') is None)), \
- 'Expected etype %s && actual etype %s=%s' % data
-
- elif expected_etype == 2054: # ARP
- assert actual_match.get('arp', 'ARP Not-present') is None, \
- 'Expected etype %s && actual etype %s=%s' % data
-
- elif expected_etype == 34887: # MPLS
- assert actual_match.get('mpls', 'MPLS Not-present') is None, \
- 'Expected etype %s && actual etype %s=%s' % data
- else:
- actual_etype = int(actual_match[name], 16)
-
- assert expected_etype == actual_etype, \
- 'xml etype: %s && actual etype %s=%s' % data
-
-
- ETH_COMPARATORS = {
- 'ethernet-type': compare_etype,
- 'ethernet-source': ethernet_address_comparator,
- 'ethernet-destination': ethernet_address_comparator,
- }
-
- # print 'ethernet_match_comparator-expected_match:', expected_match.toxml()
- # print 'ethernet_match_comparator-actual_match:', actual_match
-
- compare_elements(expected_match, actual_match, kw, \
- ETH_COMPARATORS, fallback_comparator)
-
-
-def ipv4_comparator(expected_match, actual_match, kw):
- # print 'ip_v4_comparator:', expected_match.toxml(), actual_match
- # print 'ip_v4_comparator-actual_match:', actual_match
-
- expected_value = expected_match.childNodes[0].data
- actual_value = actual_match[kw.get(expected_match.nodeName)]
-
- data = expected_match.toxml(), kw.get(expected_match.nodeName), actual_value
-
- assert IPNetwork(expected_value) == IPNetwork(actual_value),\
- 'xml part: %s && address %s=%s' % data
-
-
-def ip_match_comparator(expected_match, actual_match, kw):
- def compare_proto(child, actual_match, kw):
- print 'compare_proto:', child.toxml(), actual_match
- expected_proto = int(child.childNodes[0].data)
-
- name = child.nodeName
- data = expected_match.toxml(), name, actual_match
-
- if expected_proto == 6: # TCP
- assert actual_match.get('tcp', 'TCP Not-present') is None, \
- 'ip protocol type: expected %s, actual %s=%s' % data
-
- elif expected_proto == 17: #UDP
- assert actual_match.get('udp', 'UDP Not-present') is None, \
- 'ip protocol type: expected %s, actual %s=%s' % data
-
- elif expected_proto == 132: #SCTP
- assert actual_match.get('sctp', 'SCTP Not-present') is None, \
- 'ip protocol type: expected %s, actual %s=%s' % data
-
- else:
- fallback_comparator(child, actual_match, kw)
-
-
- def compare_dscp(child, actual_match, kw):
- # print 'compare_dscp:', child.toxml(), actual_match
-
- expected_dscp = int(child.childNodes[0].data)
- name = kw.get(child.nodeName)
- actual_dscp = int(actual_match[name])
-
- data = child.toxml(), name, actual_match
-
- assert (expected_dscp * 4) == actual_dscp, 'dscp: expected %s, actual %s=%s' % data
-
-
- IP_MATCH_COMPARATORS = {
- 'ip-protocol': compare_proto,
- 'ip-dscp': compare_dscp,
- }
-
- # print 'ip_match_comparator:', expected_match.toxml(), actual_match
- compare_elements(expected_match, actual_match, kw, \
- IP_MATCH_COMPARATORS, fallback_comparator)
-
-
-def match_comparator(expected_match, switch_flow):
- MATCH_COMPARATORS = {
- 'arp-source-hardware-address': ethernet_address_comparator,
- 'arp-target-hardware-address': ethernet_address_comparator,
- 'protocol-match-fields': proto_match_comparator,
- 'metadata': metadata_match_comparator,
- 'vlan-match': proto_match_comparator,
- 'ethernet-match': ethernet_match_comparator,
- 'ip-match': ip_match_comparator,
- 'ipv4-destination': ipv4_comparator,
- 'ipv4-source': ipv4_comparator,
- }
-
- actual_match = switch_flow['matches']
-
- # print 'match_comparator-expected_match:', expected_match.toxml()
- # print 'match_comparator-actual_match:', actual_match
- # print 'match_comparator: keywords', keywords
-
- compare_elements(expected_match, actual_match, match_keywords, \
- MATCH_COMPARATORS, fallback_comparator)
-
-
-def actions_comparator(actions, switch_flow):
- # print 'actions_comparator:', actions, switch_flow
-
- actual_actions = switch_flow['actions'].split(",")
- # print 'actions_comparator:', actual_actions
-
- for action in actions.childNodes:
- if action.nodeType is actions.TEXT_NODE:
- continue
-
- action_name = action.childNodes[3].nodeName
- expected_action = action_keywords.get(action_name)
-
- data = action.toxml(), expected_action
- # print 'actions_comparator:', data
-
- assert expected_action in actual_actions, 'xml part:\n%s\n expected action: %s' % data
-
-
-def null_comparator(element, switch_flow):
- pass
-
-
-def instructions_comparator(instructions_element, switch_flow):
- INSTRUCTION_COMPARATORS = {
- 'apply-actions': actions_comparator,
- 'default': null_comparator,
- }
- # print 'instructions_comparator:', instructions_element, switch_flow
-
- instructions = instructions_element.childNodes
-
- for instruction in instructions_element.childNodes:
- if instruction.nodeType is instructions_element.TEXT_NODE:
- continue
-
- for itype in instruction.childNodes:
- if itype.nodeType is itype.TEXT_NODE:
- continue
-
- comparator = INSTRUCTION_COMPARATORS.get(itype.nodeName,
- INSTRUCTION_COMPARATORS['default'])
- comparator(itype, switch_flow)
-
-
-COMPARATORS = {
- 'cookie': cookie_comparator,
- 'instructions': instructions_comparator,
- 'match': match_comparator,
- 'default': default_comparator,
-}
-
-def all_nodes(xml_root):
- """
- Generates every non-text nodes.
- """
- current_nodes = [xml_root]
- next_nodes = []
-
- while len(current_nodes) > 0:
- for node in current_nodes:
- if node.nodeType != xml_root.TEXT_NODE:
- yield node
- next_nodes.extend(node.childNodes)
-
- current_nodes, next_nodes = next_nodes, []
-
-
-def check_elements(xmlstr, keywords):
- # namespace = 'urn:opendaylight:flow:inventory'
- tree = md.parseString(xmlstr)
-
- for element in all_nodes(tree.documentElement):
- # switch flow object contains only some data from xml
- if element.nodeName not in keywords:
- # print 'check_elements: element.nodeName', element.nodeName, 'NOT in keywords'
- continue
-
- yield element
-
- raise StopIteration()
-
-
-class TestOpenFlowXMLs(unittest.TestCase):
- @classmethod
- def setUpClass(cls):
- cls.net = create_network(cls.host, cls.mn_port)
- cls.net.start()
- time.sleep(15)
-
- @classmethod
- def tearDownClass(cls):
- cls.net.stop()
-
-
-def get_values(node, *tags):
- result = {tag: None for tag in tags}
- for node in all_nodes(node):
- if node.nodeName in result and len(node.childNodes) > 0:
- result[node.nodeName] = node.childNodes[0].nodeValue
- return result
-
-
-def generate_tests_from_xmls(path, xmls=None):
- # generate test function from path to request xml
- def generate_test(path_to_xml):
- xml_string = ''
- with open(path_to_xml) as f:
- xml_string = f.read()
-
- tree = md.parseString(xml_string)
- ids = get_values(tree.documentElement, 'table_id', 'id')
-
- def new_test(self):
- log = logging.getLogger(__name__)
- # send request throught RESTCONF
- data = (self.host, self.port, ids['table_id'], ids['id'])
- url = 'http://%s:%d/restconf/config/opendaylight-inventory:nodes' \
- '/node/openflow:1/table/%s/flow/%s' % data
- headers = {
- 'Content-Type': 'application/xml',
- 'Accept': 'application/xml',
- }
- log.info('sending request to url: {}'.format(url))
- rsp = requests.put(url, auth=('admin', 'admin'), data=xml_string,
- headers=headers)
- log.info('received status code: {}'.format(rsp.status_code))
- log.debug('received content: {}'.format(rsp.text))
- assert rsp.status_code == 204 or rsp.status_code == 200, 'Status' \
- ' code returned %d' % rsp.status_code
-
- # check request content against restconf's datastore
- response = requests.get(url, auth=('admin', 'admin'),
- headers={'Accept': 'application/xml'})
- assert response.status_code == 200
- req = ET.tostring(ET.fromstring(xml_string))
- res = ET.tostring(ET.fromstring(response.text))
- assert req == res, 'uploaded and stored xml, are not the same\n' \
- 'uploaded: %s\nstored:%s' % (req, res)
-
- # collect flow table state on switch
- switch_flows = get_flows(self.net)
- assert len(switch_flows) > 0
-
- # compare requested object and flow table state
- for important_element in check_elements(xml_string, keywords):
- # log.info('important element: {}'.format(important_element.nodeName))
- comparator = COMPARATORS.get(important_element.nodeName,
- COMPARATORS['default'])
-
- comparator(important_element, switch_flows[0])
-
- return new_test
-
- # generate list of available xml requests
- xmlfiles = None
- if xmls is not None:
- xmlfiles = ('f%d.xml' % fid for fid in xmls)
- else:
- xmlfiles = (xml for xml in os.listdir(path) if xml.endswith('.xml'))
-
- # define key getter for sorting
- def get_test_number(test_name):
- return int(test_name[1:-4])
-
- for xmlfile in xmlfiles:
- test_name = 'test_xml_%04d' % get_test_number(xmlfile)
- setattr(TestOpenFlowXMLs,
- test_name,
- generate_test(os.path.join(path, xmlfile)))
-
-
-if __name__ == '__main__':
- # set up logging
- logging.basicConfig(level=logging.DEBUG)
-
- # parse cmdline arguments
- parser = argparse.ArgumentParser(description='Run switch <-> ODL tests '
- 'defined by xmls.')
- parser.add_argument('--odlhost', default='127.0.0.1', help='host where '
- 'odl controller is running')
- parser.add_argument('--odlport', type=int, default=8080, help='port on '
- 'which odl\'s RESTCONF is listening')
- parser.add_argument('--mnport', type=int, default=6653, help='port on '
- 'which odl\'s controller is listening')
- parser.add_argument('--xmls', default=None, help='generete tests only '
- 'from some xmls (i.e. 1,3,34) ')
- args = parser.parse_args()
-
- # set host and port of ODL controller for test cases
- TestOpenFlowXMLs.port = args.odlport
- TestOpenFlowXMLs.host = args.odlhost
- TestOpenFlowXMLs.mn_port = args.mnport
-
- keywords = None
- with open('keywords.csv') as f:
- keywords = dict(line.strip().split(';') for line in f
- if not line.startswith('#'))
-
- match_keywords = None
- with open('match-keywords.csv') as f:
- match_keywords = dict(line.strip().split(';') for line in f
- if not line.startswith('#'))
-
- action_keywords = None
- with open('action-keywords.csv') as f:
- action_keywords = dict(line.strip().split(';') for line in f
- if not line.startswith('#'))
-
- # fix arguments for unittest
- del sys.argv[1:]
-
- # generate tests for TestOpenFlowXMLs
- if args.xmls is not None:
- xmls = map(int, args.xmls.split(','))
- generate_tests_from_xmls('xmls', xmls)
- else:
- generate_tests_from_xmls('xmls')
-
- # run all tests
- unittest.main()
import os
+import re
import sys
import time
import logging
from mininet.node import OVSKernelSwitch
import xmltodict
+from xmlvalidator import XMLValidator
+
+class TestOpenFlowXml_Base(unittest.TestCase):
+
+ def __init__(self, methodName='runTest', path_to_xml=None):
+ super(TestOpenFlowXml_Base, self).__init__(methodName)
+ self.path_to_xml = path_to_xml
+
+ @staticmethod
+ def load_file_name(clazz, path_to_xml=None):
+ testloader = unittest.TestLoader()
+ testnames = testloader.getTestCaseNames(clazz)
+ suite = unittest.TestSuite()
+ for name in testnames:
+ suite.addTest(clazz(name, path_to_xml=path_to_xml))
+ return suite
+
+
+class ConvertorTools():
+
+ CONVERTORS = {
+ 'cookie': hex,
+ }
+
+ @staticmethod
+ def base_tag_values_conversion(key, value):
+ convertor = ConvertorTools.CONVERTORS.get(key, None)
+ return convertor(int(value)) if convertor > None else value
+
+
+class ParseTools():
+
+ @staticmethod
+ def get_element_alias_by_key(element,key_dict):
+ return key_dict.get(element.tag) if (key_dict.get(element.tag, None) > None) else None
+
+ @staticmethod
+ def sort_ordered_dict_to_array(x_dict=None):
+ if (x_dict > None):
+ out_put = None
+ for val in map(lambda val: x_dict.get(val), sorted(x_dict.keys())) :
+ if (out_put > None) :
+ out_put += ', %s' %val
+ else :
+ out_put = val
+ return out_put
+ return
+
+ @staticmethod
+ def get_element_value(element):
+ return (re.sub('[\s]+', '', element.text, count=1)).lower() if element.text > None else ''
+
+ @staticmethod
+ def __parse_ordered_tags_from_xml(element, kwd, p_elm_n=None, ikwd=None, ord_value=None):
+ a_dict = {}
+ if (element > None) :
+ elm_n = ParseTools.get_element_alias_by_key(element, kwd)
+ if ((element.getchildren() > None) & (len(element.getchildren()) > 0)) :
+ sub_dict ={}
+ for child in element.getchildren() :
+ if (child.tag == 'order') :
+ ord_value = ParseTools.get_element_value(child)
+ else :
+ sub_dict.update(ParseTools.__parse_ordered_tags_from_xml(child, kwd, p_elm_n, ikwd))
+
+ a_value = ParseTools.sort_ordered_dict_to_array(sub_dict)
+ if (ord_value > None) :
+ order = ord_value if (len(ord_value) > 0) else '0'
+ else :
+ order = '0'
+ a_dict[order]=a_value
+
+ else :
+ if (ord_value > None) :
+ order = ord_value if ((len(ord_value) > 0)) else '0'
+ else :
+ order = '0'
+ a_val = elm_n if elm_n > None else element.tag
+ a_dict[order] = a_val
+
+ return a_dict
+ @staticmethod
+ def __parse_tags_from_xml(element, flow_dict, kwd, p_elm_n=None, ikwd=None):
+ if element > None :
+ # find and translate element.tag in key_word_dictionary
+ elm_n = ParseTools.get_element_alias_by_key(element, kwd)
+ if ((element.getchildren() > None) & (len(element.getchildren()) > 0)) :
+ for child in element.getchildren() :
+ new_p_elm_n = elm_n if elm_n > None else p_elm_n
+ ParseTools.__parse_tags_from_xml(child, flow_dict, kwd, new_p_elm_n, ikwd)
+ else :
+ # prefer parent element_name before elment_name and element_name before element.tag
+ a_key = elm_n if elm_n > None else p_elm_n if (p_elm_n > None) else element.tag
+ a_value = ParseTools.get_element_value(element)
+ # Don't continue for ignore tags
+ if (ikwd > None) :
+ if (ikwd.get(a_key, None) > None) :
+ # TODO add check for cookie_mask (mask has to have same or more length as cookie if is more as 0)
+ return
+ flow_dict[a_key] = ConvertorTools.base_tag_values_conversion(a_key, a_value)
-class ParseTools():
+ @staticmethod
+ def get_switchflow_from_xml(xml_string, key_dict=None, action_key_dict=None, match_key_dict=None, ignore_key_dict=None):
+ if xml_string > None :
+ # remove namespace
+ xml_string = re.sub(' xmlns="[^"]+"', '', xml_string, count=1)
+ tree = ET.fromstring(xml_string)
+
+ flow_dict = {}
+
+ if (tree > None) :
+ if (tree.getchildren() > None) :
+ for child in tree.getchildren() :
+ if (child.tag == 'match') :
+ ParseTools.__parse_tags_from_xml(child, flow_dict, match_key_dict, ikwd=ignore_key_dict)
+ elif (child.tag == 'instructions') :
+ x_dict = ParseTools.__parse_ordered_tags_from_xml(child, action_key_dict, ikwd=ignore_key_dict)
+ flow_dict['actions'] = ParseTools.sort_ordered_dict_to_array(x_dict)
+ else :
+ ParseTools.__parse_tags_from_xml(child, flow_dict, key_dict, ikwd=ignore_key_dict)
+
+ return flow_dict
+ @staticmethod
+ def get_switchflow_dict(switch_dict, ignore_key_dict=None):
+ x_dict={}
+ for sw_key in switch_dict.keys() :
+ if (ignore_key_dict.get(sw_key,None) is None):
+ x_dict[sw_key] = switch_dict.get(sw_key)
+
+ return x_dict
+
@staticmethod
def all_nodes(xml_root):
"""
net = mininet.net.Mininet(topo=topo, switch=switch, controller=controller)
return net
+
@staticmethod
- def get_flows(net):
+ def get_flows(net, ikwd={}):
"""Get list of flows from network's first switch.
Return list of all flows on switch, sorted by duration (newest first)
log.debug('switch flow table: {}'.format(output))
- flows = []
-
- for line in output.splitlines()[1:]:
- flows.append(ParseTools.dump_string_to_dict(line))
+ flows = {}
- # sort by duration
- return sorted(flows, key=lambda x: x['duration'].rstrip('s'))
+ for line in output.splitlines()[1:] :
+ output = line;
+
+ action = re.split('actions=',output,1)[1]
+ output= re.split('actions=',output,1)[0]
+ for elem in output.split(',') :
+ elm_d = elem.split('=')
+ a_key = (elm_d[0]).strip()
+ if (ikwd.get(a_key, None) is None) :
+ a_value = elm_d[1] if (len(elm_d) > 1) else None
+ flows[a_key] = a_value.strip() if isinstance(a_value,str) else (str(a_value)).strip()
+
+ flows['actions'] = action.split(',')
+
+ return flows
+# for line in output.splitlines()[1:]:
+# flows.append(ParseTools.dump_string_to_dict(line))
+#
+# # sort by duration
+# return sorted(flows, key=lambda x: x['duration'].rstrip('s'))
-class Loader():
- log = logging.getLogger('Loader')
+class FileLoaderTools():
- @staticmethod
- def get_xml_test_path(test_id, path='xmls'):
- return os.path.join(path, 'f%d.xml' % test_id)
+ log = logging.getLogger('FileLoaderTools')
@staticmethod
- def get_mn_test_path(test_id, path='ofctl'):
- return os.path.join(path, 't%d' % test_id)
-
- @staticmethod
- def get_xml_test_path(path, test_id):
- return os.path.join(path, 'f%d.xml' % test_id)
-
- @staticmethod
- def load_test_file_to_string(path_to_file):
+ def load_file_to_string(path_to_file):
output_string = None
try:
with open(path_to_file) as f:
output_string = f.read()
except IOError, e:
- Loader.log.error('cannot find {}: {}'.format(path_to_file, e.strerror), exc_info=True)
+ FileLoaderTools.log.error('cannot find {}: {}'.format(path_to_file, e.strerror), exc_info=True)
return output_string
@staticmethod
- def get_xml_dict(test_id):
- xml_string = Loader.load_test_file_to_string(Loader.get_xml_test_path(test_id))
- return xmltodict(xml_string)
-
- @staticmethod
- def get_mn_dict(test_id):
- mn_string = Loader.load_test_file_to_string(Loader.get_mn_test_path(test_id))
- return ParseTools.dump_string_to_dict(mn_string)
+ def load_file_to_dict(path_to_file):
+ dictionary = None
+
+ try :
+ with open(path_to_file) as f:
+ dictionary = dict(line.strip().split(';') for line in f
+ if not line.startswith('#'))
+ except IOError, e:
+ FileLoaderTools.log.error('cannot find {}: {}'.format(path_to_file, e.strerror), exc_info=True)
+ return dictionary
class Comparator():
# VD - what should we do in this case
pass
-class TestOpenFlowXMLs(unittest.TestCase):
- @classmethod
- def setUpClass(cls):
- cls.net = MininetTools.create_network(cls.host, cls.mn_port)
- cls.net.start()
- time.sleep(15)
-
- @classmethod
- def tearDownClass(cls):
- cls.net.stop()
-
-def generate_tests_from_xmls(path, xmls=None):
- # generate test function from path to request xml
- def generate_test(path_to_xml, path_to_md):
- xml_string = Loader.load_test_file_to_string(path_to_xml)
- mn_string = Loader.load_test_file_to_string(path_to_md)
+class TestOpenFlowXml(TestOpenFlowXml_Base):
+
+ def test_xml(self):
+ test_number = int(self.path_to_xml[6:-4])
+ log = logging.getLogger('test_xml_%04d' %test_number)
+
+ xml_string = FileLoaderTools.load_file_to_string(self.path_to_xml)
tree = md.parseString(xml_string)
+
+ switch_etalon = ParseTools.get_switchflow_from_xml(xml_string,
+ key_dict = keywords,
+ action_key_dict = action_keywords,
+ match_key_dict = match_keywords,
+ ignore_key_dict = ignore_keywords)
+ print (switch_etalon)
ids = ParseTools.get_values(tree.documentElement, 'table_id', 'id')
-
- def new_test(self):
- log = logging.getLogger(__name__)
- # send request throught RESTCONF
- data = (self.host, self.port, ids['table_id'], ids['id'])
- url = 'http://%s:%d/restconf/config/opendaylight-inventory:nodes' \
- '/node/openflow:1/table/%s/flow/%s' % data
- headers = {
- 'Content-Type': 'application/xml',
- 'Accept': 'application/xml',
- }
- log.info('sending request to url: {}'.format(url))
- rsp = requests.put(url, auth=('admin', 'admin'), data=xml_string,
- headers=headers)
- log.info('received status code: {}'.format(rsp.status_code))
- log.debug('received content: {}'.format(rsp.text))
- assert rsp.status_code == 204 or rsp.status_code == 200, 'Status' \
- ' code returned %d' % rsp.status_code
- try:
- # check request content against restconf's datastore
- response = requests.get(url, auth=('admin', 'admin'),
- headers={'Accept': 'application/xml'})
- assert response.status_code == 200
- req = (xmltodict.parse(ET.tostring(ET.fromstring(xml_string))))
- res = (xmltodict.parse(ET.tostring(ET.fromstring(response.text))))
- assert req == res, 'uploaded and stored xml, are not the same\n' \
- 'uploaded: %s\nstored:%s' % (req, res)
-
- # collect flow table state on switch
- switch_flows = MininetTools.get_flows(self.net)
- assert len(switch_flows) > 0
-
- # compare requested object and flow table state
- if mn_string is not None:
- #log.info('running tests')
- Comparator.compare_results(switch_flows[0], ParseTools.dump_string_to_dict(mn_string))
- else:
- log.error('cannot find test results - comparison skipped')
- finally:
- response = requests.delete(url, auth=('admin', 'admin'),
+
+
+ data = (self.host, self.port, ids['table_id'], ids['id'])
+ url = 'http://%s:%d/restconf/config/opendaylight-inventory:nodes' \
+ '/node/openflow:1/table/%s/flow/%s' % data
+ # send request via RESTCONF
+ headers = {
+ 'Content-Type': 'application/xml',
+ 'Accept': 'application/xml',
+ }
+ log.info('sending request to url: {}'.format(url))
+ rsp = requests.put(url, auth=('admin', 'admin'), data=xml_string,
+ headers=headers)
+ log.info('received status code: {}'.format(rsp.status_code))
+ log.debug('received content: {}'.format(rsp.text))
+ assert rsp.status_code == 204 or rsp.status_code == 200, 'Status' \
+ ' code returned %d' % rsp.status_code
+ try:
+ # check request content against restconf's datastore
+ response = requests.get(url, auth=('admin', 'admin'),
headers={'Accept': 'application/xml'})
- assert response.status_code == 200
- print '\n\n\n'
-
- return new_test
-
- # generate list of available xml requests
+ assert response.status_code == 200
+ req = (xmltodict.parse(ET.tostring(ET.fromstring(xml_string))))
+ res = (xmltodict.parse(ET.tostring(ET.fromstring(response.text))))
+ assert req == res, 'uploaded and stored xml, are not the same\n' \
+ 'uploaded: %s\nstored:%s' % (req, res)
+
+ # collect flow table state on switch
+ switch_flows = MininetTools.get_flows(net, ignore_keywords)
+# switch_flows_actions = re.sub('[\s]', '', switch_flows, count=1))
+ assert len(switch_flows) > 0
+ assert switch_etalon == switch_flows, 'expected and stored switch settings are not the same \n'\
+ 'expected: %s\nstored: %s' %(switch_etalon,switch_flows)
+
+ # compare requested object and flow table state
+ ## TODO look at action parsing separatly from a flow
+# switch_flow_dict = ParseTools.get_switchflow_dict(switch_flows[0], ignore_keywords)
+# assert switch_etalon == switch_flow_dict, 'expected and stored switch settings are not the same \n'\
+# 'expected: %s\nstored: %s' %(switch_etalon,switch_flow_dict)
+# if mn_string is not None:
+# #log.info('running tests')
+# Comparator.compare_results(switch_flows[0], ParseTools.dump_string_to_dict(mn_string))
+# else:
+# log.error('cannot find test results - comparison skipped')
+ finally:
+ response = requests.delete(url, auth=('admin', 'admin'),
+ headers={'Accept': 'application/xml'})
+ assert response.status_code == 200
+ print '\n\n\n'
+
+def suite(path='xmls', test_class='TestOpenFlowXml_Base') :
+ suite = unittest.TestSuite()
+ if args.xmls is not None:
+ xmls = map(int, args.xmls.split(','))
+
xmlfiles = None
if xmls is not None:
xmlfiles = ('f%d.xml' % fid for fid in xmls)
else:
xmlfiles = (xml for xml in os.listdir(path) if xml.endswith('.xml'))
- # define key getter for sorting
- def get_test_number(test_name):
- return int(test_name[1:-4])
+ #create xml validator
+ validator = XMLValidator(keywords, action_keywords, match_keywords, logging.ERROR)
+ validator.fill_fields()
for xmlfile in xmlfiles:
- test_number = get_test_number(xmlfile)
- test_name = 'test_xml_%04d' % test_number
- setattr(TestOpenFlowXMLs,
- test_name,
- generate_test(os.path.join(path, xmlfile), os.path.join('ofctl', 't{}'.format(test_number))))
+ #fill validator with data from xml and validate them - just logging to hint what can be wrong, test wont be stopped by invalid xml
+ validator.create_dictionaries(os.path.join(path, xmlfile))
+ validator.validate()
+ suite.addTest(TestOpenFlowXml_Base.load_file_name(test_class, path_to_xml=os.path.join(path, xmlfile)))
+ return suite
if __name__ == '__main__':
# set up logging
'from some xmls (i.e. 1,3,34) ')
args = parser.parse_args()
- # set host and port of ODL controller for test cases
- TestOpenFlowXMLs.port = args.odlport
- TestOpenFlowXMLs.host = args.odlhost
- TestOpenFlowXMLs.mn_port = args.mnport
-
- keywords = None
- with open('keywords.csv') as f:
- keywords = dict(line.strip().split(';') for line in f
- if not line.startswith('#'))
-
- match_keywords = None
- with open('match-keywords.csv') as f:
- match_keywords = dict(line.strip().split(';') for line in f
- if not line.startswith('#'))
-
- action_keywords = None
- with open('action-keywords.csv') as f:
- action_keywords = dict(line.strip().split(';') for line in f
- if not line.startswith('#'))
-
- # fix arguments for unittest
- del sys.argv[1:]
-
- # generate tests for TestOpenFlowXMLs
- if args.xmls is not None:
- xmls = map(int, args.xmls.split(','))
- generate_tests_from_xmls('xmls', xmls)
- else:
- generate_tests_from_xmls('xmls')
-
- # run all tests
- unittest.main()
+ # set and start mininet
+ net = MininetTools.create_network(args.odlhost, args.mnport)
+ net.start()
+ time.sleep(15)
+
+ try:
+ # set host and port of ODL controller for test cases
+ TestOpenFlowXml.port = args.odlport
+ TestOpenFlowXml.host = args.odlhost
+ TestOpenFlowXml.mn_port = args.mnport
+
+ # set keyword dictionaries
+ keywords = FileLoaderTools.load_file_to_dict('keywords.csv')
+ match_keywords = FileLoaderTools.load_file_to_dict('match-keywords.csv')
+ action_keywords = FileLoaderTools.load_file_to_dict('action-keywords.csv')
+ ignore_keywords = FileLoaderTools.load_file_to_dict('ignore-keywords.csv')
+
+# # fix arguments for unittest
+# del sys.argv[1:]
+#
+ odl_suite = suite(test_class=TestOpenFlowXml)
+ unittest.TextTestRunner().run(odl_suite)
+ finally:
+ # stop mininet
+ net.stop()
+++ /dev/null
-cookie=0xa, duration=1.451s, table=2, n_packets=0, n_bytes=0, priority=2,ip,nw_dst=10.0.0.0/24 actions=dec_ttl
+++ /dev/null
-cookie=0xa, duration=1.754s, table=2, n_packets=0, n_bytes=0, priority=2,ip,nw_dst=10.0.0.0/24 actions=push_vlan
+++ /dev/null
-cookie=0xa, duration=1.753s, table=2, n_packets=0, n_bytes=0, priority=2,ip,nw_dst=10.0.0.0/24 actions=dec_ttl
+++ /dev/null
-cookie=0xa, duration=1.557s, table=2, n_packets=0, n_bytes=0, priority=2,ip,nw_src=10.0.0.1 actions=drop
+++ /dev/null
- cookie=0xa, duration=17.581s, table=2, n_packets=0, n_bytes=0, priority=2,ip,nw_dst=10.0.0.0/24 actions=drop
+++ /dev/null
- cookie=0xa, duration=14.319s, table=2, n_packets=0, n_bytes=0, priority=2,ip,nw_dst=10.0.0.0/24 actions=drop
+++ /dev/null
- cookie=0xa, duration=12.954s, table=2, n_packets=0, n_bytes=0, priority=2,ip,nw_dst=10.0.0.0/24 actions=drop
+++ /dev/null
- cookie=0xa, duration=16.708s, table=2, n_packets=0, n_bytes=0, priority=2,ip,nw_dst=10.0.0.0/24 actions=drop
+++ /dev/null
- cookie=0xa, duration=38.123s, table=2, n_packets=0, n_bytes=0, priority=2,ip,nw_dst=10.0.0.0/24 actions=write_metadata:0xa/0xc
+++ /dev/null
- cookie=0xa, duration=20.251s, table=2, n_packets=0, n_bytes=0, priority=2,ip,nw_dst=10.0.0.0/24 actions=strip_vlan
+++ /dev/null
- cookie=0xa, duration=17.729s, table=2, n_packets=0, n_bytes=0, priority=2,ip,nw_dst=10.0.0.0/24 actions=drop
+++ /dev/null
- cookie=0xa, duration=14.874s, table=2, n_packets=0, n_bytes=0, priority=2,ip,nw_dst=10.0.0.0/24 actions=drop
+++ /dev/null
- cookie=0xa, duration=23.141s, table=2, n_packets=0, n_bytes=0, priority=2,ip,nw_dst=10.0.0.0/24 actions=drop
+++ /dev/null
- cookie=0xa, duration=35.125s, table=2, n_packets=0, n_bytes=0, priority=2,ip,nw_dst=10.0.0.0/24 actions=drop
+++ /dev/null
- cookie=0xa, duration=69.848s, table=2, n_packets=0, n_bytes=0, priority=2,ip,nw_dst=10.0.0.0/24 actions=set_queue:1
+++ /dev/null
-cookie=0xa, duration=3.098s, table=2, n_packets=0, n_bytes=0, priority=2,ip,dl_src=00
+++ /dev/null
- cookie=0xa, duration=19.763s, table=2, n_packets=0, n_bytes=0, priority=2,ip,nw_dst=10.0.0.0/24 actions=drop
+++ /dev/null
- cookie=0xa, duration=93.115s, table=2, n_packets=0, n_bytes=0, priority=2,ip,nw_dst=10.0.0.0/24 actions=drop
+++ /dev/null
- cookie=0xa, duration=19.913s, table=2, n_packets=0, n_bytes=0, priority=2,metadata=0x100/0xffffff0000000101 actions=drop
+++ /dev/null
- cookie=0xa, duration=45.249s, table=2, n_packets=0, n_bytes=0, priority=2,tun_id=0x100/0xffffff0000000101 actions=goto_table:2
+++ /dev/null
-cookie=0xa, duration=2.095s, table=2, n_packets=0, n_bytes=0, priority=2,ip,nw_dst=10.0.0.0/24 actions=drop
+++ /dev/null
-cookie=0xa, duration=4.08s, table=2, n_packets=0, n_bytes=0, priority=2,ip,nw_dst=10.0.0.0/24 actions=goto_table
+++ /dev/null
-from xml.dom import minidom
-
-xmldoc=minidom.parse('./xmls/f1.xml')
-flow=xmldoc.childNodes[0]
-match=flow.childNodes[15]
-
-ipv4dst = match.getElementsByTagName("ipv4-destination")
-print ipv4dst[0].childNodes[0].data
-
-cvok = match.getElementsByTagName("cvok")
-print cvok
-
-print "ethernet type", match.getElementsByTagName("type")[0].childNodes[0].data
-# print "ethernet cvok", match.getElementsByTagName("cvok")[0].childNodes[0].data
+++ /dev/null
-from xml.dom import minidom
-
-xmldoc=minidom.parse('./xmls/f7.xml')
-flow=xmldoc.childNodes[0]
-match=flow.childNodes[11]
-
-print match.toxml()
-
-ip_match = match.getElementsByTagName("ip-match")
-print 'ip-match:', ip_match
-
-
-# print "ethernet type", match.getElementsByTagName("type")[0].childNodes[0].data
-# print "ethernet cvok", match.getElementsByTagName("cvok")[0].childNodes[0].data
--- /dev/null
+import logging
+import os
+import re
+from xml.etree import ElementTree as ET
+
+class Loader():
+
+ @staticmethod
+ def loadXml(file_name):
+ path_to_xml = os.path.join('', file_name)
+ with open(path_to_xml) as f:
+ xml_string = f.read()
+ xml_string = re.sub(' xmlns="[^"]+"', '', xml_string, count=1)
+
+ tree = ET.fromstring(xml_string)
+ return tree, xml_string
+
+ @staticmethod
+ def buildXmlDocDictionaryForComarableElements(element, flow_dict, p_elm_name=None, kwd=None, akwd=None, mkwd=None):
+ act_key_dict = kwd if (kwd > None) else akwd if (akwd > None) else mkwd if (mkwd > None) else None
+ if element > None :
+ elm_alias = element.tag if (act_key_dict.get(element.tag, None) > None) else None
+ if ((element.getchildren() > None) & (len(element.getchildren()) > 0)):
+ for child in element.getchildren() :
+ if (element.tag == 'match') :
+ Loader.buildXmlDocDictionaryForComarableElements(child, flow_dict, mkwd=mkwd)
+ elif (element.tag == 'actions') :
+ Loader.buildXmlDocDictionaryForComarableElements(child, flow_dict, akwd=akwd)
+ else :
+ Loader.buildXmlDocDictionaryForComarableElements(child, flow_dict, elm_alias, kwd, akwd, mkwd);
+ else :
+ if element.text > None :
+ text = re.sub( '[\s]+','', element.text, count=1)
+ a_key = p_elm_name if (p_elm_name > None) else element.tag
+ flow_dict[a_key] = text;
+ return
+
+type_int = 1
+type_boolean = 2
+type_ethernet = 3
+type_ipv4 = 4
+type_ipv6 = 5
+
+class Field():
+ """
+ fields to check, arguments:
+ key: element tag from keywords and xml
+ bits: expected length in bits
+ prerequisites: dictionary of elements tag from xml which are required for this field and their values in list
+ or [None] if value is undefined or it's irrelevant (we just need to check if tag is set)
+ convert_from: format in which is value, that is checked against prerequisite values stored in xml
+
+ e.g.:
+ key:'ipv4-source'
+ bits:32
+ prerequisites: {'ethernet-type': [2048]}
+ convert_from: 10
+
+ OF_IPV4_SRC = Field('ipv4-source', 32, {'ethernet-type': [2048]}, 10)
+ IN_PHY_PORT = Field('in-phy-port', 32, {'in-port': [None]}, 10)
+ """
+
+ def __init__(self, key, bits, prerequisites=None, convert_from=10, value_type=type_int):
+ self.key = str(key)
+ self.bits = bits
+ if prerequisites is not None:
+ self.prerequisites = dict(prerequisites)
+ else:
+ self.prerequisites = None
+ self.convert_from = convert_from
+ self.value_type = value_type
+
+ def __str__(self):
+ return "Field: {}, size: {}, prerequisites: {}"\
+ .format(self.key, self.bits, self.prerequisites)
+
+
+class XMLValidator():
+
+ log = logging.getLogger('XMLValidator')
+ log.propagate=False
+ channel = logging.StreamHandler()
+ log.addHandler(channel)
+
+ def __init__(self, kwd, akwd, mkwd, loglevel=logging.INFO):
+
+ self.test_name = 'No test loaded'
+ XMLValidator.log.setLevel(loglevel)
+
+ self.xml_ok = True
+ self.fields = list()
+ self.flow_dict = dict()
+
+ self.kwd = kwd
+ self.akwd = akwd
+ self.mkwd = mkwd
+
+ def create_dictionaries(self, file_name):
+ self.test_name = file_name
+
+ formatter = logging.Formatter('TEST {}: %(levelname)s: %(message)s'.format(self.test_name))
+ XMLValidator.channel.setFormatter(formatter)
+
+ self.flow_dict = dict()
+ treeXml1, self.xml_string = Loader.loadXml(file_name)
+ Loader.buildXmlDocDictionaryForComarableElements(treeXml1, self.flow_dict, kwd=self.kwd, akwd=self.akwd, mkwd=self.mkwd)
+ XMLValidator.log.debug('loaded dict from xml: {}'.format(self.flow_dict))
+
+
+ def fill_fields(self):
+ Matchers.fill_validator(self)
+
+ def add_field(self, fields):
+ self.fields.append(fields)
+
+ def integer_check(self, value, bits, convert_from=10):
+ XMLValidator.log.debug('validating integer: {}'.format(value))
+ if (int(value, convert_from) / 2**bits) > 0:
+ XMLValidator.log.error('value: {} is larger than expected: {}'.format(value, 2**bits))
+ raise StandardError
+
+ def boolean_check(self, value, bits):
+ XMLValidator.log.debug('validating boolean: {}'.format(value))
+ if bits < 1:
+ XMLValidator.log.error('value: {} is larger than expected: {}'.format(value, 2**bits))
+ raise StandardError
+
+ def ethernet_check(self, a):
+ XMLValidator.log.debug('validating ethernet address: {}'.format(a))
+ numbers = a.split(':')
+ max_range = (2**8) - 1
+
+ for n in numbers:
+ if int(n, 16) > max_range:
+ XMLValidator.log.error('octet: {} in ethernet address: {} larger than: {}'.format(n, a, max_range))
+ raise StandardError
+
+ def ipv4_check(self, a):
+ XMLValidator.log.debug('validating ipv4 address: {}'.format(a))
+ mask_pos = a.find('/')
+ if mask_pos > 0:
+ a = a[:mask_pos]
+
+ numbers = a.split('.')
+ max_range = (2**8) - 1
+
+ for n in numbers:
+ if int(n) > max_range:
+ XMLValidator.log.error('octet: {} in ipv4 address: {} larger than: {}'.format(n, a, max_range))
+ raise StandardError
+
+ def ipv6_check(self, a):
+ XMLValidator.log.debug('validating ipv6 address: {}'.format(a))
+ mask_pos = a.find('/')
+ if mask_pos > 0:
+ a = a[:mask_pos]
+
+ numbers = a.split(':')
+ max_range = (2**16) - 1
+
+ for n in numbers:
+ #if n == '' then the number is 0000 which is always smaller than max_range
+ if n != '' and int(n, 16) > max_range:
+ XMLValidator.log.error('number: {} in ipv6 address: {} larger than: {}'.format(n, a, max_range))
+ raise StandardError
+
+ def check_size(self, value, bits, value_type, convert_from=10):
+ XMLValidator.log.debug('checking value: {}, size should be {} bits'.format(value, bits))
+ ipv6_regexp = re.compile("^[0-9,A-F,a-f]{0,4}(:[0-9,A-F,a-f]{0,4}){1,7}(/[0-9]{1,3})?$")
+ ipv4_regexp = re.compile("^([0-9]{1,3}\.){3}[0-9]{1,3}(/[0-9]{1,2})?$")
+ ethernet_regexp = re.compile("^[0-9,A-F,a-f]{2}(:[0-9,A-F,a-f]{2}){5}$")
+
+ try:
+ if value_type == type_boolean and value in ['true', 'false']: #boolean values
+ self.boolean_check(value, bits)
+ elif value_type == type_ethernet and ethernet_regexp.match(value): #ethernet address
+ self.ethernet_check(value)
+ elif value_type == type_ipv4 and ipv4_regexp.match(value): #IPV4 address
+ self.ipv4_check(value)
+ elif value_type == type_ipv6 and ipv6_regexp.match(value): #IPV6 address
+ self.ipv6_check(value)
+ elif value_type == type_int: #integer values
+ self.integer_check(value, bits, convert_from)
+ else:
+ raise StandardError
+
+ XMLValidator.log.info('size of: {} < 2^{} validated successfully'.format(value, bits))
+
+ except ValueError:
+ XMLValidator.log.error('problem converting value to int or IP addresses: {}'.format(value))
+ self.xml_ok = False
+
+ except TypeError:
+ XMLValidator.log.error('problem converting value: {}, TypeError'.format(value))
+ self.xml_ok = False
+
+ except StandardError:
+ XMLValidator.log.error('problem checking size for value: {}'.format(value))
+ self.xml_ok = False
+
+
+ def has_prerequisite(self, key, values, convert_from, flow_dict):
+ XMLValidator.log.debug('checking prerequisite: {} - {}'.format(key, values))
+ try:
+ flow_value_raw = flow_dict[key]
+
+ #if prerequisites values are [None] we don't care about actual value
+ if values != [None]:
+ flow_value = int(flow_value_raw, convert_from)
+
+ if flow_value not in values:
+ raise StandardError()
+
+ XMLValidator.log.info('prerequisite {}: {} to value {} validated successfully'.format(key, values, flow_value_raw))
+
+ except KeyError:
+ XMLValidator.log.error('can\'t find element: {} in xml {} or in keywords {}'.format(key, self.xml_string, self.mkwd.keys()))
+ self.xml_ok = False
+
+ except ValueError or TypeError:
+ # flow_value_raw is string that cannot be converted to decimal or hex number or None
+ if flow_value_raw not in values:
+ XMLValidator.log.error('can\'t find element: {} with value value: {} '
+ 'in expected values {}'.format(key, flow_value_raw, values))
+ self.xml_ok = False
+ else:
+ XMLValidator.log.info('prerequisite {}: {} to value {} validated successfully'.format(key, values, flow_value_raw))
+
+ except StandardError:
+ XMLValidator.log.error('can\'t find element: {} with value value: {} '
+ 'in expected values {}'.format(key, flow_value, values))
+ self.xml_ok = False
+
+ def check_all_prerequisites(self, prerequisites_dict, convert_from, flow_dict):
+ XMLValidator.log.debug('checking prerequisites: {}'.format(prerequisites_dict))
+ for k, v in prerequisites_dict.items():
+ self.has_prerequisite(k, v, convert_from, flow_dict)
+
+ def check_single_field(self, field, flow_dict):
+ """
+ @type field MatchField
+ @type flow_dict dict
+ """
+
+ if field.key not in flow_dict:
+ XMLValidator.log.debug('{} is not set in XML, skipping validation'.format(field.key))
+ return
+ else:
+ XMLValidator.log.info('validating: {}'.format(field))
+
+ if field.bits is not None:
+ self.check_size(flow_dict[field.key], field.bits, field.value_type, field.convert_from)
+
+ if field.prerequisites is not None:
+ self.check_all_prerequisites(field.prerequisites, field.convert_from, flow_dict)
+
+ def validate_fields(self):
+ self.xml_ok = True
+ XMLValidator.log.info('validating against flow: {}'.format(self.flow_dict))
+ for field in self.fields:
+ self.check_single_field(field, self.flow_dict)
+
+ def validate_misc_values(self):
+ for kw in self.kwd.keys():
+ if kw in self.flow_dict.keys():
+ XMLValidator.log.info('validating: {}: {}'.format(kw, self.flow_dict[kw]))
+ try:
+ value = int(self.flow_dict[kw])
+ if value < 0:
+ XMLValidator.log.error('value: {}: {} should be non-negative'.format(kw, self.flow_dict[kw]))
+ self.xml_ok = False
+ else:
+ XMLValidator.log.info('value: {}: {} validated successfully'.format(kw, self.flow_dict[kw]))
+ except StandardError:
+ XMLValidator.log.error('can\'t convert value: {}: {} to integer'.format(kw, self.flow_dict[kw]))
+ self.xml_ok = False
+ else:
+ XMLValidator.log.debug('{} is not set in XML, skipping validation'.format(kw))
+
+ def validate(self):
+ self.validate_fields()
+ self.validate_misc_values()
+
+ XMLValidator.log.info('XML valid: {}'.format(self.xml_ok))
+
+ return self.xml_ok
+
+class Matchers():
+
+ IN_PORT = Field('in-port', 32)
+ IN_PHY_PORT = Field('in-phy-port', 32, {'in-port': [None]})
+ METADATA = Field('metadata', 64, convert_from=16)
+
+ ETH_DST = Field('ethernet-source', 48, value_type=type_ethernet)
+ ETH_SRC = Field('ethernet-destination', 48, value_type=type_ethernet)
+ ETH_TYPE = Field('ethernet-type', 16)
+
+ VLAN_VID = Field('vlan-id', 13)
+ VLAN_PCP = Field('vlan-pcp', 3, {'vlan-id': [None]})
+
+ IP_DSCP = Field('ip-dscp', 6, {'ethernet-type': [2048, 34525]})
+ IP_ENC = Field('ip-ecn', 2, {'ethernet-type': [2048, 34525]})
+ IP_PROTO = Field('ip-protocol', 8, {'ethernet-type': [2048, 34525]})
+
+ IPV4_SRC = Field('ipv4-source', 32, {'ethernet-type': [2048]}, value_type=type_ipv4)
+ IPV4_DST = Field('ipv4-destination', 32, {'ethernet-type': [2048]}, value_type=type_ipv4)
+
+ TCP_SRC = Field('tcp-source-port', 16, {'ip-protocol': [6]})
+ TCP_DST = Field('tcp-destination-port', 16, {'ip-protocol': [6]})
+ UDP_SRC = Field('udp-source-port', 16, {'ip-protocol': [17]})
+ UDP_DST = Field('udp-destination-port', 16, {'ip-protocol': [17]})
+ SCTP_SRC = Field('sctp-source-port', 16, {'ip-protocol': [132]})
+ SCTP_DST = Field('sctp-destination-port', 16, {'ip-protocol': [132]})
+ ICMPV4_TYPE = Field('icmpv4-type', 8, {'ip-protocol': [1]})
+ ICMPV4_CODE = Field('icmpv4-code', 8, {'ip-protocol': [1]})
+
+ ARP_OP = Field('arp-op', 16, {'ethernet-type': [2054]})
+ ARP_SPA = Field('arp-source-transport-address', 32, {'ethernet-type': [2054]}, value_type=type_ipv4)
+ ARP_TPA = Field('arp-target-transport-address', 32, {'ethernet-type': [2054]}, value_type=type_ipv4)
+ ARP_SHA = Field('arp-source-hardware-address', 48, {'ethernet-type': [2054]}, value_type=type_ethernet)
+ ARP_THA = Field('arp-target-hardware-address', 48, {'ethernet-type': [2054]}, value_type=type_ethernet)
+
+ IPV6_SRC = Field('ipv6-source', 128, {'ethernet-type': [34525]}, value_type=type_ipv6)
+ IPV6_DST = Field('ipv6-destination', 128, {'ethernet-type': [34525]}, value_type=type_ipv6)
+ IPV6_FLABEL = Field('ipv6-flabel', 20, {'ethernet-type': [34525]})
+
+ ICMPV6_TYPE = Field('icmpv6-type', 8, {'ip-protocol': [58]})
+ ICMPV6_CODE = Field('icmpv6-code', 8, {'ip-protocol': [58]})
+
+ IPV6_ND_TARGET = Field('ipv6-nd-target', 128, {'icmpv6-type': [135, 136]}, value_type=type_ipv6)
+ IPV6_ND_SLL = Field('ipv6-nd-sll', 48, {'icmpv6-type': [135]}, value_type=type_ethernet)
+ IPV6_ND_TLL = Field('ipv6-nd-tll', 48, {'icmpv6-type': [136]}, value_type=type_ethernet)
+
+ MPLS_LABEL = Field('mpls-label', 20, {'ethernet-type': [34887, 34888]})
+ MPLS_TC = Field('mpls-tc', 3, {'ethernet-type': [34887, 34888]})
+ MPLS_BOS = Field('mpls-bos', 1, {'ethernet-type': [34887, 34888]})
+
+ PBB_ISID = Field('pbb-isid', 24, {'ethernet-type': [35047]})
+ TUNNEL_ID = Field('tunnel-id', 64)
+ IPV6_EXTHDR = Field('ipv6-exthdr', 9, {'ethernet-type': [34525]})
+
+
+ @staticmethod
+ def fill_validator(validator):
+ """
+ @type validator XMLValidator
+ """
+
+ validator.add_field(Matchers.IN_PORT)
+ validator.add_field(Matchers.IN_PHY_PORT)
+ validator.add_field(Matchers.METADATA)
+ validator.add_field(Matchers.ETH_DST)
+ validator.add_field(Matchers.ETH_SRC)
+ validator.add_field(Matchers.ETH_TYPE)
+ #validator.add_field(Matchers.VLAN_VID) - incorrenct XML parsing, if vlan-id-present is present its overriden by it, need to fix loader
+ validator.add_field(Matchers.VLAN_PCP)
+ validator.add_field(Matchers.IP_DSCP)
+ validator.add_field(Matchers.IP_ENC)
+ validator.add_field(Matchers.IP_PROTO)
+ validator.add_field(Matchers.IPV4_SRC)
+ validator.add_field(Matchers.IPV4_DST)
+ validator.add_field(Matchers.TCP_SRC)
+ validator.add_field(Matchers.TCP_DST)
+ validator.add_field(Matchers.UDP_SRC)
+ validator.add_field(Matchers.UDP_DST)
+ validator.add_field(Matchers.SCTP_SRC)
+ validator.add_field(Matchers.SCTP_DST)
+ validator.add_field(Matchers.ICMPV4_TYPE)
+ validator.add_field(Matchers.ICMPV4_CODE)
+ validator.add_field(Matchers.ARP_OP)
+ validator.add_field(Matchers.ARP_SPA)
+ validator.add_field(Matchers.ARP_TPA)
+ validator.add_field(Matchers.ARP_SHA)
+ validator.add_field(Matchers.ARP_THA)
+ validator.add_field(Matchers.IPV6_SRC)
+ validator.add_field(Matchers.IPV6_DST)
+ validator.add_field(Matchers.IPV6_FLABEL)
+ validator.add_field(Matchers.ICMPV6_TYPE)
+ validator.add_field(Matchers.ICMPV6_CODE)
+ validator.add_field(Matchers.IPV6_ND_TARGET)
+ validator.add_field(Matchers.IPV6_ND_SLL)
+ validator.add_field(Matchers.IPV6_ND_TLL)
+ validator.add_field(Matchers.MPLS_LABEL)
+ validator.add_field(Matchers.MPLS_TC)
+ validator.add_field(Matchers.MPLS_BOS)
+ validator.add_field(Matchers.PBB_ISID)
+ validator.add_field(Matchers.TUNNEL_ID)
+ validator.add_field(Matchers.IPV6_EXTHDR)
+
+
+if __name__ == '__main__':
+
+ keywords = None
+ with open('keywords.csv') as f:
+ keywords = dict(line.strip().split(';') for line in f if not line.startswith('#'))
+
+ #print keywords
+
+ match_keywords = None
+ with open('match-keywords.csv') as f:
+ match_keywords = dict(line.strip().split(';') for line in f if not line.startswith('#'))
+
+ #print match_keywords
+
+ action_keywords = None
+ with open('action-keywords.csv') as f:
+ action_keywords = dict(line.strip().split(';') for line in f if not line.startswith('#'))
+
+ paths_to_xml = list()
+ for i in range(1, 50):
+ #paths_to_xml = ['xmls/f5.xml', 'xmls/f14.xml', 'xmls/f23.xml', 'xmls/f25.xml']
+ paths_to_xml.append('xmls/f%d.xml' % i)
+
+ validator = XMLValidator(keywords, action_keywords, match_keywords, logging.ERROR)
+ validator.fill_fields()
+
+ for path in paths_to_xml:
+ validator.create_dictionaries(path)
+ validator.validate()
+
+