Refactor odltools to use commands 91/71191/3
authorSam Hague <shague@redhat.com>
Fri, 20 Apr 2018 16:43:53 +0000 (09:43 -0700)
committerSam Hague <shague@redhat.com>
Sun, 22 Apr 2018 17:33:38 +0000 (13:33 -0400)
- changed layout of packages to be more modular
- refactored logging to use a common root config

Change-Id: I88cc012e5599ddc29d75940bea396a4861403097
Signed-off-by: Sam Hague <shague@redhat.com>
38 files changed:
resources/tools/odl/csit/logfile.py [deleted file]
resources/tools/odl/csit/test_logfile.py [deleted file]
resources/tools/odl/mdsal/test_request.py [deleted file]
resources/tools/odl/ovs/flows.py [deleted file]
resources/tools/odl/ovs/test_flows.py [deleted file]
resources/tools/odl/ovs/test_request.py [deleted file]
resources/tools/odltools/csit/__init__.py [moved from resources/tools/odl/__init__.py with 100% similarity]
resources/tools/odltools/csit/robotfiles.py [moved from resources/tools/odl/csit/robotfiles.py with 80% similarity]
resources/tools/odltools/csit/tests/test_robotfiles.py [moved from resources/tools/odl/csit/test_robot_files.py with 71% similarity]
resources/tools/odltools/mdsal/__init__.py [moved from resources/tools/odl/mdsal/__init__.py with 100% similarity]
resources/tools/odltools/mdsal/itm_state.py [moved from resources/tools/odl/mdsal/itm_state.py with 89% similarity]
resources/tools/odltools/mdsal/models.py [moved from resources/tools/odl/mdsal/models.py with 82% similarity]
resources/tools/odltools/mdsal/request.py [moved from resources/tools/odl/mdsal/request.py with 69% similarity]
resources/tools/odltools/mdsal/tests/resources/itm-state_dpn-endpoints.json [moved from resources/tools/odl/mdsal/itm-state_dpn-endpoints.json with 100% similarity]
resources/tools/odltools/mdsal/tests/test_itm_state.py [moved from resources/tools/odl/mdsal/test_itm_state.py with 73% similarity]
resources/tools/odltools/mdsal/tests/test_request.py [new file with mode: 0644]
resources/tools/odltools/netvirt/__init__.py [moved from resources/tools/odl/netvirt/__init__.py with 100% similarity]
resources/tools/odltools/netvirt/constants.py [moved from resources/tools/odl/netvirt/constants.py with 100% similarity]
resources/tools/odltools/netvirt/ds_analyze.py [moved from resources/tools/odl/netvirt/ds_analyze.py with 100% similarity]
resources/tools/odltools/netvirt/ds_get_data.py [moved from resources/tools/odl/netvirt/ds_get_data.py with 100% similarity]
resources/tools/odltools/netvirt/flow_parser.py [moved from resources/tools/odl/netvirt/flow_parser.py with 100% similarity]
resources/tools/odltools/netvirt/netvirt_utils.py [moved from resources/tools/odl/netvirt/netvirt_utils.py with 100% similarity]
resources/tools/odltools/netvirt/ovs_get_data.py [moved from resources/tools/odl/netvirt/ovs_get_data.py with 100% similarity]
resources/tools/odltools/netvirt/showOvsdbMdsal.py [moved from resources/tools/odl/netvirt/showOvsdbMdsal.py with 100% similarity]
resources/tools/odltools/odltools/__init__.py [moved from resources/tools/odl/ovs/__init__.py with 100% similarity]
resources/tools/odltools/odltools/logg.py [new file with mode: 0644]
resources/tools/odltools/odltools/odltools.py [new file with mode: 0644]
resources/tools/odltools/odltools/tests/test_odltools.py [new file with mode: 0644]
resources/tools/odltools/ovs/__init__.py [new file with mode: 0644]
resources/tools/odltools/ovs/flows.py [new file with mode: 0644]
resources/tools/odltools/ovs/request.py [moved from resources/tools/odl/ovs/request.py with 50% similarity]
resources/tools/odltools/ovs/tables.py [moved from resources/tools/odl/ovs/tables.py with 100% similarity]
resources/tools/odltools/ovs/tests/resources/flow_dumps.1.txt [moved from resources/tools/odl/ovs/flow_dumps.1.txt with 100% similarity]
resources/tools/odltools/ovs/tests/resources/flow_dumps.2.txt [moved from resources/tools/odl/ovs/flow_dumps.2.txt with 100% similarity]
resources/tools/odltools/ovs/tests/resources/flow_dumps.3.txt [new file with mode: 0644]
resources/tools/odltools/ovs/tests/test_flows.py [new file with mode: 0644]
resources/tools/odltools/ovs/tests/test_request.py [new file with mode: 0644]
resources/tools/odltools/ovs/tests/test_tables.py [new file with mode: 0644]

diff --git a/resources/tools/odl/csit/logfile.py b/resources/tools/odl/csit/logfile.py
deleted file mode 100644 (file)
index 48fc8af..0000000
+++ /dev/null
@@ -1,54 +0,0 @@
-import logging
-import os
-import re
-from subprocess import Popen
-
-
-LOG = logging.getLogger(__name__)
-
-
-class LogFile():
-    TMP = "/tmp"
-    LOGFILE = "log.html"
-
-    def __init__(self, logpath, jobpath, job):
-        if jobpath is None:
-            jobpath = self.TMP
-        self.jobpath = "{}/{}".format(jobpath, job)
-        self.logpath = logpath
-
-    def unzip_log_file0(self):
-        Popen("gunzip -fk {}".format(self.logpath), shell=True).wait()
-
-    def unzip_log_file1(self):
-        Popen("gunzip -kc {} > {}".format(self.logpath, self.jobpath + "/log.html"), shell=True).wait()
-
-    def unzip_log_file(self):
-        Popen("gunzip -cfk {} > {}".format(self.logpath, self.jobpath + "/" + self.LOGFILE), shell=True).wait()
-
-    def mkdir_job_path(self):
-        try:
-            os.makedirs(self.jobpath)
-        except OSError:
-            if not os.path.isdir(self.jobpath):
-                raise
-
-    def read_chunks(self, fp):
-        while True:
-            data = fp.read(64000)
-            if not data:
-                break
-            yield data
-
-    def parse_log(self, log):
-        # logfile = "/tmp/log.s2.html"
-        logfile = "/tmp/testjob/log.html"
-        # re_st = re.compile(r"ROBOT MESSAGE: Starting test")
-        re_st = re.compile(r"dump-flows")
-        cnt = 0
-        with open(logfile, 'rb') as fp:
-            for chunk in self.read_chunks(fp):
-                for m in re_st.finditer(chunk):
-                    print('%02d-%02d: %s' % (m.start(), m.end(), m.group(0)))
-                    cnt += 1
-        print "total matches: {}".format(cnt)
diff --git a/resources/tools/odl/csit/test_logfile.py b/resources/tools/odl/csit/test_logfile.py
deleted file mode 100644 (file)
index 687e484..0000000
+++ /dev/null
@@ -1,28 +0,0 @@
-import os
-import unittest
-from logfile import LogFile
-
-
-class TestLogFile(unittest.TestCase):
-    LOGPATH = "/tmp/log.html.gz"
-    JOBPATH = "/tmp"
-    JOB = "testjob"
-
-    def setUp(self):
-        self.logfile = LogFile(self.LOGPATH, self.JOBPATH, self.JOB)
-
-    def test_mkdir_log_path(self):
-        self.logfile.mkdir_job_path()
-        self.assertTrue(os.path.isdir(self.logfile.jobpath))
-
-    def test_unzip_log_file(self):
-        self.logfile.mkdir_job_path()
-        self.logfile.unzip_log_file()
-        fname = "{}/log.html".format(self.logfile.jobpath)
-        self.assertTrue(os.path.isfile(fname))
-
-    def test_parse_log(self):
-        self.logfile.parse_log(None)
-
-if __name__ == '__main__':
-    unittest.main()
diff --git a/resources/tools/odl/mdsal/test_request.py b/resources/tools/odl/mdsal/test_request.py
deleted file mode 100644 (file)
index 4614d84..0000000
+++ /dev/null
@@ -1,16 +0,0 @@
-import logging
-import unittest
-import request
-
-
-class TestRequest(unittest.TestCase):
-    def setUp(self):
-        self.filename = "./itm-state_dpn-endpoints.json"
-
-    def test_get_from_file(self):
-        request.logger.setLevel(logging.DEBUG)
-        self.data = request.get_from_file(self.filename)
-        self.assertIsNotNone(self.data)
-
-if __name__ == '__main__':
-    unittest.main()
diff --git a/resources/tools/odl/ovs/flows.py b/resources/tools/odl/ovs/flows.py
deleted file mode 100644 (file)
index 506ac1f..0000000
+++ /dev/null
@@ -1,155 +0,0 @@
-import logging
-from pprint import pformat
-import re
-
-import tables
-import request
-
-
-logging.basicConfig(format="%(levelname)-8s []%(name)s] [%(module)s:%(lineno)d] %(message)s",
-                    level=logging.DEBUG)
-logger = logging.getLogger(__name__)
-
-
-# TODO:
-# metadata decoder
-# mac to port
-# REG6 decoder
-# group decoder
-# curl -s -u admin:admin -X GET 127.0.0.1:8080/restconf/operational/odl-l3vpn:learnt-vpn-vip-to-port-data
-# - check if external ip is resolved, devstack uses port 8087
-
-class Flows:
-    COOKIE = "cookie"
-    DURATION = "duration"
-    TABLE = "table"
-    N_PACKETS = "n_packets"
-    N_BYTES = "n_bytes"
-    MATCHES = "matches"
-    ACTIONS = "actions"
-    IDLE_TIMEOUT = "idle_timeout"
-    SEND_FLOW_REMOVED = "send_flow_rem"
-    PRIORITY = "priority"
-    GOTO = "goto"
-    RESUBMIT = "resubmit"
-
-    def __init__(self, data, level=logging.INFO):
-        self.pdata = []
-        self.fdata = []
-        self.data = data
-        print "level: {}".format(level)
-        logger.setLevel(level)
-        if level is not logging.INFO:
-            logger.info("effective: %d", logger.getEffectiveLevel())
-        self.process_data()
-        self.format_data()
-        logger.info("data has been processed and parsed")
-
-    def set_log_level(self, level):
-        logger.setLevel(level)
-        logger.info("effective: %d", logger.getEffectiveLevel())
-
-    def process_data(self):
-        """
-        Process the dump-flows data into a map.
-
-        The processing will tokenize the parts in each line of the flow dump.
-
-        :return: A list of dictionaries of parsed tokens per line
-        """
-        # cookie=0x805138a, duration=193.107s, table=50, n_packets=119, n_bytes=11504, idle_timeout=300,
-        #  send_flow_rem priority=20,metadata=0x2138a000000/0xfffffffff000000,dl_src=fa:16:3e:15:a8:66
-        #  actions=goto_table:51
-
-        self.pdata = []
-        if len(self.data) == 0:
-            logger.warn("There is no data to process")
-            return self.pdata
-
-        # skip the header if present
-        if "OFPST_FLOW" in self.data[0]:
-            start = 1
-        else:
-            start = 0
-        if "jenkins" in self.data[-1]:
-            end = len(self.data) - 2
-        else:
-            end = len(self.data) - 1
-
-        # Parse each line of the data. Create a dictionary of all tokens and append to a list.
-        for line in self.data[start:end]:
-            pline = {}
-            tokens = line.split(" ")
-            for token in tokens:
-                # most lines are key=value so look for that pattern
-                splits = token.split("=", 1)
-                if len(splits) == 2:
-                    pline[splits[0]] = splits[1].rstrip(",")
-                elif token == Flows.SEND_FLOW_REMOVED:
-                    # send_flow_rem is a single token without a value
-                    pline[token] = token
-            self.pdata.append(pline)
-        logger.info("Processed %d lines, skipped %d", len(self.pdata), start)
-        logger.debug("Processed data: %s", pformat(self.pdata))
-        return self.pdata
-
-    def re_table(self, match):
-        """
-        regex function to add the table name to table lines
-
-        :param match: The regex match
-        :return: The new line with table name
-        :rtype: str
-        """
-        if match.group(Flows.GOTO) is not None:
-            table_id = int(match.group(Flows.GOTO))
-        elif match.group(Flows.RESUBMIT) is not None:
-            table_id = int(match.group(Flows.RESUBMIT))
-        else:
-            table_id = 256
-
-        rep = "{}({})".format(match.group(), tables.get_table_name(table_id))
-        return rep
-
-    def format_data(self):
-        if len(self.pdata) == 0:
-            self.logger.warn("There is no data to process")
-            return self.pdata
-        header = "{:9} {:8} {:13}     {:6} {:12} {}... {}... {} {}\n" \
-            .format(Flows.COOKIE, Flows.DURATION, Flows.TABLE, "n_pack", Flows.N_BYTES, Flows.MATCHES, Flows.ACTIONS,
-                    Flows.IDLE_TIMEOUT, Flows.DURATION)
-        header_under = "--------- -------- -------------     ------ ------------ ---------- ---------- --------" \
-                       "---- --------\n"
-
-        # Match goto_table: nnn or resubmit(,nnn) and return as goto or resubmit match group
-        re_gt = re.compile(r"goto_table:(?P<goto>\d{1,3})|"
-                           r"resubmit\(,(?P<resubmit>\d{1,3})\)")
-        self.fdata = [header, header_under]
-        for line in self.pdata:
-            if Flows.SEND_FLOW_REMOVED in line:
-                send_flow_rem = " {} ".format(line[Flows.SEND_FLOW_REMOVED])
-            else:
-                send_flow_rem = ""
-            if Flows.IDLE_TIMEOUT in line:
-                idle_timeo = " {}={}".format(Flows.IDLE_TIMEOUT, line[Flows.IDLE_TIMEOUT])
-            else:
-                idle_timeo = ""
-            if Flows.ACTIONS in line:
-                nactions = re_gt.sub(self.re_table, line[Flows.ACTIONS])
-            else:
-                logger.warn("Missing actions in %s", line)
-                nactions = ""
-
-            logger.debug("line: %s", line)
-
-            fline = "{:9} {:8} {:3} {:13} {:6} {:12} priority={} actions={}{}{}" \
-                .format(line[Flows.COOKIE], line[Flows.DURATION],
-                        line[Flows.TABLE], tables.get_table_name(int(line[Flows.TABLE])),
-                        line[Flows.N_PACKETS], line[Flows.N_BYTES],
-                        line[Flows.PRIORITY], nactions,
-                        idle_timeo, send_flow_rem, )
-            self.fdata.append(fline)
-        return self.fdata
-
-    def write_fdata(self, filename):
-        request.write_file(filename, self.fdata)
diff --git a/resources/tools/odl/ovs/test_flows.py b/resources/tools/odl/ovs/test_flows.py
deleted file mode 100644 (file)
index 899dac4..0000000
+++ /dev/null
@@ -1,31 +0,0 @@
-import logging
-from pprint import pformat
-import unittest
-import flows
-from flows import Flows
-import request
-import tables
-
-
-class TestFlows(unittest.TestCase):
-    def setUp(self):
-        self.filename = "./flow_dumps.1.txt"
-        self.data = request.get_from_file(self.filename)
-        self.flows = Flows(self.data, logging.DEBUG)
-
-    def test_process_data(self):
-        pdata = self.flows.process_data()
-        # print "parsed data:\n{}".format(pformat(pdata))
-
-    def test_format_data(self):
-        fdata = self.flows.format_data()
-        # print "parsed data:\n{}".format(pformat(fdata))
-
-    def test_write_file(self):
-        self.flows.write_fdata("/tmp/flow_dumps.out.txt")
-
-    def test_get_table_name(self):
-        print "table: {} is the {} table".format(17, tables.get_table_name(17))
-
-if __name__ == '__main__':
-    unittest.main()
diff --git a/resources/tools/odl/ovs/test_request.py b/resources/tools/odl/ovs/test_request.py
deleted file mode 100644 (file)
index 98c301b..0000000
+++ /dev/null
@@ -1,16 +0,0 @@
-import logging
-import unittest
-import request
-
-
-class TestRequest(unittest.TestCase):
-    def setUp(self):
-        self.filename = "./flow_dumps.1.txt"
-
-    def test_get_from_file(self):
-        request.logger.setLevel(logging.DEBUG)
-        self.data = request.get_from_file(self.filename)
-        self.assertEquals(len(self.data), 76)
-
-if __name__ == '__main__':
-    unittest.main()
similarity index 80%
rename from resources/tools/odl/csit/robotfiles.py
rename to resources/tools/odltools/csit/robotfiles.py
index ea2218ef7db8a5bbe57a3b2ef07cc2f331dd0181..f4992d2cfd61660cc5b5f7b4b1e4c515572eea60 100644 (file)
@@ -1,54 +1,31 @@
-import argparse
 import logging
-import sys
 import os
-from os import path
 import re
 import xml.etree.cElementTree as ET
 from subprocess import Popen
-#sys.path.append('../')
-#sys.path.append('../ovs')
-#sys.path.append(path.dirname( path.dirname( path.abspath(__file__))))
+from ovs import flows
 
 
-# logging.basicConfig(level=logging.DEBUG)
-# logger = logging.getLogger(__name__)
-logger = logging.getLogger("robotfiles")
-logger.setLevel(logging.INFO)
-formatter = logging.Formatter('%(asctime)s - %(levelname).3s - %(name)s - %(lineno)04d - %(message)s')
-ch = logging.StreamHandler()
-ch.setLevel(logging.INFO)
-ch.setFormatter(formatter)
-logger.addHandler(ch)
-fh = logging.FileHandler("/tmp/robotfiles.txt", "w")
-fh.setLevel(logging.DEBUG)
-fh.setFormatter(formatter)
-logger.addHandler(fh)
+logger = logging.getLogger("csit.robotfiles")
 
 
 class RobotFiles:
-    JOBTAG = "job"
-    TMP = "/tmp"
+    OUTDIR = "/tmp/robotjob"
     CHUNK_SIZE = 65536
     DUMP_FLOWS = "sudo ovs-ofctl dump-flows br-int -OOpenFlow13"
+    TMP = "/tmp"
 
-    def __init__(self, infile, outdir, jobtag):
+    def __init__(self, infile, outdir):
         if outdir is None:
-            outdir = RobotFiles.TMP
-        if jobtag is None:
-            jobtag = RobotFiles.JOBTAG
-        self.outdir = "{}/{}".format(outdir, jobtag)
+            self.outdir = RobotFiles.TMP
+        else:
+            self.outdir = outdir
         self.datafilepath = infile
         self.pdata = {}
-        self.set_log_level(logging.INFO)
         self.re_normalize_text = re.compile(r"( \n)|(\[A\[C.*)")
         self.re_uri = re.compile(r"uri=(?P<uri>.*),")
         self.re_model = re.compile(r"uri=(?P<uri>.*),")
 
-    @staticmethod
-    def set_log_level(level):
-        ch.setLevel(level)
-
     def gunzip_output_file(self):
         infile = self.datafilepath
         basename = os.path.splitext(os.path.basename(self.datafilepath))[0]
@@ -62,7 +39,7 @@ class RobotFiles:
             if not os.path.isdir(path):
                 raise
 
-    def mkdir_job_path(self):
+    def mk_outdir(self):
         self.mkdir(self.outdir)
 
     def read_chunks(self, fp):
@@ -78,9 +55,9 @@ class RobotFiles:
         with open(self.datafilepath, 'rb') as fp:
             for chunk in self.read_chunks(fp):
                 for m in re_st.finditer(chunk):
-                    print('%02d-%02d: %s' % (m.start(), m.end(), m.group(0)))
+                    logger.info("%02d-%02d: %s", m.start(), m.end(), m.group(0))
                     cnt += 1
-        print "total matches: {}".format(cnt)
+        logger.info("total matches: %d", cnt)
 
     class State:
         def __init__(self):
@@ -108,7 +85,7 @@ class RobotFiles:
     def print_config(self):
         logger.info("datafilepath: %s, outdir: %s", self.datafilepath, self.outdir)
 
-    # scan until test id= is seen that indicates the start of a new test -> state=test
+    # scan until test id= is seen. This indicates the start of a new test -> state=test
     # - scan until Get Test Teardown Debugs -> state=debugs
     #   - scan until Get DumpFlows And Ovsconfig -> state=nodes
     #   - scan until Get Model Dump -> state=models
@@ -229,7 +206,7 @@ class RobotFiles:
                 element.clear()
                 # debugging code to stop after the named test case is processed
                 # if "s1-t2" in self.pdata:
-                #    break
+                #     break
             root.clear()
 
     def write_pdata(self):
@@ -259,11 +236,12 @@ class RobotFiles:
                 ndir = tdir + "/" + nodeid
                 if RobotFiles.DUMP_FLOWS not in node:
                     continue
-                filename = ndir + "/" + self.fix_command_names(RobotFiles.DUMP_FLOWS) + ".f.txt"
+                filename = ndir + "/" + self.fix_command_names(RobotFiles.DUMP_FLOWS)
                 logger.info("Processing: %s", filename)
+                filename = filename + ".f.txt"
                 dump_flows = node[RobotFiles.DUMP_FLOWS]
-                flows = Flows(dump_flows, logging.DEBUG)
-                flows.write_fdata(filename)
+                fls = flows.Flows(dump_flows)
+                fls.write_fdata(filename)
 
     def fix_command_names(self, cmd):
         return cmd.replace(" ", "_")
@@ -273,47 +251,27 @@ class RobotFiles:
 
 
 def run(args):
-    robotfile = RobotFiles(args.infile, args.outdir, args.jobtag)
+    robotfile = RobotFiles(args.infile, args.outdir)
+    robotfile.print_config()
+    robotfile.mk_outdir()
+    if args.gunzip:
+        robotfile.gunzip_output_file()
     robotfile.print_config()
     robotfile.parse_xml_data_file()
-    robotfile.mkdir_job_path()
     robotfile.write_pdata()
-
-
-def create_parser():
-    parser = argparse.ArgumentParser(description="OpenStack CLI Mock")
-    parser.add_argument("-i", "--infile")
-    parser.add_argument("-o", "--outdir")
-    parser.add_argument("-j", "--jobtag")
-    parser.add_argument("-g", "--gunzip", action="store_true")
-    parser.add_argument("-v", "--verbose", dest="verbose", action="count", default=logging.INFO,
-                        help="Output more information about what's going on")
-    parser.add_argument("-V", "--version", action="version",
-                        version="%s version %s" %
-                                (os.path.split(sys.argv[0])[-1], 0.1))
+    if args.dump:
+        robotfile.write_debug_pdata()
+
+
+def add_parser(subparsers):
+    parser = subparsers.add_parser("csit")
+    parser.add_argument("infile",
+                        help="XML output from a Robot test, e.g. output_01_l2.xml.gz")
+    parser.add_argument("outdir",
+                        help="the directory that the parsed data is written into")
+    parser.add_argument("-g", "--gunzip", action="store_true",
+                        help="unzip the infile")
+    parser.add_argument("-d", "--dump", action="store_true",
+                        help="dump extra debugging, e.g. ovs metadata")
     parser.set_defaults(func=run)
 
-    return parser
-
-
-def parse_args():
-    parser = create_parser()
-    args = parser.parse_args()
-
-    return args
-
-
-def main():
-    args = parse_args()
-    args.func(args)
-
-if __name__ == "__main__":
-    if __package__ is None:
-        import sys
-        from os import path
-
-        sys.path.append(path.dirname(path.dirname(path.abspath(__file__))))
-        from ovs.flows import Flows
-    else:
-        from ..ovs.flows import Flows
-    main()
similarity index 71%
rename from resources/tools/odl/csit/test_robot_files.py
rename to resources/tools/odltools/csit/tests/test_robotfiles.py
index e602a2fa3f7b03aeb8de918e75a8136414b08b4c..2fa282ee9bbd81d2fb26d23aab3468507b6dbcb4 100644 (file)
@@ -1,6 +1,8 @@
+import logging
 import os
 import unittest
-from robotfiles import RobotFiles
+from csit.robotfiles import RobotFiles
+from odltools import logg
 
 
 # Requirements
@@ -9,23 +11,27 @@ from robotfiles import RobotFiles
 
 class TestRobotFiles(unittest.TestCase):
     DATAPATH = "/tmp/output_01_l2.xml.gz"
-    JOBPATH = "/tmp"
-    JOBTAG = "testjob435"
+    OUTPATH = "/tmp/robotjob"
 
-    def test_mkdir_job_path(self):
-        self.robotfile = RobotFiles(self.DATAPATH, self.JOBPATH, self.JOBTAG)
-        self.robotfile.mkdir_job_path()
+    def setUp(self):
+        logg.Logger()
+
+    def test_mk_outdir(self):
+        self.robotfile = RobotFiles(self.DATAPATH, self.OUTPATH)
+        self.robotfile.mk_outdir()
         self.assertTrue(os.path.isdir(self.robotfile.outdir))
 
     def test_gunzip_xml_data_file(self):
-        self.robotfile = RobotFiles(self.DATAPATH, self.JOBPATH, self.JOBTAG)
-        self.robotfile.mkdir_job_path()
+        self.robotfile = RobotFiles(self.DATAPATH, self.OUTPATH)
+        self.robotfile.mk_outdir()
         self.robotfile.gunzip_output_file()
         self.assertTrue(os.path.isfile(self.robotfile.datafilepath))
 
     def test_parse_xml_data_file(self):
-        self.robotfile = RobotFiles("/tmp/testjob435/output_01_l2.xml", self.JOBPATH, self.JOBTAG)
+        self.robotfile = RobotFiles(self.DATAPATH, self.OUTPATH)
         self.robotfile.print_config()
+        self.robotfile.mk_outdir()
+        self.robotfile.gunzip_output_file()
         self.robotfile.parse_xml_data_file()
 
         print "tests: {}".format(len(self.robotfile.pdata))
@@ -35,7 +41,7 @@ class TestRobotFiles(unittest.TestCase):
             self.fail("wrong test_id")
         pdata = self.robotfile.pdata[test_id]
         print "\n{} test id = {} - {}".format(1, test_id, pdata['name'])
-        if 1:
+        if 0:
             for nindex, (node, ndata) in enumerate(pdata['nodes'].items()):
                 print "{}: node = {}".format(nindex, node)
                 for cindex, (command, cdata) in enumerate(ndata.items()):
@@ -44,9 +50,8 @@ class TestRobotFiles(unittest.TestCase):
             for mindex, (model, mdata) in enumerate(sorted(pdata['models'].items())):
                 print "{}: model = {} - {}".format(mindex, model, mdata)
 
-        self.robotfile.mkdir_job_path()
         self.robotfile.write_pdata()
-        self.robotfile.write_debug_pdata()
+        self.robotfile.write_debug_pdata()
 
 if __name__ == '__main__':
     unittest.main()
similarity index 89%
rename from resources/tools/odl/mdsal/itm_state.py
rename to resources/tools/odltools/mdsal/itm_state.py
index 8fd01c9b0b2c111fc92f2c7ab42c37c2bdb430fa..db83ca93d18af5624b1a4989a5f7fc5bdda99d2b 100644 (file)
@@ -1,7 +1,7 @@
 from models import Model
 
 
-name = "itm-state"
+NAME = "itm-state"
 
 
 class DpnEndpoints(Model):
@@ -11,6 +11,7 @@ class DpnEndpoints(Model):
     TUNNEL_END_POINTS = "tunnel-end-points"
     IP_ADDRESS = "ip-address"
 
+    # not currently used, backup method to get_kv
     def item_generator(self, json_input, lookup_key):
         if isinstance(json_input, dict):
             for k, v in json_input.iteritems():
@@ -47,5 +48,5 @@ class DpnEndpoints(Model):
         return tunnel_endpoints[0][self.IP_ADDRESS]
 
 
-def dpn_endpoints(store, ip, port, debug=0):
-    return DpnEndpoints(name, DpnEndpoints.CONTAINER, store, ip, port, debug)
+def dpn_endpoints(store, ip, port):
+    return DpnEndpoints(NAME, DpnEndpoints.CONTAINER, store, ip, port)
similarity index 82%
rename from resources/tools/odl/mdsal/models.py
rename to resources/tools/odltools/mdsal/models.py
index 34d6e8bbfbd7f15b188b51287616cbcb461bcac2..d96bd2b6fe6fdba6fc49398534c11e8420b98e81 100644 (file)
@@ -8,19 +8,15 @@ class Model:
     USER = "admin"
     PW = "admin"
 
-    def __init__(self, name, container, store, ip, port, debug=0):
+    def __init__(self, name, container, store, ip, port):
         self.name = name
         self.CONTAINER = container
         self.store = store
         self.ip = ip
         self.port = port
-        self.debug = debug
         self.data = None
         self.url = self.make_url()
 
-    def set_debug(self, level):
-        self.debug = level
-
     def set_odl_address(self, ip, port):
         self.ip = ip
         self.port = port
@@ -34,8 +30,8 @@ class Model:
         self.data = request.get(self.url, self.USER, self.PW)
         return self.data
 
-    def get_from_file(self, filename):
-        self.data = request.get_from_file(filename)
+    def read_file(self, filename):
+        self.data = request.read_file(filename)
         return self.data
 
     def pretty_format(self, data=None):
@@ -44,6 +40,13 @@ class Model:
         return json.dumps(data, indent=4, separators=(',', ': '))
 
     def get_kv(self, k, v, values):
+        """
+        Return a list of values for the given key
+        :param k:
+        :param v:
+        :param values:
+        :return:
+        """
         if type(v) is dict:
             for jsonkey in v:
                 if jsonkey == k:
similarity index 69%
rename from resources/tools/odl/mdsal/request.py
rename to resources/tools/odltools/mdsal/request.py
index 69b2f063621a1d5ed33c555e7d9f667e4de0cd7d..423dccfa608ca99defaae614dd1b148e912e968c 100644 (file)
@@ -3,16 +3,11 @@ import logging
 import requests
 
 
-logging.basicConfig(format="%(levelname)-8s [%(module)s:%(lineno)d] %(message)s",
-                    level=logging.INFO)
-logger = logging.getLogger(__name__)
-
-
-def set_log_level(level):
-    logger.setLevel(level)
+logger = logging.getLogger("mdsal.request")
 
 
 def debug_print(text1, data):
+    logger.info("request: %s: processed %d lines", text1, len(data))
     logger.debug("request: %s", text1)
     logger.debug("%s", json.dumps(data))
     logger.debug("%s", json.dumps(data, indent=4, separators=(',', ': ')))
@@ -26,7 +21,7 @@ def get(url, user, pw):
     return data
 
 
-def get_from_file(filename):
+def read_file(filename):
     with open(filename) as json_file:
         data = json.load(json_file)
     debug_print(filename, data)
similarity index 73%
rename from resources/tools/odl/mdsal/test_itm_state.py
rename to resources/tools/odltools/mdsal/tests/test_itm_state.py
index 00f81c54afeee80bf3f18bc8dd3919f598fb8dd1..d086c5e69f20cd13a99377ef69977a71b8cd0b98 100644 (file)
@@ -1,8 +1,8 @@
 import unittest
-import itm_state
-from models import Model
-from itm_state import DpnEndpoints
-
+from mdsal import itm_state
+from mdsal.models import Model
+from mdsal.itm_state import DpnEndpoints
+from odltools import logg
 
 ip = "127.0.0.1"
 port = "8080"
@@ -10,10 +10,11 @@ port = "8080"
 
 class TestItmState(unittest.TestCase):
     def setUp(self):
-        self.dpn_endpoints = itm_state.DpnEndpoints(self, DpnEndpoints.CONTAINER, Model.CONFIG, ip, port, 1)
-        self.data = self.dpn_endpoints.get_from_file("itm-state_dpn-endpoints.json")
+        logg.Logger()
+        self.dpn_endpoints = itm_state.dpn_endpoints(Model.CONFIG, ip, port)
+        self.data = self.dpn_endpoints.read_file("./resources/itm-state_dpn-endpoints.json")
 
-    def test_get_from_file(self):
+    def test_read_file(self):
         print "dpn-endpoints: {}".format(self.data)
         print "dpn-endpoints: \n{}".format(self.dpn_endpoints.pretty_format(self.data))
 
@@ -22,6 +23,8 @@ class TestItmState(unittest.TestCase):
         dpn_id = dpn_ids[0]
         ip_address = self.dpn_endpoints.get_ip_address(dpn_id)
         print "dpn_id: {}, ip_address: {}".format(dpn_id, ip_address)
+        self.assertEqual(dpn_id, "13878168265586")
+        self.assertEqual(ip_address, "10.29.13.165")
 
     def test_get_all(self):
         print "dpn-endpoints: {}".format(self.data)
@@ -34,6 +37,7 @@ class TestItmState(unittest.TestCase):
 
         ip_address = self.dpn_endpoints.get_ip_address(dpn_id)
         print "ip_address: {}".format(ip_address)
+        self.assertEqual(ip_address, "10.29.13.165")
 
         self.get_info(DpnEndpoints.CONTAINER)
         self.get_info(DpnEndpoints.DPN_TEPS_INFO)
diff --git a/resources/tools/odltools/mdsal/tests/test_request.py b/resources/tools/odltools/mdsal/tests/test_request.py
new file mode 100644 (file)
index 0000000..d360043
--- /dev/null
@@ -0,0 +1,16 @@
+import unittest
+from mdsal import request
+from odltools import logg
+
+
+class TestRequest(unittest.TestCase):
+    def setUp(self):
+        logg.Logger()
+        self.filename = "./resources/itm-state_dpn-endpoints.json"
+
+    def test_read_file(self):
+        data = request.read_file(self.filename)
+        self.assertEquals(len(data), 1)
+
+if __name__ == '__main__':
+    unittest.main()
diff --git a/resources/tools/odltools/odltools/logg.py b/resources/tools/odltools/odltools/logg.py
new file mode 100644 (file)
index 0000000..3a25883
--- /dev/null
@@ -0,0 +1,16 @@
+import logging
+
+
+class Logger:
+    def __init__(self):
+        logger = logging.getLogger()
+        formatter = logging.Formatter('%(asctime)s - %(levelname).3s - %(name)-20s - %(lineno)04d - %(message)s')
+        ch = logging.StreamHandler()
+        ch.setLevel(logging.INFO)
+        ch.setFormatter(formatter)
+        logger.addHandler(ch)
+        fh = logging.FileHandler("/tmp/odltools.txt", "w")
+        fh.setLevel(logging.DEBUG)
+        fh.setFormatter(formatter)
+        logger.addHandler(fh)
+        logger.setLevel(min([ch.level, fh.level]))
diff --git a/resources/tools/odltools/odltools/odltools.py b/resources/tools/odltools/odltools/odltools.py
new file mode 100644 (file)
index 0000000..46659fc
--- /dev/null
@@ -0,0 +1,34 @@
+#!/usr/bin/env python
+import argparse
+import csit.robotfiles
+import logg
+
+__version__ = "0.1"
+
+
+def create_parser():
+    parser = argparse.ArgumentParser(description="OpenDaylight Troubleshooting Tools")
+    parser.add_argument("-v", "--verbose", dest="verbose", action="count", default=0,
+                        help="verbosity (-v, -vv)")
+    parser.add_argument("-V", "--version", action="version",
+                        version="%(prog)s (version {version})".format(version=__version__))
+    subparsers = parser.add_subparsers(dest="command")
+    csit.robotfiles.add_parser(subparsers)
+
+    return parser
+
+
+def parse_args():
+    parser = create_parser()
+    args = parser.parse_args()
+
+    return args
+
+
+def main():
+    args = parse_args()
+    args.func(args)
+
+if __name__ == "__main__":
+    logg.Logger()
+    main()
diff --git a/resources/tools/odltools/odltools/tests/test_odltools.py b/resources/tools/odltools/odltools/tests/test_odltools.py
new file mode 100644 (file)
index 0000000..7b9aa1f
--- /dev/null
@@ -0,0 +1,33 @@
+import unittest
+from odltools import odltools
+from csit import robotfiles
+
+
+class TestOdltools(unittest.TestCase):
+    DATAPATH = "/tmp/output_01_l2.xml.gz"
+    OUTPATH = "/tmp/robotjob"
+
+    def test_parser_empty(self):
+        parser = odltools.create_parser()
+        with self.assertRaises(SystemExit) as cm:
+            parser.parse_args([])
+        self.assertEqual(cm.exception.code, 2)
+
+    def test_parser_help(self):
+        parser = odltools.create_parser()
+        with self.assertRaises(SystemExit) as cm:
+            parser.parse_args(['-h'])
+        self.assertEqual(cm.exception.code, 0)
+
+    def test_robotfiles_run(self):
+        parser = odltools.create_parser()
+        args = parser.parse_args(['csit', self.DATAPATH, self.OUTPATH, '-g'])
+        robotfiles.run(args)
+
+    def test_csit(self):
+        parser = odltools.create_parser()
+        args = parser.parse_args(['csit', self.DATAPATH, self.OUTPATH, '-g', '-d'])
+        robotfiles.run(args)
+
+if __name__ == '__main__':
+    unittest.main()
diff --git a/resources/tools/odltools/ovs/__init__.py b/resources/tools/odltools/ovs/__init__.py
new file mode 100644 (file)
index 0000000..e69de29
diff --git a/resources/tools/odltools/ovs/flows.py b/resources/tools/odltools/ovs/flows.py
new file mode 100644 (file)
index 0000000..576e667
--- /dev/null
@@ -0,0 +1,186 @@
+import logging
+from pprint import pformat
+import re
+import tables
+import request
+
+
+logger = logging.getLogger("ovs.flows")
+
+
+# TODO:
+# metadata decoder
+# mac to port
+# REG6 decoder
+# group decoder
+# curl -s -u admin:admin -X GET 127.0.0.1:8080/restconf/operational/odl-l3vpn:learnt-vpn-vip-to-port-data
+# - check if external ip is resolved, devstack uses port 8087
+class Flows:
+    COOKIE = "cookie"
+    DURATION = "duration"
+    TABLE = "table"
+    N_PACKETS = "n_packets"
+    N_BYTES = "n_bytes"
+    MATCHES = "matches"
+    ACTIONS = "actions"
+    IDLE_TIMEOUT = "idle_timeout"
+    SEND_FLOW_REMOVED = "send_flow_rem"
+    PRIORITY = "priority"
+    GOTO = "goto"
+    RESUBMIT = "resubmit"
+
+    def __init__(self, data):
+        self.pdata = []
+        self.fdata = []
+        if type(data) is str:
+            self.data = data.splitlines()
+        elif type(data) is list:
+            self.data = data
+        else:
+            logger.error("init: data is not a supproted type")
+            return
+        self.start = 0
+        logger.info("init: Copied %d lines", len(self.data))
+        self.process_data()
+        self.format_data()
+        logger.info("init: data has been processed and formatted")
+
+    def pretty_print(self, data):
+        return "{}".format(pformat(data))
+
+    def process_data(self):
+        """
+        Process the dump-flows data into a map.
+
+        The processing will tokenize the parts in each line of the flow dump.
+
+        :return: A list of dictionaries of parsed tokens per line
+        """
+        # cookie=0x805138a, duration=193.107s, table=50, n_packets=119, n_bytes=11504, idle_timeout=300,
+        #  send_flow_rem priority=20,metadata=0x2138a000000/0xfffffffff000000,dl_src=fa:16:3e:15:a8:66
+        #  actions=goto_table:51
+
+        self.pdata = []
+        if len(self.data) == 0:
+            logger.warn("There is no data to process")
+            return self.pdata
+
+        # skip the header if present
+        if "OFPST_FLOW" in self.data[0]:
+            self.start = 1
+            logger.debug("process_data: will skip first line: OFPST_FLOW line")
+        else:
+            self.start = 0
+        if "jenkins" in self.data[-1]:
+            end = len(self.data) - 1
+            logger.debug("process_data: will skip last line: jenkins line")
+        else:
+            end = len(self.data)
+
+        # Parse each line of the data. Each line is a single flow.
+        # Create a dictionary of all tokens in that flow.
+        # Append this flow dictionary to a list of flows.
+        for line in self.data[self.start:end]:
+            pline = {}
+            pline[Flows.IDLE_TIMEOUT] = "---"
+            pline[Flows.SEND_FLOW_REMOVED] = "-"
+            tokens = line.split(" ")
+            for token in tokens:
+                # most lines are key=value so look for that pattern
+                splits = token.split("=", 1)
+                if len(splits) == 2:
+                    if Flows.PRIORITY in splits[0]:
+                        splitp = splits[1].split(",", 1)
+                        if len(splitp) == 2:
+                            pline[Flows.PRIORITY] = splitp[0]
+                            pline[Flows.MATCHES] = splitp[1]
+                        else:
+                            pline[Flows.PRIORITY] = splitp[0]
+                            pline[Flows.MATCHES] = ""
+                    else:
+                        pline[splits[0]] = splits[1].rstrip(",")
+                elif token == Flows.SEND_FLOW_REMOVED:
+                    # send_flow_rem is a single token without a value
+                    pline[token] = token
+            self.pdata.append(pline)
+            logger.debug("process_data: Processed line %d into: \n%s",
+                         self.start + len(self.pdata), pformat(pline))
+        logger.info("process_data: Processed %d lines, skipped %d", len(self.pdata),
+                    self.start + len(self.data) - end)
+
+        return self.pdata
+
+    def re_table(self, match):
+        """
+        regex function to add the table name to table lines
+
+        :param match: The regex match
+        :return: The new line with table name
+        :rtype: str
+        """
+        if match.group(Flows.GOTO) is not None:
+            table_id = int(match.group(Flows.GOTO))
+        elif match.group(Flows.RESUBMIT) is not None:
+            table_id = int(match.group(Flows.RESUBMIT))
+        else:
+            table_id = 256
+
+        rep = "{}({})".format(match.group(), tables.get_table_name(table_id))
+        return rep
+
+    def format_data(self):
+        if len(self.pdata) == 0:
+            logger.warn("There is no data to process")
+            return self.pdata
+        header = "{:3} {:9} {:8} {:13}     {:6} {:12} {:1} {:3} {:5}\n" \
+                 "    {}\n" \
+                 "    {}\n" \
+            .format("nnn", Flows.COOKIE, Flows.DURATION, Flows.TABLE, "n_pack", Flows.N_BYTES,
+                    "S", "ito", "prio",
+                    Flows.MATCHES,
+                    Flows.ACTIONS)
+        header_under = "--- --------- -------- -------------     ------ ------------ - --- -----\n"
+
+        # Match goto_table: nnn or resubmit(,nnn) and return as goto or resubmit match group
+        re_gt = re.compile(r"goto_table:(?P<goto>\d{1,3})|"
+                           r"resubmit\(,(?P<resubmit>\d{1,3})\)")
+
+        # Add the header as the first two lines of formatted data
+        self.fdata = [header, header_under]
+
+        # Format each line of parsed data
+        for i, line in enumerate(self.pdata):
+            logger.debug("format_data: processing line %d: %s", self.start + i + 1, line)
+
+            #if Flows.SEND_FLOW_REMOVED in line:
+            #    send_flow_rem = " {} ".format(line[Flows.SEND_FLOW_REMOVED])
+            #else:
+            #    send_flow_rem = ""
+
+            #if Flows.IDLE_TIMEOUT in line:
+            #    idle_timeo = " {}={}".format(Flows.IDLE_TIMEOUT, line[Flows.IDLE_TIMEOUT])
+            #else:
+            #    idle_timeo = ""
+
+            if Flows.ACTIONS in line:
+                nactions = re_gt.sub(self.re_table, line[Flows.ACTIONS])
+            else:
+                logger.warn("Missing actions in %s", line)
+                nactions = ""
+
+            fline = "{:3} {:9} {:8} {:3} {:13} {:6} {:12} {:1} {:3} {:5}\n" \
+                    "    matches={}\n" \
+                    "    actions={}\n" \
+                .format(i+1, line[Flows.COOKIE], line[Flows.DURATION],
+                        line[Flows.TABLE], tables.get_table_name(int(line[Flows.TABLE])),
+                        line[Flows.N_PACKETS], line[Flows.N_BYTES],
+                        line[Flows.SEND_FLOW_REMOVED][0], line[Flows.IDLE_TIMEOUT],
+                        line[Flows.PRIORITY],
+                        line[Flows.MATCHES],
+                        nactions)
+            self.fdata.append(fline)
+            logger.debug("format_data: formatted line %d: %s", self.start + i + 1, fline)
+        return self.fdata
+
+    def write_fdata(self, filename):
+        request.write_file(filename, self.fdata)
similarity index 50%
rename from resources/tools/odl/ovs/request.py
rename to resources/tools/odltools/ovs/request.py
index 21d16a5cce8ae86cbfdd8902e962da79f36ca5a0..030ace3d32c3f1863795d5acced187b6bcef46d5 100644 (file)
@@ -1,32 +1,25 @@
 import logging
-# from pprint import pformat
 
 
 # TODO:
 # - requests to get flow dumps via ovs-vsctl, ssh
 # - group processing
 
-logging.basicConfig(format="%(levelname)-8s [%(module)s:%(lineno)d] %(message)s",
-                    level=logging.INFO)
-logger = logging.getLogger(__name__)
+logger = logging.getLogger("ovs.request")
 
 
-def set_log_level(level):
-    logger.setLevel(level)
-
-
-def get_from_file(filename):
+def read_file(filename):
     lines = []
     with open(filename, 'r') as fp:
         for line in fp:
             # strip leading spaces; by default every flow line has a leading space: " cookie=..."
             lines.append(line.lstrip())
-    logger.info("File: %s: processed %d lines", filename, len(lines))
-    logger.debug("\n%s", "".join(lines))
-    # logger.debug("\n%s", pformat(lines))
+    logger.info("read_file: File: %s: processed %d lines", filename, len(lines))
+    # logger.debug("read_file: lines:\n%s", "".join(lines))
     return lines
 
 
 def write_file(filename, lines):
     with open(filename, 'w') as fp:
         fp.writelines(lines)
+    logger.info("write_file: File: %s: wrote %d lines", filename, len(lines))
diff --git a/resources/tools/odltools/ovs/tests/resources/flow_dumps.3.txt b/resources/tools/odltools/ovs/tests/resources/flow_dumps.3.txt
new file mode 100644 (file)
index 0000000..adb3b36
--- /dev/null
@@ -0,0 +1,68 @@
+OFPST_FLOW reply (OF1.3) (xid=0x2):
+ cookie=0x8000001, duration=649.639s, table=0, n_packets=203, n_bytes=19461, priority=5,in_port=2 actions=write_metadata:0x20000000001/0xfffff0000000001,goto_table:36
+ cookie=0x8000001, duration=596.674s, table=0, n_packets=186, n_bytes=17869, priority=5,in_port=3 actions=write_metadata:0x60000000001/0xfffff0000000001,goto_table:36
+ cookie=0x8000000, duration=676.843s, table=17, n_packets=0, n_bytes=0, priority=0,metadata=0x8000000000000000/0xf000000000000000 actions=write_metadata:0x9000000000000000/0xf000000000000000,goto_table:80
+ cookie=0x6800000, duration=676.790s, table=18, n_packets=0, n_bytes=0, priority=0 actions=goto_table:38
+ cookie=0x8220015, duration=676.694s, table=19, n_packets=0, n_bytes=0, priority=100,arp,arp_op=1 actions=resubmit(,17)
+ cookie=0x8220016, duration=676.694s, table=19, n_packets=0, n_bytes=0, priority=100,arp,arp_op=2 actions=resubmit(,17)
+ cookie=0x1080000, duration=676.730s, table=19, n_packets=0, n_bytes=0, priority=0 actions=resubmit(,17)
+ cookie=0x1030000, duration=676.731s, table=20, n_packets=0, n_bytes=0, priority=0 actions=goto_table:80
+ cookie=0x8000004, duration=676.695s, table=22, n_packets=0, n_bytes=0, priority=0 actions=CONTROLLER:65535
+ cookie=0x1080000, duration=676.695s, table=23, n_packets=0, n_bytes=0, priority=0 actions=resubmit(,17)
+ cookie=0x1080000, duration=676.437s, table=24, n_packets=0, n_bytes=0, priority=0 actions=resubmit(,17)
+ cookie=0x822002d, duration=676.061s, table=43, n_packets=0, n_bytes=0, priority=100,arp,arp_op=1 actions=group:5000
+ cookie=0x822002e, duration=676.061s, table=43, n_packets=0, n_bytes=0, priority=100,arp,arp_op=2 actions=CONTROLLER:65535,resubmit(,48)
+ cookie=0x8220000, duration=676.437s, table=43, n_packets=0, n_bytes=0, priority=0 actions=goto_table:48
+ cookie=0x4000000, duration=676.791s, table=45, n_packets=0, n_bytes=0, priority=0 actions=resubmit(,17)
+ cookie=0x8500000, duration=676.695s, table=48, n_packets=0, n_bytes=0, priority=0 actions=resubmit(,49),resubmit(,50)
+ cookie=0x8050001, duration=676.437s, table=50, n_packets=0, n_bytes=0, priority=10,reg4=0x1 actions=goto_table:51
+ cookie=0x8050000, duration=676.695s, table=50, n_packets=0, n_bytes=0, priority=0 actions=CONTROLLER:65535,learn(table=49,hard_timeout=10,priority=0,cookie=0x8600000,NXM_OF_ETH_SRC[],NXM_NX_REG1[0..19],load:0x1->NXM_NX_REG4[0..7]),goto_table:51
+ cookie=0x8030000, duration=676.437s, table=51, n_packets=0, n_bytes=0, priority=15,dl_dst=01:80:c2:00:00:00/ff:ff:ff:ff:ff:f0 actions=drop
+ cookie=0x8030000, duration=676.437s, table=51, n_packets=0, n_bytes=0, priority=0 actions=goto_table:52
+ cookie=0x6800000, duration=676.791s, table=60, n_packets=0, n_bytes=0, priority=0 actions=resubmit(,17)
+ cookie=0x1030000, duration=676.695s, table=80, n_packets=0, n_bytes=0, priority=0 actions=resubmit(,17)
+ cookie=0x8220000, duration=676.437s, table=81, n_packets=0, n_bytes=0, priority=0 actions=drop
+ cookie=0x4000001, duration=676.791s, table=90, n_packets=0, n_bytes=0, priority=0 actions=resubmit(,17)
+ cookie=0x6900000, duration=676.341s, table=210, n_packets=0, n_bytes=0, priority=63009,arp actions=drop
+ cookie=0x6900000, duration=676.341s, table=210, n_packets=0, n_bytes=0, priority=61009,ipv6 actions=drop
+ cookie=0x6900000, duration=676.341s, table=210, n_packets=0, n_bytes=0, priority=61009,ip actions=drop
+ cookie=0x6900000, duration=676.341s, table=210, n_packets=0, n_bytes=0, priority=0 actions=drop
+ cookie=0x6900000, duration=676.341s, table=211, n_packets=0, n_bytes=0, priority=100,tcp6 actions=write_metadata:0/0x2,goto_table:212
+ cookie=0x6900000, duration=676.341s, table=211, n_packets=0, n_bytes=0, priority=100,udp6 actions=write_metadata:0/0x2,goto_table:212
+ cookie=0x6900000, duration=676.341s, table=211, n_packets=0, n_bytes=0, priority=100,icmp actions=write_metadata:0/0x2,goto_table:212
+ cookie=0x6900000, duration=676.341s, table=211, n_packets=0, n_bytes=0, priority=100,tcp actions=write_metadata:0/0x2,goto_table:212
+ cookie=0x6900000, duration=676.341s, table=211, n_packets=0, n_bytes=0, priority=100,udp actions=write_metadata:0/0x2,goto_table:212
+ cookie=0x6900000, duration=676.341s, table=211, n_packets=0, n_bytes=0, priority=100,icmp6 actions=write_metadata:0/0x2,goto_table:212
+ cookie=0x6900000, duration=676.341s, table=211, n_packets=0, n_bytes=0, priority=0 actions=write_metadata:0x2/0x2,goto_table:214
+ cookie=0x6900000, duration=676.341s, table=212, n_packets=0, n_bytes=0, priority=0 actions=drop
+ cookie=0x6900000, duration=676.341s, table=213, n_packets=0, n_bytes=0, priority=0 actions=goto_table:214
+ cookie=0x6900000, duration=676.341s, table=214, n_packets=0, n_bytes=0, priority=62030,ct_state=-new-est+rel-inv+trk,ct_mark=0x1/0x1 actions=ct_clear,resubmit(,17)
+ cookie=0x6900000, duration=676.341s, table=214, n_packets=0, n_bytes=0, priority=62030,ct_state=-new+est-rel-inv+trk,ct_mark=0x1/0x1 actions=ct_clear,resubmit(,17)
+ cookie=0x6900000, duration=676.341s, table=214, n_packets=0, n_bytes=0, priority=0 actions=drop
+ cookie=0x6900000, duration=676.341s, table=215, n_packets=0, n_bytes=0, priority=0 actions=resubmit(,214)
+ cookie=0x6900000, duration=676.341s, table=216, n_packets=0, n_bytes=0, priority=0 actions=resubmit(,214)
+ cookie=0x6900000, duration=676.341s, table=217, n_packets=0, n_bytes=0, priority=0 actions=drop
+ cookie=0x8000007, duration=649.278s, table=220, n_packets=0, n_bytes=0, priority=9,reg6=0x200 actions=output:2
+ cookie=0x8000007, duration=596.202s, table=220, n_packets=0, n_bytes=0, priority=9,reg6=0x600 actions=output:3
+ cookie=0x6900000, duration=676.341s, table=239, n_packets=0, n_bytes=0, priority=100,ct_state=+trk,ipv6 actions=ct_clear,goto_table:240
+ cookie=0x6900000, duration=676.341s, table=239, n_packets=0, n_bytes=0, priority=100,ct_state=+trk,ip actions=ct_clear,goto_table:240
+ cookie=0x6900000, duration=676.341s, table=239, n_packets=0, n_bytes=0, priority=0 actions=goto_table:240
+ cookie=0x6900000, duration=676.341s, table=240, n_packets=0, n_bytes=0, priority=61010,ip,dl_dst=ff:ff:ff:ff:ff:ff,nw_dst=255.255.255.255 actions=goto_table:241
+ cookie=0x6900000, duration=676.341s, table=240, n_packets=0, n_bytes=0, priority=61005,dl_dst=ff:ff:ff:ff:ff:ff actions=resubmit(,220)
+ cookie=0x6900000, duration=676.341s, table=240, n_packets=0, n_bytes=0, priority=0 actions=drop
+ cookie=0x6900000, duration=676.341s, table=241, n_packets=0, n_bytes=0, priority=100,icmp actions=write_metadata:0/0x2,goto_table:242
+ cookie=0x6900000, duration=676.341s, table=241, n_packets=0, n_bytes=0, priority=100,tcp6 actions=write_metadata:0/0x2,goto_table:242
+ cookie=0x6900000, duration=676.341s, table=241, n_packets=0, n_bytes=0, priority=100,udp6 actions=write_metadata:0/0x2,goto_table:242
+ cookie=0x6900000, duration=676.341s, table=241, n_packets=0, n_bytes=0, priority=100,icmp6 actions=write_metadata:0/0x2,goto_table:242
+ cookie=0x6900000, duration=676.341s, table=241, n_packets=0, n_bytes=0, priority=100,tcp actions=write_metadata:0/0x2,goto_table:242
+ cookie=0x6900000, duration=676.341s, table=241, n_packets=0, n_bytes=0, priority=100,udp actions=write_metadata:0/0x2,goto_table:242
+ cookie=0x6900000, duration=676.341s, table=241, n_packets=0, n_bytes=0, priority=0 actions=write_metadata:0x2/0x2,goto_table:244
+ cookie=0x6900000, duration=676.341s, table=242, n_packets=0, n_bytes=0, priority=0 actions=drop
+ cookie=0x6900000, duration=676.341s, table=243, n_packets=0, n_bytes=0, priority=0 actions=goto_table:244
+ cookie=0x6900000, duration=676.341s, table=244, n_packets=0, n_bytes=0, priority=62030,ct_state=-new+est-rel-inv+trk,ct_mark=0x1/0x1 actions=ct_clear,resubmit(,220)
+ cookie=0x6900000, duration=676.341s, table=244, n_packets=0, n_bytes=0, priority=62030,ct_state=-new-est+rel-inv+trk,ct_mark=0x1/0x1 actions=ct_clear,resubmit(,220)
+ cookie=0x6900000, duration=676.341s, table=244, n_packets=0, n_bytes=0, priority=0 actions=drop
+ cookie=0x6900000, duration=676.341s, table=245, n_packets=0, n_bytes=0, priority=0 actions=resubmit(,244)
+ cookie=0x6900000, duration=676.341s, table=246, n_packets=0, n_bytes=0, priority=0 actions=resubmit(,244)
+ cookie=0x6900000, duration=676.341s, table=247, n_packets=0, n_bytes=0, priority=0 actions=drop
+[jenkins@releng-13754-435-2-devstack-pike-0 ~]>
\ No newline at end of file
diff --git a/resources/tools/odltools/ovs/tests/test_flows.py b/resources/tools/odltools/ovs/tests/test_flows.py
new file mode 100644 (file)
index 0000000..96e69ad
--- /dev/null
@@ -0,0 +1,24 @@
+import unittest
+from odltools import logg
+from ovs import flows
+from ovs import request
+
+
+class TestFlows(unittest.TestCase):
+    def setUp(self):
+        logg.Logger()
+        self.filename = "./resources/flow_dumps.3.txt"
+        self.data = request.read_file(self.filename)
+        self.flows = flows.Flows(self.data)
+
+    def test_process_data(self):
+        print "pretty_print:\n{}".format(self.flows.pretty_print(self.flows.pdata))
+
+    def test_format_data(self):
+        print "pretty_print:\n{}".format(self.flows.pretty_print(self.flows.fdata))
+
+    def test_write_file(self):
+        self.flows.write_fdata("/tmp/flow_dumps.3.out.txt")
+
+if __name__ == '__main__':
+    unittest.main()
diff --git a/resources/tools/odltools/ovs/tests/test_request.py b/resources/tools/odltools/ovs/tests/test_request.py
new file mode 100644 (file)
index 0000000..cec294c
--- /dev/null
@@ -0,0 +1,21 @@
+import unittest
+from odltools import logg
+from ovs import request
+
+
+class TestRequest(unittest.TestCase):
+    def setUp(self):
+        logg.Logger()
+        self.filename = "./resources/flow_dumps.1.txt"
+
+    def test_read_file(self):
+        data = request.read_file(self.filename)
+        self.assertEquals(len(data), 76)
+
+    def test_write_file(self):
+        data = request.read_file(self.filename)
+        self.assertEquals(len(data), 76)
+        request.write_file("/tmp/flow_dumps.1.out.txt", data)
+
+if __name__ == '__main__':
+    unittest.main()
diff --git a/resources/tools/odltools/ovs/tests/test_tables.py b/resources/tools/odltools/ovs/tests/test_tables.py
new file mode 100644 (file)
index 0000000..018080c
--- /dev/null
@@ -0,0 +1,14 @@
+import unittest
+from odltools import logg
+from ovs import tables
+
+
+class TestTables(unittest.TestCase):
+    def setUp(self):
+        logg.Logger()
+
+    def test_get_table_name(self):
+        self.assertEqual(tables.get_table_name(17), "DISPATCHER")
+
+if __name__ == '__main__':
+    unittest.main()