Move files around to fit pypi better
[netvirt.git] / resources / tools / odltools / odltools / ovs / flows.py
1 import logging
2 from pprint import pformat
3
4 import re
5
6 import request
7 import tables
8
9 logger = logging.getLogger("ovs.flows")
10
11
12 # TODO:
13 # metadata decoder
14 # mac to port
15 # REG6 decoder
16 # group decoder
17 # curl -s -u admin:admin -X GET 127.0.0.1:8080/restconf/operational/odl-l3vpn:learnt-vpn-vip-to-port-data
18 # - check if external ip is resolved, devstack uses port 8087
19 class Flows:
20     COOKIE = "cookie"
21     DURATION = "duration"
22     TABLE = "table"
23     N_PACKETS = "n_packets"
24     N_BYTES = "n_bytes"
25     MATCHES = "matches"
26     ACTIONS = "actions"
27     IDLE_TIMEOUT = "idle_timeout"
28     SEND_FLOW_REMOVED = "send_flow_rem"
29     PRIORITY = "priority"
30     GOTO = "goto"
31     RESUBMIT = "resubmit"
32
33     def __init__(self, data):
34         self.pdata = []
35         self.fdata = []
36         if type(data) is str:
37             self.data = data.splitlines()
38         elif type(data) is list:
39             self.data = data
40         else:
41             logger.error("init: data is not a supported type")
42             return
43         self.start = 0
44         logger.info("init: Copied %d lines", len(self.data))
45         self.process_data()
46         self.format_data()
47         logger.info("init: data has been processed and formatted")
48
49     def pretty_print(self, data):
50         return "{}".format(pformat(data))
51
52     def process_data(self):
53         """
54         Process the dump-flows data into a map.
55
56         The processing will tokenize the parts in each line of the flow dump.
57
58         :return: A list of dictionaries of parsed tokens per line
59         """
60         # cookie=0x805138a, duration=193.107s, table=50, n_packets=119, n_bytes=11504, idle_timeout=300,
61         #  send_flow_rem priority=20,metadata=0x2138a000000/0xfffffffff000000,dl_src=fa:16:3e:15:a8:66
62         #  actions=goto_table:51
63
64         self.pdata = []
65         if len(self.data) == 0:
66             logger.warn("There is no data to process")
67             return self.pdata
68
69         # skip the header if present
70         if "OFPST_FLOW" in self.data[0]:
71             self.start = 1
72             logger.debug("process_data: will skip first line: OFPST_FLOW line")
73         else:
74             self.start = 0
75         if "jenkins" in self.data[-1]:
76             end = len(self.data) - 1
77             logger.debug("process_data: will skip last line: jenkins line")
78         else:
79             end = len(self.data)
80
81         # Parse each line of the data. Each line is a single flow.
82         # Create a dictionary of all tokens in that flow.
83         # Append this flow dictionary to a list of flows.
84         for line in self.data[self.start:end]:
85             pline = {}
86             pline[Flows.IDLE_TIMEOUT] = "---"
87             pline[Flows.SEND_FLOW_REMOVED] = "-"
88             tokens = line.split(" ")
89             for token in tokens:
90                 # most lines are key=value so look for that pattern
91                 splits = token.split("=", 1)
92                 if len(splits) == 2:
93                     if Flows.PRIORITY in splits[0]:
94                         splitp = splits[1].split(",", 1)
95                         if len(splitp) == 2:
96                             pline[Flows.PRIORITY] = splitp[0]
97                             pline[Flows.MATCHES] = splitp[1]
98                         else:
99                             pline[Flows.PRIORITY] = splitp[0]
100                             pline[Flows.MATCHES] = ""
101                     else:
102                         pline[splits[0]] = splits[1].rstrip(",")
103                 elif token == Flows.SEND_FLOW_REMOVED:
104                     # send_flow_rem is a single token without a value
105                     pline[token] = token
106             self.pdata.append(pline)
107             logger.debug("process_data: Processed line %d into: \n%s",
108                          self.start + len(self.pdata), pformat(pline))
109         logger.info("process_data: Processed %d lines, skipped %d", len(self.pdata),
110                     self.start + len(self.data) - end)
111
112         return self.pdata
113
114     def re_table(self, match):
115         """
116         regex function to add the table name to table lines
117
118         :param match: The regex match
119         :return: The new line with table name
120         :rtype: str
121         """
122         if match.group(Flows.GOTO) is not None:
123             table_id = int(match.group(Flows.GOTO))
124         elif match.group(Flows.RESUBMIT) is not None:
125             table_id = int(match.group(Flows.RESUBMIT))
126         else:
127             table_id = 256
128
129         rep = "{}({})".format(match.group(), tables.get_table_name(table_id))
130         return rep
131
132     def format_data(self):
133         if len(self.pdata) == 0:
134             logger.warn("There is no data to process")
135             return self.pdata
136         header = "{:3} {:9} {:8} {:13}     {:6} {:12} {:1} {:3} {:5}\n" \
137                  "    {}\n" \
138                  "    {}\n" \
139             .format("nnn", Flows.COOKIE, Flows.DURATION, Flows.TABLE, "n_pack", Flows.N_BYTES,
140                     "S", "ito", "prio",
141                     Flows.MATCHES,
142                     Flows.ACTIONS)
143         header_under = "--- --------- -------- -------------     ------ ------------ - --- -----\n"
144
145         # Match goto_table: nnn or resubmit(,nnn) and return as goto or resubmit match group
146         re_gt = re.compile(r"goto_table:(?P<goto>\d{1,3})|"
147                            r"resubmit\(,(?P<resubmit>\d{1,3})\)")
148
149         # Add the header as the first two lines of formatted data
150         self.fdata = [header, header_under]
151
152         # Format each line of parsed data
153         for i, line in enumerate(self.pdata):
154             logger.debug("format_data: processing line %d: %s", self.start + i + 1, line)
155
156             #if Flows.SEND_FLOW_REMOVED in line:
157             #    send_flow_rem = " {} ".format(line[Flows.SEND_FLOW_REMOVED])
158             #else:
159             #    send_flow_rem = ""
160
161             #if Flows.IDLE_TIMEOUT in line:
162             #    idle_timeo = " {}={}".format(Flows.IDLE_TIMEOUT, line[Flows.IDLE_TIMEOUT])
163             #else:
164             #    idle_timeo = ""
165
166             if Flows.ACTIONS in line:
167                 nactions = re_gt.sub(self.re_table, line[Flows.ACTIONS])
168             else:
169                 logger.warn("Missing actions in %s", line)
170                 nactions = ""
171
172             fline = "{:3} {:9} {:8} {:3} {:13} {:6} {:12} {:1} {:3} {:5}\n" \
173                     "    matches={}\n" \
174                     "    actions={}\n" \
175                 .format(i + 1, line[Flows.COOKIE], line[Flows.DURATION],
176                         line[Flows.TABLE], tables.get_table_name(int(line[Flows.TABLE])),
177                         line[Flows.N_PACKETS], line[Flows.N_BYTES],
178                         line[Flows.SEND_FLOW_REMOVED][0], line[Flows.IDLE_TIMEOUT],
179                         line[Flows.PRIORITY],
180                         line[Flows.MATCHES],
181                         nactions)
182             self.fdata.append(fline)
183             logger.debug("format_data: formatted line %d: %s", self.start + i + 1, fline)
184         return self.fdata
185
186     def write_fdata(self, filename):
187         request.write_file(filename, self.fdata)