Migrate Get Requests invocations(libraries)
[integration/test.git] / tools / odl-mdsal-clustering-tests / clustering-performance-test / inventory_crawler.py
1 #!/usr/bin/python
2 import argparse
3 import requests
4 import re
5 import json
6
7
8 __author__ = "Jan Medved"
9 __copyright__ = "Copyright(c) 2014, Cisco Systems, Inc."
10 __license__ = "New-style BSD"
11 __email__ = "jmedved@cisco.com"
12
13
14 class InventoryCrawler(object):
15     reported_flows = 0
16     found_flows = 0
17     nodes = 0
18
19     INVENTORY_URL = "restconf/%s/opendaylight-inventory:nodes"
20     hdr = {"Accept": "application/json"}
21     OK, ERROR = range(2)
22     table_stats_unavailable = 0
23     table_stats_fails = []
24
25     def __init__(self, host, port, plevel, datastore, auth, debug):
26         self.url = "http://" + host + ":" + port + "/" + self.INVENTORY_URL % datastore
27         self.plevel = plevel
28         self.auth = auth
29         self.debug = debug
30
31     def crawl_flows(self, flows):
32         """
33         Collects and prints summary information for all flows in a table
34         """
35         self.found_flows += len(flows)
36         if self.plevel > 1:
37             print("             Flows found: %d\n" % len(flows))
38             if self.plevel > 2:
39                 for f in flows:
40                     s = json.dumps(f, sort_keys=True, indent=4, separators=(",", ": "))
41                     # s = s.replace('{\n', '')
42                     # s = s.replace('}', '')
43                     s = s.strip()
44                     s = s.lstrip("{")
45                     s = s.rstrip("}")
46                     s = s.replace("\n", "\n            ")
47                     s = s.lstrip("\n")
48                     print("             Flow %s:" % (f["id"]))
49                     print(s)
50
51     def crawl_table(self, table):
52         """
53         Collects and prints summary statistics information about a single table. Depending on the print level
54         (plevel), it also invokes the crawl_flows
55         """
56         try:
57             stats = table["opendaylight-flow-table-statistics:flow-table-statistics"]
58             active_flows = int(stats["active-flows"])
59
60             if active_flows > 0:
61                 self.reported_flows += active_flows
62                 if self.plevel > 1:
63                     print("        Table %s:" % table["id"])
64                     s = json.dumps(
65                         stats, sort_keys=True, indent=12, separators=(",", ": ")
66                     )
67                     s = s.replace("{\n", "")
68                     s = s.replace("}", "")
69                     print(s)
70         except KeyError:
71             if self.plevel > 1:
72                 print("        Stats for Table '%s' not available." % (table["id"]))
73             self.table_stats_unavailable += 1
74             pass
75
76         try:
77             flows_in_table = table["flow"]
78             self.crawl_flows(flows_in_table)
79         except KeyError:
80             pass
81
82     def crawl_node(self, node):
83         """
84         Collects and prints summary information about a single node
85         """
86         self.table_stats_unavailable = 0
87         self.nodes += 1
88
89         if self.plevel > 1:
90             print("\nNode '%s':" % ((node["id"])))
91         elif self.plevel > 0:
92             print("%s" % ((node["id"])))
93
94         try:
95             tables = node["flow-node-inventory:table"]
96             if self.plevel > 1:
97                 print("    Tables: %d" % len(tables))
98
99             for t in tables:
100                 self.crawl_table(t)
101
102             if self.table_stats_unavailable > 0:
103                 self.table_stats_fails.append(node["id"])
104
105         except KeyError:
106             if self.plevel > 1:
107                 print("    Data for tables not available.")
108
109     def crawl_inventory(self):
110         """
111         Collects and prints summary information about all openflow nodes in a data store (either operational or config)
112         """
113         self.nodes = 0
114         self.found_flows = 0
115         self.reported_flows = 0
116         self.table_stats_unavailable = 0
117         self.table_stats_fails = []
118
119         s = requests.Session()
120         if not self.auth:
121             r = s.get(self.url, headers=self.hdr, stream=False)
122         else:
123             r = s.get(self.url, headers=self.hdr, stream=False, auth=("admin", "admin"))
124
125         if r.status_code == 200:
126             try:
127                 inv = json.loads(r.content)["nodes"]["node"]
128                 sinv = []
129                 for n in range(len(inv)):
130                     if re.search("openflow", inv[n]["id"]) is not None:
131                         sinv.append(inv[n])
132
133                 sinv = sorted(sinv, key=lambda k: int(re.findall(r"\d+", k["id"])[0]))
134
135                 for n in range(len(sinv)):
136                     try:
137                         self.crawl_node(sinv[n])
138                     except Exception:
139                         print("Can not crawl %s" % sinv[n]["id"])
140
141             except KeyError:
142                 print("Could not retrieve inventory, response not in JSON format")
143         else:
144             print("Could not retrieve inventory, HTTP error %d" % r.status_code)
145
146         s.close()
147
148     def set_plevel(self, plevel):
149         self.plevel = plevel
150
151
152 if __name__ == "__main__":
153     parser = argparse.ArgumentParser(description="Restconf test program")
154     parser.add_argument(
155         "--host",
156         default="127.0.0.1",
157         help="host where " "the controller is running; default 127.0.0.1",
158     )
159     parser.add_argument(
160         "--port",
161         default="8181",
162         help="port on " "which odl's RESTCONF is listening; default 8181",
163     )
164     parser.add_argument(
165         "--plevel",
166         type=int,
167         default=0,
168         help="Print Level: 0 - Summary (stats only); 1 - Node names; 2 - Node details;"
169         "3 - Flow details",
170     )
171     parser.add_argument(
172         "--datastore",
173         choices=["operational", "config"],
174         default="operational",
175         help="Which data store to crawl; default operational",
176     )
177     parser.add_argument(
178         "--no-auth",
179         dest="auth",
180         action="store_false",
181         default=False,
182         help="Do not use authenticated access to REST (default)",
183     )
184     parser.add_argument(
185         "--auth",
186         dest="auth",
187         action="store_true",
188         help="Use authenticated access to REST (username: 'admin', password: 'admin').",
189     )
190     parser.add_argument(
191         "--debug",
192         dest="debug",
193         action="store_true",
194         default=False,
195         help="List nodes that have not provided proper statistics data",
196     )
197
198     in_args = parser.parse_args()
199
200     ic = InventoryCrawler(
201         in_args.host,
202         in_args.port,
203         in_args.plevel,
204         in_args.datastore,
205         in_args.auth,
206         in_args.debug,
207     )
208
209     print("Crawling '%s'" % (ic.url))
210     ic.crawl_inventory()
211
212     print("\nTotals:")
213     print("    Nodes:          %d" % ic.nodes)
214     print("    Reported flows: %d" % ic.reported_flows)
215     print("    Found flows:    %d" % ic.found_flows)
216
217     if in_args.debug:
218         n_missing = len(ic.table_stats_fails)
219         if n_missing > 0:
220             print("\nMissing table stats (%d nodes):" % n_missing)
221             print("%s\n" % (", ".join([x for x in ic.table_stats_fails])))