2 __author__ = "Jan Medved"
3 __copyright__ = "Copyright(c) 2014, Cisco Systems, Inc."
4 __license__ = "New-style BSD"
5 __email__ = "jmedved@cisco.com"
13 class InventoryCrawler(object):
18 INVENTORY_URL = 'restconf/%s/opendaylight-inventory:nodes'
19 hdr = {'Accept': 'application/json'}
21 table_stats_unavailable = 0
22 table_stats_fails = []
24 def __init__(self, host, port, plevel, datastore, auth, debug):
25 self.url = 'http://' + host + ":" + port + '/' + self.INVENTORY_URL % datastore
30 def crawl_flows(self, flows):
32 Collects and prints summary information for all flows in a table
34 self.found_flows += len(flows)
36 print ' Flows found: %d\n' % len(flows)
39 s = json.dumps(f, sort_keys=True, indent=4, separators=(',', ': '))
40 # s = s.replace('{\n', '')
41 # s = s.replace('}', '')
45 s = s.replace('\n', '\n ')
47 print " Flow %s:" % f['id']
50 def crawl_table(self, table):
52 Collects and prints summary statistics information about a single table. Depending on the print level
53 (plevel), it also invokes the crawl_flows
56 stats = table['opendaylight-flow-table-statistics:flow-table-statistics']
57 active_flows = int(stats['active-flows'])
60 self.reported_flows += active_flows
62 print ' Table %s:' % table['id']
63 s = json.dumps(stats, sort_keys=True, indent=12, separators=(',', ': '))
64 s = s.replace('{\n', '')
65 s = s.replace('}', '')
69 print " Stats for Table '%s' not available." % table['id']
70 self.table_stats_unavailable += 1
74 flows_in_table = table['flow']
75 self.crawl_flows(flows_in_table)
79 def crawl_node(self, node):
81 Collects and prints summary information about a single node
83 self.table_stats_unavailable = 0
87 print "\nNode '%s':" % (node['id'])
89 print "%s" % (node['id'])
92 tables = node['flow-node-inventory:table']
94 print ' Tables: %d' % len(tables)
99 if self.table_stats_unavailable > 0:
100 self.table_stats_fails.append(node['id'])
104 print ' Data for tables not available.'
106 def crawl_inventory(self):
108 Collects and prints summary information about all openflow nodes in a data store (either operational or config)
111 self.reported_flows = 0
112 self.table_stats_unavailable = 0
113 self.table_stats_fails = []
115 s = requests.Session()
117 r = s.get(self.url, headers=self.hdr, stream=False)
119 r = s.get(self.url, headers=self.hdr, stream=False, auth=('admin', 'admin'))
121 if r.status_code == 200:
123 inv = json.loads(r.content)['nodes']['node']
125 for n in range(len(inv)):
126 if re.search('openflow', inv[n]['id']) is not None:
129 sinv = sorted(sinv, key=lambda k: int(re.findall('\d+', k['id'])[0]))
131 for n in range(len(sinv)):
133 self.crawl_node(sinv[n])
135 print 'Can not crawl %s' % sinv[n]['id']
138 print 'Could not retrieve inventory, response not in JSON format'
140 print 'Could not retrieve inventory, HTTP error %d' % r.status_code
144 def set_plevel(self, plevel):
148 if __name__ == "__main__":
149 parser = argparse.ArgumentParser(description='Restconf test program')
150 parser.add_argument('--host', default='127.0.0.1', help='host where '
151 'the controller is running; default 127.0.0.1')
152 parser.add_argument('--port', default='8181', help='port on '
153 'which odl\'s RESTCONF is listening; default 8181')
154 parser.add_argument('--plevel', type=int, default=0,
155 help='Print Level: 0 - Summary (stats only); 1 - Node names; 2 - Node details;'
157 parser.add_argument('--datastore', choices=['operational', 'config'],
158 default='operational', help='Which data store to crawl; default operational')
159 parser.add_argument('--no-auth', dest='auth', action='store_false', default=False,
160 help="Do not use authenticated access to REST (default)")
161 parser.add_argument('--auth', dest='auth', action='store_true',
162 help="Use authenticated access to REST (username: 'admin', password: 'admin').")
163 parser.add_argument('--debug', dest='debug', action='store_true', default=False,
164 help="List nodes that have not provided proper statistics data")
166 in_args = parser.parse_args()
168 ic = InventoryCrawler(in_args.host, in_args.port, in_args.plevel, in_args.datastore, in_args.auth,
171 print "Crawling '%s'" % ic.url
175 print ' Nodes: %d' % ic.nodes
176 print ' Reported flows: %d' % ic.reported_flows
177 print ' Found flows: %d' % ic.found_flows
180 n_missing = len(ic.table_stats_fails)
182 print '\nMissing table stats (%d nodes):' % n_missing
183 print "%s\n" % ", ".join([x for x in ic.table_stats_fails])