found_flows = 0
nodes = 0
- INVENTORY_URL = 'restconf/%s/opendaylight-inventory:nodes'
- hdr = {'Accept': 'application/json'}
+ INVENTORY_URL = "restconf/%s/opendaylight-inventory:nodes"
+ hdr = {"Accept": "application/json"}
OK, ERROR = range(2)
table_stats_unavailable = 0
table_stats_fails = []
def __init__(self, host, port, plevel, datastore, auth, debug):
- self.url = 'http://' + host + ":" + port + '/' + self.INVENTORY_URL % datastore
+ self.url = "http://" + host + ":" + port + "/" + self.INVENTORY_URL % datastore
self.plevel = plevel
self.auth = auth
self.debug = debug
"""
self.found_flows += len(flows)
if self.plevel > 1:
- print(' Flows found: %d\n' % len(flows))
+ print(" Flows found: %d\n" % len(flows))
if self.plevel > 2:
for f in flows:
- s = json.dumps(f, sort_keys=True, indent=4, separators=(',', ': '))
+ s = json.dumps(f, sort_keys=True, indent=4, separators=(",", ": "))
# s = s.replace('{\n', '')
# s = s.replace('}', '')
s = s.strip()
- s = s.lstrip('{')
- s = s.rstrip('}')
- s = s.replace('\n', '\n ')
- s = s.lstrip('\n')
- print(" Flow %s:" % (f['id']))
+ s = s.lstrip("{")
+ s = s.rstrip("}")
+ s = s.replace("\n", "\n ")
+ s = s.lstrip("\n")
+ print(" Flow %s:" % (f["id"]))
print(s)
def crawl_table(self, table):
(plevel), it also invokes the crawl_flows
"""
try:
- stats = table['opendaylight-flow-table-statistics:flow-table-statistics']
- active_flows = int(stats['active-flows'])
+ stats = table["opendaylight-flow-table-statistics:flow-table-statistics"]
+ active_flows = int(stats["active-flows"])
if active_flows > 0:
self.reported_flows += active_flows
if self.plevel > 1:
- print(' Table %s:' % table['id'])
- s = json.dumps(stats, sort_keys=True, indent=12, separators=(',', ': '))
- s = s.replace('{\n', '')
- s = s.replace('}', '')
+ print(" Table %s:" % table["id"])
+ s = json.dumps(
+ stats, sort_keys=True, indent=12, separators=(",", ": ")
+ )
+ s = s.replace("{\n", "")
+ s = s.replace("}", "")
print(s)
except KeyError:
if self.plevel > 1:
- print(" Stats for Table '%s' not available." % (table['id']))
+ print(" Stats for Table '%s' not available." % (table["id"]))
self.table_stats_unavailable += 1
pass
try:
- flows_in_table = table['flow']
+ flows_in_table = table["flow"]
self.crawl_flows(flows_in_table)
except KeyError:
pass
self.nodes += 1
if self.plevel > 1:
- print("\nNode '%s':" % ((node['id'])))
+ print("\nNode '%s':" % ((node["id"])))
elif self.plevel > 0:
- print("%s" % ((node['id'])))
+ print("%s" % ((node["id"])))
try:
- tables = node['flow-node-inventory:table']
+ tables = node["flow-node-inventory:table"]
if self.plevel > 1:
- print(' Tables: %d' % len(tables))
+ print(" Tables: %d" % len(tables))
for t in tables:
self.crawl_table(t)
if self.table_stats_unavailable > 0:
- self.table_stats_fails.append(node['id'])
+ self.table_stats_fails.append(node["id"])
except KeyError:
if self.plevel > 1:
- print(' Data for tables not available.')
+ print(" Data for tables not available.")
def crawl_inventory(self):
"""
if not self.auth:
r = s.get(self.url, headers=self.hdr, stream=False)
else:
- r = s.get(self.url, headers=self.hdr, stream=False, auth=('admin', 'admin'))
+ r = s.get(self.url, headers=self.hdr, stream=False, auth=("admin", "admin"))
if r.status_code == 200:
try:
- inv = json.loads(r.content)['nodes']['node']
+ inv = json.loads(r.content)["nodes"]["node"]
sinv = []
for n in range(len(inv)):
- if re.search('openflow', inv[n]['id']) is not None:
+ if re.search("openflow", inv[n]["id"]) is not None:
sinv.append(inv[n])
- sinv = sorted(sinv, key=lambda k: int(re.findall('\d+', k['id'])[0]))
+ sinv = sorted(sinv, key=lambda k: int(re.findall("\d+", k["id"])[0]))
for n in range(len(sinv)):
try:
self.crawl_node(sinv[n])
except Exception:
- print('Can not crawl %s' % sinv[n]['id'])
+ print("Can not crawl %s" % sinv[n]["id"])
except KeyError:
- print('Could not retrieve inventory, response not in JSON format')
+ print("Could not retrieve inventory, response not in JSON format")
else:
- print('Could not retrieve inventory, HTTP error %d' % r.status_code)
+ print("Could not retrieve inventory, HTTP error %d" % r.status_code)
s.close()
if __name__ == "__main__":
- parser = argparse.ArgumentParser(description='Restconf test program')
- parser.add_argument('--host', default='127.0.0.1', help='host where '
- 'the controller is running; default 127.0.0.1')
- parser.add_argument('--port', default='8181', help='port on '
- 'which odl\'s RESTCONF is listening; default 8181')
- parser.add_argument('--plevel', type=int, default=0,
- help='Print Level: 0 - Summary (stats only); 1 - Node names; 2 - Node details;'
- '3 - Flow details')
- parser.add_argument('--datastore', choices=['operational', 'config'],
- default='operational', help='Which data store to crawl; default operational')
- parser.add_argument('--no-auth', dest='auth', action='store_false', default=False,
- help="Do not use authenticated access to REST (default)")
- parser.add_argument('--auth', dest='auth', action='store_true',
- help="Use authenticated access to REST (username: 'admin', password: 'admin').")
- parser.add_argument('--debug', dest='debug', action='store_true', default=False,
- help="List nodes that have not provided proper statistics data")
+ parser = argparse.ArgumentParser(description="Restconf test program")
+ parser.add_argument(
+ "--host",
+ default="127.0.0.1",
+ help="host where " "the controller is running; default 127.0.0.1",
+ )
+ parser.add_argument(
+ "--port",
+ default="8181",
+ help="port on " "which odl's RESTCONF is listening; default 8181",
+ )
+ parser.add_argument(
+ "--plevel",
+ type=int,
+ default=0,
+ help="Print Level: 0 - Summary (stats only); 1 - Node names; 2 - Node details;"
+ "3 - Flow details",
+ )
+ parser.add_argument(
+ "--datastore",
+ choices=["operational", "config"],
+ default="operational",
+ help="Which data store to crawl; default operational",
+ )
+ parser.add_argument(
+ "--no-auth",
+ dest="auth",
+ action="store_false",
+ default=False,
+ help="Do not use authenticated access to REST (default)",
+ )
+ parser.add_argument(
+ "--auth",
+ dest="auth",
+ action="store_true",
+ help="Use authenticated access to REST (username: 'admin', password: 'admin').",
+ )
+ parser.add_argument(
+ "--debug",
+ dest="debug",
+ action="store_true",
+ default=False,
+ help="List nodes that have not provided proper statistics data",
+ )
in_args = parser.parse_args()
- ic = InventoryCrawler(in_args.host, in_args.port, in_args.plevel, in_args.datastore, in_args.auth,
- in_args.debug)
+ ic = InventoryCrawler(
+ in_args.host,
+ in_args.port,
+ in_args.plevel,
+ in_args.datastore,
+ in_args.auth,
+ in_args.debug,
+ )
print("Crawling '%s'" % (ic.url))
ic.crawl_inventory()
- print('\nTotals:')
- print(' Nodes: %d' % ic.nodes)
- print(' Reported flows: %d' % ic.reported_flows)
- print(' Found flows: %d' % ic.found_flows)
+ print("\nTotals:")
+ print(" Nodes: %d" % ic.nodes)
+ print(" Reported flows: %d" % ic.reported_flows)
+ print(" Found flows: %d" % ic.found_flows)
if in_args.debug:
n_missing = len(ic.table_stats_fails)
if n_missing > 0:
- print('\nMissing table stats (%d nodes):' % n_missing)
+ print("\nMissing table stats (%d nodes):" % n_missing)
print("%s\n" % (", ".join([x for x in ic.table_stats_fails])))