X-Git-Url: https://git.opendaylight.org/gerrit/gitweb?a=blobdiff_plain;f=test%2Ftools%2Fodl-mdsal-clustering-tests%2Fclustering-performance-test%2Finventory_crawler.py;h=2712314b399204085b16e983b18269e1a9c021ba;hb=072f6e3a8d1bdf8f4c663843589c22d93ba07791;hp=b69f901f1fd52f69015c1f1753a9318a5e9dfbca;hpb=74e4eaecc76a7e0838bfc1299ffefe2685b629af;p=integration%2Ftest.git diff --git a/test/tools/odl-mdsal-clustering-tests/clustering-performance-test/inventory_crawler.py b/test/tools/odl-mdsal-clustering-tests/clustering-performance-test/inventory_crawler.py old mode 100644 new mode 100755 index b69f901f1f..2712314b39 --- a/test/tools/odl-mdsal-clustering-tests/clustering-performance-test/inventory_crawler.py +++ b/test/tools/odl-mdsal-clustering-tests/clustering-performance-test/inventory_crawler.py @@ -27,7 +27,6 @@ class InventoryCrawler(object): self.auth = auth self.debug = debug - def crawl_flows(self, flows): """ Collects and prints summary information for all flows in a table @@ -48,7 +47,6 @@ class InventoryCrawler(object): print " Flow %s:" % f['id'] print s - def crawl_table(self, table): """ Collects and prints summary statistics information about a single table. Depending on the print level @@ -78,7 +76,6 @@ class InventoryCrawler(object): except KeyError: pass - def crawl_node(self, node): """ Collects and prints summary information about a single node @@ -106,11 +103,11 @@ class InventoryCrawler(object): if self.plevel > 1: print ' Data for tables not available.' - def crawl_inventory(self): """ Collects and prints summary information about all openflow nodes in a data store (either operational or config) """ + self.nodes = 0 self.found_flows = 0 self.reported_flows = 0 self.table_stats_unavailable = 0 @@ -145,25 +142,21 @@ class InventoryCrawler(object): s.close() - def set_plevel(self, plevel): self.plevel = plevel - - if __name__ == "__main__": parser = argparse.ArgumentParser(description='Restconf test program') - parser.add_argument('--odlhost', default='127.0.0.1', help='host where ' - 'odl controller is running (default is 127.0.0.1)') - parser.add_argument('--odlport', default='8181', help='port on ' - 'which odl\'s RESTCONF is listening (default is 8181)') + parser.add_argument('--host', default='127.0.0.1', help='host where ' + 'the controller is running; default 127.0.0.1') + parser.add_argument('--port', default='8181', help='port on ' + 'which odl\'s RESTCONF is listening; default 8181') parser.add_argument('--plevel', type=int, default=0, help='Print Level: 0 - Summary (stats only); 1 - Node names; 2 - Node details;' '3 - Flow details') parser.add_argument('--datastore', choices=['operational', 'config'], - default='operational', help='Which data store to crawl; ' - 'default operational') + default='operational', help='Which data store to crawl; default operational') parser.add_argument('--no-auth', dest='auth', action='store_false', default=False, help="Do not use authenticated access to REST (default)") parser.add_argument('--auth', dest='auth', action='store_true', @@ -173,7 +166,7 @@ if __name__ == "__main__": in_args = parser.parse_args() - ic = InventoryCrawler(in_args.odlhost, in_args.odlport, in_args.plevel, in_args.datastore, in_args.auth, + ic = InventoryCrawler(in_args.host, in_args.port, in_args.plevel, in_args.datastore, in_args.auth, in_args.debug) print "Crawling '%s'" % ic.url @@ -189,5 +182,3 @@ if __name__ == "__main__": if n_missing > 0: print '\nMissing table stats (%d nodes):' % n_missing print "%s\n" % ", ".join([x for x in ic.table_stats_fails]) - -