:return: None
"""
total_delay = 0
- print('Waiting for stats to catch up:')
+ print("Waiting for stats to catch up:")
with Timer() as t:
while True:
crawler.crawl_inventory()
- print(' %d, %d' % (crawler.reported_flows, crawler.found_flows))
+ print(" %d, %d" % (crawler.reported_flows, crawler.found_flows))
if crawler.found_flows == exp_found or total_delay > timeout:
break
total_delay += delay
time.sleep(delay)
if total_delay < timeout:
- print('Stats collected in %d seconds.' % t.secs)
+ print("Stats collected in %d seconds." % t.secs)
else:
- print('Stats collection did not finish in %d seconds. Aborting...' % total_delay)
+ print(
+ "Stats collection did not finish in %d seconds. Aborting..." % total_delay
+ )
if __name__ == "__main__":
# flow_config_blaster 'delete' method)
############################################################################
- parser = argparse.ArgumentParser(description='Flow programming performance test: First adds and then deletes flows '
- 'into the config tree, as specified by optional parameters.')
-
- parser.add_argument('--host', default='127.0.0.1',
- help='Host where odl controller is running (default is 127.0.0.1)')
- parser.add_argument('--port', default='8181',
- help='Port on which odl\'s RESTCONF is listening (default is 8181)')
- parser.add_argument('--cycles', type=int, default=1,
- help='Number of flow add/delete cycles; default 1. Both Flow Adds and Flow Deletes are '
- 'performed in cycles. <THREADS> worker threads are started in each cycle and the cycle '
- 'ends when all threads finish. Another cycle is started when the previous cycle finished.')
- parser.add_argument('--threads', type=int, default=1,
- help='Number of request worker threads to start in each cycle; default=1. '
- 'Each thread will add/delete <FLOWS> flows.')
- parser.add_argument('--flows', type=int, default=10,
- help='Number of flows that will be added/deleted by each worker thread in each cycle; '
- 'default 10')
- parser.add_argument('--fpr', type=int, default=1,
- help='Flows-per-Request - number of flows (batch size) sent in each HTTP request; '
- 'default 1')
- parser.add_argument('--delay', type=int, default=2,
- help='Time (seconds) to between inventory polls when waiting for stats to catch up; default=1')
- parser.add_argument('--timeout', type=int, default=100,
- help='The maximum time (seconds) to wait between the add and delete cycles; default=100')
- parser.add_argument('--delete', dest='delete', action='store_true', default=True,
- help='Delete all added flows one by one, benchmark delete '
- 'performance.')
- parser.add_argument('--bulk-delete', dest='bulk_delete', action='store_true', default=False,
- help='Delete all flows in bulk; default=False')
- parser.add_argument('--auth', dest='auth', action='store_true',
- help="Use authenticated access to REST (username: 'admin', password: 'admin'); default=False")
- parser.add_argument('--startflow', type=int, default=0,
- help='The starting Flow ID; default=0')
- parser.add_argument('--file', default='',
- help='File from which to read the JSON flow template; default: no file, use a built in '
- 'template.')
+ parser = argparse.ArgumentParser(
+ description="Flow programming performance test: First adds and then deletes flows "
+ "into the config tree, as specified by optional parameters."
+ )
+
+ parser.add_argument(
+ "--host",
+ default="127.0.0.1",
+ help="Host where odl controller is running (default is 127.0.0.1)",
+ )
+ parser.add_argument(
+ "--port",
+ default="8181",
+ help="Port on which odl's RESTCONF is listening (default is 8181)",
+ )
+ parser.add_argument(
+ "--cycles",
+ type=int,
+ default=1,
+ help="Number of flow add/delete cycles; default 1. Both Flow Adds and Flow Deletes are "
+ "performed in cycles. <THREADS> worker threads are started in each cycle and the cycle "
+ "ends when all threads finish. Another cycle is started when the previous cycle finished.",
+ )
+ parser.add_argument(
+ "--threads",
+ type=int,
+ default=1,
+ help="Number of request worker threads to start in each cycle; default=1. "
+ "Each thread will add/delete <FLOWS> flows.",
+ )
+ parser.add_argument(
+ "--flows",
+ type=int,
+ default=10,
+ help="Number of flows that will be added/deleted by each worker thread in each cycle; "
+ "default 10",
+ )
+ parser.add_argument(
+ "--fpr",
+ type=int,
+ default=1,
+ help="Flows-per-Request - number of flows (batch size) sent in each HTTP request; "
+ "default 1",
+ )
+ parser.add_argument(
+ "--delay",
+ type=int,
+ default=2,
+ help="Time (seconds) to between inventory polls when waiting for stats to catch up; default=1",
+ )
+ parser.add_argument(
+ "--timeout",
+ type=int,
+ default=100,
+ help="The maximum time (seconds) to wait between the add and delete cycles; default=100",
+ )
+ parser.add_argument(
+ "--delete",
+ dest="delete",
+ action="store_true",
+ default=True,
+ help="Delete all added flows one by one, benchmark delete " "performance.",
+ )
+ parser.add_argument(
+ "--bulk-delete",
+ dest="bulk_delete",
+ action="store_true",
+ default=False,
+ help="Delete all flows in bulk; default=False",
+ )
+ parser.add_argument(
+ "--auth",
+ dest="auth",
+ action="store_true",
+ help="Use authenticated access to REST (username: 'admin', password: 'admin'); default=False",
+ )
+ parser.add_argument(
+ "--startflow", type=int, default=0, help="The starting Flow ID; default=0"
+ )
+ parser.add_argument(
+ "--file",
+ default="",
+ help="File from which to read the JSON flow template; default: no file, use a built in "
+ "template.",
+ )
in_args = parser.parse_args()
# Initialize
- if in_args.file != '':
+ if in_args.file != "":
flow_template = get_json_from_file(in_args.file)
else:
flow_template = None
- ic = InventoryCrawler(in_args.host, in_args.port, 0, 'operational', in_args.auth, False)
-
- fct = FlowConfigBlaster(in_args.host, in_args.port, in_args.cycles, in_args.threads, in_args.fpr,
- 16, in_args.flows, in_args.startflow, in_args.auth)
+ ic = InventoryCrawler(
+ in_args.host, in_args.port, 0, "operational", in_args.auth, False
+ )
+
+ fct = FlowConfigBlaster(
+ in_args.host,
+ in_args.port,
+ in_args.cycles,
+ in_args.threads,
+ in_args.fpr,
+ 16,
+ in_args.flows,
+ in_args.startflow,
+ in_args.auth,
+ )
# Get the baseline stats. Required in Step 3 to validate if the delete
# function gets the controller back to the baseline
ic.crawl_inventory()
reported = ic.reported_flows
found = ic.found_flows
- print('Baseline:')
- print(' Reported flows: %d' % reported)
- print(' Found flows: %d' % found)
+ print("Baseline:")
+ print(" Reported flows: %d" % reported)
+ print(" Found flows: %d" % found)
# Run through <CYCLES> add cycles, where <THREADS> threads are started in
# each cycle and <FLOWS> flows are added from each thread
fct.add_blaster()
- print('\n*** Total flows added: %d' % fct.get_ok_flows())
- print(' HTTP[OK] results: %d\n' % fct.get_ok_rqsts())
+ print("\n*** Total flows added: %d" % fct.get_ok_flows())
+ print(" HTTP[OK] results: %d\n" % fct.get_ok_rqsts())
# Wait for stats to catch up
wait_for_stats(ic, found + fct.get_ok_flows(), in_args.timeout, in_args.delay)
# in each cycle and <FLOWS> flows previously added in an add cycle are
# deleted in each thread
if in_args.bulk_delete:
- print('\nDeleting all flows in bulk:')
+ print("\nDeleting all flows in bulk:")
sts = cleanup_config_odl(in_args.host, in_args.port, in_args.auth)
if sts != 200:
- print(' Failed to delete flows, code %d' % sts)
+ print(" Failed to delete flows, code %d" % sts)
else:
- print(' All flows deleted.')
+ print(" All flows deleted.")
else:
- print('\nDeleting flows one by one\n ',)
+ print("\nDeleting flows one by one\n ")
fct.delete_blaster()
- print('\n*** Total flows deleted: %d' % fct.get_ok_flows())
- print(' HTTP[OK] results: %d\n' % fct.get_ok_rqsts())
+ print("\n*** Total flows deleted: %d" % fct.get_ok_flows())
+ print(" HTTP[OK] results: %d\n" % fct.get_ok_rqsts())
# Wait for stats to catch up back to baseline
wait_for_stats(ic, found, in_args.timeout, in_args.delay)