- flow_add_delete_test.py:
Adds/deletes ("blasts") flows into ODL's config space. Similar to the
- flow_config_blaster (in fact, flow_config_blaster is used in this test),
- but has more advanced handling of the add/delete cycles. The test
- executes in three steps:
+ flow_config_blaster (in fact, the flow_config_blaster is used in this
+ test), but has more advanced handling of the add/delete cycles. The
+ test executes in three steps:
1. The specified number of flows is added in the 'add cycle' (uses
flow_config_blaster to blast flows)
2. The network is polled for flow statistics from the network
config_cleanup script or one by one using the flow_config_blaster)
+Prerequisites:
+- requests
+- netaddr
The Inventory Crawler:
======================
def wait_for_stats(crawler, exp_found, timeout, delay):
+ """
+ Waits for the ODL stats manager to catch up. Polls ODL inventory every <delay> seconds and compares the
+ retrieved stats to the expected values. If stats collection has not finished within <timeout> seconds,
+ Gives up/
+
+ :param crawler: Inventory crawler object
+ :param exp_found: Expected value for flows found in the network
+ :param timeout: Max number of seconds to wait for stats collector to collect all stats
+ :param delay: poll interval for inventory
+ :return: None
+ """
total_delay = 0
print 'Waiting for stats to catch up:'
while True:
if __name__ == "__main__":
+ ########################################################################################
+ #This program executes an ODL performance test. The test is executed in three steps:
+ #
+ # 1. The specified number of flows is added in the 'add cycle' (uses flow_config_blaster to blast flows)
+ #
+ # 2. The network is polled for flow statistics from the network (using the inventory_crawler) to make sure
+ # that all flows have been properly programmed into the network and the ODL statistics collector can
+ # properly read them
+ #
+ # 3. The flows are deleted in the flow cycle. Deletion happens either in 'bulk' (using the config_cleanup)
+ # script or one by one (using the flow_config_blaster 'delete' method)
+ ########################################################################################
JSON_FLOW_MOD1 = '''{
"flow-node-inventory:flow": [
fct = FlowConfigBlaster(in_args.host, in_args.port, in_args.cycles, in_args.threads, in_args.nodes,
in_args.flows, in_args.startflow, in_args.auth, flow_template)
- # Get baseline stats
+ # Get the baseline stats. Required in Step 3 to validate if the delete function gets the controller back to
+ # the baseline
ic.crawl_inventory()
reported = ic.reported_flows
found = ic.found_flows
print ' Reported nodes: %d' % reported
print ' Found nodes: %d' % found
- # Run through <cycles>, where <threads> are started in each cycle and <flows> are added from each thread
+ # Run through <CYCLES> add cycles, where <THREADS> threads are started in each cycle and <FLOWS> flows are
+ # added from each thread
fct.add_blaster()
print '\n*** Total flows added: %s' % fct.get_total_flows()
# Wait for stats to catch up
wait_for_stats(ic, found + fct.get_ok_flows(), in_args.timeout, in_args.delay)
- # Run through <cycles>, where <threads> are started in each cycle and <flows> previously added in an add cycle are
- # deleted in each thread
+ # Run through <CYCLES> delete cycles, where <THREADS> threads are started in each cycle and <FLOWS> flows
+ # previously added in an add cycle are deleted in each thread
if in_args.bulk_delete:
print '\nDeleting all flows in bulk:'
sts = cleanup_config_odl(in_args.host, in_args.port, in_args.auth)
print '\nDeleting flows one by one\n ',
fct.delete_blaster()
- # Wait for stats to catch up
+ # Wait for stats to catch up back to baseline
wait_for_stats(ic, found, in_args.timeout, in_args.delay)
def run_cycle(self, function):
"""
Runs an add or delete cycle. Starts a number of worker threads that each add a bunch of flows. Work is done
- in context of the worker threads
+ in context of the worker threads.
"""
for c in range(self.ncycles):
with self.print_lock:
print ' Total success rate: %.2f, Total rate: %.2f' % (
self.ok_rate.value, self.total_rate.value)
- measured_rate = self.nthreads * self.nflows * self.ncycles / t.secs
+ measured_rate = (self.nthreads * self.nflows) / t.secs
print ' Measured rate: %.2f (%.2f%% of Total success rate)' % \
(measured_rate, measured_rate / self.total_rate.value * 100)
self.threads_done = 0
def get_json_from_file(filename):
+ """
+ Get a flow programming template from a file
+ :param filename: File from which to get the template
+ :return: The json flow template (string)
+ """
with open(filename, 'r') as f:
read_data = f.read()
return read_data
if __name__ == "__main__":
-
JSON_FLOW_MOD1 = '''{
"flow-node-inventory:flow": [
{
if __name__ == "__main__":
parser = argparse.ArgumentParser(description='Restconf test program')
- parser.add_argument('--odlhost', default='127.0.0.1', help='host where '
+ parser.add_argument('--host', default='127.0.0.1', help='host where '
'odl controller is running (default is 127.0.0.1)')
- parser.add_argument('--odlport', default='8181', help='port on '
+ parser.add_argument('--port', default='8181', help='port on '
'which odl\'s RESTCONF is listening (default is 8181)')
parser.add_argument('--plevel', type=int, default=0,
help='Print Level: 0 - Summary (stats only); 1 - Node names; 2 - Node details;'
in_args = parser.parse_args()
- ic = InventoryCrawler(in_args.odlhost, in_args.odlport, in_args.plevel, in_args.datastore, in_args.auth,
+ ic = InventoryCrawler(in_args.host, in_args.port, in_args.plevel, in_args.datastore, in_args.auth,
in_args.debug)
print "Crawling '%s'" % ic.url