import argparse
import requests
+import sys
+getheaders = {'Accept': 'application/json'}
-def cleanup_config(host, port, auth):
- CONFIGURL = 'restconf/config/opendaylight-inventory:nodes'
- getheaders = {'Accept': 'application/json'}
+def cleanup_config_fl(host, port):
+ global getheaders
- url = 'http://' + host + ":" + port + '/' + CONFIGURL
- s = requests.Session()
+ url = 'http://' + host + ":" + port + '/wm/staticflowentrypusher/clear/all/json'
+ r = requests.get(url, headers=getheaders)
+ return r.status_code
- if not auth:
- r = s.delete(url, headers=getheaders)
- else:
- r = s.delete(url, headers=getheaders, auth=('admin', 'admin'))
- s.close()
+def cleanup_config_odl(host, port, auth):
+ global getheaders
+
+ url = 'http://' + host + ":" + port + '/restconf/config/opendaylight-inventory:nodes'
- if r.status_code != 200:
- print 'Failed to delete nodes in the config space, code %d' % r.status_code
+ if not auth:
+ r = requests.delete(url, headers=getheaders)
else:
- print 'Nodes in config space deleted.'
+ r = requests.delete(url, headers=getheaders, auth=('admin', 'admin'))
+
+ return r.status_code
if __name__ == "__main__":
parser.add_argument('--auth', dest='auth', action='store_true', default=False,
help="Use authenticated access to REST "
"(username: 'admin', password: 'admin').")
+ parser.add_argument('--controller', choices=['odl', 'floodlight'], default='odl',
+ help='Controller type (ODL or Floodlight); default odl (OpenDaylight)')
in_args = parser.parse_args()
- cleanup_config(in_args.host, in_args.port, in_args.auth)
+
+ if in_args.controller == 'odl':
+ sts = cleanup_config_odl(in_args.host, in_args.port, in_args.auth)
+ exp = 200
+ elif in_args.controller == 'floodlight':
+ sts = cleanup_config_fl(in_args.host, in_args.port)
+ exp = 204
+ else:
+ print 'Unknown controller type'
+ sys.exit(-1)
+
+ if sts != exp:
+ print 'Failed to delete nodes in the config space, code %d' % sts
+ else:
+ print 'Nodes in config space deleted.'
+
import time
from flow_config_blaster import FlowConfigBlaster, get_json_from_file
from inventory_crawler import InventoryCrawler
-from config_cleanup import cleanup_config
+from config_cleanup import cleanup_config_odl
+
+
+def wait_for_stats(crawler, exp_found, timeout, delay):
+ total_delay = 0
+ print 'Waiting for stats to catch up:'
+ while True:
+ crawler.crawl_inventory()
+ print ' %d, %d' % (crawler.reported_flows, crawler.found_flows)
+ if crawler.found_flows == exp_found or total_delay > timeout:
+ break
+ total_delay += delay
+ time.sleep(delay)
+
+ if total_delay < timeout:
+ print 'Stats collected in %d seconds.' % total_delay
+ else:
+ print 'Stats collection did not finish in %d seconds. Aborting...' % total_delay
if __name__ == "__main__":
"flow-node-inventory:flow": [
{
"flow-node-inventory:cookie": %d,
- "flow-node-inventory:cookie_mask": 65535,
+ "flow-node-inventory:cookie_mask": 4294967295,
"flow-node-inventory:flow-name": "%s",
"flow-node-inventory:hard-timeout": %d,
"flow-node-inventory:id": "%s",
print ' HTTP[OK] results: %d\n' % fct.get_ok_flows()
# Wait for stats to catch up
- total_delay = 0
- exp_found = found + fct.get_ok_flows()
- exp_reported = reported + fct.get_ok_flows()
-
- print 'Waiting for stats to catch up:'
- while True:
- ic.crawl_inventory()
- print ' %d, %d' % (ic.reported_flows, ic.found_flows)
- if ic.found_flows == exp_found or total_delay > in_args.timeout:
- break
- total_delay += in_args.delay
- time.sleep(in_args.delay)
-
- if total_delay < in_args.timeout:
- print 'Stats collected in %d seconds.' % total_delay
- else:
- print 'Stats collection did not finish in %d seconds. Aborting...' % total_delay
+ wait_for_stats(ic, found + fct.get_ok_flows(), in_args.timeout, in_args.delay)
# Run through <cycles>, where <threads> are started in each cycle and <flows> previously added in an add cycle are
# deleted in each thread
if in_args.bulk_delete:
- print '\nDeleting all flows in bulk:\n ',
- cleanup_config(in_args.host, in_args.port, in_args.auth)
+ print '\nDeleting all flows in bulk:'
+ sts = cleanup_config_odl(in_args.host, in_args.port, in_args.auth)
+ if sts != 200:
+ print ' Failed to delete flows, code %d' % sts
+ else:
+ print ' All flows deleted.'
else:
print '\nDeleting flows one by one\n ',
fct.delete_blaster()
# Wait for stats to catch up
- total_delay = 0
-
- print '\nWaiting for stats to catch up:'
- while True:
- ic.crawl_inventory()
- if ic.found_flows == found or total_delay > in_args.timeout:
- break
- total_delay += in_args.delay
- print ' %d, %d' % (ic.reported_flows, ic.found_flows)
- time.sleep(in_args.delay)
-
- if total_delay < in_args.timeout:
- print 'Stats collected in %d seconds.' % total_delay
- else:
- print 'Stats collection did not finish in %d seconds. Aborting...' % total_delay
+ wait_for_stats(ic, found, in_args.timeout, in_args.delay)