from random import randrange
import json
import argparse
-import requests
import time
import threading
import re
+
+import requests
import netaddr
self.ip_addr = Counter(int(netaddr.IPAddress('10.0.0.1')) + startflow)
-
self.print_lock = threading.Lock()
self.cond = threading.Condition()
self.threads_done = 0
for i in range(self.nthreads):
self.flows[i] = {}
-
def get_num_nodes(self, session):
"""
Determines the number of OF nodes in the connected mininet network. If mininet is not connected, the default
return nodes
-
- def add_flow(self, session, tid, node, flow_id, ipaddr):
+ def add_flow(self, session, node, flow_id, ipaddr):
"""
Adds a single flow to the config data store via REST
"""
- flow_data = self.json_template % (tid + flow_id, 'TestFlow-%d' % flow_id, 65000,
- str(flow_id), 65000, str(netaddr.IPAddress(ipaddr)))
+ flow_data = self.json_template % (flow_id, 'TestFlow-%d' % flow_id, 65000, str(flow_id), 65000,
+ str(netaddr.IPAddress(ipaddr)))
# print flow_data
flow_url = self.url_template % (node, flow_id)
# print flow_url
return r.status_code
-
def add_flows(self, start_flow, tid):
"""
Adds flows into the ODL config space. This function is executed by a worker thread
node_id = randrange(1, n_nodes + 1)
flow_id = tid * (self.ncycles * self.nflows) + flow + start_flow + self.startflow
self.flows[tid][flow_id] = node_id
- sts = self.add_flow(s, tid, node_id, flow_id, self.ip_addr.increment())
+ sts = self.add_flow(s, node_id, flow_id, self.ip_addr.increment())
try:
add_res[sts] += 1
except KeyError:
with self.cond:
self.cond.notifyAll()
-
def delete_flow(self, session, node, flow_id):
"""
Deletes a single flow from the ODL config data store via REST
-
- :param session:
- :param url_template:
- :param node:
- :param flow_id:
- :return:
"""
flow_url = self.url_template % (node, flow_id)
return r.status_code
-
def delete_flows(self, start_flow, tid):
"""
Deletes flow from the ODL config space that have been added using the 'add_flows()' function. This function is
with self.cond:
self.cond.notifyAll()
-
def run_cycle(self, function):
"""
Runs an add or delete cycle. Starts a number of worker threads that each add a bunch of flows. Work is done
- in context of the worker threads
+ in context of the worker threads.
"""
for c in range(self.ncycles):
with self.print_lock:
print ' Total success rate: %.2f, Total rate: %.2f' % (
- self.ok_rate.value, self.total_rate.value)
- measured_rate = self.nthreads * self.nflows * self.ncycles / t.secs
+ self.ok_rate.value, self.total_rate.value)
+ measured_rate = (self.nthreads * self.nflows) / t.secs
print ' Measured rate: %.2f (%.2f%% of Total success rate)' % \
(measured_rate, measured_rate / self.total_rate.value * 100)
+ print ' Measured time: %.2fs' % t.secs
self.threads_done = 0
self.ok_rate.value = 0
self.total_rate.value = 0
-
def add_blaster(self):
self.run_cycle(self.add_flows)
def get_json_from_file(filename):
+ """
+ Get a flow programming template from a file
+ :param filename: File from which to get the template
+ :return: The json flow template (string)
+ """
with open(filename, 'r') as f:
read_data = f.read()
return read_data
if __name__ == "__main__":
-
JSON_FLOW_MOD1 = '''{
"flow-node-inventory:flow": [
{
"flow-node-inventory:cookie": %d,
- "flow-node-inventory:cookie_mask": 65535,
+ "flow-node-inventory:cookie_mask": 4294967295,
"flow-node-inventory:flow-name": "%s",
"flow-node-inventory:hard-timeout": %d,
"flow-node-inventory:id": "%s",