Added FlowConfigBlaster 'Floodlight Edition' - flow config blaster that uses Floodlig...
[integration/test.git] / test / tools / odl-mdsal-clustering-tests / clustering-performance-test / flow_add_delete_test.py
1 #!/usr/bin/python
2
3 __author__ = "Jan Medved"
4 __copyright__ = "Copyright(c) 2014, Cisco Systems, Inc."
5 __license__ = "New-style BSD"
6 __email__ = "jmedved@cisco.com"
7
8 import argparse
9 import time
10 from flow_config_blaster import FlowConfigBlaster, get_json_from_file
11 from inventory_crawler import InventoryCrawler
12 from config_cleanup import cleanup_config
13
14
15 if __name__ == "__main__":
16
17     JSON_FLOW_MOD1 = '''{
18         "flow-node-inventory:flow": [
19             {
20                 "flow-node-inventory:cookie": %d,
21                 "flow-node-inventory:cookie_mask": 65535,
22                 "flow-node-inventory:flow-name": "%s",
23                 "flow-node-inventory:hard-timeout": %d,
24                 "flow-node-inventory:id": "%s",
25                 "flow-node-inventory:idle-timeout": %d,
26                 "flow-node-inventory:installHw": false,
27                 "flow-node-inventory:instructions": {
28                     "flow-node-inventory:instruction": [
29                         {
30                             "flow-node-inventory:apply-actions": {
31                                 "flow-node-inventory:action": [
32                                     {
33                                         "flow-node-inventory:drop-action": {},
34                                         "flow-node-inventory:order": 0
35                                     }
36                                 ]
37                             },
38                             "flow-node-inventory:order": 0
39                         }
40                     ]
41                 },
42                 "flow-node-inventory:match": {
43                     "flow-node-inventory:ipv4-destination": "%s/32",
44                     "flow-node-inventory:ethernet-match": {
45                         "flow-node-inventory:ethernet-type": {
46                             "flow-node-inventory:type": 2048
47                         }
48                     }
49                 },
50                 "flow-node-inventory:priority": 2,
51                 "flow-node-inventory:strict": false,
52                 "flow-node-inventory:table_id": 0
53             }
54         ]
55     }'''
56
57     parser = argparse.ArgumentParser(description='Flow programming performance test: First adds and then deletes flows '
58                                                  'into the config tree, as specified by optional parameters.')
59
60     parser.add_argument('--host', default='127.0.0.1',
61                         help='Host where odl controller is running (default is 127.0.0.1)')
62     parser.add_argument('--port', default='8181',
63                         help='Port on which odl\'s RESTCONF is listening (default is 8181)')
64     parser.add_argument('--cycles', type=int, default=1,
65                         help='Number of flow add/delete cycles; default 1. Both Flow Adds and Flow Deletes are '
66                              'performed in cycles. <THREADS> worker threads are started in each cycle and the cycle '
67                              'ends when all threads finish. Another cycle is started when the previous cycle finished.')
68     parser.add_argument('--threads', type=int, default=1,
69                         help='Number of request worker threads to start in each cycle; default=1. '
70                              'Each thread will add/delete <FLOWS> flows.')
71     parser.add_argument('--flows', type=int, default=10,
72                         help='Number of flows that will be added/deleted by each worker thread in each cycle; '
73                              'default 10')
74     parser.add_argument('--nodes', type=int, default=16,
75                         help='Number of nodes if mininet is not connected; default=16. If mininet is connected, '
76                              'flows will be evenly distributed (programmed) into connected nodes.')
77     parser.add_argument('--delay', type=int, default=2,
78                         help='Time (seconds) to between inventory polls when waiting for stats to catch up; default=1')
79     parser.add_argument('--timeout', type=int, default=100,
80                         help='The maximum time (seconds) to wait between the add and delete cycles; default=100')
81     parser.add_argument('--delete', dest='delete', action='store_true', default=True,
82                         help='Delete all added flows one by one, benchmark delete '
83                              'performance.')
84     parser.add_argument('--bulk-delete', dest='bulk_delete', action='store_true', default=False,
85                         help='Delete all flows in bulk; default=False')
86     parser.add_argument('--auth', dest='auth', action='store_true',
87                         help="Use authenticated access to REST (username: 'admin', password: 'admin'); default=False")
88     parser.add_argument('--startflow', type=int, default=0,
89                         help='The starting Flow ID; default=0')
90     parser.add_argument('--file', default='',
91                         help='File from which to read the JSON flow template; default: no file, use a built in '
92                              'template.')
93
94     in_args = parser.parse_args()
95
96     # Initialize
97     if in_args.file != '':
98         flow_template = get_json_from_file(in_args.file)
99     else:
100         flow_template = JSON_FLOW_MOD1
101
102     ic = InventoryCrawler(in_args.host, in_args.port, 0, 'operational', in_args.auth, False)
103
104     fct = FlowConfigBlaster(in_args.host, in_args.port, in_args.cycles, in_args.threads, in_args.nodes,
105                             in_args.flows, in_args.startflow, in_args.auth, flow_template)
106
107     # Get baseline stats
108     ic.crawl_inventory()
109     reported = ic.reported_flows
110     found = ic.found_flows
111
112     print 'Baseline:'
113     print '   Reported nodes: %d' % reported
114     print '   Found nodes:    %d' % found
115
116     # Run through <cycles>, where <threads> are started in each cycle and <flows> are added from each thread
117     fct.add_blaster()
118
119     print '\n*** Total flows added: %s' % fct.get_total_flows()
120     print '    HTTP[OK] results:  %d\n' % fct.get_ok_flows()
121
122     # Wait for stats to catch up
123     total_delay = 0
124     exp_found = found + fct.get_ok_flows()
125     exp_reported = reported + fct.get_ok_flows()
126
127     print 'Waiting for stats to catch up:'
128     while True:
129         ic.crawl_inventory()
130         print '   %d, %d' % (ic.reported_flows, ic.found_flows)
131         if ic.found_flows == exp_found or total_delay > in_args.timeout:
132             break
133         total_delay += in_args.delay
134         time.sleep(in_args.delay)
135
136     if total_delay < in_args.timeout:
137         print 'Stats collected in %d seconds.' % total_delay
138     else:
139         print 'Stats collection did not finish in %d seconds. Aborting...' % total_delay
140
141     # Run through <cycles>, where <threads> are started in each cycle and <flows> previously added in an add cycle are
142     # deleted in each thread
143     if in_args.bulk_delete:
144         print '\nDeleting all flows in bulk:\n   ',
145         cleanup_config(in_args.host, in_args.port, in_args.auth)
146     else:
147         print '\nDeleting flows one by one\n   ',
148         fct.delete_blaster()
149
150     # Wait for stats to catch up
151     total_delay = 0
152
153     print '\nWaiting for stats to catch up:'
154     while True:
155         ic.crawl_inventory()
156         if ic.found_flows == found or total_delay > in_args.timeout:
157             break
158         total_delay += in_args.delay
159         print '   %d, %d' % (ic.reported_flows, ic.found_flows)
160         time.sleep(in_args.delay)
161
162     if total_delay < in_args.timeout:
163         print 'Stats collected in %d seconds.' % total_delay
164     else:
165         print 'Stats collection did not finish in %d seconds. Aborting...' % total_delay