Merge "check-in scripts (passed by Jan Medved) related to datastore performance testi...
[integration/test.git] / test / tools / odl-mdsal-clustering-tests / clustering-performance-test / inventory_crawler.py
1 __author__ = "Jan Medved"
2 __copyright__ = "Copyright(c) 2014, Cisco Systems, Inc."
3 __license__ = "New-style BSD"
4 __email__ = "jmedved@cisco.com"
5
6 from operator import itemgetter, attrgetter
7 import argparse
8 import requests
9 # import time
10 # import threading
11 import re
12 import json
13
14 INVENTORY_URL = 'restconf/%s/opendaylight-inventory:nodes'
15 hdr = {'Accept': 'application/json'}
16
17 # Inventory totals
18 reported_flows = 0
19 found_flows = 0
20 nodes = 0
21
22 def crawl_flows(flows):
23     global found_flows
24
25     found_flows += len(flows)
26     if in_args.plevel > 1:
27         print '             Flows found: %d' % len(flows)
28         if in_args.plevel > 2:
29             for f in flows:
30                 s = json.dumps(f, sort_keys=True, indent=4, separators=(',', ': '))
31                 # s = s.replace('{\n', '')
32                 # s = s.replace('}', '')
33                 s = s.strip()
34                 s = s.lstrip('{')
35                 s = s.rstrip('}')
36                 s = s.replace('\n', '\n            ')
37                 s = s.lstrip('\n')
38                 print "             Flow %s:" % f['flow-node-inventory:id']
39                 print s
40
41
42
43 def crawl_table(table):
44     global reported_flows
45
46     try:
47         stats = table['opendaylight-flow-table-statistics:flow-table-statistics']
48         active_flows = stats['opendaylight-flow-table-statistics:active-flows']
49
50         if active_flows > 0:
51             reported_flows += active_flows
52             if in_args.plevel > 1:
53                 print '        Table %s:' % table['flow-node-inventory:id']
54                 s = json.dumps(stats, sort_keys=True, indent=12, separators=(',', ': '))
55                 s = s.replace('{\n', '')
56                 s = s.replace('}', '')
57                 print s
58     except:
59         print "        Stats for Table '%s' not available." %  \
60               table['flow-node-inventory:id']
61
62     try:
63         flows_in_table = table['flow-node-inventory:flow']
64         crawl_flows(flows_in_table)
65     except(KeyError):
66         pass
67
68
69
70 def crawl_node(node):
71     global nodes
72     nodes = nodes + 1
73     if in_args.plevel > 1:
74         print "\nNode '%s':" %(node['id'])
75     elif in_args.plevel > 0:
76         print "%s" %(node['id'])
77
78     try:
79         tables = node['flow-node-inventory:table']
80         if in_args.plevel > 1:
81             print '    Tables: %d' % len(tables)
82
83         for t in tables:
84             crawl_table(t)
85     except:
86         print '    Data for tables not available.'
87
88 #    print json.dumps(tables, sort_keys=True, indent=4, separators=(',', ': '))
89
90 def crawl_inventory(url):
91     s = requests.Session()
92     r = s.get(url, headers=hdr, stream=False)
93
94     if (r.status_code == 200):
95         try:
96             inv = json.loads(r.content)['nodes']['node']
97             sinv = []
98             for n in range(len(inv)):
99                 if re.search('openflow', inv[n]['id']) != None:
100                     sinv.append(inv[n])
101
102 #            sinv = sorted(sinv, key=lambda k: int(k['id'].split(':')[-1]))
103             try:
104                 sinv = sorted(sinv, key=lambda k: int(re.findall('\d+', k['id'])[0]))
105                 for n in range(len(sinv)):
106                     crawl_node(sinv[n])
107             except:
108                 print 'Fuck! %s' % sinv[n]['id']
109
110         except(KeyError):
111             print 'Could not retrieve inventory, response not in JSON format'
112     else:
113         print 'Could not retrieve inventory, HTTP error %d' % r.status_code
114
115
116
117 if __name__ == "__main__":
118
119     parser = argparse.ArgumentParser(description='Restconf test program')
120     parser.add_argument('--odlhost', default='127.0.0.1', help='host where '
121                         'odl controller is running (default is 127.0.0.1)')
122     parser.add_argument('--odlport', default='8080', help='port on '
123                         'which odl\'s RESTCONF is listening (default is 8080)')
124     parser.add_argument('--plevel', type=int, default=0, help='Print level: '
125                         '0 - Summary (just stats); 1 - Node names; 2 - Node details; '
126                          '3 - Flow details')
127     parser.add_argument('--datastore', choices=['operational', 'config'], \
128                         default='operational', help='Which data store to crawl; '
129                         'default operational')
130
131     in_args = parser.parse_args()
132
133     url = 'http://' + in_args.odlhost + ":" + in_args.odlport + '/' + \
134           INVENTORY_URL % in_args.datastore
135
136     print "Crawling '%s'" % url
137
138     crawl_inventory(url)
139
140     print '\nTotals:'
141     print '    Nodes:          %d' % nodes
142     print '    Reported flows: %d' % reported_flows
143     print '    Found flows:    %d' % found_flows
144
145