2 # -*- coding: utf-8 -*-
4 # @License EPL-1.0 <http://spdx.org/licenses/EPL-1.0>
5 ##############################################################################
6 # Copyright (c) 2017 Raghuram Vadapalli, Jaspreet Singh and others.
8 # All rights reserved. This program and the accompanying materials
9 # are made available under the terms of the Eclipse Public License v1.0
10 # which accompanies this distribution, and is available at
11 # http://www.eclipse.org/legal/epl-v10.html
12 ##############################################################################
15 This script is used to parse logs, construct JSON BODY and push
18 Usage: python construct_json.py host:port
20 JSON body similar to following is \
21 constructed from robot files, jenkins environment
22 and plot files available in workspace available post-build.
24 "project": "opendaylight", <- fix string for ODL project
25 "subject": "test", <- fix string for ODL test
26 "test-type": "performance", <- if there are csv files, \
27 otherwise "functional"
28 "jenkins-silo": "releng" <- from Jenkins $SILO
29 "test-name": "openflowplugin-csit-1node-periodic \
30 -bulkomatic-perf-daily-only-carbon", <- from Jenkins $JOB_NAME
31 "test-run": 289, <- from Jenkins $BUILD_NUMBER
32 "start-time": "20170612 16:50:04 GMT-07:00", <- from robot log
33 "duration": "00:01:05.942", <- from robot log
34 "pass-tests": 9, <- from robot log
35 "fail-tests": 0, <- from robot log
37 "rate": { <- csv filename
38 "Config DS": 5816.99726601, <- from csv file
39 "OVS Switch": 5757.05238918, <- from csv file
40 "Operational DS": 2654.49139945 <- from csv file
42 "time": { <- csv filename
43 "Config DS": 17.191, <- from csv file
44 "OVS Switch": 17.37, <- from csv file
45 "Operational DS": 37.672 <- from csv file
52 from datetime import datetime
58 import xml.etree.ElementTree as ET
61 from elasticsearch import Elasticsearch, RequestsHttpConnection, exceptions
65 import generate_visState as vis_gen
66 import generate_dashVis as dash_gen
70 print(json.dumps(x, indent=6, sort_keys=True))
71 # ELK DB host and port to be passed as ':' separated argument
75 if ':' in sys.argv[1]:
76 ELK_DB_HOST = sys.argv[1].split(':')[0]
77 ELK_DB_PORT = sys.argv[1].split(':')[1]
79 print('Usage: python push_to_elk.py host:port')
80 print('Unable to publish data to ELK. Exiting.')
89 hosts=[{'host': ELK_DB_HOST, 'port': int(ELK_DB_PORT)}],
91 connection_class=RequestsHttpConnection
93 except Exception as e:
94 print('Unexpected Error Occurred. Exiting')
100 datetime.fromtimestamp(ts).strftime('%Y-%m-%dT%H:%M:%S.%fZ')
101 BODY['@timestamp'] = formatted_ts
103 # Plots are obtained from csv files ( in archives directory in $WORKSPACE).
105 csv_files = glob.glob('archives/*.csv')
106 BODY['project'] = 'opendaylight'
107 BODY['subject'] = 'test'
109 # If there are no csv files, then it is a functional test.
110 # Parse csv files and fill perfomance parameter values
112 if len(csv_files) == 0:
113 BODY['test-type'] = 'functional'
115 BODY['test-type'] = 'performance'
118 key = (f.split('/')[-1])[:-4]
119 BODY['plots'][key] = {}
120 with open(f) as file:
121 lines = file.readlines()
122 props = lines[0].strip().split(',')
123 vals = lines[1].strip().split(',')
124 BODY['plots'][key][props[0]] = float(vals[0])
125 BODY['plots'][key][props[1]] = float(vals[1])
126 BODY['plots'][key][props[2]] = float(vals[2])
128 # Fill the required parameters whose values are obtained from environment.
130 BODY['jenkins-silo'] = os.environ['SILO']
131 BODY['test-name'] = os.environ['JOB_NAME']
132 BODY['test-run'] = int(os.environ['BUILD_NUMBER'])
134 # Parsing robot log for stats on start-time, pass/fail tests and duration.
136 robot_log = os.environ['WORKSPACE'] + '/output.xml'
137 tree = ET.parse(robot_log)
138 BODY['id'] = '{}-{}'.format(os.environ['JOB_NAME'],
139 os.environ['BUILD_NUMBER'])
140 BODY['start-time'] = tree.getroot().attrib['generated']
141 BODY['pass-tests'] = int(tree.getroot().find('statistics')[0][1].get('pass'))
142 BODY['fail-tests'] = int(tree.getroot().find('statistics')[0][1].get('fail'))
143 endtime = tree.getroot().find('suite').find('status').get('endtime')
144 starttime = tree.getroot().find('suite').find('status').get('starttime')
145 elap_time = datetime.strptime(endtime, '%Y%m%d %H:%M:%S.%f') \
146 - datetime.strptime(starttime, '%Y%m%d %H:%M:%S.%f')
147 BODY['duration'] = str(elap_time)
150 'type': BODY['test-type'],
151 BODY['test-type']: BODY
154 print(json.dumps(BODY, indent=4))
156 # Try to send request to ELK DB.
159 index = '{}-{}'.format(BODY[BODY['type']]['project'],
160 BODY[BODY['type']]['subject'])
161 ES_ID = '{}:{}-{}'.format(BODY['type'], BODY[BODY['type']]
162 ['test-name'], BODY[BODY['type']]['test-run'])
163 res = es.index(index=index, doc_type='doc', id=ES_ID, body=BODY)
164 print(json.dumps(res, indent=4))
165 except Exception as e:
167 print('Unable to push data to ElasticSearch')
171 # Function to convert JSON object to string.
172 # Python puts 'true' as 'True' etc. which need handling.
175 def JSONToString(jobj):
177 retval = retval.replace('\'', '"')
178 retval = retval.replace(': ', ':')
179 retval = retval.replace(', ', ',')
180 retval = retval.replace('True', 'true')
181 retval = retval.replace('False', 'false')
182 retval = retval.replace('None', 'null')
186 # Create and push index-pattern to be used by visualizations
188 INDEX_PATTERN_BODY = {
189 "type": "index-pattern",
191 "timeFieldName": "performance.@timestamp",
192 "title": '{}-{}'.format(BODY[BODY['type']]['project'],
193 BODY[BODY['type']]['subject'])
198 KIBANA_CONFIG = {'config': {
199 'defaultIndex': 'pattern-for-{}-{}'.format(BODY[BODY['type']]['project'],
200 BODY[BODY['type']]['subject']),
201 'timepicker:timeDefaults': '{\n "from": "now-5y",\n \
202 "to": "now",\n "mode": "quick"\n}',
203 'xPackMonitoring:showBanner': False},
207 res = es.index(index='.kibana', doc_type='doc',
208 id='config:6.2.4', body=KIBANA_CONFIG)
213 ES_ID = 'index-pattern:pattern-for-{}-{}'.format(
214 BODY[BODY['type']]['project'], BODY[BODY['type']]['subject'])
215 res = es.index(index=index, doc_type='doc',
216 id=ES_ID, body=INDEX_PATTERN_BODY)
217 p(json.dumps(INDEX_PATTERN_BODY, indent=4))
218 print(json.dumps(res, indent=4))
219 except Exception as e:
222 print('Unable to push data to ElasticSearch')
224 # Create and push visualizations
226 viz_config_path = glob.glob('**/dashboard/viz_config.yaml')[0]
228 print('Visualization template file not found!')
232 dash_config_path = glob.glob('**/dashboard/dash_config.yaml')[0]
234 print('Dashboard configuration file not found!')
237 with open(dash_config_path, 'r') as f:
238 dash_config = yaml.safe_load(f)
240 with open(viz_config_path, 'r') as f:
241 viz_config = yaml.safe_load(f)
243 SEARCH_SOURCE = {"index": None, "filter": [],
244 "query": {"language": "lucene", "query": ""}}
246 for _, i in dash_config['dashboard']['viz'].items():
247 intermediate_format, visState = vis_gen.generate(
248 i, viz_config[i['viz-template']])
250 # p(intermediate_format)
253 SEARCH_SOURCE['index'] = intermediate_format['index_pattern']
255 'type': 'visualization',
262 "kibanaSavedObjectMeta": {
263 "searchSourceJSON": JSONToString(SEARCH_SOURCE)
268 VIZ_BODY['visualization']['title'] = intermediate_format['title']
269 VIZ_BODY['visualization']['visState'] = JSONToString(visState)
270 VIZ_BODY['visualization']['description'] = intermediate_format['desc']
274 ES_ID = 'visualization:{}'.format(i['id'])
275 res = es.index(index=index, doc_type='doc', id=ES_ID, body=VIZ_BODY)
276 print(json.dumps(res, indent=4))
279 # Create and push dashboard
282 for _, i in dash_config.items():
289 'optionsJSON': '{\"darkTheme\":false,\
290 \"hidePanelTitles\":false,\"useMargins\":true}',
292 'kibanaSavedObjectMeta': {
293 'searchSourceJSON': '{\"query\":{\"language\":\"lucene\", \
295 \"filter\":[],\"highlightAll\" \
296 :true,\"version\":true}'
301 DASH_BODY['dashboard']['title'] = i['title']
302 DASH_BODY['dashboard']['description'] = i['desc']
303 DASH_BODY['dashboard']['panelsJSON'] = JSONToString(
304 dash_gen.generate(i['viz']))
309 ES_ID = 'dashboard:{}'.format(i['id'])
310 res = es.index(index=index, doc_type='doc', id=ES_ID, body=DASH_BODY)
311 print(json.dumps(res, indent=4))