d567b08130972dfdb5396e05ee09a0a6b9f6274f
[integration/test.git] / csit / libraries / UtilLibrary.py
1 import requests
2 from SSHLibrary import SSHLibrary
3
4 import robot
5 import time
6 import re
7 import json
8 import warnings
9
10
11 __author__ = "Basheeruddin Ahmed"
12 __copyright__ = "Copyright(c) 2014, Cisco Systems, Inc."
13 __license__ = "New-style BSD"
14 __email__ = "syedbahm@cisco.com"
15
16
17 global _cache
18
19
20 def get(url, userId="admin", password="admin"):
21     """Helps in making GET REST calls"""
22     warnings.warn(
23         "Use the Robot RequestsLibrary rather than this. See DatastoreCRUD.robot for examples",
24         DeprecationWarning,
25     )
26     headers = {}
27     headers["Accept"] = "application/xml"
28
29     # Send the GET request
30     session = _cache.switch("CLUSTERING_GET")
31     resp = session.get(url, headers=headers, auth=(userId, password))
32     # resp = session.get(url,headers=headers,auth={userId,password})
33     # Read the response
34     return resp
35
36
37 def nonprintpost(url, userId, password, data):
38     """Helps in making POST REST calls without outputs"""
39     warnings.warn(
40         "Use the Robot RequestsLibrary rather than this. See DatastoreCRUD.robot for examples",
41         DeprecationWarning,
42     )
43
44     if userId is None:
45         userId = "admin"
46
47     if password is None:
48         password = "admin"
49
50     headers = {}
51     headers["Content-Type"] = "application/json"
52     # headers['Accept']= 'application/xml'
53
54     session = _cache.switch("CLUSTERING_POST")
55     resp = session.post(
56         url, data.encode("utf-8"), headers=headers, auth=(userId, password)
57     )
58
59     return resp
60
61
62 def post(url, userId, password, data):
63     """Helps in making POST REST calls"""
64     warnings.warn(
65         "Use the Robot RequestsLibrary rather than this. See DatastoreCRUD.robot for examples",
66         DeprecationWarning,
67     )
68
69     if userId is None:
70         userId = "admin"
71
72     if password is None:
73         password = "admin"
74
75     print("post request with url " + url)
76     print("post request with data " + data)
77     headers = {}
78     headers["Content-Type"] = "application/json"
79     # headers['Accept'] = 'application/xml'
80     session = _cache.switch("CLUSTERING_POST")
81     resp = session.post(
82         url, data.encode("utf-8"), headers=headers, auth=(userId, password)
83     )
84
85     # print(resp.raise_for_status())
86     print(resp.headers)
87     if resp.status_code >= 500:
88         print(resp.text)
89
90     return resp
91
92
93 def delete(url, userId="admin", password="admin"):
94     """Helps in making DELET REST calls"""
95     warnings.warn(
96         "Use the Robot RequestsLibrary rather than this. See DatastoreCRUD.robot for examples",
97         DeprecationWarning,
98     )
99     print("delete all resources belonging to url" + url)
100     session = _cache.switch("CLUSTERING_DELETE")
101     resp = session.delete(url, auth=(userId, password))  # noqa
102
103
104 def Should_Not_Be_Type_None(var):
105     """Keyword to check if the given variable is of type NoneType.  If the
106     variable type does match  raise an assertion so the keyword will fail
107     """
108     if var is None:
109         raise AssertionError("the variable passed was type NoneType")
110     return "PASS"
111
112
113 def execute_ssh_command(ip, username, password, command):
114     """Execute SSH Command
115
116     use username and password of controller server for ssh and need
117     karaf distribution location like /root/Documents/dist
118     """
119     print("executing ssh command")
120     lib = SSHLibrary()
121     lib.open_connection(ip)
122     lib.login(username=username, password=password)
123     print("login done")
124     cmd_response = lib.execute_command(command)
125     print("command executed : " + command)
126     lib.close_connection()
127     return cmd_response
128
129
130 def wait_for_controller_up(ip, port="8181"):
131     url = (
132         "http://"
133         + ip
134         + ":"
135         + str(port)
136         + "/restconf/config/opendaylight-inventory:nodes/node/controller-config/yang-ext:mount/config:modules"
137     )
138
139     print("Waiting for controller " + ip + " up.")
140     # Try 30*10s=5 minutes for the controller to be up.
141     for i in xrange(30):
142         try:
143             print("attempt %s to url %s" % (str(i), url))
144             resp = get(url, "admin", "admin")
145             print("attempt %s response is %s" % (str(i), str(resp)))
146             print(resp.text)
147             if "clustering-it-provider" in resp.text:
148                 print("Wait for controller " + ip + " succeeded")
149                 return True
150         except Exception as e:
151             print(e)
152         time.sleep(10)
153
154     print("Wait for controller " + ip + " failed")
155     return False
156
157
158 def startAllControllers(username, password, karafhome, port, *ips):
159     # Start all controllers
160     for ip in ips:
161         execute_ssh_command(ip, username, password, karafhome + "/bin/start")
162
163     # Wait for all of them to be up
164     for ip in ips:
165         rc = wait_for_controller_up(ip, port)
166         if rc is False:
167             return False
168     return True
169
170
171 def startcontroller(ip, username, password, karafhome, port):
172     execute_ssh_command(ip, username, password, karafhome + "/bin/start")
173     return wait_for_controller_up(ip, port)
174
175
176 def stopcontroller(ip, username, password, karafhome):
177     executeStopController(ip, username, password, karafhome)
178
179     wait_for_controller_stopped(ip, username, password, karafhome)
180
181
182 def executeStopController(ip, username, password, karafhome):
183     execute_ssh_command(ip, username, password, karafhome + "/bin/stop")
184
185
186 def stopAllControllers(username, password, karafhome, *ips):
187     for ip in ips:
188         executeStopController(ip, username, password, karafhome)
189
190     for ip in ips:
191         wait_for_controller_stopped(ip, username, password, karafhome)
192
193
194 def wait_for_controller_stopped(ip, username, password, karafHome):
195     lib = SSHLibrary()
196     lib.open_connection(ip)
197     lib.login(username=username, password=password)
198
199     # Wait 1 minute for the controller to stop gracefully
200     tries = 20
201     i = 1
202     while i <= tries:
203         stdout = lib.execute_command("ps -axf | grep karaf | grep -v grep | wc -l")
204         processCnt = stdout[0].strip("\n")
205         print("processCnt: " + processCnt)
206         if processCnt == "0":
207             break
208         i = i + 1
209         time.sleep(3)
210
211     lib.close_connection()
212
213     if i > tries:
214         print("Killing controller")
215         kill_controller(ip, username, password, karafHome)
216
217
218 def clean_journal(ip, username, password, karafHome):
219     execute_ssh_command(ip, username, password, "rm -rf " + karafHome + "/*journal")
220
221
222 def kill_controller(ip, username, password, karafHome):
223     execute_ssh_command(
224         ip,
225         username,
226         password,
227         "ps axf | grep karaf | grep -v grep | awk '{print \"kill -9 \" $1}' | sh",
228     )
229
230
231 def isolate_controller(controllers, username, password, isolated):
232     """Isolate one controller from the others in the cluster
233
234     :param controllers: A list of ip addresses or host names as strings.
235     :param username: Username for the controller to be isolated.
236     :param password: Password for the controller to be isolated.
237     :param isolated: Number (starting at one) of the controller to be isolated.
238     :return: If successful, returns "pass", otherwise returns the last failed IPTables text.
239     """
240     isolated_controller = controllers[isolated - 1]
241     for controller in controllers:
242         if controller != isolated_controller:
243             base_str = "sudo iptables -I OUTPUT -p all --source "
244             cmd_str = (
245                 base_str
246                 + isolated_controller
247                 + " --destination "
248                 + controller
249                 + " -j DROP"
250             )
251             execute_ssh_command(isolated_controller, username, password, cmd_str)
252             cmd_str = (
253                 base_str
254                 + controller
255                 + " --destination "
256                 + isolated_controller
257                 + " -j DROP"
258             )
259             execute_ssh_command(isolated_controller, username, password, cmd_str)
260     ip_tables = execute_ssh_command(
261         isolated_controller, username, password, "sudo iptables -L"
262     )
263     print(ip_tables)
264     iso_result = "pass"
265     for controller in controllers:
266         controller_regex_string = (
267             "[\s\S]*" + isolated_controller + " *" + controller + "[\s\S]*"
268         )
269         controller_regex = re.compile(controller_regex_string)
270         if not controller_regex.match(ip_tables):
271             iso_result = ip_tables
272         controller_regex_string = (
273             "[\s\S]*" + controller + " *" + isolated_controller + "[\s\S]*"
274         )
275         controller_regex = re.compile(controller_regex_string)
276         if not controller_regex.match(ip_tables):
277             iso_result = ip_tables
278     return iso_result
279
280
281 def rejoin_controller(controllers, username, password, isolated):
282     """Return an isolated controller to the cluster.
283
284     :param controllers: A list of ip addresses or host names as strings.
285     :param username: Username for the isolated controller.
286     :param password: Password for the isolated controller.
287     :param isolated: Number (starting at one) of the isolated controller isolated.
288     :return: If successful, returns "pass", otherwise returns the last failed IPTables text.
289     """
290     isolated_controller = controllers[isolated - 1]
291     for controller in controllers:
292         if controller != isolated_controller:
293             base_str = "sudo iptables -D OUTPUT -p all --source "
294             cmd_str = (
295                 base_str
296                 + isolated_controller
297                 + " --destination "
298                 + controller
299                 + " -j DROP"
300             )
301             execute_ssh_command(isolated_controller, username, password, cmd_str)
302             cmd_str = (
303                 base_str
304                 + controller
305                 + " --destination "
306                 + isolated_controller
307                 + " -j DROP"
308             )
309             execute_ssh_command(isolated_controller, username, password, cmd_str)
310     ip_tables = execute_ssh_command(
311         isolated_controller, username, password, "sudo iptables -L"
312     )
313     print(ip_tables)
314     iso_result = "pass"
315     for controller in controllers:
316         controller_regex_string = (
317             "[\s\S]*" + isolated_controller + " *" + controller + "[\s\S]*"
318         )
319         controller_regex = re.compile(controller_regex_string)
320         if controller_regex.match(ip_tables):
321             iso_result = ip_tables
322         controller_regex_string = (
323             "[\s\S]*" + controller + " *" + isolated_controller + "[\s\S]*"
324         )
325         controller_regex = re.compile(controller_regex_string)
326         if controller_regex.match(ip_tables):
327             iso_result = ip_tables
328     return iso_result
329
330
331 def flush_iptables(controllers, username, password):
332     """Removes all entries from IPTables on all controllers.
333
334     :param controllers: A list of ip address or host names as strings.
335     :param username: Username for all controllers.
336     :param password: Password for all controllers.
337     :return: If successful, returns "pass", otherwise returns "fail".
338     """
339     flush_result = "pass"
340     for controller in controllers:
341         print("Flushing ", controller)
342         cmd_str = "sudo iptables -v -F"
343         cmd_result = execute_ssh_command(controller, username, password, cmd_str)
344         print(cmd_result)
345         success_string = "Flushing chain `INPUT'" + "\n"
346         success_string += "Flushing chain `FORWARD'" + "\n"
347         success_string += "Flushing chain `OUTPUT'"
348         if not cmd_result == success_string:
349             flush_result = "Failed to flush IPTables. Check Log."
350         print(".")
351         print(".")
352         print(".")
353     return flush_result
354
355
356 def build_elastic_search_JSON_request(query_String):
357     data = {
358         "from": "0",
359         "size": "1",
360         "sort": [{"TimeStamp": {"order": "desc"}}],
361         "query": {"query_string": {"query": query_String}},
362     }
363     return json.dumps(data)
364
365
366 def create_query_string_search(data_category, metric_name, node_id, rk_node_id):
367     query = "TSDRDataCategory:"
368     query += data_category
369     query += " AND MetricName:"
370     query += metric_name
371     query += ' AND NodeID:"'
372     query += node_id
373     query += '" AND RecordKeys.KeyValue:"'
374     query += rk_node_id
375     query += '" AND RecordKeys.KeyName:Node AND RecordKeys.KeyValue:0 AND RecordKeys.KeyName:Table'
376     return query
377
378
379 def create_query_string_count(data_category):
380     query = "TSDRDataCategory:"
381     query += data_category
382     return query
383
384
385 def extract_metric_value_search(response):
386     return str(response["hits"]["hits"][0]["_source"]["MetricValue"])
387
388
389 def extract_metric_value_count(response):
390     return int(response["hits"]["total"])
391
392
393 #
394 # main invoked
395 if __name__ != "__main__":
396     _cache = robot.utils.ConnectionCache("No sessions created")
397     # here create one session for each HTTP functions
398     _cache.register(requests.session(), alias="CLUSTERING_GET")
399     _cache.register(requests.session(), alias="CLUSTERING_POST")
400     _cache.register(requests.session(), alias="CLUSTERING_DELETE")