add a method to check nodes configs in func tests
[transportpce.git] / tests / transportpce_tests / common / test_utils.py
index b94c9de012a13a3298fb685af5d89179c8bd4376..6b6958f73cf09e834b8fa04a7f9f49f533f053cb 100644 (file)
@@ -9,39 +9,58 @@
 ##############################################################################
 import json
 import os
+import sys
 import re
 import signal
 import subprocess
+import time
 
 import psutil
 import requests
 
 import simulators
 
-sims = simulators.sims
-honeynode_executable = simulators.honeynode_executable
-samples_directory = simulators.samples_directory
+SIMS = simulators.SIMS
+HONEYNODE_EXECUTABLE = simulators.HONEYNODE_EXECUTABLE
+SAMPLES_DIRECTORY = simulators.SAMPLES_DIRECTORY
 
-HONEYNODE_OK_START_MSG = re.escape("Netconf SSH endpoint started successfully at 0.0.0.0")
-KARAF_OK_START_MSG = re.escape("Blueprint container for bundle "
-                               "org.opendaylight.netconf.restconf") + ".* was successfully created"
+HONEYNODE_OK_START_MSG = "Netconf SSH endpoint started successfully at 0.0.0.0"
+KARAF_OK_START_MSG = re.escape(
+    "Blueprint container for bundle org.opendaylight.netconf.restconf")+".* was successfully created"
 
-TYPE_APPLICATION_JSON = {'content-type': 'application/json'}
 
-log_directory = os.path.dirname(os.path.realpath(__file__))
+RESTCONF_BASE_URL = "http://localhost:8181/restconf"
+ODL_LOGIN = "admin"
+ODL_PWD = "admin"
+NODES_LOGIN = "admin"
+NODES_PWD = "admin"
+URL_CONFIG_NETCONF_TOPO = "{}/config/network-topology:network-topology/topology/topology-netconf/"
 
-karaf_log = os.path.join(
+TYPE_APPLICATION_JSON = {'Content-Type': 'application/json', 'Accept': 'application/json'}
+TYPE_APPLICATION_XML = {'Content-Type': 'application/xml', 'Accept': 'application/xml'}
+
+CODE_SHOULD_BE_200 = 'Http status code should be 200'
+CODE_SHOULD_BE_201 = 'Http status code should be 201'
+
+LOG_DIRECTORY = os.path.dirname(os.path.realpath(__file__))
+
+KARAF_LOG = os.path.join(
     os.path.dirname(os.path.realpath(__file__)),
     "..", "..", "..", "karaf", "target", "assembly", "data", "log", "karaf.log")
 
 process_list = []
 
+if "USE_LIGHTY" in os.environ and os.environ['USE_LIGHTY'] == 'True':
+    TPCE_LOG = 'odl.log'
+else:
+    TPCE_LOG = KARAF_LOG
+
 
 def start_sims(sims_list):
     for sim in sims_list:
         print("starting simulator for " + sim + "...")
-        log_file = os.path.join(log_directory, sims[sim]['logfile'])
-        process = start_honeynode(log_file, sims[sim]['port'], sims[sim]['configfile'])
+        log_file = os.path.join(LOG_DIRECTORY, SIMS[sim]['logfile'])
+        process = start_honeynode(log_file, SIMS[sim]['port'], SIMS[sim]['configfile'])
         if wait_until_log_contains(log_file, HONEYNODE_OK_START_MSG, 100):
             print("simulator for " + sim + " started")
         else:
@@ -49,26 +68,26 @@ def start_sims(sims_list):
             shutdown_process(process)
             for pid in process_list:
                 shutdown_process(pid)
-            exit(3)
+            sys.exit(3)
         process_list.append(process)
     return process_list
 
 
 def start_tpce():
-    print("starting opendaylight...")
+    print("starting OpenDaylight...")
     if "USE_LIGHTY" in os.environ and os.environ['USE_LIGHTY'] == 'True':
         process = start_lighty()
         # TODO: add some sort of health check similar to Karaf below
     else:
         process = start_karaf()
-        if wait_until_log_contains(karaf_log, KARAF_OK_START_MSG, time_to_wait=60):
-            print("opendaylight started")
+        if wait_until_log_contains(KARAF_LOG, KARAF_OK_START_MSG, time_to_wait=60):
+            print("OpenDaylight started !")
         else:
-            print("opendaylight failed to start")
+            print("OpenDaylight failed to start !")
             shutdown_process(process)
             for pid in process_list:
                 shutdown_process(pid)
-            exit(1)
+            sys.exit(1)
     process_list.append(process)
     return process_list
 
@@ -104,38 +123,119 @@ def install_karaf_feature(feature_name: str):
                           universal_newlines=True)
 
 
-def post_request(url, data, username, password):
+def get_request(url):
+    return requests.request(
+        "GET", url.format(RESTCONF_BASE_URL),
+        headers=TYPE_APPLICATION_JSON,
+        auth=(ODL_LOGIN, ODL_PWD))
+
+
+def post_request(url, data):
+    if data:
+        return requests.request(
+            "POST", url.format(RESTCONF_BASE_URL),
+            data=json.dumps(data),
+            headers=TYPE_APPLICATION_JSON,
+            auth=(ODL_LOGIN, ODL_PWD))
+    else:
+        return requests.request(
+            "POST", url.format(RESTCONF_BASE_URL),
+            headers=TYPE_APPLICATION_JSON,
+            auth=(ODL_LOGIN, ODL_PWD))
+
+
+def post_xmlrequest(url, data):
+    if data:
+        return requests.request(
+            "POST", url.format(RESTCONF_BASE_URL),
+            data=data,
+            headers=TYPE_APPLICATION_XML,
+            auth=(ODL_LOGIN, ODL_PWD))
+
+
+def put_request(url, data):
+    return requests.request(
+        "PUT", url.format(RESTCONF_BASE_URL),
+        data=json.dumps(data),
+        headers=TYPE_APPLICATION_JSON,
+        auth=(ODL_LOGIN, ODL_PWD))
+
+
+def put_xmlrequest(url, data):
     return requests.request(
-        "POST", url, data=json.dumps(data),
-        headers=TYPE_APPLICATION_JSON, auth=(username, password))
+        "PUT", url.format(RESTCONF_BASE_URL),
+        data=data,
+        headers=TYPE_APPLICATION_XML,
+        auth=(ODL_LOGIN, ODL_PWD))
 
 
-def put_request(url, data, username, password):
+def rawput_request(url, data):
     return requests.request(
-        "PUT", url, data=json.dumps(data), headers=TYPE_APPLICATION_JSON,
-        auth=(username, password))
+        "PUT", url.format(RESTCONF_BASE_URL),
+        data=data,
+        headers=TYPE_APPLICATION_JSON,
+        auth=(ODL_LOGIN, ODL_PWD))
 
 
-def delete_request(url, username, password):
+def delete_request(url):
     return requests.request(
-        "DELETE", url, headers=TYPE_APPLICATION_JSON,
-        auth=(username, password))
+        "DELETE", url.format(RESTCONF_BASE_URL),
+        headers=TYPE_APPLICATION_JSON,
+        auth=(ODL_LOGIN, ODL_PWD))
 
 
-def generate_connect_data(node_id: str, node_port: str):
-    data = {"node": [{
+def mount_device(node_id, sim):
+    url = URL_CONFIG_NETCONF_TOPO+"node/"+node_id
+    body = {"node": [{
         "node-id": node_id,
-        "netconf-node-topology:username": "admin",
-        "netconf-node-topology:password": "admin",
+        "netconf-node-topology:username": NODES_LOGIN,
+        "netconf-node-topology:password": NODES_PWD,
         "netconf-node-topology:host": "127.0.0.1",
-        "netconf-node-topology:port": node_port,
+        "netconf-node-topology:port": SIMS[sim]['port'],
         "netconf-node-topology:tcp-only": "false",
         "netconf-node-topology:pass-through": {}}]}
-    return data
+    response = put_request(url, body)
+    if wait_until_log_contains(TPCE_LOG, re.escape("Triggering notification stream NETCONF for node "+node_id), 60):
+        print("Node "+node_id+" correctly added to tpce topology", end='... ', flush=True)
+    else:
+        print("Node "+node_id+" still not added to tpce topology", end='... ', flush=True)
+        if response.status_code == requests.codes.ok:
+            print("It was probably loaded at start-up", end='... ', flush=True)
+        # TODO an else-clause to abort test would probably be nice here
+    return response
+
+
+def unmount_device(node_id):
+    url = URL_CONFIG_NETCONF_TOPO+"node/"+node_id
+    response = delete_request(url)
+    if wait_until_log_contains(TPCE_LOG, re.escape("onDeviceDisConnected: "+node_id), 60):
+        print("Node "+node_id+" correctly deleted from tpce topology", end='... ', flush=True)
+    else:
+        print("Node "+node_id+" still not deleted from tpce topology", end='... ', flush=True)
+    return response
+
 
+def connect_xpdr_to_rdm_request(xpdr_node: str, xpdr_num: str, network_num: str,
+                                rdm_node: str, srg_num: str, termination_num: str):
+    url = "{}/operations/transportpce-networkutils:init-xpdr-rdm-links"
+    data = {
+        "networkutils:input": {
+            "networkutils:links-input": {
+                "networkutils:xpdr-node": xpdr_node,
+                "networkutils:xpdr-num": xpdr_num,
+                "networkutils:network-num": network_num,
+                "networkutils:rdm-node": rdm_node,
+                "networkutils:srg-num": srg_num,
+                "networkutils:termination-point-num": termination_num
+            }
+        }
+    }
+    return post_request(url, data)
 
-def generate_link_data(xpdr_node: str, xpdr_num: str, network_num: str, rdm_node: str, srg_num: str,
-                       termination_num: str):
+
+def connect_rdm_to_xpdr_request(xpdr_node: str, xpdr_num: str, network_num: str,
+                                rdm_node: str, srg_num: str, termination_num: str):
+    url = "{}/operations/transportpce-networkutils:init-rdm-xpdr-links"
     data = {
         "networkutils:input": {
             "networkutils:links-input": {
@@ -148,7 +248,14 @@ def generate_link_data(xpdr_node: str, xpdr_num: str, network_num: str, rdm_node
             }
         }
     }
-    return data
+    return post_request(url, data)
+
+
+def check_netconf_node_request(node: str, suffix: str):
+    url = URL_CONFIG_NETCONF_TOPO + (
+        "node/" + node + "/yang-ext:mount/org-openroadm-device:org-openroadm-device/" + suffix
+    )
+    return get_request(url)
 
 
 def shutdown_process(process):
@@ -160,40 +267,47 @@ def shutdown_process(process):
 
 
 def start_honeynode(log_file: str, node_port: str, node_config_file_name: str):
-    if os.path.isfile(honeynode_executable):
+    if os.path.isfile(HONEYNODE_EXECUTABLE):
         with open(log_file, 'w') as outfile:
             return subprocess.Popen(
-                [honeynode_executable, node_port, os.path.join(samples_directory, node_config_file_name)],
+                [HONEYNODE_EXECUTABLE, node_port, os.path.join(SAMPLES_DIRECTORY, node_config_file_name)],
                 stdout=outfile, stderr=outfile)
 
 
-def wait_until_log_contains(log_file, searched_string, time_to_wait=20):
-    found = False
-    tail = None
+def wait_until_log_contains(log_file, regexp, time_to_wait=20):
+    stringfound = False
+    filefound = False
+    line = None
     try:
-        with timeout(seconds=time_to_wait):
-            print("Waiting for " + searched_string)
-            tail = subprocess.Popen(['tail', '-F', log_file], stdout=subprocess.PIPE, stderr=subprocess.PIPE)
-            regexp = re.compile(searched_string)
+        with TimeOut(seconds=time_to_wait):
+            while not os.path.exists(log_file):
+                time.sleep(0.2)
+            filelogs = open(log_file, 'r')
+            filelogs.seek(0, 2)
+            filefound = True
+            print("Searching for pattern '"+regexp+"' in "+os.path.basename(log_file), end='... ', flush=True)
+            compiled_regexp = re.compile(regexp)
             while True:
-                line = tail.stdout.readline().decode('utf-8')
-                if regexp.search(line):
-                    print("Searched string found.")
-                    found = True
+                line = filelogs.readline()
+                if compiled_regexp.search(line):
+                    print("Pattern found!", end=' ')
+                    stringfound = True
                     break
+                if not line:
+                    time.sleep(0.1)
     except TimeoutError:
-        print("Cannot find string "+searched_string+" after waiting for "+str(time_to_wait))
+        print("Pattern not found after "+str(time_to_wait), end=" seconds! ", flush=True)
+    except PermissionError:
+        print("Permission Error when trying to access the log file", end=" ... ", flush=True)
     finally:
-        if tail is not None:
-            print("Stopping tail command")
-            tail.stderr.close()
-            tail.stdout.close()
-            tail.kill()
-            tail.wait()
-        return found
+        if filefound:
+            filelogs.close()
+        else:
+            print("log file does not exist or is not accessible... ", flush=True)
+        return stringfound
 
 
-class timeout:
+class TimeOut:
     def __init__(self, seconds=1, error_message='Timeout'):
         self.seconds = seconds
         self.error_message = error_message