X-Git-Url: https://git.opendaylight.org/gerrit/gitweb?a=blobdiff_plain;f=tests%2Ftransportpce_tests%2Fcommon%2Ftest_utils.py;h=c18a4d1f5ee1288834881c70e5f9835ae3864ef9;hb=80c1e9a96ad3e992fb6504463ce1eaacd1e5a7e9;hp=2abe3ee7625506905eeaf28cba0bdb2ff03d1639;hpb=f17d29fa9331ce5bfb4fcdbbd649045710f0b75f;p=transportpce.git diff --git a/tests/transportpce_tests/common/test_utils.py b/tests/transportpce_tests/common/test_utils.py index 2abe3ee76..c18a4d1f5 100644 --- a/tests/transportpce_tests/common/test_utils.py +++ b/tests/transportpce_tests/common/test_utils.py @@ -1,4 +1,5 @@ #!/usr/bin/env python + ############################################################################## # Copyright (c) 2020 Orange, Inc. and others. All rights reserved. # @@ -7,8 +8,12 @@ # which accompanies this distribution, and is available at # http://www.apache.org/licenses/LICENSE-2.0 ############################################################################## + +# pylint: disable=no-member + import json import os +# pylint: disable=wrong-import-order import sys import re import signal @@ -18,18 +23,16 @@ import time import psutil import requests +# pylint: disable=import-error import simulators SIMS = simulators.SIMS -HONEYNODE_EXECUTABLE = simulators.HONEYNODE_EXECUTABLE -SAMPLES_DIRECTORY = simulators.SAMPLES_DIRECTORY HONEYNODE_OK_START_MSG = "Netconf SSH endpoint started successfully at 0.0.0.0" KARAF_OK_START_MSG = re.escape( "Blueprint container for bundle org.opendaylight.netconf.restconf")+".* was successfully created" +LIGHTY_OK_START_MSG = re.escape("lighty.io and RESTCONF-NETCONF started") - -RESTCONF_BASE_URL = "http://localhost:8181/restconf" ODL_LOGIN = "admin" ODL_PWD = "admin" NODES_LOGIN = "admin" @@ -40,6 +43,22 @@ URL_CONFIG_OTN_TOPO = "{}/config/ietf-network:networks/network/otn-topology/" URL_CONFIG_CLLI_NET = "{}/config/ietf-network:networks/network/clli-network/" URL_CONFIG_ORDM_NET = "{}/config/ietf-network:networks/network/openroadm-network/" URL_PORTMAPPING = "{}/config/transportpce-portmapping:network/nodes/" +URL_OPER_SERV_LIST = "{}/operational/org-openroadm-service:service-list/" +URL_GET_NBINOTIFICATIONS_PROCESS_SERV = "{}/operations/nbi-notifications:get-notifications-process-service/" +URL_GET_NBINOTIFICATIONS_ALARM_SERV = "{}/operations/nbi-notifications:get-notifications-alarm-service/" +URL_SERV_CREATE = "{}/operations/org-openroadm-service:service-create" +URL_SERV_DELETE = "{}/operations/org-openroadm-service:service-delete" +URL_SERVICE_PATH = "{}/operations/transportpce-device-renderer:service-path" +URL_OTN_SERVICE_PATH = "{}/operations/transportpce-device-renderer:otn-service-path" +URL_TAPI_CREATE_CONNECTIVITY = "{}/operations/tapi-connectivity:create-connectivity-service" +URL_TAPI_DELETE_CONNECTIVITY = "{}/operations/tapi-connectivity:delete-connectivity-service" +URL_CREATE_OTS_OMS = "{}/operations/transportpce-device-renderer:create-ots-oms" +URL_PATH_COMPUTATION_REQUEST = "{}/operations/transportpce-pce:path-computation-request" +URL_FULL_PORTMAPPING = "{}/config/transportpce-portmapping:network" +URL_TAPI_TOPOLOGY_DETAILS = "{}/operations/tapi-topology:get-topology-details" +URL_TAPI_NODE_DETAILS = "{}/operations/tapi-topology:get-node-details" +URL_TAPI_SIP_LIST = "{}/operations/tapi-common:get-service-interface-point-list" +URL_TAPI_SERVICE_LIST = "{}/operations/tapi-connectivity:get-connectivity-service-list" TYPE_APPLICATION_JSON = {'Content-Type': 'application/json', 'Accept': 'application/json'} TYPE_APPLICATION_XML = {'Content-Type': 'application/xml', 'Accept': 'application/xml'} @@ -47,29 +66,40 @@ TYPE_APPLICATION_XML = {'Content-Type': 'application/xml', 'Accept': 'applicatio CODE_SHOULD_BE_200 = 'Http status code should be 200' CODE_SHOULD_BE_201 = 'Http status code should be 201' -LOG_DIRECTORY = os.path.dirname(os.path.realpath(__file__)) +SIM_LOG_DIRECTORY = os.path.join(os.path.dirname(os.path.realpath(__file__)), "log") + +process_list = [] + + +if "USE_ODL_ALT_RESTCONF_PORT" in os.environ: + RESTCONF_BASE_URL = "http://localhost:" + os.environ['USE_ODL_ALT_RESTCONF_PORT'] + "/restconf" +else: + RESTCONF_BASE_URL = "http://localhost:8181/restconf" + +if "USE_ODL_ALT_KARAF_INSTALL_DIR" in os.environ: + KARAF_INSTALLDIR = os.environ['USE_ODL_ALT_KARAF_INSTALL_DIR'] +else: + KARAF_INSTALLDIR = "karaf" KARAF_LOG = os.path.join( os.path.dirname(os.path.realpath(__file__)), - "..", "..", "..", "karaf", "target", "assembly", "data", "log", "karaf.log") - -process_list = [] + "..", "..", "..", KARAF_INSTALLDIR, "target", "assembly", "data", "log", "karaf.log") if "USE_LIGHTY" in os.environ and os.environ['USE_LIGHTY'] == 'True': - TPCE_LOG = 'odl.log' + TPCE_LOG = 'odl-' + str(os.getpid()) + '.log' else: TPCE_LOG = KARAF_LOG def start_sims(sims_list): for sim in sims_list: - print("starting simulator for " + sim + "...") - log_file = os.path.join(LOG_DIRECTORY, SIMS[sim]['logfile']) - process = start_honeynode(log_file, SIMS[sim]['port'], SIMS[sim]['configfile']) + print("starting simulator " + sim[0] + " in OpenROADM device version " + sim[1] + "...") + log_file = os.path.join(SIM_LOG_DIRECTORY, SIMS[sim]['logfile']) + process = start_honeynode(log_file, sim) if wait_until_log_contains(log_file, HONEYNODE_OK_START_MSG, 100): - print("simulator for " + sim + " started") + print("simulator for " + sim[0] + " started") else: - print("simulator for " + sim + " failed to start") + print("simulator for " + sim[0] + " failed to start") shutdown_process(process) for pid in process_list: shutdown_process(pid) @@ -82,17 +112,18 @@ def start_tpce(): print("starting OpenDaylight...") if "USE_LIGHTY" in os.environ and os.environ['USE_LIGHTY'] == 'True': process = start_lighty() - # TODO: add some sort of health check similar to Karaf below + start_msg = LIGHTY_OK_START_MSG else: process = start_karaf() - if wait_until_log_contains(KARAF_LOG, KARAF_OK_START_MSG, time_to_wait=60): - print("OpenDaylight started !") - else: - print("OpenDaylight failed to start !") - shutdown_process(process) - for pid in process_list: - shutdown_process(pid) - sys.exit(1) + start_msg = KARAF_OK_START_MSG + if wait_until_log_contains(TPCE_LOG, start_msg, time_to_wait=300): + print("OpenDaylight started !") + else: + print("OpenDaylight failed to start !") + shutdown_process(process) + for pid in process_list: + shutdown_process(pid) + sys.exit(1) process_list.append(process) return process_list @@ -101,8 +132,8 @@ def start_karaf(): print("starting KARAF TransportPCE build...") executable = os.path.join( os.path.dirname(os.path.realpath(__file__)), - "..", "..", "..", "karaf", "target", "assembly", "bin", "karaf") - with open('odl.log', 'w') as outfile: + "..", "..", "..", KARAF_INSTALLDIR, "target", "assembly", "bin", "karaf") + with open('odl.log', 'w', encoding='utf-8') as outfile: return subprocess.Popen( ["sh", executable, "server"], stdout=outfile, stderr=outfile, stdin=None) @@ -113,7 +144,7 @@ def start_lighty(): os.path.dirname(os.path.realpath(__file__)), "..", "..", "..", "lighty", "target", "tpce", "clean-start-controller.sh") - with open('odl.log', 'w') as outfile: + with open(TPCE_LOG, 'w', encoding='utf-8') as outfile: return subprocess.Popen( ["sh", executable], stdout=outfile, stderr=outfile, stdin=None) @@ -122,10 +153,11 @@ def install_karaf_feature(feature_name: str): print("installing feature " + feature_name) executable = os.path.join( os.path.dirname(os.path.realpath(__file__)), - "..", "..", "..", "karaf", "target", "assembly", "bin", "client") + "..", "..", "..", KARAF_INSTALLDIR, "target", "assembly", "bin", "client") return subprocess.run([executable], - input='feature:install ' + feature_name + '\n feature:list | grep tapi \n logout \n', - universal_newlines=True) + input='feature:install ' + feature_name + '\n feature:list | grep ' + + feature_name + ' \n logout \n', + universal_newlines=True, check=False) def get_request(url): @@ -137,16 +169,17 @@ def get_request(url): def post_request(url, data): if data: + print(json.dumps(data)) return requests.request( "POST", url.format(RESTCONF_BASE_URL), data=json.dumps(data), headers=TYPE_APPLICATION_JSON, auth=(ODL_LOGIN, ODL_PWD)) - else: - return requests.request( - "POST", url.format(RESTCONF_BASE_URL), - headers=TYPE_APPLICATION_JSON, - auth=(ODL_LOGIN, ODL_PWD)) + + return requests.request( + "POST", url.format(RESTCONF_BASE_URL), + headers=TYPE_APPLICATION_JSON, + auth=(ODL_LOGIN, ODL_PWD)) def post_xmlrequest(url, data): @@ -156,6 +189,7 @@ def post_xmlrequest(url, data): data=data, headers=TYPE_APPLICATION_XML, auth=(ODL_LOGIN, ODL_PWD)) + return None def put_request(url, data): @@ -174,6 +208,14 @@ def put_xmlrequest(url, data): auth=(ODL_LOGIN, ODL_PWD)) +def put_jsonrequest(url, data): + return requests.request( + "PUT", url.format(RESTCONF_BASE_URL), + data=data, + headers=TYPE_APPLICATION_JSON, + auth=(ODL_LOGIN, ODL_PWD)) + + def rawput_request(url, data): return requests.request( "PUT", url.format(RESTCONF_BASE_URL), @@ -182,6 +224,14 @@ def rawput_request(url, data): auth=(ODL_LOGIN, ODL_PWD)) +def rawpost_request(url, data): + return requests.request( + "POST", url.format(RESTCONF_BASE_URL), + data=data, + headers=TYPE_APPLICATION_JSON, + auth=(ODL_LOGIN, ODL_PWD)) + + def delete_request(url): return requests.request( "DELETE", url.format(RESTCONF_BASE_URL), @@ -190,7 +240,7 @@ def delete_request(url): def mount_device(node_id, sim): - url = URL_CONFIG_NETCONF_TOPO+"node/"+node_id + url = URL_CONFIG_NETCONF_TOPO + "node/" + node_id body = {"node": [{ "node-id": node_id, "netconf-node-topology:username": NODES_LOGIN, @@ -200,10 +250,10 @@ def mount_device(node_id, sim): "netconf-node-topology:tcp-only": "false", "netconf-node-topology:pass-through": {}}]} response = put_request(url, body) - if wait_until_log_contains(TPCE_LOG, re.escape("Triggering notification stream NETCONF for node "+node_id), 60): - print("Node "+node_id+" correctly added to tpce topology", end='... ', flush=True) + if wait_until_log_contains(TPCE_LOG, re.escape("Triggering notification stream NETCONF for node " + node_id), 180): + print("Node " + node_id + " correctly added to tpce topology", end='... ', flush=True) else: - print("Node "+node_id+" still not added to tpce topology", end='... ', flush=True) + print("Node " + node_id + " still not added to tpce topology", end='... ', flush=True) if response.status_code == requests.codes.ok: print("It was probably loaded at start-up", end='... ', flush=True) # TODO an else-clause to abort test would probably be nice here @@ -211,12 +261,12 @@ def mount_device(node_id, sim): def unmount_device(node_id): - url = URL_CONFIG_NETCONF_TOPO+"node/"+node_id + url = URL_CONFIG_NETCONF_TOPO + "node/" + node_id response = delete_request(url) - if wait_until_log_contains(TPCE_LOG, re.escape("onDeviceDisConnected: "+node_id), 60): - print("Node "+node_id+" correctly deleted from tpce topology", end='... ', flush=True) + if wait_until_log_contains(TPCE_LOG, re.escape("onDeviceDisConnected: " + node_id), 180): + print("Node " + node_id + " correctly deleted from tpce topology", end='... ', flush=True) else: - print("Node "+node_id+" still not deleted from tpce topology", end='... ', flush=True) + print("Node " + node_id + " still not deleted from tpce topology", end='... ', flush=True) return response @@ -314,6 +364,126 @@ def portmapping_request(suffix: str): return get_request(url) +def get_notifications_process_service_request(attr): + return post_request(URL_GET_NBINOTIFICATIONS_PROCESS_SERV, attr) + + +def get_notifications_alarm_service_request(attr): + return post_request(URL_GET_NBINOTIFICATIONS_ALARM_SERV, attr) + + +def get_service_list_request(suffix: str): + url = URL_OPER_SERV_LIST + suffix + return get_request(url) + + +def service_create_request(attr): + return post_request(URL_SERV_CREATE, attr) + + +def service_delete_request(servicename: str, + requestid="e3028bae-a90f-4ddd-a83f-cf224eba0e58", + notificationurl="http://localhost:8585/NotificationServer/notify"): + attr = {"input": { + "sdnc-request-header": { + "request-id": requestid, + "rpc-action": "service-delete", + "request-system-id": "appname", + "notification-url": notificationurl}, + "service-delete-req-info": { + "service-name": servicename, + "tail-retention": "no"}}} + return post_request(URL_SERV_DELETE, attr) + + +def service_path_request(operation: str, servicename: str, wavenumber: str, nodes, centerfreq: str, + slotwidth: int, minfreq: float, maxfreq: float, lowerslotnumber: int, + higherslotnumber: int): + attr = {"renderer:input": { + "renderer:service-name": servicename, + "renderer:wave-number": wavenumber, + "renderer:modulation-format": "dp-qpsk", + "renderer:operation": operation, + "renderer:nodes": nodes, + "renderer:center-freq": centerfreq, + "renderer:width": slotwidth, + "renderer:min-freq": minfreq, + "renderer:max-freq": maxfreq, + "renderer:lower-spectral-slot-number": lowerslotnumber, + "renderer:higher-spectral-slot-number": higherslotnumber}} + return post_request(URL_SERVICE_PATH, attr) + + +def otn_service_path_request(operation: str, servicename: str, servicerate: str, serviceformat: str, nodes, + eth_attr=None): + attr = {"service-name": servicename, + "operation": operation, + "service-rate": servicerate, + "service-format": serviceformat, + "nodes": nodes} + if eth_attr: + attr.update(eth_attr) + return post_request(URL_OTN_SERVICE_PATH, {"renderer:input": attr}) + + +def create_ots_oms_request(nodeid: str, lcp: str): + attr = {"input": { + "node-id": nodeid, + "logical-connection-point": lcp}} + return post_request(URL_CREATE_OTS_OMS, attr) + + +def path_computation_request(requestid: str, servicename: str, serviceaend, servicezend, + hardconstraints=None, softconstraints=None, metric="hop-count", other_attr=None): + attr = {"service-name": servicename, + "resource-reserve": "true", + "service-handler-header": {"request-id": requestid}, + "service-a-end": serviceaend, + "service-z-end": servicezend, + "pce-metric": metric} + if hardconstraints: + attr.update({"hard-constraints": hardconstraints}) + if softconstraints: + attr.update({"soft-constraints": softconstraints}) + if other_attr: + attr.update(other_attr) + return post_request(URL_PATH_COMPUTATION_REQUEST, {"input": attr}) + + +def tapi_create_connectivity_request(topologyidorname): + return post_request(URL_TAPI_CREATE_CONNECTIVITY, topologyidorname) + + +def tapi_delete_connectivity_request(serviceidorname): + attr = { + "input": { + "service-id-or-name": serviceidorname}} + return post_request(URL_TAPI_DELETE_CONNECTIVITY, attr) + + +def tapi_get_topology_details_request(topologyidorname): + attr = { + "input": { + "topology-id-or-name": topologyidorname}} + return post_request(URL_TAPI_TOPOLOGY_DETAILS, attr) + + +def tapi_get_node_details_request(topologyidorname, nodeidorname): + attr = { + "input": { + "topology-id-or-name": topologyidorname, + "node-id-or-name": nodeidorname}} + return post_request(URL_TAPI_NODE_DETAILS, attr) + + +def tapi_get_sip_details_request(): + return post_request(URL_TAPI_SIP_LIST, "") + + +def tapi_get_service_list_request(): + return post_request(URL_TAPI_SERVICE_LIST, "") + + def shutdown_process(process): if process is not None: for child in psutil.Process(process.pid).children(): @@ -322,15 +492,21 @@ def shutdown_process(process): process.send_signal(signal.SIGINT) -def start_honeynode(log_file: str, node_port: str, node_config_file_name: str): - if os.path.isfile(HONEYNODE_EXECUTABLE): - with open(log_file, 'w') as outfile: +def start_honeynode(log_file: str, sim): + executable = os.path.join(os.path.dirname(os.path.realpath(__file__)), + "..", "..", "honeynode", sim[1], "honeynode-simulator", "honeycomb-tpce") + sample_directory = os.path.join(os.path.dirname(os.path.realpath(__file__)), + "..", "..", "sample_configs", "openroadm", sim[1]) + if os.path.isfile(executable): + with open(log_file, 'w', encoding='utf-8') as outfile: return subprocess.Popen( - [HONEYNODE_EXECUTABLE, node_port, os.path.join(SAMPLES_DIRECTORY, node_config_file_name)], + [executable, SIMS[sim]['port'], os.path.join(sample_directory, SIMS[sim]['configfile'])], stdout=outfile, stderr=outfile) + return None -def wait_until_log_contains(log_file, regexp, time_to_wait=20): +def wait_until_log_contains(log_file, regexp, time_to_wait=60): + # pylint: disable=lost-exception stringfound = False filefound = False line = None @@ -338,10 +514,10 @@ def wait_until_log_contains(log_file, regexp, time_to_wait=20): with TimeOut(seconds=time_to_wait): while not os.path.exists(log_file): time.sleep(0.2) - filelogs = open(log_file, 'r') + filelogs = open(log_file, 'r', encoding='utf-8') filelogs.seek(0, 2) filefound = True - print("Searching for pattern '"+regexp+"' in "+os.path.basename(log_file), end='... ', flush=True) + print("Searching for pattern '" + regexp + "' in " + os.path.basename(log_file), end='... ', flush=True) compiled_regexp = re.compile(regexp) while True: line = filelogs.readline() @@ -352,7 +528,7 @@ def wait_until_log_contains(log_file, regexp, time_to_wait=20): if not line: time.sleep(0.1) except TimeoutError: - print("Pattern not found after "+str(time_to_wait), end=" seconds! ", flush=True) + print("Pattern not found after " + str(time_to_wait), end=" seconds! ", flush=True) except PermissionError: print("Permission Error when trying to access the log file", end=" ... ", flush=True) finally: @@ -376,4 +552,5 @@ class TimeOut: signal.alarm(self.seconds) def __exit__(self, type, value, traceback): + # pylint: disable=W0622 signal.alarm(0)