#!/usr/bin/env python
+
##############################################################################
# Copyright (c) 2020 Orange, Inc. and others. All rights reserved.
#
# which accompanies this distribution, and is available at
# http://www.apache.org/licenses/LICENSE-2.0
##############################################################################
+
+# pylint: disable=no-member
+# pylint: disable=too-many-arguments
+
import json
import os
+# pylint: disable=wrong-import-order
import sys
import re
import signal
import subprocess
+import time
import psutil
import requests
+# pylint: disable=import-error
import simulators
SIMS = simulators.SIMS
-HONEYNODE_EXECUTABLE = simulators.HONEYNODE_EXECUTABLE
-SAMPLES_DIRECTORY = simulators.SAMPLES_DIRECTORY
HONEYNODE_OK_START_MSG = "Netconf SSH endpoint started successfully at 0.0.0.0"
KARAF_OK_START_MSG = re.escape(
"Blueprint container for bundle org.opendaylight.netconf.restconf")+".* was successfully created"
+LIGHTY_OK_START_MSG = re.escape("lighty.io and RESTCONF-NETCONF started")
-
-RESTCONF_BASE_URL = "http://localhost:8181/restconf"
ODL_LOGIN = "admin"
ODL_PWD = "admin"
NODES_LOGIN = "admin"
NODES_PWD = "admin"
+URL_CONFIG_NETCONF_TOPO = "{}/config/network-topology:network-topology/topology/topology-netconf/"
+URL_CONFIG_ORDM_TOPO = "{}/config/ietf-network:networks/network/openroadm-topology/"
+URL_CONFIG_OTN_TOPO = "{}/config/ietf-network:networks/network/otn-topology/"
+URL_CONFIG_CLLI_NET = "{}/config/ietf-network:networks/network/clli-network/"
+URL_CONFIG_ORDM_NET = "{}/config/ietf-network:networks/network/openroadm-network/"
+URL_PORTMAPPING = "{}/config/transportpce-portmapping:network/nodes/"
+URL_OPER_SERV_LIST = "{}/operational/org-openroadm-service:service-list/"
+URL_GET_NBINOTIFICATIONS_PROCESS_SERV = "{}/operations/nbi-notifications:get-notifications-process-service/"
+URL_GET_NBINOTIFICATIONS_ALARM_SERV = "{}/operations/nbi-notifications:get-notifications-alarm-service/"
+URL_SERV_CREATE = "{}/operations/org-openroadm-service:service-create"
+URL_SERV_DELETE = "{}/operations/org-openroadm-service:service-delete"
+URL_SERVICE_PATH = "{}/operations/transportpce-device-renderer:service-path"
+URL_OTN_SERVICE_PATH = "{}/operations/transportpce-device-renderer:otn-service-path"
+URL_TAPI_CREATE_CONNECTIVITY = "{}/operations/tapi-connectivity:create-connectivity-service"
+URL_TAPI_DELETE_CONNECTIVITY = "{}/operations/tapi-connectivity:delete-connectivity-service"
+URL_CREATE_OTS_OMS = "{}/operations/transportpce-device-renderer:create-ots-oms"
+URL_PATH_COMPUTATION_REQUEST = "{}/operations/transportpce-pce:path-computation-request"
+URL_FULL_PORTMAPPING = "{}/config/transportpce-portmapping:network"
+URL_TAPI_TOPOLOGY_DETAILS = "{}/operations/tapi-topology:get-topology-details"
+URL_TAPI_NODE_DETAILS = "{}/operations/tapi-topology:get-node-details"
+URL_TAPI_SIP_LIST = "{}/operations/tapi-common:get-service-interface-point-list"
+URL_TAPI_SERVICE_LIST = "{}/operations/tapi-connectivity:get-connectivity-service-list"
TYPE_APPLICATION_JSON = {'Content-Type': 'application/json', 'Accept': 'application/json'}
TYPE_APPLICATION_XML = {'Content-Type': 'application/xml', 'Accept': 'application/xml'}
CODE_SHOULD_BE_200 = 'Http status code should be 200'
CODE_SHOULD_BE_201 = 'Http status code should be 201'
-LOG_DIRECTORY = os.path.dirname(os.path.realpath(__file__))
+SIM_LOG_DIRECTORY = os.path.join(os.path.dirname(os.path.realpath(__file__)), "log")
+
+process_list = []
+
+
+if "USE_ODL_ALT_RESTCONF_PORT" in os.environ:
+ RESTCONF_BASE_URL = "http://localhost:" + os.environ['USE_ODL_ALT_RESTCONF_PORT'] + "/restconf"
+else:
+ RESTCONF_BASE_URL = "http://localhost:8181/restconf"
+
+if "USE_ODL_ALT_KARAF_INSTALL_DIR" in os.environ:
+ KARAF_INSTALLDIR = os.environ['USE_ODL_ALT_KARAF_INSTALL_DIR']
+else:
+ KARAF_INSTALLDIR = "karaf"
KARAF_LOG = os.path.join(
os.path.dirname(os.path.realpath(__file__)),
- "..", "..", "..", "karaf", "target", "assembly", "data", "log", "karaf.log")
-
-process_list = []
+ "..", "..", "..", KARAF_INSTALLDIR, "target", "assembly", "data", "log", "karaf.log")
if "USE_LIGHTY" in os.environ and os.environ['USE_LIGHTY'] == 'True':
- TPCE_LOG = 'odl.log'
+ TPCE_LOG = 'odl-' + str(os.getpid()) + '.log'
else:
TPCE_LOG = KARAF_LOG
def start_sims(sims_list):
for sim in sims_list:
- print("starting simulator for " + sim + "...")
- log_file = os.path.join(LOG_DIRECTORY, SIMS[sim]['logfile'])
- process = start_honeynode(log_file, SIMS[sim]['port'], SIMS[sim]['configfile'])
+ print("starting simulator " + sim[0] + " in OpenROADM device version " + sim[1] + "...")
+ log_file = os.path.join(SIM_LOG_DIRECTORY, SIMS[sim]['logfile'])
+ process = start_honeynode(log_file, sim)
if wait_until_log_contains(log_file, HONEYNODE_OK_START_MSG, 100):
- print("simulator for " + sim + " started")
+ print("simulator for " + sim[0] + " started")
else:
- print("simulator for " + sim + " failed to start")
+ print("simulator for " + sim[0] + " failed to start")
shutdown_process(process)
for pid in process_list:
shutdown_process(pid)
print("starting OpenDaylight...")
if "USE_LIGHTY" in os.environ and os.environ['USE_LIGHTY'] == 'True':
process = start_lighty()
- # TODO: add some sort of health check similar to Karaf below
+ start_msg = LIGHTY_OK_START_MSG
else:
process = start_karaf()
- if wait_until_log_contains(KARAF_LOG, KARAF_OK_START_MSG, time_to_wait=60):
- print("OpenDaylight started !")
- else:
- print("OpenDaylight failed to start !")
- shutdown_process(process)
- for pid in process_list:
- shutdown_process(pid)
- sys.exit(1)
+ start_msg = KARAF_OK_START_MSG
+ if wait_until_log_contains(TPCE_LOG, start_msg, time_to_wait=300):
+ print("OpenDaylight started !")
+ else:
+ print("OpenDaylight failed to start !")
+ shutdown_process(process)
+ for pid in process_list:
+ shutdown_process(pid)
+ sys.exit(1)
process_list.append(process)
return process_list
print("starting KARAF TransportPCE build...")
executable = os.path.join(
os.path.dirname(os.path.realpath(__file__)),
- "..", "..", "..", "karaf", "target", "assembly", "bin", "karaf")
- with open('odl.log', 'w') as outfile:
+ "..", "..", "..", KARAF_INSTALLDIR, "target", "assembly", "bin", "karaf")
+ with open('odl.log', 'w', encoding='utf-8') as outfile:
return subprocess.Popen(
["sh", executable, "server"], stdout=outfile, stderr=outfile, stdin=None)
os.path.dirname(os.path.realpath(__file__)),
"..", "..", "..", "lighty", "target", "tpce",
"clean-start-controller.sh")
- with open('odl.log', 'w') as outfile:
+ with open(TPCE_LOG, 'w', encoding='utf-8') as outfile:
return subprocess.Popen(
["sh", executable], stdout=outfile, stderr=outfile, stdin=None)
print("installing feature " + feature_name)
executable = os.path.join(
os.path.dirname(os.path.realpath(__file__)),
- "..", "..", "..", "karaf", "target", "assembly", "bin", "client")
+ "..", "..", "..", KARAF_INSTALLDIR, "target", "assembly", "bin", "client")
return subprocess.run([executable],
- input='feature:install ' + feature_name + '\n feature:list | grep tapi \n logout \n',
- universal_newlines=True)
+ input='feature:install ' + feature_name + '\n feature:list | grep '
+ + feature_name + ' \n logout \n',
+ universal_newlines=True, check=False)
+
+
+def get_request(url):
+ return requests.request(
+ "GET", url.format(RESTCONF_BASE_URL),
+ headers=TYPE_APPLICATION_JSON,
+ auth=(ODL_LOGIN, ODL_PWD))
+
+
+def post_request(url, data):
+ if data:
+ print(json.dumps(data))
+ return requests.request(
+ "POST", url.format(RESTCONF_BASE_URL),
+ data=json.dumps(data),
+ headers=TYPE_APPLICATION_JSON,
+ auth=(ODL_LOGIN, ODL_PWD))
+
+ return requests.request(
+ "POST", url.format(RESTCONF_BASE_URL),
+ headers=TYPE_APPLICATION_JSON,
+ auth=(ODL_LOGIN, ODL_PWD))
+
+
+def post_xmlrequest(url, data):
+ if data:
+ return requests.request(
+ "POST", url.format(RESTCONF_BASE_URL),
+ data=data,
+ headers=TYPE_APPLICATION_XML,
+ auth=(ODL_LOGIN, ODL_PWD))
+ return None
+
+
+def put_request(url, data):
+ return requests.request(
+ "PUT", url.format(RESTCONF_BASE_URL),
+ data=json.dumps(data),
+ headers=TYPE_APPLICATION_JSON,
+ auth=(ODL_LOGIN, ODL_PWD))
-def post_request(url, data, username, password):
+def put_xmlrequest(url, data):
return requests.request(
- "POST", url, data=json.dumps(data),
- headers=TYPE_APPLICATION_JSON, auth=(username, password))
+ "PUT", url.format(RESTCONF_BASE_URL),
+ data=data,
+ headers=TYPE_APPLICATION_XML,
+ auth=(ODL_LOGIN, ODL_PWD))
-def put_request(url, data, username, password):
+def put_jsonrequest(url, data):
return requests.request(
- "PUT", url, data=json.dumps(data), headers=TYPE_APPLICATION_JSON,
- auth=(username, password))
+ "PUT", url.format(RESTCONF_BASE_URL),
+ data=data,
+ headers=TYPE_APPLICATION_JSON,
+ auth=(ODL_LOGIN, ODL_PWD))
-def delete_request(url, username, password):
+def rawput_request(url, data):
return requests.request(
- "DELETE", url, headers=TYPE_APPLICATION_JSON,
- auth=(username, password))
+ "PUT", url.format(RESTCONF_BASE_URL),
+ data=data,
+ headers=TYPE_APPLICATION_JSON,
+ auth=(ODL_LOGIN, ODL_PWD))
+
+
+def rawpost_request(url, data):
+ return requests.request(
+ "POST", url.format(RESTCONF_BASE_URL),
+ data=data,
+ headers=TYPE_APPLICATION_JSON,
+ auth=(ODL_LOGIN, ODL_PWD))
+
+
+def delete_request(url):
+ return requests.request(
+ "DELETE", url.format(RESTCONF_BASE_URL),
+ headers=TYPE_APPLICATION_JSON,
+ auth=(ODL_LOGIN, ODL_PWD))
def mount_device(node_id, sim):
- url = ("{}/config/network-topology:network-topology/topology/topology-netconf/node/"
- + node_id).format(RESTCONF_BASE_URL)
- headers = {"node": [{
+ url = URL_CONFIG_NETCONF_TOPO + "node/" + node_id
+ body = {"node": [{
"node-id": node_id,
"netconf-node-topology:username": NODES_LOGIN,
"netconf-node-topology:password": NODES_PWD,
"netconf-node-topology:port": SIMS[sim]['port'],
"netconf-node-topology:tcp-only": "false",
"netconf-node-topology:pass-through": {}}]}
- response = put_request(url, headers, ODL_LOGIN, ODL_PWD)
- if wait_until_log_contains(TPCE_LOG, re.escape("Triggering notification stream NETCONF for node "+node_id), 60):
- print("Node "+node_id+" correctly added to tpce topology", end='... ', flush=True)
+ response = put_request(url, body)
+ if wait_until_log_contains(TPCE_LOG, re.escape("Triggering notification stream NETCONF for node " + node_id), 180):
+ print("Node " + node_id + " correctly added to tpce topology", end='... ', flush=True)
else:
- print("Node "+node_id+" still not added to tpce topology", end='... ', flush=True)
+ print("Node " + node_id + " still not added to tpce topology", end='... ', flush=True)
+ if response.status_code == requests.codes.ok:
+ print("It was probably loaded at start-up", end='... ', flush=True)
+ # TODO an else-clause to abort test would probably be nice here
+ return response
+
+
+def mount_tapi_device(node_id, sim):
+ url = URL_CONFIG_NETCONF_TOPO + "node/" + node_id
+ body = {"node": [{
+ "node-id": node_id,
+ "netconf-node-topology:username": NODES_LOGIN,
+ "netconf-node-topology:password": NODES_PWD,
+ "netconf-node-topology:host": "127.0.0.1",
+ "netconf-node-topology:port": SIMS[sim]['port'],
+ "netconf-node-topology:tcp-only": "false",
+ "netconf-node-topology:pass-through": {}}]}
+ response = put_request(url, body)
+ if wait_until_log_contains(TPCE_LOG, re.escape(f"TAPI node for or node {node_id} successfully merged"), 200):
+ print("Node " + node_id + " correctly added to tpce topology", end='... ', flush=True)
+ else:
+ print("Node " + node_id + " still not added to tpce topology", end='... ', flush=True)
if response.status_code == requests.codes.ok:
print("It was probably loaded at start-up", end='... ', flush=True)
# TODO an else-clause to abort test would probably be nice here
def unmount_device(node_id):
- url = ("{}/config/network-topology:network-topology/topology/topology-netconf/node/"
- + node_id).format(RESTCONF_BASE_URL)
- response = delete_request(url, ODL_LOGIN, ODL_PWD)
- if wait_until_log_contains(TPCE_LOG, re.escape("onDeviceDisConnected: "+node_id), 60):
- print("Node "+node_id+" correctly deleted from tpce topology", end='... ', flush=True)
+ url = URL_CONFIG_NETCONF_TOPO + "node/" + node_id
+ response = delete_request(url)
+ if wait_until_log_contains(TPCE_LOG, re.escape("onDeviceDisConnected: " + node_id), 180):
+ print("Node " + node_id + " correctly deleted from tpce topology", end='... ', flush=True)
else:
- print("Node "+node_id+" still not deleted from tpce topology", end='... ', flush=True)
+ print("Node " + node_id + " still not deleted from tpce topology", end='... ', flush=True)
return response
-def generate_link_data(xpdr_node: str, xpdr_num: str, network_num: str, rdm_node: str, srg_num: str,
- termination_num: str):
+def connect_xpdr_to_rdm_request(xpdr_node: str, xpdr_num: str, network_num: str,
+ rdm_node: str, srg_num: str, termination_num: str):
+ url = "{}/operations/transportpce-networkutils:init-xpdr-rdm-links"
+ data = {
+ "networkutils:input": {
+ "networkutils:links-input": {
+ "networkutils:xpdr-node": xpdr_node,
+ "networkutils:xpdr-num": xpdr_num,
+ "networkutils:network-num": network_num,
+ "networkutils:rdm-node": rdm_node,
+ "networkutils:srg-num": srg_num,
+ "networkutils:termination-point-num": termination_num
+ }
+ }
+ }
+ return post_request(url, data)
+
+
+def connect_rdm_to_xpdr_request(xpdr_node: str, xpdr_num: str, network_num: str,
+ rdm_node: str, srg_num: str, termination_num: str):
+ url = "{}/operations/transportpce-networkutils:init-rdm-xpdr-links"
data = {
"networkutils:input": {
"networkutils:links-input": {
}
}
}
- return data
+ return post_request(url, data)
+
+
+def connect_xpdr_to_rdm_tapi_request(xpdr_node: str, xpdr_num: str, rdm_node: str, srg_num: str):
+ url = "{}/operations/transportpce-tapinetworkutils:init-xpdr-rdm-tapi-link"
+ data = {
+ "input": {
+ "xpdr-node": xpdr_node,
+ "network-tp": xpdr_num,
+ "rdm-node": rdm_node,
+ "add-drop-tp": srg_num
+ }
+ }
+ return post_request(url, data)
+
+
+def check_netconf_node_request(node: str, suffix: str):
+ url = URL_CONFIG_NETCONF_TOPO + (
+ "node/" + node + "/yang-ext:mount/org-openroadm-device:org-openroadm-device/" + suffix
+ )
+ return get_request(url)
+
+
+def get_netconf_oper_request(node: str):
+ url = "{}/operational/network-topology:network-topology/topology/topology-netconf/node/" + node
+ return get_request(url)
+
+
+def get_ordm_topo_request(suffix: str):
+ url = URL_CONFIG_ORDM_TOPO + suffix
+ return get_request(url)
+
+
+def add_oms_attr_request(link: str, attr):
+ url = URL_CONFIG_ORDM_TOPO + (
+ "ietf-network-topology:link/" + link + "/org-openroadm-network-topology:OMS-attributes/span"
+ )
+ return put_request(url, attr)
+
+
+def del_oms_attr_request(link: str):
+ url = URL_CONFIG_ORDM_TOPO + (
+ "ietf-network-topology:link/" + link + "/org-openroadm-network-topology:OMS-attributes/span"
+ )
+ return delete_request(url)
+
+
+def get_clli_net_request():
+ return get_request(URL_CONFIG_CLLI_NET)
+
+
+def get_ordm_net_request():
+ return get_request(URL_CONFIG_ORDM_NET)
+
+
+def get_otn_topo_request():
+ return get_request(URL_CONFIG_OTN_TOPO)
+
+
+def del_link_request(link: str):
+ url = URL_CONFIG_ORDM_TOPO + ("ietf-network-topology:link/" + link)
+ return delete_request(url)
+
+
+def del_node_request(node: str):
+ url = URL_CONFIG_CLLI_NET + ("node/" + node)
+ return delete_request(url)
+
+
+def portmapping_request(suffix: str):
+ url = URL_PORTMAPPING + suffix
+ return get_request(url)
+
+
+def get_notifications_process_service_request(attr):
+ return post_request(URL_GET_NBINOTIFICATIONS_PROCESS_SERV, attr)
+
+
+def get_notifications_alarm_service_request(attr):
+ return post_request(URL_GET_NBINOTIFICATIONS_ALARM_SERV, attr)
+
+
+def get_service_list_request(suffix: str):
+ url = URL_OPER_SERV_LIST + suffix
+ return get_request(url)
+
+
+def service_create_request(attr):
+ return post_request(URL_SERV_CREATE, attr)
+
+
+def service_delete_request(servicename: str,
+ requestid="e3028bae-a90f-4ddd-a83f-cf224eba0e58",
+ notificationurl="http://localhost:8585/NotificationServer/notify"):
+ attr = {"input": {
+ "sdnc-request-header": {
+ "request-id": requestid,
+ "rpc-action": "service-delete",
+ "request-system-id": "appname",
+ "notification-url": notificationurl},
+ "service-delete-req-info": {
+ "service-name": servicename,
+ "tail-retention": "no"}}}
+ return post_request(URL_SERV_DELETE, attr)
+
+
+def service_path_request(operation: str, servicename: str, wavenumber: str, nodes, centerfreq: str,
+ slotwidth: int, minfreq: float, maxfreq: float, lowerslotnumber: int,
+ higherslotnumber: int):
+ attr = {"renderer:input": {
+ "renderer:service-name": servicename,
+ "renderer:wave-number": wavenumber,
+ "renderer:modulation-format": "dp-qpsk",
+ "renderer:operation": operation,
+ "renderer:nodes": nodes,
+ "renderer:center-freq": centerfreq,
+ "renderer:nmc-width": slotwidth,
+ "renderer:min-freq": minfreq,
+ "renderer:max-freq": maxfreq,
+ "renderer:lower-spectral-slot-number": lowerslotnumber,
+ "renderer:higher-spectral-slot-number": higherslotnumber}}
+ return post_request(URL_SERVICE_PATH, attr)
+
+
+def otn_service_path_request(operation: str, servicename: str, servicerate: str, serviceformat: str, nodes,
+ eth_attr=None):
+ attr = {"service-name": servicename,
+ "operation": operation,
+ "service-rate": servicerate,
+ "service-format": serviceformat,
+ "nodes": nodes}
+ if eth_attr:
+ attr.update(eth_attr)
+ return post_request(URL_OTN_SERVICE_PATH, {"renderer:input": attr})
+
+
+def create_ots_oms_request(nodeid: str, lcp: str):
+ attr = {"input": {
+ "node-id": nodeid,
+ "logical-connection-point": lcp}}
+ return post_request(URL_CREATE_OTS_OMS, attr)
+
+
+def path_computation_request(requestid: str, servicename: str, serviceaend, servicezend,
+ hardconstraints=None, softconstraints=None, metric="hop-count", other_attr=None):
+ attr = {"service-name": servicename,
+ "resource-reserve": "true",
+ "service-handler-header": {"request-id": requestid},
+ "service-a-end": serviceaend,
+ "service-z-end": servicezend,
+ "pce-metric": metric}
+ if hardconstraints:
+ attr.update({"hard-constraints": hardconstraints})
+ if softconstraints:
+ attr.update({"soft-constraints": softconstraints})
+ if other_attr:
+ attr.update(other_attr)
+ return post_request(URL_PATH_COMPUTATION_REQUEST, {"input": attr})
+
+
+def tapi_create_connectivity_request(topologyidorname):
+ return post_request(URL_TAPI_CREATE_CONNECTIVITY, topologyidorname)
+
+
+def tapi_delete_connectivity_request(serviceidorname):
+ attr = {
+ "input": {
+ "service-id-or-name": serviceidorname}}
+ return post_request(URL_TAPI_DELETE_CONNECTIVITY, attr)
+
+
+def tapi_get_topology_details_request(topologyidorname):
+ attr = {
+ "input": {
+ "topology-id-or-name": topologyidorname}}
+ return post_request(URL_TAPI_TOPOLOGY_DETAILS, attr)
+
+
+def tapi_get_node_details_request(topologyidorname, nodeidorname):
+ attr = {
+ "input": {
+ "topology-id-or-name": topologyidorname,
+ "node-id-or-name": nodeidorname}}
+ return post_request(URL_TAPI_NODE_DETAILS, attr)
+
+
+def tapi_get_sip_details_request():
+ return post_request(URL_TAPI_SIP_LIST, "")
+
+
+def tapi_get_service_list_request():
+ return post_request(URL_TAPI_SERVICE_LIST, "")
def shutdown_process(process):
process.send_signal(signal.SIGINT)
-def start_honeynode(log_file: str, node_port: str, node_config_file_name: str):
- if os.path.isfile(HONEYNODE_EXECUTABLE):
- with open(log_file, 'w') as outfile:
+def start_honeynode(log_file: str, sim):
+ executable = os.path.join(os.path.dirname(os.path.realpath(__file__)),
+ "..", "..", "honeynode", sim[1], "honeynode-simulator", "honeycomb-tpce")
+ sample_directory = os.path.join(os.path.dirname(os.path.realpath(__file__)),
+ "..", "..", "sample_configs", "openroadm", sim[1])
+ if os.path.isfile(executable):
+ with open(log_file, 'w', encoding='utf-8') as outfile:
return subprocess.Popen(
- [HONEYNODE_EXECUTABLE, node_port, os.path.join(SAMPLES_DIRECTORY, node_config_file_name)],
+ [executable, SIMS[sim]['port'], os.path.join(sample_directory, SIMS[sim]['configfile'])],
stdout=outfile, stderr=outfile)
+ return None
-def wait_until_log_contains(log_file, regexp, time_to_wait=20):
- found = False
- tail = None
+def wait_until_log_contains(log_file, regexp, time_to_wait=60):
+ # pylint: disable=lost-exception
+ # pylint: disable=consider-using-with
+ stringfound = False
+ filefound = False
+ line = None
try:
with TimeOut(seconds=time_to_wait):
- print("Searching for pattern '"+regexp+"' in "+os.path.basename(log_file), end='... ', flush=True)
- tail = subprocess.Popen(['tail', '-F', log_file], stdout=subprocess.PIPE, stderr=subprocess.PIPE)
+ while not os.path.exists(log_file):
+ time.sleep(0.2)
+ filelogs = open(log_file, 'r', encoding='utf-8')
+ filelogs.seek(0, 2)
+ filefound = True
+ print("Searching for pattern '" + regexp + "' in " + os.path.basename(log_file), end='... ', flush=True)
compiled_regexp = re.compile(regexp)
while True:
- line = tail.stdout.readline().decode('utf-8')
+ line = filelogs.readline()
if compiled_regexp.search(line):
- print("String found!", end=' ')
- found = True
+ print("Pattern found!", end=' ')
+ stringfound = True
break
+ if not line:
+ time.sleep(0.1)
except TimeoutError:
- print("String not found after "+str(time_to_wait), end=" seconds! ", flush=True)
+ print("Pattern not found after " + str(time_to_wait), end=" seconds! ", flush=True)
+ except PermissionError:
+ print("Permission Error when trying to access the log file", end=" ... ", flush=True)
finally:
- if tail is not None:
- print("Stopping tail command", end='... ', flush=True)
- tail.stderr.close()
- tail.stdout.close()
- tail.kill()
- tail.wait()
- return found
-# TODO try to find an alternative to subprocess+tail -f (such as https://pypi.org/project/tailhead/)
+ if filefound:
+ filelogs.close()
+ else:
+ print("log file does not exist or is not accessible... ", flush=True)
+ return stringfound
class TimeOut:
signal.alarm(self.seconds)
def __exit__(self, type, value, traceback):
+ # pylint: disable=W0622
signal.alarm(0)