The start of some scripts to help debug performance issues
authorMoiz Raja <moraja@cisco.com>
Wed, 10 Jun 2015 21:25:35 +0000 (14:25 -0700)
committerGerrit Code Review <gerrit@opendaylight.org>
Fri, 12 Jun 2015 15:08:01 +0000 (15:08 +0000)
This may all be done in a better way but I'm hoping for this
to be a start

Change-Id: Ie73a1c708efca9f463e3cb85cc77ca0e70cac15e
Signed-off-by: Moiz Raja <moraja@cisco.com>
test/tools/clustering/cluster-debugging/transaction-tracking/extract.sh [new file with mode: 0755]
test/tools/clustering/cluster-debugging/transaction-tracking/filter_transactions.py [new file with mode: 0644]
test/tools/clustering/cluster-debugging/transaction-tracking/process.py [new file with mode: 0644]
test/tools/clustering/cluster-debugging/transaction-tracking/to_csv.py [new file with mode: 0644]

diff --git a/test/tools/clustering/cluster-debugging/transaction-tracking/extract.sh b/test/tools/clustering/cluster-debugging/transaction-tracking/extract.sh
new file mode 100755 (executable)
index 0000000..81aa83b
--- /dev/null
@@ -0,0 +1,14 @@
+# This script is designed to be run in a directory where the karaf.log file is present
+# It greps the log file and produced 3 text files which can then be used to find more 
+# information about transactions
+#
+# To properly use this script you must ensure that the karaf.log is produced with the 
+# following log settings (this goes in etc/org.ops4j.pax.logging.cfg)
+#
+# log4j.logger.org.opendaylight.controller.cluster.datastore.Shard=DEBUG
+# log4j.logger.org.opendaylight.controller.cluster.datastore.AbstractTransactionContext=DEBUG
+#
+grep "Total modifications" karaf.log | awk '{print $1","$2"," $20 "," $23}' > txnbegin.txt
+grep "Applying local modifications for Tx" karaf.log | awk '{print $1","$2","$22}' > txnreached.txt
+grep "currentTransactionComplete" karaf.log | awk '{print $1","$2"," $18}' > txnend.txt
diff --git a/test/tools/clustering/cluster-debugging/transaction-tracking/filter_transactions.py b/test/tools/clustering/cluster-debugging/transaction-tracking/filter_transactions.py
new file mode 100644 (file)
index 0000000..0bc8ed7
--- /dev/null
@@ -0,0 +1,6 @@
+#!/usr/bin/env python
+
+import process
+import sys
+
+process.filterTransactionsByTimeToComplete(int(sys.argv[1]))
diff --git a/test/tools/clustering/cluster-debugging/transaction-tracking/process.py b/test/tools/clustering/cluster-debugging/transaction-tracking/process.py
new file mode 100644 (file)
index 0000000..d69a3cc
--- /dev/null
@@ -0,0 +1,104 @@
+#!/usr/bin/env python
+
+from datetime import datetime
+import collections
+
+
+class Transaction:
+    def __init__(self, txnId, startTime, operations):
+        self.txnId = txnId
+        self.operations = operations
+        self.startTime = datetime.strptime(startTime,
+                                           '%Y-%m-%d,%H:%M:%S,%f')
+        self.reachedTime = None
+        self.completeTime = None
+
+    def setReachedTime(self, reachedTime):
+        self.reachedTime = datetime.strptime(reachedTime,
+                                             '%Y-%m-%d,%H:%M:%S,%f')
+
+    def setCompleteTime(self, completeTime):
+        self.completeTime = datetime.strptime(completeTime,
+                                              '%Y-%m-%d,%H:%M:%S,%f')
+
+    def totalTime(self):
+        return Transaction.diffInMicros(self.startTime, self.completeTime)
+
+    def transferTime(self):
+        return Transaction.diffInMicros(self.startTime, self.reachedTime)
+
+    @staticmethod
+    def diffInMicros(start, end):
+        if end is not None and start is not None:
+            delta = end - start
+            seconds = delta.seconds
+            microseconds = delta.microseconds
+            return (seconds * 1000000 + microseconds) / 1000
+        return -1
+
+    def __str__(self):
+        return "transactionId = " + self.txnId + ", " \
+               + "operations = " + unicode(self.operations) + ", " \
+               + "startTime = " + unicode(self.startTime) + ", " \
+               + "reachedTime = " + unicode(self.reachedTime) + ", " \
+               + "completeTime = " + unicode(self.completeTime) + ", " \
+               + "transferTime = " + unicode(self.transferTime()) + ", " \
+               + "totalTime = " + unicode(self.totalTime())
+
+    def csv(self):
+        return unicode(self.startTime) + "," \
+            + self.txnId + "," \
+            + unicode(self.operations) + "," \
+            + unicode(self.transferTime()) + "," \
+            + unicode(self.totalTime())
+
+    @staticmethod
+    def csv_header():
+        return "Start Time,Transaction Id,Operations,Transfer Time," \
+               "Complete Time"
+
+
+def processFiles():
+    txns = collections.OrderedDict()
+    txnBegin = open("txnbegin.txt", "r")
+    for line in txnBegin:
+        arr = line.split(",")
+        txns[arr[3]] = Transaction(arr[3],
+                                   arr[0] + "," + arr[1] + "," + arr[2],
+                                   int(arr[4]))
+
+    txnReached = open("txnreached.txt", "r")
+    for line in txnReached:
+        arr = line.split(",")
+        txnId = arr[3].strip()
+        if txnId in txns:
+            txn = txns[txnId]
+            txn.setReachedTime(arr[0] + "," + arr[1] + "," + arr[2])
+
+    txnComplete = open("txnend.txt", "r")
+    for line in txnComplete:
+        arr = line.split(",")
+        txnId = arr[3].strip()
+        if txnId in txns:
+            txn = txns[txnId]
+            txn.setCompleteTime(arr[0] + "," + arr[1] + "," + arr[2])
+
+    return txns
+
+
+def filterTransactionsByTimeToComplete(timeToComplete):
+    txns = processFiles()
+    totalTime = 0
+    for txn in txns:
+        if txns[txn].totalTime() > timeToComplete:
+            print txns[txn]
+            totalTime += txns[txn].totalTime()
+
+    print "Total time for these transactions = " + unicode(totalTime)
+
+
+def csv():
+    txns = processFiles()
+    print Transaction.csv_header()
+    for txn in txns:
+        print txns[txn].csv()
diff --git a/test/tools/clustering/cluster-debugging/transaction-tracking/to_csv.py b/test/tools/clustering/cluster-debugging/transaction-tracking/to_csv.py
new file mode 100644 (file)
index 0000000..cf95be8
--- /dev/null
@@ -0,0 +1,5 @@
+#!/usr/bin/env python
+
+import process
+
+process.csv()