Adjusts for new log server 32/41232/1
authorLuis Gomez <ecelgp@gmail.com>
Sat, 2 Jul 2016 01:02:59 +0000 (18:02 -0700)
committerLuis Gomez <ecelgp@gmail.com>
Sat, 2 Jul 2016 01:02:59 +0000 (18:02 -0700)
- Add achive macro in all integration templates.
- Remove compress logs, this is done by new archive macro.

Change-Id: I7029a6de57d4730c46d8fffda5f8a009dfe01d0a
Signed-off-by: Luis Gomez <ecelgp@gmail.com>
jjb/integration/include-raw-integration-deploy-controller-offline.sh
jjb/integration/include-raw-integration-deploy-controller-run-test.sh
jjb/integration/include-raw-integration-start-cluster-run-test.sh
jjb/integration/integration-distribution-jobs.yaml
jjb/integration/integration-test-jobs.yaml

index dd2ae6df9e101e7c6735b79e436ee396b14a600f..d7fc720e8871d139540033982b2f0a4299e01852 100644 (file)
@@ -60,16 +60,12 @@ sleep 10
 echo "Installing all features..."
 sshpass -p karaf ${WORKSPACE}/${BUNDLEFOLDER}/bin/client -u karaf "feature:install ${ACTUALFEATURES}" || echo $? > ${WORKSPACE}/error.txt
 
-echo "dumping first 500K bytes of karaf log..."
-head --bytes=500K "${WORKSPACE}/${BUNDLEFOLDER}/data/log/karaf.log" > "karaf.log"
-echo "dumping last 500K bytes of karaf log..."
-tail --bytes=500K "${WORKSPACE}/${BUNDLEFOLDER}/data/log/karaf.log" > "karaf.log"
-# TODO: Do we want different name for karaf.log chunk to signal it may be not complete?
 echo "killing karaf process..."
 ps axf | grep karaf | grep -v grep | awk '{print "kill -9 " $1}' | sh
 sleep 5
-xz -9ekvv "${WORKSPACE}/${BUNDLEFOLDER}/data/log/karaf.log"
-mv "${WORKSPACE}/${BUNDLEFOLDER}/data/log/karaf.log.xz" .
+
+echo "Fetching Karaf logs"
+cp ${WORKSPACE}/${BUNDLEFOLDER}/data/log/karaf.log .
 
 echo "Exit with error"
 if [ -f ${WORKSPACE}/error.txt ]; then
@@ -78,4 +74,3 @@ if [ -f ${WORKSPACE}/error.txt ]; then
 fi
 
 # vim: ts=4 sw=4 sts=4 et ft=sh :
-
index a36d6cca8611c49c853091a6906b6be63cb73677..18c009e5a5abe2c4841ab3ebbbe4741ea7045095 100644 (file)
@@ -164,12 +164,9 @@ pybot -N ${TESTPLAN} -c critical -e exclude -v BUNDLEFOLDER:${BUNDLEFOLDER} -v W
 
 echo "Killing ODL and fetching Karaf log..."
 set +e  # We do not want to create red dot just because something went wrong while fetching logs.
-ssh "${ODL_SYSTEM_IP}" tail --bytes=1M "/tmp/${BUNDLEFOLDER}/data/log/karaf.log" > "karaf.log"
 ssh "${ODL_SYSTEM_IP}" bash -c 'ps axf | grep karaf | grep -v grep | awk '"'"'{print "kill -9 " $1}'"'"' | sh'
 sleep 5
-ssh "${ODL_SYSTEM_IP}" xz -9ekvv "/tmp/${BUNDLEFOLDER}/data/log/karaf.log"
-scp "${ODL_SYSTEM_IP}:/tmp/${BUNDLEFOLDER}/data/log/karaf.log.xz" .
+scp "${ODL_SYSTEM_IP}:/tmp/${BUNDLEFOLDER}/data/log/karaf.log" .
 true  # perhaps Jenkins is testing last exit code
 
 # vim: ts=4 sw=4 sts=4 et ft=sh :
-
index 4bc956436ad5a7e2efd0ccb2377039901f989366..0debe1be4652b13c97be5daf9d16a9669538c536 100644 (file)
@@ -121,10 +121,6 @@ set +e  # We do not want to create red dot just because something went wrong whi
 for i in `seq 1 ${NUM_ODL_SYSTEM}`
 do
     CONTROLLERIP=ODL_SYSTEM_${i}_IP
-    echo "dumping first 500K bytes of karaf log..." > "odl${i}_karaf.log"
-    ssh "${!CONTROLLERIP}" head --bytes=500K "/tmp/${BUNDLEFOLDER}/data/log/karaf.log" >> "odl${i}_karaf.log"
-    echo "dumping last 500K bytes of karaf log..." >> "odl${i}_karaf.log"
-    ssh "${!CONTROLLERIP}" tail --bytes=500K "/tmp/${BUNDLEFOLDER}/data/log/karaf.log" >> "odl${i}_karaf.log"
     echo "killing karaf process..."
     ssh "${!CONTROLLERIP}" bash -c 'ps axf | grep karaf | grep -v grep | awk '"'"'{print "kill -9 " $1}'"'"' | sh'
 done
@@ -132,10 +128,8 @@ sleep 5
 for i in `seq 1 ${NUM_ODL_SYSTEM}`
 do
     CONTROLLERIP=ODL_SYSTEM_${i}_IP
-    ssh "${!CONTROLLERIP}" xz -9ekvv "/tmp/${BUNDLEFOLDER}/data/log/karaf.log"
-    scp "${!CONTROLLERIP}:/tmp/${BUNDLEFOLDER}/data/log/karaf.log.xz" "odl${i}_karaf.log.xz"
+    scp "${!CONTROLLERIP}:/tmp/${BUNDLEFOLDER}/data/log/karaf.log" "odl${i}_karaf.log"
 done
 true  # perhaps Jenkins is testing last exit code
 
 # vim: ts=4 sw=4 sts=4 et ft=sh :
-
index a21e56b8aa5413e8a0719aa06aec59d375d5a339..316431bcbb50c5536e3915297fddb37f7e7070d9 100644 (file)
         - integration-deploy-controller-verify
 
     publishers:
-        - archive:
-            artifacts: '*.log,*.log.xz'
         - email-notification:
             email-prefix: '[integration]'
+        - integration-csit-archive-build
+        - archive-build:
+            maven-version: '{mvn33}'
 
 # Template: integration-distribution-offline-{stream}
 # Goal: Verify distribution can start with no internet connection
         - integration-deploy-controller-offline
 
     publishers:
-        - archive:
-            artifacts: '*.log,*.log.xz'
         - email-notification:
             email-prefix: '[integration]'
+        - integration-csit-archive-build
+        - archive-build:
+            maven-version: '{mvn33}'
index 82d1d558dcb512db663583949ded0f4e66c5c443..f744433a6d622242b426cf35094b09a90ef139cb 100644 (file)
         - integration-robot:
             unstable-if: 0.0
             pass-if: 100.0
-        - archive:
-            artifacts: '*.log,*.log.xz'
         - email-notification:
             email-prefix: '[integration]'
+        - integration-csit-archive-build
+        - archive-build:
+            maven-version: '{mvn33}'
 
 # Template: integration-distribution-test-{stream}
 # Goal: Verify a distribution through all system test available