X-Git-Url: https://git.opendaylight.org/gerrit/gitweb?a=blobdiff_plain;f=jjb%2Fintegration%2Finclude-raw-integration-deploy-controller-run-test.sh;h=b08af74c471cdda55c5fb653f2c8559778434ce6;hb=e0bd0b45eb352a3ae8b14e477087bf9188681889;hp=0fe7c0ab1da0d353365c5d7989971a93643f79ac;hpb=afdacf93d9967d04f1698fbec3fa89242f1a31c2;p=releng%2Fbuilder.git diff --git a/jjb/integration/include-raw-integration-deploy-controller-run-test.sh b/jjb/integration/include-raw-integration-deploy-controller-run-test.sh index 0fe7c0ab1..b08af74c4 100644 --- a/jjb/integration/include-raw-integration-deploy-controller-run-test.sh +++ b/jjb/integration/include-raw-integration-deploy-controller-run-test.sh @@ -11,13 +11,6 @@ if [ "${ENABLE_HAPROXY_FOR_NEUTRON}" == "yes" ]; then AKKACONF=/tmp/${BUNDLEFOLDER}/configuration/initial/akka.conf MODULESCONF=/tmp/${BUNDLEFOLDER}/configuration/initial/modules.conf MODULESHARDSCONF=/tmp/${BUNDLEFOLDER}/configuration/initial/module-shards.conf - # Create the string for odl nodes - odl_node_list="${ODL_SYSTEM_1_IP}" - for i in `seq 2 ${NUM_ODL_SYSTEM}` ; do - CONTROLLERIP=ODL_SYSTEM_${i}_IP - odl_node_list="${odl_node_list} ${!CONTROLLERIP}" - done - echo ${odl_node_list} fi if [ ${CONTROLLERSCOPE} == 'all' ]; then @@ -50,7 +43,7 @@ echo "Changing to /tmp" cd /tmp echo "Downloading the distribution..." -wget --progress=dot:mega '${ACTUALBUNDLEURL}' +wget --progress=dot:mega '${ACTUAL_BUNDLE_URL}' echo "Extracting the new controller..." unzip -q ${BUNDLE} @@ -58,7 +51,7 @@ unzip -q ${BUNDLE} echo "Configuring the startup features..." FEATURESCONF=/tmp/${BUNDLEFOLDER}/etc/org.apache.karaf.features.cfg CUSTOMPROP=/tmp/${BUNDLEFOLDER}/etc/custom.properties -sed -ie "s/featuresBoot=.*/featuresBoot=config,standard,region,package,kar,ssh,management,${ACTUALFEATURES}/g" \${FEATURESCONF} +sed -ie "s/\(featuresBoot=\|featuresBoot =\)/featuresBoot = ${ACTUALFEATURES},/g" \${FEATURESCONF} sed -ie "s%mvn:org.opendaylight.integration/features-integration-index/${BUNDLEVERSION}/xml/features%mvn:org.opendaylight.integration/features-integration-index/${BUNDLEVERSION}/xml/features,mvn:org.opendaylight.integration/features-integration-test/${BUNDLEVERSION}/xml/features,mvn:org.apache.karaf.decanter/apache-karaf-decanter/1.0.0/xml/features%g" \${FEATURESCONF} cat \${FEATURESCONF} @@ -73,6 +66,7 @@ LOGCONF=/tmp/${BUNDLEFOLDER}/etc/org.ops4j.pax.logging.cfg sed -ie 's/log4j.appender.out.maxBackupIndex=10/log4j.appender.out.maxBackupIndex=1/g' \${LOGCONF} # FIXME: Make log size limit configurable from build parameter. sed -ie 's/log4j.appender.out.maxFileSize=1MB/log4j.appender.out.maxFileSize=30GB/g' \${LOGCONF} +echo "log4j.logger.org.opendaylight.yangtools.yang.parser.repo.YangTextSchemaContextResolver = WARN" >> \${LOGCONF} cat \${LOGCONF} echo "Configure java home and max memory..." @@ -106,7 +100,7 @@ if [ "${ENABLE_HAPROXY_FOR_NEUTRON}" == "yes" ]; then fi echo "Configuring cluster" - /tmp/${BUNDLEFOLDER}/bin/configure_cluster.sh \$1 ${odl_node_list} + /tmp/${BUNDLEFOLDER}/bin/configure_cluster.sh \$1 \$2 echo "Dump akka.conf" cat ${AKKACONF} @@ -181,13 +175,26 @@ exit_on_log_file_message 'server is unhealthy' EOF -# Copy over the config script to controller and execute it. -for i in `seq 1 ${NUM_ODL_SYSTEM}` +[ "$NUM_OPENSTACK_SITES" ] || NUM_OPENSTACK_SITES=1 +NUM_ODLS_PER_SITE=$((NUM_ODL_SYSTEM / NUM_OPENSTACK_SITES)) +for i in `seq 1 ${NUM_OPENSTACK_SITES}` do - CONTROLLERIP=ODL_SYSTEM_${i}_IP - echo "Execute the configuration script on controller ${!CONTROLLERIP}" - scp ${WORKSPACE}/configuration-script.sh ${!CONTROLLERIP}:/tmp - ssh ${!CONTROLLERIP} "bash /tmp/configuration-script.sh ${i}" + # Get full list of ODL nodes for this site + odl_node_list= + for j in `seq 1 ${NUM_ODLS_PER_SITE}` + do + odl_ip=ODL_SYSTEM_$(((i - 1) * NUM_ODLS_PER_SITE + j))_IP + odl_node_list="${odl_node_list} ${!odl_ip}" + done + + for j in `seq 1 ${NUM_ODLS_PER_SITE}` + do + odl_ip=ODL_SYSTEM_$(((i - 1) * NUM_ODLS_PER_SITE + j))_IP + # Copy over the config script to controller and execute it (parameters are used only for cluster) + echo "Execute the configuration script on controller ${!odl_ip} for index $j with node list ${odl_node_list}" + scp ${WORKSPACE}/configuration-script.sh ${!odl_ip}:/tmp + ssh ${!odl_ip} "bash /tmp/configuration-script.sh ${j} '${odl_node_list}'" + done done echo "Locating config plan to use..." @@ -216,32 +223,36 @@ do ssh ${!CONTROLLERIP} "bash /tmp/startup-script.sh" done +seed_index=1 for i in `seq 1 ${NUM_ODL_SYSTEM}` do CONTROLLERIP=ODL_SYSTEM_${i}_IP echo "Execute the post startup script on controller ${!CONTROLLERIP}" scp ${WORKSPACE}/post-startup-script.sh ${!CONTROLLERIP}:/tmp - ssh ${!CONTROLLERIP} "bash /tmp/post-startup-script.sh ${i}" + ssh ${!CONTROLLERIP} "bash /tmp/post-startup-script.sh $(( seed_index++ ))" + if [ $(( $i % (${NUM_ODL_SYSTEM} / ${NUM_OPENSTACK_SITES}) )) == 0 ]; then + seed_index=1 + fi done echo "Cool down for ${COOLDOWN_PERIOD} seconds :)..." sleep ${COOLDOWN_PERIOD} -if [ ${NUM_OPENSTACK_SYSTEM} -gt 0 ]; then - echo "Exiting without running tests to deploy openstack for testing" - exit -fi - echo "Generating controller variables..." for i in `seq 1 ${NUM_ODL_SYSTEM}` do CONTROLLERIP=ODL_SYSTEM_${i}_IP odl_variables=${odl_variables}" -v ${CONTROLLERIP}:${!CONTROLLERIP}" echo "Lets's take the karaf thread dump" - KARAF_PID=$(ssh ${!CONTROLLERIP} "ps aux | grep 'distribution-karaf' | grep -v grep | tr -s ' ' | cut -f2 -d' '") + KARAF_PID=$(ssh ${!CONTROLLERIP} "ps aux | grep ${KARAF_ARTIFACT} | grep -v grep | tr -s ' ' | cut -f2 -d' '") ssh ${!CONTROLLERIP} "jstack $KARAF_PID"> ${WORKSPACE}/karaf_${i}_threads_before.log || true done +if [ ${NUM_OPENSTACK_SYSTEM} -gt 0 ]; then + echo "Exiting without running tests to deploy openstack for testing" + exit +fi + echo "Generating mininet variables..." for i in `seq 1 ${NUM_TOOLS_SYSTEM}` do @@ -262,7 +273,7 @@ SUITES=$( egrep -v '(^[[:space:]]*#|^[[:space:]]*$)' testplan.txt | tr '\012' ' echo "Starting Robot test suites ${SUITES} ..." pybot -N ${TESTPLAN} --removekeywords wuks -c critical -e exclude -v BUNDLEFOLDER:${BUNDLEFOLDER} -v WORKSPACE:/tmp \ --v JAVA_HOME:${JAVA_HOME} -v BUNDLE_URL:${ACTUALBUNDLEURL} -v NEXUSURL_PREFIX:${NEXUSURL_PREFIX} \ +-v JAVA_HOME:${JAVA_HOME} -v BUNDLE_URL:${ACTUAL_BUNDLE_URL} -v NEXUSURL_PREFIX:${NEXUSURL_PREFIX} \ -v CONTROLLER:${ODL_SYSTEM_IP} -v ODL_SYSTEM_IP:${ODL_SYSTEM_IP} -v ODL_SYSTEM_1_IP:${ODL_SYSTEM_IP} \ -v CONTROLLER_USER:${USER} -v ODL_SYSTEM_USER:${USER} \ -v TOOLS_SYSTEM_IP:${TOOLS_SYSTEM_IP} -v TOOLS_SYSTEM_2_IP:${TOOLS_SYSTEM_2_IP} -v TOOLS_SYSTEM_3_IP:${TOOLS_SYSTEM_3_IP} \ @@ -273,11 +284,15 @@ pybot -N ${TESTPLAN} --removekeywords wuks -c critical -e exclude -v BUNDLEFOLDE -v MININET_USER:${USER} -v USER_HOME:${HOME} ${TESTOPTIONS} ${SUITES} || true # FIXME: Sort (at least -v) options alphabetically. +echo "Examining the files in data/log and checking filesize" +ssh ${ODL_SYSTEM_IP} "ls -altr /tmp/${BUNDLEFOLDER}/data/log/" +ssh ${ODL_SYSTEM_IP} "du -hs /tmp/${BUNDLEFOLDER}/data/log/*" + for i in `seq 1 ${NUM_ODL_SYSTEM}` do CONTROLLERIP=ODL_SYSTEM_${i}_IP echo "Lets's take the karaf thread dump again..." - KARAF_PID=$(ssh ${!CONTROLLERIP} "ps aux | grep 'distribution-karaf' | grep -v grep | tr -s ' ' | cut -f2 -d' '") + KARAF_PID=$(ssh ${!CONTROLLERIP} "ps aux | grep ${KARAF_ARTIFACT} | grep -v grep | tr -s ' ' | cut -f2 -d' '") ssh ${!CONTROLLERIP} "jstack $KARAF_PID"> ${WORKSPACE}/karaf_${i}_threads_after.log || true echo "Killing ODL" set +e # We do not want to create red dot just because something went wrong while fetching logs. @@ -285,17 +300,26 @@ do done sleep 5 +# FIXME: Unify the copy process between various scripts. +# TODO: Use rsync. for i in `seq 1 ${NUM_ODL_SYSTEM}` do CONTROLLERIP=ODL_SYSTEM_${i}_IP echo "Compressing karaf.log ${i}" ssh ${!CONTROLLERIP} gzip --best /tmp/${BUNDLEFOLDER}/data/log/karaf.log echo "Fetching compressed karaf.log ${i}" - scp "${!CONTROLLERIP}:/tmp/${BUNDLEFOLDER}/data/log/karaf.log.gz" "odl${i}_karaf.log.gz" + scp "${!CONTROLLERIP}:/tmp/${BUNDLEFOLDER}/data/log/karaf.log.gz" "odl${i}_karaf.log.gz" && ssh ${!CONTROLLERIP} rm -f "/tmp/${BUNDLEFOLDER}/data/log/karaf.log.gz" # TODO: Should we compress the output log file as well? - scp "${!CONTROLLERIP}:/tmp/${BUNDLEFOLDER}/data/log/karaf_console.log" "odl${i}_karaf_console.log" + scp "${!CONTROLLERIP}:/tmp/${BUNDLEFOLDER}/data/log/karaf_console.log" "odl${i}_karaf_console.log" && ssh ${!CONTROLLERIP} rm -f "/tmp/${BUNDLEFOLDER}/data/log/karaf_console.log" + echo "Fetch GC logs" + # FIXME: Put member index in filename, instead of directory name. + mkdir -p "gclogs-${i}" + scp "${!CONTROLLERIP}:/tmp/${BUNDLEFOLDER}/data/log/*.log" "gclogs-${i}/" && ssh ${!CONTROLLERIP} rm -f "/tmp/${BUNDLEFOLDER}/data/log/*.log" done +echo "Examine copied files" +ls -lt + true # perhaps Jenkins is testing last exit code # vim: ts=4 sw=4 sts=4 et ft=sh :