---
# TODO: Make all bash constants more readable.
# e.g.: DISTRIBUTION_BRANCH instead of DISTROBRANCH.
+
+- parameter:
+ name: integration-csit-parameters
+ parameters:
+ - opendaylight-infra-parameters:
+ os-cloud: '{os-cloud}'
+ project: 'integration/test'
+ branch: '{branch}'
+ refspec: 'refs/heads/{branch}'
+ artifacts: '{archive-artifacts}'
+ # CSIT
+ - integration-bundle-url:
+ bundle-url: '{bundle-url}'
+ - integration-repo-url:
+ repo-url: '{repo-url}'
+ - integration-test-options:
+ test-options: '{robot-options}'
+ - integration-controller-parameters:
+ controller-debug-map: '{debug-map}'
+ controller-features: '{install-features}'
+ controller-scope: '{controller-scope}'
+ controller-use-features-boot: '{use-features-boot}'
+
+- parameter:
+ name: integration-controller-parameters
+ parameters:
+ - string:
+ name: CONTROLLERSCOPE
+ default: '{controller-scope}'
+ description: 'Feature Only or Feature All test'
+
+ - string:
+ name: CONTROLLERFEATURES
+ default: '{controller-features}'
+ description: 'Features to install in the controller separated by comma'
+
+ - string:
+ name: CONTROLLERDEBUGMAP
+ default: '{controller-debug-map}'
+ description: |
+ Key:Value map of debugs to enable on the controller separated by spaces.
+ The module should not include org.opendaylight.
+ The levels must be one of: ERROR, WARN, INFO, DEBUG, TRACE.
+ (ex. ovsdb.lib:ERROR netvirt.vpnservice:INFO)
+
+ - string:
+ name: USEFEATURESBOOT
+ default: '{controller-use-features-boot}'
+ description: 'Boolean to toggle whether featuresBoot is appended or ignored'
+
+
- parameter:
name: integration-distribution-branch
parameters:
description: 'Distribution GIT branch'
- parameter:
- name: integration-bundleurl
+ name: distribution-karaf-version
+ # Arguments:
+ # karaf-version: Provide, otherwise defaults.yaml value is used.
+ parameters:
+ - string:
+ name: KARAF_VERSION
+ default: '{karaf-version}'
+ description: 'karaf3, karaf4, odl, affects ODL deploy scripts'
+
+- parameter:
+ name: integration-bundle-url
parameters:
- string:
- name: BUNDLEURL
- default: '{bundleurl}'
+ name: BUNDLE_URL
+ default: '{bundle-url}'
description: 'URL to karaf distribution zip'
+- parameter:
+ name: integration-repo-url
+ parameters:
+ - string:
+ name: REPO_URL
+ default: '{repo-url}'
+ description: 'URL to feature repo artifact'
+
- parameter:
name: integration-jdk-version
parameters:
description: 'Parameter to indicate JAVA Version'
- parameter:
- name: integration-controller-scope
+ name: integration-distribution-stream
parameters:
- string:
- name: CONTROLLERSCOPE
- default: '{controller-scope}'
- description: 'Feature Only or Feature All test'
+ name: DISTROSTREAM
+ default: '{stream}'
+ description: 'Distribution stream string, for suites to know which behavior to expect'
- parameter:
- name: integration-controller-features
+ name: integration-script-plan
parameters:
- string:
- name: CONTROLLERFEATURES
- default: '{controller-features}'
- description: 'Features to install in the controller separated by comma'
+ name: SCRIPTPLAN
+ default: '{script-plan}'
+ description: 'Script plan we will run before downloading and un-archiving am ODL distribution'
- parameter:
- name: integration-distribution-stream
+ name: integration-config-plan
parameters:
- string:
- name: DISTROSTREAM
- default: '{stream}'
- description: 'Distribution stream string, for suites to know which behavior to expect'
+ name: CONFIGPLAN
+ default: '{config-plan}'
+ description: 'Config plan we will run after un-archiving and starting an ODL distribution'
- parameter:
name: integration-stream-test-plan
default: '{test-plan}'
description: 'General test plan we will run unless stream-specific one is found'
+- parameter:
+ name: integration-test-suites
+ parameters:
+ - string:
+ name: SUITES
+ default: '{test-suites}'
+ description: |
+ List of space-separated suites. Useful when running specific suites within a testplan.
+ Do not include ${{WORKSPACE}}/test/csit/suites/ in the values:
+ (ex. openstack/connectivity/l2.robot openstack/clustering/ha_l2.robot
+ openstack/tempest/tempest.robot netvirt/elan/elan.robot).
+
- parameter:
name: integration-test-options
parameters:
# in order to prevent Jenkins from starting only a subset and still voting Verified+1.
# Contrary to gerrit-trigger-patch-submitted, this votes +1 also on unstable runs.
# Arguments:
-# server: name of gerrit server to listen to
+# gerrit-server-name: name of gerrit server to listen to
# project: pattern to match triggering projects
# branch: triggering branch name
# files: pattern to match triggering filepaths
name: integration-trigger-patch-submitted
triggers:
- gerrit:
- server-name: '{server}'
+ server-name: '{gerrit-server-name}'
trigger-on:
- patchset-created-event:
exclude-drafts: true
exclude-no-code-change: false
- draft-published-event
- comment-added-contains-event:
- comment-contains-value: recheck
+ comment-contains-value: '^Patch Set[ ]+[0-9]+:([ ]+|[\n]+)(recheck|reverify)$'
override-votes: true
gerrit-build-unstable-verified-value: +1
gerrit-build-unstable-codereview-value: 0
- compare-type: ANT
pattern: '{files}'
+# Macro: integration-csit-only
+# Operation: this macro sets a trigger on reverse job list
+# Used by: csit -only- job templates
+
+- trigger:
+ name: integration-csit-trigger
+ triggers:
+ - reverse:
+ jobs: '{jobs}'
+ result: 'success'
+
# Macro: integration-openstack-controller-mininet
# Operation: this macro will spin the controller and mininet vms
# Used by: {project}-csit-* job templates
# Shell Scripts #
#################
-# Macro: integration-infra-stack
-# Operation: Sets environment and then calls opendaylight-infra-stack to spin
-# up csit lab using openstack-heat.
-# Used by: {project}-csit-* job templates
+# Macro: integration-get-apex-addresses
+# Operation: this macro gets the IP addresses of the dynamic vms from apex snapshots
+# Used by: {project}-csit-* apex related job templates
- builder:
- name: integration-infra-stack
+ name: integration-get-apex-addresses
builders:
- - integration-cleanup-workspace
- - shell: |
- #!/bin/bash
- # Setup openstack envirnoment file for use by
- # the opendaylight-infra-stack macro
- cat > $WORKSPACE/opendaylight-infra-environment.yaml << EOF
- parameters:
- vm_0_count: {vm_0_count}
- vm_0_flavor: {vm_0_flavor}
- vm_0_image: {vm_0_image}
- vm_1_count: {vm_1_count}
- vm_1_flavor: {vm_1_flavor}
- vm_1_image: {vm_1_image}
- EOF
- echo "Contents of opendaylight-infra-environment.yaml ..."
- cat $WORKSPACE/opendaylight-infra-environment.yaml
- - opendaylight-infra-stack:
- stack-template: '{stack-template}'
-
-# Macro: integration-infra-stack-3-type
-# Operation: Sets environment and then calls opendaylight-infra-stack to spin
-# up csit lab using openstack-heat.
-# Used by: {project}-csit-* job templates
+ - shell: !include-raw: integration-get-apex-addresses.sh
+
+# Macro: integration-apex-run-tests
+# Operation: this macro just runs the robot suites
+# Used by: {project}-csit-* apex related job templates
- builder:
- name: integration-infra-stack-3-type
+ name: integration-apex-run-tests
builders:
- - integration-cleanup-workspace
- - shell: |
- #!/bin/bash
- # Setup openstack envirnoment file for use by
- # the opendaylight-infra-stack macro
- cat > $WORKSPACE/opendaylight-infra-environment.yaml << EOF
- parameters:
- vm_0_count: {vm_0_count}
- vm_0_flavor: {vm_0_flavor}
- vm_0_image: {vm_0_image}
- vm_1_count: {vm_1_count}
- vm_1_flavor: {vm_1_flavor}
- vm_1_image: {vm_1_image}
- vm_2_count: {vm_2_count}
- vm_2_flavor: {vm_2_flavor}
- vm_2_image: {vm_2_image}
- EOF
- echo "Contents of opendaylight-infra-environment.yaml ..."
- cat $WORKSPACE/opendaylight-infra-environment.yaml
- - opendaylight-infra-stack:
- stack-template: 'csit-3-instance-type.yaml'
+ - shell: !include-raw: integration-apex-run-tests.sh
# Macro: integration-get-slave-addresses
# Operation: this macro gets the IP addresses of the dynamic vms
- builder:
name: integration-get-slave-addresses
builders:
- - shell: !include-raw: include-raw-integration-get-slave-addresses.sh
-
-# Macro: integration-get-bundle-vars
-# Operation: this macro gets all bundle related variables
-# Used by: {project}-csit-3node-* job templates
-
-- builder:
- name: integration-get-bundle-vars
- builders:
- - shell: !include-raw: include-raw-integration-get-bundle-vars.sh
-
-# Macro: integration-distribution-check
-# Operation: this macro deploys the controller with all fetures
-# Used by: {project}-distribution-check-{stream} job template
+ - shell: !include-raw: integration-get-slave-addresses.sh
- builder:
- name: integration-distribution-check
+ name: integration-set-variables
+ # Operation: Construct variables not requiring online detection and export using set_variables.env
+ # Required bash variables:
+ # KARAF_VERSION: 'karaf4' or 'karaf3'. Use distribution-karaf-version macro to set.
builders:
- - shell: !include-raw: include-raw-integration-distribution-check.sh
-
-# Macro: integration-deploy-controller-verify
-# Operation: this macro deploys the controller with all fetures
-# Used by: distribution-deploy-{stream} job template
+ - shell: !include-raw: integration-set-variables.sh
+ - inject:
+ properties-file: set_variables.env
- builder:
- name: integration-deploy-controller-verify
+ name: integration-detect-variables
+ # Operation: Construct variables including online detection and export using detect_variables.env
+ # This calls integration-set-variables internally.
+ # Required bash variables:
+ # KARAF_VERSION: 'karaf4' or 'karaf3'. Use distribution-karaf-version macro to set.
builders:
- - shell: !include-raw: include-raw-integration-deploy-controller-verify.sh
-
-# Macro: integration-deploy-controller-offline
-# Operation: this macro deploys the controller with no external repo configuration
-# Used by: integration-distrbution-offline-{stream} job template
-
-- builder:
- name: integration-deploy-controller-offline
- builders:
- - shell: !include-raw: include-raw-integration-deploy-controller-offline.sh
+ - integration-set-variables
+ - shell: !include-raw: integration-detect-variables.sh
+ - inject:
+ properties-file: detect_variables.env
# Macro: integration-deply-controller-run-test
# Operation: this macro deploys single contoller and runs test
- builder:
name: integration-deploy-controller-run-test
builders:
- - shell: !include-raw: include-raw-integration-deploy-controller-run-test.sh
+ - shell: !include-raw: integration-deploy-controller-run-test.sh
# Macro: integration-configure-clustering
# Operation: this macro configures the clustering
- builder:
name: integration-configure-clustering
builders:
- - shell: !include-raw: include-raw-integration-configure-clustering.sh
+ - shell: !include-raw: integration-configure-clustering.sh
# Macro: integration-start-cluster-run-test
# Operation: this macro starts the 3-node cluster and runs test
- builder:
name: integration-start-cluster-run-test
builders:
- - shell: !include-raw: include-raw-integration-start-cluster-run-test.sh
+ - shell: !include-raw: integration-start-cluster-run-test.sh
# Macro: integration-install-robotframework
# Operation: Installs robotframework using pip to a virtualenv
- builder:
name: integration-install-robotframework
builders:
- - shell: !include-raw: include-raw-integration-install-robotframework.sh
+ - shell: !include-raw: integration-install-robotframework.sh
# Macro: integration-cleanup-tmp
# Operation: Cleans up temporary files created by build
- builder:
name: integration-cleanup-tmp
builders:
- - shell: !include-raw: include-raw-integration-cleanup-tmp.sh
+ - shell: !include-raw: integration-cleanup-tmp.sh
# Macro: integration-multipatch-builder
# Operation: checks out multiple patches and builds custom distribution
- builder:
name: integration-multipatch-builder
builders:
- - shell: !include-raw: include-raw-integration-multipatch-distribution-test.sh
+ - shell: !include-raw: multipatch-distribution.sh
# Macro: integration-cleanup-workspace
# Operation: Cleans up files possibly left there by the previous build
- builder:
name: integration-cleanup-workspace
builders:
- - shell: !include-raw: include-raw-integration-cleanup-workspace.sh
+ - shell: !include-raw: integration-cleanup-workspace.sh
# Macro: integration-rebase-patch
# Operation: For cloned project, rebase checked-out Gerrit patch onto $DISTROBRANCH.
- builder:
name: integration-rebase-gerrit-patch
builders:
- - shell: !include-raw: include-raw-integration-rebase-gerrit-patch.sh
+ - shell: !include-raw: integration-rebase-gerrit-patch.sh
+
+# Macro: integration-install-common-functions
+# Operation: Copy the common-functions.sh script to csit nodes
+# Used by: {project}-csit-* job templates
+- builder:
+ name: integration-install-common-functions
+ builders:
+ - shell: !include-raw:
+ - copy-common-functions.sh
+ - common-functions.sh
- builder:
name: integration-deploy-openstack-run-test
builders:
- - shell: !include-raw: include-raw-integration-deploy-openstack-run-test.sh
-
-# Macro: integration-list-jobs
-# Operation: queries Jenkins jobs from jenkins.opendaylight.org and
-# writes a list of the jobs corresponding to the search terms to a file.
-# Used by: builder-merge job template
-#
-# This script requires the following JJB variables to be passed in:
-# {search_string} Job type to act as a primary filter (e.g. csit)
-# {blacklist} List of sub-projects to filter out (e.g. longevity)
-# {stream} Development release name (e.g. boron)
-# {jobs-filename} Filename to write list into (e.g. carbon-jobs.lst)
-# NOTE: Requires xmlstarlet installed on node.
+ - shell: !include-raw:
+ - integration-deploy-openstack-run-test.sh
- builder:
- name: integration-list-jobs
+ name: integration-apex-image-manager
builders:
- - shell: !include-raw: include-raw-integration-list-jobs.sh
-
-# Macro: integration-replace-block-text
-# Operation: using starting and ending regex to
-# replace a block of text in a file with the text in
-# another file. Regex should be basic and work with sed
-# Used by: builder-merge job template
-#
-# This script requires the following JJB variables to be passed in:
-# {starting-regex}
-# {ending-regex}
-# {file-with-changes-to-insert}
-# {file-to-change}
+ - shell: !include-raw:
+ - ../opendaylight-infra-apex-image-management.sh
- builder:
- name: integration-replace-block-text
+ name: integration-compare-distributions
+ # Operation: will compare an already created distribtion (probably last
+ # published to nexus) to the current distribution being created.
+ # The comparison is done to find the list of patches that are new to the
+ # current distribution.
+ # Required bash variables:
+ # KARAF_ARTIFACT: 'karaf' or 'distribution-karaf'. Use integration-set-variables.
builders:
- - shell: !include-raw: include-raw-integration-replace-block-text.sh
-
-# Macro: integration-compare-distributions
-# Operation: will compare an already created distribtion (probably last
-# published to nexus) to the current distribution being created.
-# The comparison is done to find the list of patches that are new to the
-# current distribtion.
-# Used by: builder-merge job template
-#
-# This script requires the bundle vars that are created by the
-# get-bundle-vars macro script
-
+ - shell: !include-raw: integration-compare-distributions.sh
- builder:
- name: integration-compare-distributions
+ name: integration-upload-distribution
+ # Upload distribution karaf artifact to Nexus SNAPSHOT repo for testing.
+ # Arguments:
+ # {dist-pom} path to pom file containing the version
+ # Required bash variables:
+ # KARAF_VERSION: 'karaf4' or 'karaf3'.
+ # Use distribution-karaf-version macro to customize.
builders:
- - shell: !include-raw: include-raw-integration-compare-distributions.sh
+ - config-file-provider:
+ files:
+ - file-id: global-settings
+ variable: GLOBAL_SETTINGS_FILE
+ - file-id: integration-settings
+ variable: SETTINGS_FILE
+ - integration-set-variables
+ - inject:
+ properties-content: BUNDLE_POM={dist-pom}
+ - shell: !include-raw-escape: integration-upload-distribution.sh
+ - inject:
+ properties-file: integration-upload-distribution.env
##############
# Publishers #
##############
-# Macro: integration-robot
-# Operation: this macro publishes robot results
-# Used by: {project}-csit-* job templates
-
- publisher:
- name: integration-robot
+ name: integration-csit-publish
publishers:
- robot:
output-path: ''
- other-files: ''
- unstable-threshold: '{unstable-if}'
- pass-threshold: '{pass-if}'
+ other-files: '{obj:robot_other_files}'
+ unstable-threshold: '{robot-unstable-threshold}'
+ pass-threshold: '{robot-pass-threshold}'
only-critical: false
+ - integration-csit-archive-build
+ - integration-csit-run-postscript
+ - releng-openstack-stack-delete
- publisher:
- name: integration-robot-tempest
+ name: integration-csit-archive-build
publishers:
- - robot:
- output-path: ''
- other-files:
- - tempest_results.html
- unstable-threshold: '{unstable-if}'
- pass-threshold: '{pass-if}'
- only-critical: false
+ - postbuildscript:
+ builders:
+ - role: BOTH
+ build-on:
+ - ABORTED
+ - FAILURE
+ - NOT_BUILT
+ - SUCCESS
+ - UNSTABLE
+ build-steps:
+ - shell: |
+ #!/bin/bash
+ echo "Archiving csit artifacts"
+ cd $WORKSPACE
+ mkdir -p ./archives
+ for i in `seq 1 ${NUM_ODL_SYSTEM}`; do
+ NODE_FOLDER="./archives/odl_${i}"
+ mkdir -p ${NODE_FOLDER}
+ mv odl${i}_* ${NODE_FOLDER} || true
+ mv karaf_${i}_*_threads* ${NODE_FOLDER} || true
+ mv *_${i}.png ${NODE_FOLDER} || true
+ mv /tmp/odl${i}_* ${NODE_FOLDER} || true
+ done
+ curl --output robot-plugin.zip "$BUILD_URL/robot/report/*zip*/robot-plugin.zip"
+ unzip -d ./archives robot-plugin.zip
+ mv *.log *.log.gz *.csv *.png ./archives || true # Don't fail if file missing
+ # TODO: Can the following line ever fail?
+ find . -type f -name *.hprof -print0 \
+ | tar -cvf - --null -T - | xz --threads=0 > ./archives/hprof.tar.xz
+ # TODO: Tweak the compression level if better ratio (or speed) is needed.
+ mark-unstable-if-failed: true
+
- publisher:
- name: integration-csit-archive-build
+ name: integration-csit-run-postscript
+ publishers:
+ - postbuildscript:
+ builders:
+ - role: BOTH
+ build-on:
+ - ABORTED
+ - FAILURE
+ - NOT_BUILT
+ - SUCCESS
+ - UNSTABLE
+ build-steps:
+ - config-file-provider:
+ files:
+ - file-id: 'odl-elastic-cloud'
+ target: '${HOME}/.netrc'
+ - shell: |
+ #!/bin/bash
+ #The following script runs any configurable postplan stored in test/csit/postplans.
+ if [ -f "${WORKSPACE}/test/csit/postplans/${TESTPLAN}" ]; then
+ echo "postplan exists!!!"
+ echo "Changing the postplan path..."
+ script_name=${WORKSPACE}/test/csit/postplans/${TESTPLAN}
+ cat ${script_name} | sed "s:integration:${WORKSPACE}:" > postplan.txt
+ cat postplan.txt
+ egrep -v '(^[[:space:]]*#|^[[:space:]]*$)' postplan.txt | while read -r line ; do
+ echo "Executing ${line}..."
+ ${line}
+ done
+ fi
+ rm ${HOME}/.netrc
+ mark-unstable-if-failed: true
+
+- publisher:
+ name: integration-csit-collect-netvirt-logs
publishers:
- postbuildscript:
builders:
- - shell: |
- #!/bin/bash
- mkdir -p $WORKSPACE/archives
- curl --output robot-plugin.zip "$BUILD_URL/robot/report/*zip*/robot-plugin.zip"
- unzip -d $WORKSPACE/archives robot-plugin.zip
- mv *log* *.log *.log.gz *.csv *.png $WORKSPACE/archives || true # Don't fail if file missing
- script-only-if-succeeded: 'False'
- script-only-if-failed: 'False'
- mark-unstable-if-failed: 'True'
+ - role: BOTH
+ build-on:
+ - ABORTED
+ - FAILURE
+ - NOT_BUILT
+ - SUCCESS
+ - UNSTABLE
+ build-steps:
+ - inject:
+ properties-file: 'slave_addresses.txt'
+ - shell: |
+ #!/bin/bash
+ source /tmp/common-functions.sh ${BUNDLEFOLDER}
+ collect_netvirt_logs
+ mark-unstable-if-failed: true