--- /dev/null
+# Makefile for Sphinx documentation
+#
+
+# You can set these variables from the command line.
+SPHINXOPTS =
+SPHINXBUILD = sphinx-build
+PAPER =
+BUILDDIR = _build
+
+# User-friendly check for sphinx-build
+ifeq ($(shell which $(SPHINXBUILD) >/dev/null 2>&1; echo $$?), 1)
+ $(error The '$(SPHINXBUILD)' command was not found. Make sure you have Sphinx installed, then set the SPHINXBUILD environment variable to point to the full path of the '$(SPHINXBUILD)' executable. Alternatively you can add the directory with the executable to your PATH. If you don\'t have Sphinx installed, grab it from http://sphinx-doc.org/)
+endif
+
+# Internal variables.
+PAPEROPT_a4 = -D latex_paper_size=a4
+PAPEROPT_letter = -D latex_paper_size=letter
+ALLSPHINXOPTS = -d $(BUILDDIR)/doctrees $(PAPEROPT_$(PAPER)) $(SPHINXOPTS) .
+# the i18n builder cannot share the environment and doctrees with the others
+I18NSPHINXOPTS = $(PAPEROPT_$(PAPER)) $(SPHINXOPTS) .
+
+.PHONY: help
+help:
+ @echo "Please use \`make <target>' where <target> is one of"
+ @echo " html to make standalone HTML files"
+ @echo " dirhtml to make HTML files named index.html in directories"
+ @echo " singlehtml to make a single large HTML file"
+ @echo " pickle to make pickle files"
+ @echo " json to make JSON files"
+ @echo " htmlhelp to make HTML files and a HTML help project"
+ @echo " qthelp to make HTML files and a qthelp project"
+ @echo " applehelp to make an Apple Help Book"
+ @echo " devhelp to make HTML files and a Devhelp project"
+ @echo " epub to make an epub"
+ @echo " epub3 to make an epub3"
+ @echo " latex to make LaTeX files, you can set PAPER=a4 or PAPER=letter"
+ @echo " latexpdf to make LaTeX files and run them through pdflatex"
+ @echo " latexpdfja to make LaTeX files and run them through platex/dvipdfmx"
+ @echo " text to make text files"
+ @echo " man to make manual pages"
+ @echo " texinfo to make Texinfo files"
+ @echo " info to make Texinfo files and run them through makeinfo"
+ @echo " gettext to make PO message catalogs"
+ @echo " changes to make an overview of all changed/added/deprecated items"
+ @echo " xml to make Docutils-native XML files"
+ @echo " pseudoxml to make pseudoxml-XML files for display purposes"
+ @echo " linkcheck to check all external links for integrity"
+ @echo " doctest to run all doctests embedded in the documentation (if enabled)"
+ @echo " coverage to run coverage check of the documentation (if enabled)"
+
+.PHONY: clean
+clean:
+ rm -rf $(BUILDDIR)/*
+
+.PHONY: html
+html:
+ $(SPHINXBUILD) -b html $(ALLSPHINXOPTS) $(BUILDDIR)/html
+ @echo
+ @echo "Build finished. The HTML pages are in $(BUILDDIR)/html."
+
+.PHONY: dirhtml
+dirhtml:
+ $(SPHINXBUILD) -b dirhtml $(ALLSPHINXOPTS) $(BUILDDIR)/dirhtml
+ @echo
+ @echo "Build finished. The HTML pages are in $(BUILDDIR)/dirhtml."
+
+.PHONY: singlehtml
+singlehtml:
+ $(SPHINXBUILD) -b singlehtml $(ALLSPHINXOPTS) $(BUILDDIR)/singlehtml
+ @echo
+ @echo "Build finished. The HTML page is in $(BUILDDIR)/singlehtml."
+
+.PHONY: pickle
+pickle:
+ $(SPHINXBUILD) -b pickle $(ALLSPHINXOPTS) $(BUILDDIR)/pickle
+ @echo
+ @echo "Build finished; now you can process the pickle files."
+
+.PHONY: json
+json:
+ $(SPHINXBUILD) -b json $(ALLSPHINXOPTS) $(BUILDDIR)/json
+ @echo
+ @echo "Build finished; now you can process the JSON files."
+
+.PHONY: htmlhelp
+htmlhelp:
+ $(SPHINXBUILD) -b htmlhelp $(ALLSPHINXOPTS) $(BUILDDIR)/htmlhelp
+ @echo
+ @echo "Build finished; now you can run HTML Help Workshop with the" \
+ ".hhp project file in $(BUILDDIR)/htmlhelp."
+
+.PHONY: qthelp
+qthelp:
+ $(SPHINXBUILD) -b qthelp $(ALLSPHINXOPTS) $(BUILDDIR)/qthelp
+ @echo
+ @echo "Build finished; now you can run "qcollectiongenerator" with the" \
+ ".qhcp project file in $(BUILDDIR)/qthelp, like this:"
+ @echo "# qcollectiongenerator $(BUILDDIR)/qthelp/OpenDaylightRelEngBuilder.qhcp"
+ @echo "To view the help file:"
+ @echo "# assistant -collectionFile $(BUILDDIR)/qthelp/OpenDaylightRelEngBuilder.qhc"
+
+.PHONY: applehelp
+applehelp:
+ $(SPHINXBUILD) -b applehelp $(ALLSPHINXOPTS) $(BUILDDIR)/applehelp
+ @echo
+ @echo "Build finished. The help book is in $(BUILDDIR)/applehelp."
+ @echo "N.B. You won't be able to view it unless you put it in" \
+ "~/Library/Documentation/Help or install it in your application" \
+ "bundle."
+
+.PHONY: devhelp
+devhelp:
+ $(SPHINXBUILD) -b devhelp $(ALLSPHINXOPTS) $(BUILDDIR)/devhelp
+ @echo
+ @echo "Build finished."
+ @echo "To view the help file:"
+ @echo "# mkdir -p $$HOME/.local/share/devhelp/OpenDaylightRelEngBuilder"
+ @echo "# ln -s $(BUILDDIR)/devhelp $$HOME/.local/share/devhelp/OpenDaylightRelEngBuilder"
+ @echo "# devhelp"
+
+.PHONY: epub
+epub:
+ $(SPHINXBUILD) -b epub $(ALLSPHINXOPTS) $(BUILDDIR)/epub
+ @echo
+ @echo "Build finished. The epub file is in $(BUILDDIR)/epub."
+
+.PHONY: epub3
+epub3:
+ $(SPHINXBUILD) -b epub3 $(ALLSPHINXOPTS) $(BUILDDIR)/epub3
+ @echo
+ @echo "Build finished. The epub3 file is in $(BUILDDIR)/epub3."
+
+.PHONY: latex
+latex:
+ $(SPHINXBUILD) -b latex $(ALLSPHINXOPTS) $(BUILDDIR)/latex
+ @echo
+ @echo "Build finished; the LaTeX files are in $(BUILDDIR)/latex."
+ @echo "Run \`make' in that directory to run these through (pdf)latex" \
+ "(use \`make latexpdf' here to do that automatically)."
+
+.PHONY: latexpdf
+latexpdf:
+ $(SPHINXBUILD) -b latex $(ALLSPHINXOPTS) $(BUILDDIR)/latex
+ @echo "Running LaTeX files through pdflatex..."
+ $(MAKE) -C $(BUILDDIR)/latex all-pdf
+ @echo "pdflatex finished; the PDF files are in $(BUILDDIR)/latex."
+
+.PHONY: latexpdfja
+latexpdfja:
+ $(SPHINXBUILD) -b latex $(ALLSPHINXOPTS) $(BUILDDIR)/latex
+ @echo "Running LaTeX files through platex and dvipdfmx..."
+ $(MAKE) -C $(BUILDDIR)/latex all-pdf-ja
+ @echo "pdflatex finished; the PDF files are in $(BUILDDIR)/latex."
+
+.PHONY: text
+text:
+ $(SPHINXBUILD) -b text $(ALLSPHINXOPTS) $(BUILDDIR)/text
+ @echo
+ @echo "Build finished. The text files are in $(BUILDDIR)/text."
+
+.PHONY: man
+man:
+ $(SPHINXBUILD) -b man $(ALLSPHINXOPTS) $(BUILDDIR)/man
+ @echo
+ @echo "Build finished. The manual pages are in $(BUILDDIR)/man."
+
+.PHONY: texinfo
+texinfo:
+ $(SPHINXBUILD) -b texinfo $(ALLSPHINXOPTS) $(BUILDDIR)/texinfo
+ @echo
+ @echo "Build finished. The Texinfo files are in $(BUILDDIR)/texinfo."
+ @echo "Run \`make' in that directory to run these through makeinfo" \
+ "(use \`make info' here to do that automatically)."
+
+.PHONY: info
+info:
+ $(SPHINXBUILD) -b texinfo $(ALLSPHINXOPTS) $(BUILDDIR)/texinfo
+ @echo "Running Texinfo files through makeinfo..."
+ make -C $(BUILDDIR)/texinfo info
+ @echo "makeinfo finished; the Info files are in $(BUILDDIR)/texinfo."
+
+.PHONY: gettext
+gettext:
+ $(SPHINXBUILD) -b gettext $(I18NSPHINXOPTS) $(BUILDDIR)/locale
+ @echo
+ @echo "Build finished. The message catalogs are in $(BUILDDIR)/locale."
+
+.PHONY: changes
+changes:
+ $(SPHINXBUILD) -b changes $(ALLSPHINXOPTS) $(BUILDDIR)/changes
+ @echo
+ @echo "The overview file is in $(BUILDDIR)/changes."
+
+.PHONY: linkcheck
+linkcheck:
+ $(SPHINXBUILD) -b linkcheck $(ALLSPHINXOPTS) $(BUILDDIR)/linkcheck
+ @echo
+ @echo "Link check complete; look for any errors in the above output " \
+ "or in $(BUILDDIR)/linkcheck/output.txt."
+
+.PHONY: doctest
+doctest:
+ $(SPHINXBUILD) -b doctest $(ALLSPHINXOPTS) $(BUILDDIR)/doctest
+ @echo "Testing of doctests in the sources finished, look at the " \
+ "results in $(BUILDDIR)/doctest/output.txt."
+
+.PHONY: coverage
+coverage:
+ $(SPHINXBUILD) -b coverage $(ALLSPHINXOPTS) $(BUILDDIR)/coverage
+ @echo "Testing of coverage in the sources finished, look at the " \
+ "results in $(BUILDDIR)/coverage/python.txt."
+
+.PHONY: xml
+xml:
+ $(SPHINXBUILD) -b xml $(ALLSPHINXOPTS) $(BUILDDIR)/xml
+ @echo
+ @echo "Build finished. The XML files are in $(BUILDDIR)/xml."
+
+.PHONY: pseudoxml
+pseudoxml:
+ $(SPHINXBUILD) -b pseudoxml $(ALLSPHINXOPTS) $(BUILDDIR)/pseudoxml
+ @echo
+ @echo "Build finished. The pseudo-XML files are in $(BUILDDIR)/pseudoxml."
--- /dev/null
+#!/usr/bin/env python3
+# -*- coding: utf-8 -*-
+#
+# OpenDaylight RelEng/Builder documentation build configuration file, created by
+# sphinx-quickstart on Thu May 5 10:17:10 2016.
+#
+# This file is execfile()d with the current directory set to its
+# containing dir.
+#
+# Note that not all possible configuration values are present in this
+# autogenerated file.
+#
+# All configuration values have a default; values that are commented out
+# serve to show the default.
+
+import sys
+import os
+
+# If extensions (or modules to document with autodoc) are in another directory,
+# add these directories to sys.path here. If the directory is relative to the
+# documentation root, use os.path.abspath to make it absolute, like shown here.
+#sys.path.insert(0, os.path.abspath('.'))
+
+# -- General configuration ------------------------------------------------
+
+# If your documentation needs a minimal Sphinx version, state it here.
+#needs_sphinx = '1.0'
+
+# Add any Sphinx extension module names here, as strings. They can be
+# extensions coming with Sphinx (named 'sphinx.ext.*') or your custom
+# ones.
+extensions = []
+
+# Add any paths that contain templates here, relative to this directory.
+templates_path = ['_templates']
+
+# The suffix(es) of source filenames.
+# You can specify multiple suffix as a list of string:
+# source_suffix = ['.rst', '.md']
+source_suffix = '.rst'
+
+# The encoding of source files.
+#source_encoding = 'utf-8-sig'
+
+# The master toctree document.
+master_doc = 'index'
+
+# General information about the project.
+project = 'OpenDaylight RelEng/Builder'
+copyright = '2016, Andrew Grimberg, Thanh Ha, Daniel Farell'
+author = 'Andrew Grimberg, Thanh Ha, Daniel Farell'
+
+# The version info for the project you're documenting, acts as replacement for
+# |version| and |release|, also used in various other places throughout the
+# built documents.
+#
+# The short X.Y version.
+version = '1.0.0'
+# The full version, including alpha/beta/rc tags.
+release = '1.0.0'
+
+# The language for content autogenerated by Sphinx. Refer to documentation
+# for a list of supported languages.
+#
+# This is also used if you do content translation via gettext catalogs.
+# Usually you set "language" from the command line for these cases.
+language = None
+
+# There are two options for replacing |today|: either, you set today to some
+# non-false value, then it is used:
+#today = ''
+# Else, today_fmt is used as the format for a strftime call.
+#today_fmt = '%B %d, %Y'
+
+# List of patterns, relative to source directory, that match files and
+# directories to ignore when looking for source files.
+# This patterns also effect to html_static_path and html_extra_path
+exclude_patterns = ['_build', 'Thumbs.db', '.DS_Store']
+
+# The reST default role (used for this markup: `text`) to use for all
+# documents.
+#default_role = None
+
+# If true, '()' will be appended to :func: etc. cross-reference text.
+#add_function_parentheses = True
+
+# If true, the current module name will be prepended to all description
+# unit titles (such as .. function::).
+#add_module_names = True
+
+# If true, sectionauthor and moduleauthor directives will be shown in the
+# output. They are ignored by default.
+#show_authors = False
+
+# The name of the Pygments (syntax highlighting) style to use.
+pygments_style = 'sphinx'
+
+# A list of ignored prefixes for module index sorting.
+#modindex_common_prefix = []
+
+# If true, keep warnings as "system message" paragraphs in the built documents.
+#keep_warnings = False
+
+# If true, `todo` and `todoList` produce output, else they produce nothing.
+todo_include_todos = False
+
+
+# -- Options for HTML output ----------------------------------------------
+
+# The theme to use for HTML and HTML Help pages. See the documentation for
+# a list of builtin themes.
+html_theme = 'alabaster'
+
+# Theme options are theme-specific and customize the look and feel of a theme
+# further. For a list of options available for each theme, see the
+# documentation.
+#html_theme_options = {}
+
+# Add any paths that contain custom themes here, relative to this directory.
+#html_theme_path = []
+
+# The name for this set of Sphinx documents.
+# "<project> v<release> documentation" by default.
+#html_title = 'OpenDaylight RelEng/Builder v1.0.0'
+
+# A shorter title for the navigation bar. Default is the same as html_title.
+#html_short_title = None
+
+# The name of an image file (relative to this directory) to place at the top
+# of the sidebar.
+#html_logo = None
+
+# The name of an image file (relative to this directory) to use as a favicon of
+# the docs. This file should be a Windows icon file (.ico) being 16x16 or 32x32
+# pixels large.
+#html_favicon = None
+
+# Add any paths that contain custom static files (such as style sheets) here,
+# relative to this directory. They are copied after the builtin static files,
+# so a file named "default.css" will overwrite the builtin "default.css".
+html_static_path = ['_static']
+
+# Add any extra paths that contain custom files (such as robots.txt or
+# .htaccess) here, relative to this directory. These files are copied
+# directly to the root of the documentation.
+#html_extra_path = []
+
+# If not None, a 'Last updated on:' timestamp is inserted at every page
+# bottom, using the given strftime format.
+# The empty string is equivalent to '%b %d, %Y'.
+#html_last_updated_fmt = None
+
+# If true, SmartyPants will be used to convert quotes and dashes to
+# typographically correct entities.
+#html_use_smartypants = True
+
+# Custom sidebar templates, maps document names to template names.
+#html_sidebars = {}
+
+# Additional templates that should be rendered to pages, maps page names to
+# template names.
+#html_additional_pages = {}
+
+# If false, no module index is generated.
+#html_domain_indices = True
+
+# If false, no index is generated.
+#html_use_index = True
+
+# If true, the index is split into individual pages for each letter.
+#html_split_index = False
+
+# If true, links to the reST sources are added to the pages.
+#html_show_sourcelink = True
+
+# If true, "Created using Sphinx" is shown in the HTML footer. Default is True.
+#html_show_sphinx = True
+
+# If true, "(C) Copyright ..." is shown in the HTML footer. Default is True.
+#html_show_copyright = True
+
+# If true, an OpenSearch description file will be output, and all pages will
+# contain a <link> tag referring to it. The value of this option must be the
+# base URL from which the finished HTML is served.
+#html_use_opensearch = ''
+
+# This is the file name suffix for HTML files (e.g. ".xhtml").
+#html_file_suffix = None
+
+# Language to be used for generating the HTML full-text search index.
+# Sphinx supports the following languages:
+# 'da', 'de', 'en', 'es', 'fi', 'fr', 'h', 'it', 'ja'
+# 'nl', 'no', 'pt', 'ro', 'r', 'sv', 'tr', 'zh'
+#html_search_language = 'en'
+
+# A dictionary with options for the search language support, empty by default.
+# 'ja' uses this config value.
+# 'zh' user can custom change `jieba` dictionary path.
+#html_search_options = {'type': 'default'}
+
+# The name of a javascript file (relative to the configuration directory) that
+# implements a search results scorer. If empty, the default will be used.
+#html_search_scorer = 'scorer.js'
+
+# Output file base name for HTML help builder.
+htmlhelp_basename = 'OpenDaylightRelEngBuilderdoc'
+
+# -- Options for LaTeX output ---------------------------------------------
+
+latex_elements = {
+# The paper size ('letterpaper' or 'a4paper').
+#'papersize': 'letterpaper',
+
+# The font size ('10pt', '11pt' or '12pt').
+#'pointsize': '10pt',
+
+# Additional stuff for the LaTeX preamble.
+#'preamble': '',
+
+# Latex figure (float) alignment
+#'figure_align': 'htbp',
+}
+
+# Grouping the document tree into LaTeX files. List of tuples
+# (source start file, target name, title,
+# author, documentclass [howto, manual, or own class]).
+latex_documents = [
+ (master_doc, 'OpenDaylightRelEngBuilder.tex', 'OpenDaylight RelEng/Builder Documentation',
+ 'Andrew Grimberg, Thanh Ha, Daniel Farell', 'manual'),
+]
+
+# The name of an image file (relative to this directory) to place at the top of
+# the title page.
+#latex_logo = None
+
+# For "manual" documents, if this is true, then toplevel headings are parts,
+# not chapters.
+#latex_use_parts = False
+
+# If true, show page references after internal links.
+#latex_show_pagerefs = False
+
+# If true, show URL addresses after external links.
+#latex_show_urls = False
+
+# Documents to append as an appendix to all manuals.
+#latex_appendices = []
+
+# If false, no module index is generated.
+#latex_domain_indices = True
+
+
+# -- Options for manual page output ---------------------------------------
+
+# One entry per manual page. List of tuples
+# (source start file, name, description, authors, manual section).
+man_pages = [
+ (master_doc, 'opendaylightrelengbuilder', 'OpenDaylight RelEng/Builder Documentation',
+ [author], 1)
+]
+
+# If true, show URL addresses after external links.
+#man_show_urls = False
+
+
+# -- Options for Texinfo output -------------------------------------------
+
+# Grouping the document tree into Texinfo files. List of tuples
+# (source start file, target name, title, author,
+# dir menu entry, description, category)
+texinfo_documents = [
+ (master_doc, 'OpenDaylightRelEngBuilder', 'OpenDaylight RelEng/Builder Documentation',
+ author, 'OpenDaylightRelEngBuilder', 'One line description of project.',
+ 'Miscellaneous'),
+]
+
+# Documents to append as an appendix to all manuals.
+#texinfo_appendices = []
+
+# If false, no module index is generated.
+#texinfo_domain_indices = True
+
+# How to display URL addresses: 'footnote', 'no', or 'inline'.
+#texinfo_show_urls = 'footnote'
+
+# If true, do not generate a @detailmenu in the "Top" node's menu.
+#texinfo_no_detailmenu = False
--- /dev/null
+.. OpenDaylight RelEng/Builder documentation master file, created by
+ sphinx-quickstart on Thu May 5 10:17:10 2016.
+ You can adapt this file completely to your liking, but it should at least
+ contain the root `toctree` directive.
+
+Infrastructure Guide
+====================
+
+This guide provides details into OpenDaylight Infrastructure and services.
+
+Contents:
+
+.. toctree::
+ :maxdepth: 2
+
+ jenkins
+ release-workflow
--- /dev/null
+Jenkins
+=======
+
+The `Release Engineering Project <releng-wiki_>`_ consolidates the Jenkins jobs from
+project-specific VMs to a single Jenkins server. Each OpenDaylight project
+has a tab for their jobs on the `jenkins-master`_. The system utilizes
+`Jenkins Job Builder <jjb-docs_>`_ for the creation and management of the
+Jenkins jobs.
+
+Sections:
+
+.. contents::
+ :depth: 3
+ :local:
+
+Jenkins Master
+--------------
+
+The `jenkins-master`_ is the home for all project's Jenkins jobs. All
+maintenance and configuration of these jobs must be done via JJB through the
+`releng-builder-repo`_. Project contributors can no longer edit the Jenkins jobs
+directly on the server.
+
+Build Slaves
+------------
+
+The Jenkins jobs are run on build slaves (executors) which are created on an
+as-needed basis. If no idle build slaves are available a new VM is brought
+up. This process can take up to 2 minutes. Once the build slave has finished a
+job, it will remain online for 45 minutes before shutting down. Subsequent
+jobs will use an idle build slave if available.
+
+Our Jenkins master supports many types of dynamic build slaves. If you are
+creating custom jobs then you will need to have an idea of what type of slaves
+are available. The following are the current slave types and descriptions.
+Slave Template Names are needed for jobs that take advantage of multiple
+slaves as they must be specifically called out by template name instead of
+label.
+
+Adding New Components to the Slaves
+^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^
+
+If your project needs something added to one of the slaves used during build
+and test you can help us get things added faster by doing one of the following:
+
+* Submit a patch to RelEng/Builder for the `spinup-scripts`_ that
+ configures your new piece of software.
+* Submit a patch to RelEng/Builder for the Vagrant template's bootstrap.sh in
+ the `vagrant-definitions`_ directory that configures your new piece of
+ software.
+
+Going the first route will be faster in the short term as we can inspect the
+changes and make test modifications in the sandbox to verify that it works.
+
+The second route, however, is better for the community as a whole as it will
+allow others that utilize our Vagrant setups to replicate our systems more
+closely. It is, however, more time consuming as an image snapshot needs to be
+created based on the updated Vagrant definition before it can be attached to
+the sandbox for validation testing.
+
+In either case, the changes must be validated in the sandbox with tests to
+make sure that we don't break current jobs and that the new software features
+are operating as intended. Once this is done the changes will be merged and
+the updates applied to the RelEng Jenkins production silo.
+
+Please note that the combination of a Vagrant slave snapshot and a Jenkins
+spinup script is what defines a given slave. For instance, a slave may be
+defined by the `vagrant-basic-java-node`_ Vagrant definition
+and the `spinup-scripts-controller.sh`_ Jenkins spinup script
+(as the dynamic\_controller slave is). The pair provides the full definition of
+the realized slave. Jenkins starts a slave using the last-spun Vagrant snapshot
+for the specified definition. Once the base Vagrant instance is online Jenkins
+checks out the RelEng/Builder repo on it and executes two scripts. The first is
+`spinup-scripts-basic_settings.sh`_, which is a baseline for all of the slaves.
+The second is
+the specialized spinup script, which handles any system updates, new software
+installs or extra environment tweaks that don't make sense in a snapshot. After
+all of these scripts have executed Jenkins will finally attach the slave as an
+actual slave and start handling jobs on it.
+
+Pool: Rackspace - Docker
+^^^^^^^^^^^^^^^^^^^^^^^^
+
+.. raw:: html
+
+ <table border="1">
+ <tr>
+ <td><b>Jenkins Label</b><br/> dynamic_docker</td>
+ <td><b>Slave Template name</b><br/> rk-f20-docker</td>
+ <td><b>Vagrant Definition</b><br/> releng/builder/vagrant/ovsdb-docker</td>
+ <td><b>Spinup Script</b><br/> releng/builder/jenkins-scripts/docker.sh</td>
+ </tr>
+ <tr>
+ <td colspan="4">
+ A Fedora 20 system that is configured with OpenJDK 1.7 (aka Java7) and
+ Docker. This system was originally custom built for the test needs of
+ the OVSDB project but other projects have expressed interest in using
+ it.
+ </td>
+ </tr>
+ </table>
+
+Pool: Rackspace DFW
+^^^^^^^^^^^^^^^^^^^
+
+.. raw:: html
+
+ <table border="1">
+ <tr>
+ <td><b>Jenkins Label</b><br/> dynamic_verify</td>
+ <td><b>Slave Template name</b><br/> rk-c-el65-build</td>
+ <td><b>Vagrant Definition</b><br/> releng/builder/vagrant/basic-builder</td>
+ <td><b>Spinup Script</b><br/> releng/builder/jenkins-scripts/builder.sh</td>
+ </tr>
+ <tr>
+ <td colspan="4">
+ A CentOS 6 build slave. This system has OpenJDK 1.7 (Java7) and OpenJDK
+ 1.8 (Java8) installed on it along with all the other components and
+ libraries needed for building any current OpenDaylight project. This is
+ the label that is used for all basic -verify and -daily- builds for
+ projects.
+ </td>
+ </tr>
+ </table>
+
+ <table border="1">
+ <tr>
+ <td><b>Jenkins Label</b><br/> dynamic_merge</td>
+ <td><b>Slave Template name</b><br/> rk-c-el65-build</td>
+ <td><b>Vagrant Definition</b><br/> releng/builder/vagrant/basic-builder</td>
+ <td><b>Spinup Script</b><br/> releng/builder/jenkins-scripts/builder.sh</td>
+ </tr>
+ <tr>
+ <td colspan="4">
+ See dynamic_verify (same image on the back side). This is the label that
+ is used for all basic -merge and -integration- builds for projects.
+ </td>
+ </tr>
+ </table>
+
+Pool: Rackspace DFW - Devstack
+^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^
+
+.. raw:: html
+
+ <table border="1">
+ <tr>
+ <td><b>Jenkins Label</b><br/> dynamic_devstack</td>
+ <td><b>Slave Template name</b><br/> rk-c7-devstack</td>
+ <td><b>Vagrant Definition</b><br/> releng/builder/vagrant/ovsdb-devstack</td>
+ <td><b>Spinup Script</b><br/> releng/builder/jenkins-scripts/devstack.sh</td>
+ </tr>
+ <tr>
+ <td colspan="4">
+ A CentOS 7 system purpose built for doing OpenStack testing using
+ DevStack. This slave is primarily targeted at the needs of the OVSDB
+ project. It has OpenJDK 1.7 (aka Java7) and other basic DevStack related
+ bits installed.
+ </td>
+ </tr>
+ </table>
+
+Pool: Rackspace DFW - Integration
+^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^
+
+.. raw:: html
+
+ <table border="1">
+ <tr>
+ <td><b>Jenkins Label</b><br/> dynamic_robot</td>
+ <td><b>Slave Template name</b><br/> rk-c-el6-robot</td>
+ <td><b>Vagrant Definition</b><br/> releng/builder/vagrant/integration-robotframework</td>
+ <td><b>Spinup Script</b><br/> releng/builder/jenkins-scripts/robot.sh</td>
+ </tr>
+ <tr>
+ <td colspan="4">
+ A CentOS 6 slave that is configured with OpenJDK 1.7 (Java7) and all the
+ current packages used by the integration project for doing robot driven
+ jobs. If you are executing robot framework jobs then your job should be
+ using this as the slave that you are tied to. This image does not
+ contain the needed libraries for building components of OpenDaylight,
+ only for executing robot tests.
+ </td>
+ </tr>
+ </table>
+
+Pool: Rackspace DFW - Integration Dynamic Lab
+^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^
+
+.. raw:: html
+
+ <table border="1">
+ <tr>
+ <td><b>Jenkins Label</b><br/> dynamic_controller</td>
+ <td><b>Slave Template name</b><br/> rk-c-el6-java</td>
+ <td><b>Vagrant Definition</b><br/> releng/builder/vagrant/basic-java-node</td>
+ <td><b>Spinup Script</b><br/> releng/builder/jenkins-scripts/controller.sh</td>
+ </tr>
+ <tr>
+ <td colspan="4">
+ A CentOS 6 slave that has the basic OpenJDK 1.7 (Java7) installed and is
+ capable of running the controller, not building.
+ </td>
+ </tr>
+ </table>
+
+ <table border="1">
+ <tr>
+ <td><b>Jenkins Label</b><br/> dynamic_java</td>
+ <td><b>Slave Template name</b><br/> rk-c-el6-java</td>
+ <td><b>Vagrant Definition</b><br/> releng/builder/vagrant/basic-java-node</td>
+ <td><b>Spinup Script</b><br/> releng/builder/jenkins-scripts/controller.sh</td>
+ </tr>
+ <tr>
+ <td colspan="4">
+ See dynamic_controller as it is currently the same image.
+ </td>
+ </tr>
+ </table>
+
+ <table border="1">
+ <tr>
+ <td><b>Jenkins Label</b><br/> dynamic_mininet</td>
+ <td><b>Slave Template name</b><br/> rk-c-el6-mininet</td>
+ <td><b>Vagrant Definition</b><br/> releng/builder/vagrant/basic-mininet-node</td>
+ <td><b>Spinup Script</b><br/> releng/builder/jenkins-scripts/mininet.sh</td>
+ </tr>
+ <tr>
+ <td colspan="4">
+ A CentOS 6 image that has mininet, openvswitch v2.0.x, netopeer and
+ PostgreSQL 9.3 installed. This system is targeted at playing the role of
+ a mininet system for integration tests. Netopeer is installed as it is
+ needed for various tests by Integration. PostgreSQL 9.3 is installed as
+ the system is also capable of being used as a VTN project controller and
+ VTN requires PostgreSQL 9.3.
+ </td>
+ </tr>
+ </table>
+
+ <table border="1">
+ <tr>
+ <td><b>Jenkins Label</b><br/> dynamic_mininet_fedora</td>
+ <td><b>Slave Template name</b><br/> rk-f21-mininet</td>
+ <td><b>Vagrant Definition</b><br/> releng/builder/vagrant/basic-mininet-fedora-node</td>
+ <td><b>Spinup Script</b><br/> releng/builder/jenkins-scripts/mininet-fedora.sh</td>
+ </tr>
+ <tr>
+ <td colspan="4">
+ Basic Fedora 21 system with ovs v2.3.x and mininet 2.2.1
+ </td>
+ </tr>
+ </table>
+
+ <table border="1">
+ <tr>
+ <td><b>Jenkins Label</b><br/> ubuntu_mininet</td>
+ <td><b>Slave Template name</b><br/> ubuntu-trusty-mininet</td>
+ <td><b>Vagrant Definition</b><br/> releng/builder/vagrant/ubuntu-mininet</td>
+ <td><b>Spinup Script</b><br/> releng/builder/jenkins-scripts/mininet-ubuntu.sh</td>
+ </tr>
+ <tr>
+ <td colspan="4">
+ Basic Ubuntu system with ovs 2.0.2 and mininet 2.1.0
+ </td>
+ </tr>
+ </table>
+
+ <table border="1">
+ <tr>
+ <td><b>Jenkins Label</b><br/> ubuntu_mininet_ovs_23</td>
+ <td><b>Slave Template name</b><br/> ubuntu-trusty-mininet-ovs-23</td>
+ <td><b>Vagrant Definition</b><br/> releng/builder/vagrant/ubuntu-mininet-ovs-23</td>
+ <td><b>Spinup Script</b><br/> releng/builder/jenkins-scripts/mininet-ubuntu.sh</td>
+ </tr>
+ <tr>
+ <td colspan="4">
+ Basic Ubuntu system with ovs 2.3 and mininet 2.2.1
+ </td>
+ </tr>
+ </table>
+
+Pool: Rackspace DFW - Matrix
+^^^^^^^^^^^^^^^^^^^^^^^^^^^^
+
+.. raw:: html
+
+ <table border="1">
+ <tr>
+ <td><b>Jenkins Label</b><br/> matrix_master</td>
+ <td><b>Slave Template name</b><br/> rk-c-el6-matrix</td>
+ <td><b>Vagrant Definition</b><br/> releng/builder/vagrant/basic-java-node</td>
+ <td><b>Spinup Script</b><br/> releng/builder/jenkins-scripts/matrix.sh</td>
+ </tr>
+ <tr>
+ <td colspan="4">
+ This is a very minimal system that is designed to spin up with 2 build
+ instances on it. The purpose is to have a location that is not the
+ Jenkins master itself for jobs that are executing matrix operations
+ since they need a director location. This image should not be used for
+ anything but tying matrix jobs before the matrx defined label ties.
+ </td>
+ </tr>
+ </table>
+
+Creating Jenkins Jobs
+---------------------
+
+Jenkins Job Builder takes simple descriptions of Jenkins jobs in YAML format
+and uses them to configure Jenkins.
+
+* `Jenkins Job Builder (JJB) documentation <jjb-docs_>`_
+* `RelEng/Builder Gerrit <releng-builder-gerrit_>`_
+* `RelEng/Builder Git repository <releng-builder-repo_>`_
+
+Getting Jenkins Job Builder
+---------------------------
+
+OpenDaylight uses Jenkins Job Builder to translate our in-repo YAML job
+configuration into job descriptions suitable for consumption by Jenkins.
+When testing new Jenkins Jobs in the `Jenkins Sandbox`_, you'll
+need to use the `jenkins-jobs` executable to translate a set of jobs into
+their XML descriptions and upload them to the sandbox Jenkins server.
+
+We document `installing <Installing Jenkins Job Builder_>`_ `jenkins-jobs`
+below. We also provide
+a `pre-built Docker image <jjb-docker_>`_ with `jenkins-jobs` already installed.
+
+Installing Jenkins Job Builder
+------------------------------
+
+For users who aren't already experienced with Docker or otherwise don't want
+to use our `pre-built JJB Docker image <jjb-docker_>`_, installing JJB into a
+virtual environment is an equally good option.
+
+We recommend using `pip <Installing JJB using pip_>`_ to assist with JJB
+installs, but we
+also document `installing from a git repository manually
+<Installing JJB Manually_>`_.
+For both, we recommend using Python `Virtual Environments`_
+to isolate JJB and its dependencies.
+
+The `builder/jjb/requirements.txt <odl-jjb-requirements.txt_>`_ file contains the currently
+recommended JJB version. Because JJB is fairly unstable, it may be necessary
+to debug things by installing different versions. This is documented for both
+`pip-assisted <Installing JJB using pip_>`_ and `manual
+<Installing JJB Manually_>`_ installs.
+
+Virtual Environments
+--------------------
+
+For both `pip-assisted <Installing JJB using pip_>`_ and `manual
+<Installing JJB Manually_>`_ JJB
+installs, we recommend using `Python Virtual Environments <python-virtualenv_>`_
+to manage JJB and its
+Python dependencies. The `python-virtualenvwrapper`_ tool can help you do so.
+
+There are good docs for installing `python-virtualenvwrapper`_. On Linux systems
+with pip (typical), they amount to:
+
+.. code-block:: bash
+
+ sudo pip install virtualenvwrapper
+
+A virtual environment is simply a directory that you install Python programs
+into and then append to the front of your path, causing those copies to be
+found before any system-wide versions.
+
+Create a new virtual environment for JJB.
+
+.. code-block:: bash
+
+ # Virtaulenvwrapper uses this dir for virtual environments
+ $ echo $WORKON_HOME
+ /home/daniel/.virtualenvs
+ # Make a new virtual environment
+ $ mkvirtualenv jjb
+ # A new venv dir was created
+ (jjb)$ ls -rc $WORKON_HOME | tail -n 1
+ jjb
+ # The new venv was added to the front of this shell's path
+ (jjb)$ echo $PATH
+ /home/daniel/.virtualenvs/jjb/bin:<my normal path>
+ # Software installed to venv, like pip, is found before system-wide copies
+ (jjb)$ command -v pip
+ /home/daniel/.virtualenvs/jjb/bin/pip
+
+With your virtual environment active, you should install JJB. Your install will
+be isolated to that virtual environment's directory and only visible when the
+virtual environment is active.
+
+You can easily leave and return to your venv. Make sure you activate it before
+each use of JJB.
+
+.. code-block:: bash
+
+ (jjb)$ deactivate
+ $ command -v jenkins-jobs
+ # No jenkins-jobs executable found
+ $ workon jjb
+ (jjb)$ command -v jenkins-jobs
+ $WORKON_HOME/jjb/bin/jenkins-jobs
+
+Installing JJB using pip
+------------------------
+
+The recommended way to install JJB is via pip.
+
+First, clone the latest version of the `releng-builder-repo`_.
+
+.. code-block:: bash
+
+ $ git clone https://git.opendaylight.org/gerrit/p/releng/builder.git
+
+Before actually installing JJB and its dependencies, make sure you've `created
+and activated <Virtual Environments_>`_ a virtual environment for JJB.
+
+.. code-block:: bash
+
+ $ mkvirtualenv jjb
+
+The recommended version of JJB to install is the version specified in the
+`builder/jjb/requirements.txt <odl-jjb-requirements.txt_>`_ file.
+
+.. code-block:: bash
+
+ # From the root of the releng/builder repo
+ (jjb)$ pip install -r jjb/requirements.txt
+
+To validate that JJB was successfully installed you can run this command:
+
+.. code-block:: bash
+
+ (jjb)$ jenkins-jobs --version
+
+To change the version of JJB specified by `builder/jjb/requirements.txt
+<odl-jjb-requirements.txt_>`_
+to install from the latest commit to the master branch of JJB's git repository:
+
+.. code-block:: bash
+
+ $ cat jjb/requirements.txt
+ -e git+https://git.openstack.org/openstack-infra/jenkins-job-builder#egg=jenkins-job-builder
+
+To install from a tag, like 1.4.0:
+
+.. code-block:: bash
+
+ $ cat jjb/requirements.txt
+ -e git+https://git.openstack.org/openstack-infra/jenkins-job-builder@1.4.0#egg=jenkins-job-builder
+
+Installing JJB Manually
+-----------------------
+
+This section documents installing JJB from its manually cloned repository.
+
+Note that `installing via pip <Installing JJB using pip_>`_ is typically simpler.
+
+Checkout the version of JJB's source you'd like to build.
+
+For example, using master:
+
+.. code-block:: bash
+
+ $ git clone https://git.openstack.org/openstack-infra/jenkins-job-builder
+
+Using a tag, like 1.4.0:
+
+.. code-block:: bash
+
+ $ git clone https://git.openstack.org/openstack-infra/jenkins-job-builder
+ $ cd jenkins-job-builder
+ $ git checkout tags/1.4.0
+
+Before actually installing JJB and its dependencies, make sure you've `created
+and activated <Virtual Environments_>`_ a virtual environment for JJB.
+
+.. code-block:: bash
+
+ $ mkvirtualenv jjb
+
+You can then use JJB's `requirements.txt <jjb-requirements.txt_>`_ file to
+install its
+dependencies. Note that we're not using `sudo` to install as root, since we want
+to make use of the venv we've configured for our current user.
+
+.. code-block:: bash
+
+ # In the cloned JJB repo, with the desired version of the code checked out
+ (jjb)$ pip install -r requirements.txt
+
+Then install JJB from the repo with:
+
+.. code-block:: bash
+
+ (jjb)$ pip install .
+
+To validate that JJB was successfully installed you can run this command:
+
+.. code-block:: bash
+
+ (jjb)$ jenkins-jobs --version
+
+JJB Docker Image
+----------------
+
+`Docker <docker-docs_>`_ is an open platform used to create virtualized Linux containers
+for shipping self-contained applications. Docker leverages LinuX Containers
+\(LXC\) running on the same operating system as the host machine, whereas a
+traditional VM runs an operating system over the host.
+
+.. code-block:: bash
+
+ docker pull zxiiro/jjb-docker
+ docker run --rm -v ${PWD}:/jjb jjb-docker
+
+This `Dockerfile <jjb-dockerfile_>`_ created the
+`zxiiro/jjb-docker image <jjb-docker_>`_.
+By default it will run:
+
+.. code-block:: bash
+
+ jenkins-jobs test .
+
+You'll need to use the `-v/--volume=[]` parameter to mount a directory
+containing your YAML files, as well as a configured `jenkins.ini` file if you
+wish to upload your jobs to the `Jenkins Sandbox`_.
+
+Jenkins Job Templates
+---------------------
+
+The OpenDaylight `RelEng/Builder <releng-builder-wiki_>`_ project provides
+`jjb-templates`_ that can be used to define basic jobs.
+
+Verify Job Template
+^^^^^^^^^^^^^^^^^^^
+
+Trigger: **recheck**
+
+The Verify job template creates a Gerrit Trigger job that will trigger when a
+new patch is submitted to Gerrit.
+
+Verify jobs can be retriggered in Gerrit by leaving a comment that says
+**recheck**.
+
+Merge Job Template
+^^^^^^^^^^^^^^^^^^
+
+Trigger: **remerge**
+
+The Merge job template is similar to the Verify Job Template except it will
+trigger once a Gerrit patch is merged into the repo. It also automatically
+runs the Maven goals **source:jar** and **javadoc:jar**.
+
+This job will upload artifacts to `OpenDaylight's Nexus <odl-nexus_>`_ on completion.
+
+Merge jobs can be retriggered in Gerrit by leaving a comment that says
+**remerge**.
+
+Daily Job Template
+^^^^^^^^^^^^^^^^^^
+
+The Daily (or Nightly) Job Template creates a job which will run on a build on
+a Daily basis as a sanity check to ensure the build is still working day to
+day.
+
+Sonar Job Template
+^^^^^^^^^^^^^^^^^^
+
+Trigger: **run-sonar**
+
+This job runs Sonar analysis and reports the results to `OpenDaylight's Sonar
+dashboard <odl-sonar_>`_.
+
+The Sonar Job Template creates a job which will run against the master branch,
+or if BRANCHES are specified in the CFG file it will create a job for the
+**First** branch listed.
+
+.. note:: Running the "run-sonar" trigger will cause Jenkins to remove its
+ existing vote if it's already -1'd or +1'd a comment. You will need to
+ re-run your verify job (recheck) after running this to get Jenkins to
+ re-vote.
+
+Integration Job Template
+^^^^^^^^^^^^^^^^^^^^^^^^
+
+The Integration Job Template creates a job which runs when a project that your
+project depends on is successfully built. This job type is basically the same
+as a verify job except that it triggers from other Jenkins jobs instead of via
+Gerrit review updates. The dependencies that triger integration jobs are listed
+in your project.cfg file under the **DEPENDENCIES** variable.
+
+If no dependencies are listed then this job type is disabled by default.
+
+Distribution Test Job
+^^^^^^^^^^^^^^^^^^^^^
+
+Trigger: **test-distribution**
+
+This job builds a distrbution against your patch, passes distribution sanity test
+and reports back the results to Gerrit. Leave a comment with trigger keyword above
+to activate it for a particular patch.
+
+This job is maintained by the Integration/Test (`integration-test-wiki`_) project.
+
+.. note:: Running the "test-distribution" trigger will cause Jenkins to remove
+ it's existing vote if it's already -1 or +1'd a comment. You will need
+ to re-run your verify job (recheck) after running this to get Jenkins
+ to put back the correct vote.
+
+Patch Test Job
+^^^^^^^^^^^^^^
+
+Trigger: **test-integration**
+
+This job runs a full integration test suite against your patch and reports
+back the results to Gerrit. Leave a comment with trigger keyword above to activate it
+for a particular patch.
+
+This job is maintained by the Integration/Test (`integration-test-wiki`_) project.
+
+.. note:: Running the "test-integration" trigger will cause Jenkins to remove
+ it's existing vote if it's already -1 or +1'd a comment. You will need
+ to re-run your verify job (recheck) after running this to get Jenkins
+ to put back the correct vote.
+
+Some considerations when using this job:
+
+* The patch test verification takes some time (~2 hours) + consumes a lot of
+ resources so it is not meant to be used for every patch.
+* The system tests for master patches will fail most of the times because both
+ code and test are unstable during the release cycle (should be good by the
+ end of the cycle).
+* Because of the above, patch test results typically have to be interpreted by
+ system test experts. The Integration/Test (`integration-test-wiki`_) project
+ can help with that.
+
+
+Autorelease Validate Job
+^^^^^^^^^^^^^^^^^^^^^^^^
+
+Trigger: **revalidate**
+
+This job runs the PROJECT-validate-autorelease-BRANCH job which is used as a
+quick sanity test to ensure that a patch does not depend on features that do
+not exist in the current release.
+
+The **revalidate** trigger is useful in cases where a project's verify job
+passed however validate failed due to infra problems or intermittent issues.
+It will retrigger just the validate-autorelease job.
+
+Basic Job Configuration
+-----------------------
+
+To create jobs based on existing `templates <Jenkins Job Templates_>`_, use the
+`jjb-init-project.py`_ helper script. When run from the root of
+`RelEng/Builder's repo <releng-builder-repo_>`_, it will produce a file in
+`jjb/<project>/<project>.yaml` containing your project's base template.
+
+.. code-block:: bash
+
+ $ python scripts/jjb-init-project.py --help
+ usage: jjb-init-project.py [-h] [-c CONF] [-d DEPENDENCIES] [-t TEMPLATES]
+ [-s STREAMS] [-p POM] [-g MVN_GOALS] [-o MVN_OPTS]
+ [-a ARCHIVE_ARTIFACTS]
+ project
+
+ positional arguments:
+ project project
+
+ optional arguments:
+ -h, --help show this help message and exit
+ -c CONF, --conf CONF Config file
+ -d DEPENDENCIES, --dependencies DEPENDENCIES
+ Project dependencies A comma-seperated (no spaces)
+ list of projects your project depends on. This is used
+ to create an integration job that will trigger when a
+ dependent project-merge job is built successfully.
+ Example: aaa,controller,yangtools
+ -t TEMPLATES, --templates TEMPLATES
+ Job templates to use
+ -s STREAMS, --streams STREAMS
+ Release streams to fill with default options
+ -p POM, --pom POM Path to pom.xml to use in Maven build (Default:
+ pom.xml
+ -g MVN_GOALS, --mvn-goals MVN_GOALS
+ Maven Goals
+ -o MVN_OPTS, --mvn-opts MVN_OPTS
+ Maven Options
+ -a ARCHIVE_ARTIFACTS, --archive-artifacts ARCHIVE_ARTIFACTS
+ Comma-seperated list of patterns of artifacts to
+ archive on build completion. See:
+ http://ant.apache.org/manual/Types/fileset.html
+
+If all your project requires is the basic verify, merge, and daily jobs then
+using the job template should be all you need to configure for your jobs.
+
+Auto-Update Job Templates
+^^^^^^^^^^^^^^^^^^^^^^^^^
+
+The first line of the job YAML file produced by the `jjb-init-project.py`_ script will
+contain the words `# REMOVE THIS LINE IF...`. Leaving this line will allow the
+RelEng/Builder `jjb-autoupdate-project.py`_ script to maintain this file for your project,
+should the base templates ever change. It is a good idea to leave this line if
+you do not plan to create any complex jobs outside of the provided template.
+
+However, if your project needs more control over your jobs or if you have any
+additional configuration outside of the standard configuration provided by the
+template, then this line should be removed.
+
+Tuning Templates
+""""""""""""""""
+
+Allowing the auto-updated to manage your templates doesn't prevent you from
+doing some configuration changes. Parameters can be passed to templates via
+a `<project>.cfg` in your `builder/jjb/<project>` directory. An example is
+provided below, others can be found in the repos of other projects. Tune as
+necessary. Unnecessary paramaters can be removed or commented out with a "#"
+sign.
+
+.. code-block:: yaml
+
+ JOB_TEMPLATES: verify,merge,sonar
+ STREAMS:
+ - beryllium:
+ branch: master
+ jdks: openjdk7,openjdk8
+ autorelease: true
+ - stable-lithium:
+ branch: stable/lithium
+ jdks: openjdk7
+ POM: dfapp/pom.xml
+ MVN_GOALS: clean install javadoc:aggregate -DrepoBuild -Dmaven.repo.local=$WORKSPACE/.m2repo -Dorg.ops4j.pax.url.mvn.localRepository=$WORKSPACE/.m2repo
+ MVN_OPTS: -Xmx1024m -XX:MaxPermSize=256m
+ DEPENDENCIES: aaa,controller,yangtools
+ ARCHIVE_ARTIFACTS: *.logs, *.patches
+
+.. note:: `STREAMS <streams-design-background_>`_ is a list of branches you want
+ JJB to generate jobs for.
+ The first branch will be the branch that reports Sonar analysis. Each
+ branch must define a "jdks:" section listing the JDKs the verify jobs
+ should run tests against for the branch. The first JDK listed will be
+ used as the default JDK for non-verify type jobs.
+
+.. note:: Projects that are participating in the simultanious release should set
+ "autorelease: true" under the streams they are participating in
+ autorelease for. This enables a new job type validate-autorelease
+ which is used to help identify if Gerrit patches might break
+ autorelease or not.
+
+Advanced
+""""""""
+
+It is also possible to take advantage of both the auto-updater and creating
+your own jobs. To do this, create a YAML file in your project's sub-directory
+with any name other than \<project\>.yaml. The auto-update script will only
+search for files with the name \<project\>.yaml. The normal \<project\>.yaml
+file can then be left in tact with the "# REMOVE THIS LINE IF..." comment so
+it will be automatically updated.
+
+Maven Properties
+----------------
+
+We provide a properties which your job can take advantage of if you want to do
+something different depending on the job type that is run. If you create a
+profile that activates on a property listed blow. The JJB templated jobs will
+be able to activate the profile during the build to run any custom code you
+wish to run in your project.
+
+.. code-block:: bash
+
+ -Dmerge : This flag is passed in our Merge job and is equivalent to the
+ Maven property
+ <merge>true</merge>.
+ -Dsonar : This flag is passed in our Sonar job and is equivalent to the
+ Maven property
+ <sonar>true</sonar>.
+
+Jenkins Sandbox
+---------------
+
+The `jenkins-sandbox`_ instance's purpose is to allow projects to test their JJB
+setups before merging their code over to the RelEng master silo. It is
+configured similarly to the master instance, although it cannot publish
+artifacts or vote in Gerrit.
+
+If your project requires access to the sandbox please open an OpenDaylight
+Helpdesk ticket (<helpdesk@opendaylight.org>) and provide your ODL ID.
+
+Notes Regarding the Sandbox
+^^^^^^^^^^^^^^^^^^^^^^^^^^^
+
+* Jobs are automatically deleted every Saturday at 08:00 UTC
+* Committers can login and configure Jenkins jobs in the sandbox directly
+ (unlike with the master silo)
+* Sandbox configuration mirrors the master silo when possible
+* Sandbox jobs can NOT upload artifacts to Nexus
+* Sandbox jobs can NOT vote on Gerrit
+
+Configuration
+^^^^^^^^^^^^^
+
+Make sure you have Jenkins Job Builder [properly installed](#jjb_install).
+
+If you do not already have access, open an OpenDaylight Helpdesk ticket
+(<helpdesk@opendaylight.org>) to request access to ODL's sandbox instance.
+Integration/Test (`integration-test-wiki`_) committers have access by default.
+
+JJB reads user-specific configuration from a `jenkins.ini`_. An
+example is provided by releng/builder at `example-jenkins.ini`_.
+
+.. code-block:: bash
+
+ # If you don't have RelEng/Builder's repo, clone it
+ $ git clone https://git.opendaylight.org/gerrit/p/releng/builder.git
+ # Make a copy of the example JJB config file (in the builder/ directory)
+ $ cp jenkins.ini.example jenkins.ini
+ # Edit jenkins.ini with your username, API token and ODL's sandbox URL
+ $ cat jenkins.ini
+ <snip>
+ [jenkins]
+ user=<your ODL username>
+ password=<your ODL Jenkins sandbox API token>
+ url=https://jenkins.opendaylight.org/sandbox
+ <snip>
+
+To get your API token, `login to the Jenkins **sandbox** instance
+<jenkins-sandbox-login_>`_ (_not
+the main master Jenkins instance, different tokens_), go to your user page (by
+clicking on your username, for example), click "Configure" and then "Show API
+Token".
+
+Manual Method
+^^^^^^^^^^^^^
+
+If you `installed JJB locally into a virtual environment
+<Installing Jenkins Job Builder_>`_,
+you should now activate that virtual environment to access the `jenkins-jobs`
+executable.
+
+.. code-block:: bash
+
+ $ workon jjb
+ (jjb)$
+
+You'll want to work from the root of the RelEng/Builder repo, and you should
+have your `jenkins.ini` file [properly configured](#sandbox_config).
+
+Testing Jobs
+^^^^^^^^^^^^
+
+It's good practice to use the `test` command to validate your JJB files before
+pushing them.
+
+.. code-block:: bash
+
+ jenkins-jobs --conf jenkins.ini test jjb/ <job-name>
+
+If the job you'd like to test is a template with variables in its name, it
+must be manually expanded before use. For example, the commonly used template
+`{project}-csit-verify-1node-{functionality}` might expand to
+`ovsdb-csit-verify-1node-netvirt`.
+
+.. code-block:: bash
+
+ jenkins-jobs --conf jenkins.ini test jjb/ ovsdb-csit-verify-1node-netvirt
+
+Successful tests output the XML description of the Jenkins job described by
+the specified JJB job name.
+
+Pushing Jobs
+^^^^^^^^^^^^
+
+Once you've `configured your \`jenkins.ini\` <Configuration_>`_ and `verified your
+JJB jobs <Testing Jobs_>`_ produce valid XML descriptions of Jenkins jobs you
+can push them to the Jenkins sandbox.
+
+.. important::
+
+ When pushing with `jenkins-jobs`, a log message with the number
+ of jobs you're pushing will be issued, typically to stdout.
+ **If the number is greater than 1** (or the number of jobs you
+ passed to the command to push) then you are pushing too many
+ jobs and should **`ctrl+c` to cancel the upload**. Else you will
+ flood the system with jobs.
+
+ .. code-block:: bash
+
+ INFO:jenkins_jobs.builder:Number of jobs generated: 1
+
+ **Failing to provide the final `<job-name>` param will push all
+ jobs!**
+
+ .. code-block:: bash
+
+ # Don't push all jobs by omitting the final param! (ctrl+c to abort)
+ jenkins-jobs --conf jenkins.ini update jjb/ <job-name>
+
+Running Jobs
+^^^^^^^^^^^^
+
+Once you have your Jenkins job configuration `pushed to the
+Sandbox <Pushing Jobs_>`_ you can trigger it to run.
+
+Find your newly-pushed job on the `Sandbox's web UI <jenkins-sandbox_>`_. Click
+on its name to see the job's details.
+
+Make sure you're `logged in <jenkins-sandbox-login_>`_ to the Sandbox.
+
+Click "Build with Parameters" and then "Build".
+
+Wait for your job to be scheduled and run. Click on the job number to see
+details, including console output.
+
+Make changes to your JJB configuration, re-test, re-push and re-run until
+your job is ready.
+
+Docker Method
+^^^^^^^^^^^^^
+
+If `using Docker <JJB Docker image_>`_:
+
+.. code-block:: bash
+
+ # To test
+ docker run --rm -v ${PWD}:/jjb zxiiro/jjb-docker
+
+.. important::
+
+ When pushing with `jenkins-jobs`, a log message with
+ the number of jobs you're pushing will be issued, typically to stdout.
+ **If the number is greater than 1** (or the number of jobs you passed to
+ the command to push) then you are pushing too many jobs and should **`ctrl+c`
+ to cancel the upload**. Else you will flood the system with jobs.
+
+ .. code-block:: bash
+
+ INFO:jenkins_jobs.builder:Number of jobs generated: 1
+
+ **Failing to provide the final `<job-name>` param will push all jobs!**
+
+ .. code-block:: bash
+
+ # To upload jobs to the sandbox
+ # Please ensure that you include a configured jenkins.ini in your volume mount
+ # Making sure not to push more jobs than expected, ctrl+c to abort
+ docker run --rm -v ${PWD}:/jjb zxiiro/jjb-docker jenkins-jobs --conf jenkins.ini update . openflowplugin-csit-periodic-1node-cds-longevity-only-master
+
+.. _docker-docs: https://www.docker.com/whatisdocker/
+.. _example-jenkins.ini: https://git.opendaylight.org/gerrit/gitweb?p=releng/builder.git;a=blob;f=jenkins.ini.example
+.. _integration-test-wiki: https://wiki.opendaylight.org/view/Integration/Test
+.. _jenkins-master: https://jenkins.opendaylight.org/releng
+.. _jenkins-sandbox: https://jenkins.opendaylight.org/sandbox
+.. _jenkins-sandbox-login: https://jenkins.opendaylight.org/sandbox/login
+.. _jenkins.ini: http://docs.openstack.org/infra/jenkins-job-builder/execution.html#configuration-file
+.. _jjb-autoupdate-project.py: https://git.opendaylight.org/gerrit/gitweb?p=releng/builder.git;a=blob;f=scripts/jjb-autoupdate-project.py
+.. _jjb-docker: https://hub.docker.com/r/zxiiro/jjb-docker/
+.. _jjb-dockerfile: https://github.com/zxiiro/jjb-docker/blob/master/Dockerfile
+.. _jjb-docs: http://ci.openstack.org/jenkins-job-builder/
+.. _jjb-init-project.py: https://git.opendaylight.org/gerrit/gitweb?p=releng/builder.git;a=blob;f=scripts/jjb-init-project.py
+.. _jjb-repo: https://github.com/openstack-infra/jenkins-job-builder
+.. _jjb-requirements.txt: https://github.com/openstack-infra/jenkins-job-builder/blob/master/requirements.txt
+.. _jjb-templates: https://git.opendaylight.org/gerrit/gitweb?p=releng/builder.git;a=tree;f=jjb
+.. _odl-jjb-requirements.txt: https://git.opendaylight.org/gerrit/gitweb?p=releng/builder.git;a=blob;f=jjb/requirements.txt
+.. _odl-nexus: https://nexus.opendaylight.org
+.. _odl-sonar: https://sonar.opendaylight.org
+.. _python-virtualenv: https://virtualenv.readthedocs.org/en/latest/
+.. _python-virtualenvwrapper: https://virtualenvwrapper.readthedocs.org/en/latest/
+.. _releng-wiki: https://wiki.opendaylight.org/view/RelEng:Main
+.. _releng-builder-gerrit: https://git.opendaylight.org/gerrit/#/admin/projects/releng/builder
+.. _releng-builder-repo: https://git.opendaylight.org/gerrit/gitweb?p=releng%2Fbuilder.git;a=summary
+.. _releng-builder-wiki: https://wiki.opendaylight.org/view/RelEng/Builder
+.. _streams-design-background: https://lists.opendaylight.org/pipermail/release/2015-July/003139.html
+.. _spinup-scripts: https://git.opendaylight.org/gerrit/gitweb?p=releng/builder.git;a=tree;f=jenkins-scripts
+.. _spinup-scripts-basic_settings.sh: https://git.opendaylight.org/gerrit/gitweb?p=releng/builder.git;a=blob;f=jenkins-scripts/basic_settings.sh
+.. _spinup-scripts-controller.sh: https://git.opendaylight.org/gerrit/gitweb?p=releng/builder.git;a=blob;f=jenkins-scripts/controller.sh
+.. _vagrant-basic-java-node: https://git.opendaylight.org/gerrit/gitweb?p=releng/builder.git;a=tree;f=vagrant/basic-java-node
+.. _vagrant-definitions: https://git.opendaylight.org/gerrit/gitweb?p=releng/builder.git;a=tree;f=vagrant
--- /dev/null
+@ECHO OFF\r
+\r
+REM Command file for Sphinx documentation\r
+\r
+if "%SPHINXBUILD%" == "" (\r
+ set SPHINXBUILD=sphinx-build\r
+)\r
+set BUILDDIR=_build\r
+set ALLSPHINXOPTS=-d %BUILDDIR%/doctrees %SPHINXOPTS% .\r
+set I18NSPHINXOPTS=%SPHINXOPTS% .\r
+if NOT "%PAPER%" == "" (\r
+ set ALLSPHINXOPTS=-D latex_paper_size=%PAPER% %ALLSPHINXOPTS%\r
+ set I18NSPHINXOPTS=-D latex_paper_size=%PAPER% %I18NSPHINXOPTS%\r
+)\r
+\r
+if "%1" == "" goto help\r
+\r
+if "%1" == "help" (\r
+ :help\r
+ echo.Please use `make ^<target^>` where ^<target^> is one of\r
+ echo. html to make standalone HTML files\r
+ echo. dirhtml to make HTML files named index.html in directories\r
+ echo. singlehtml to make a single large HTML file\r
+ echo. pickle to make pickle files\r
+ echo. json to make JSON files\r
+ echo. htmlhelp to make HTML files and a HTML help project\r
+ echo. qthelp to make HTML files and a qthelp project\r
+ echo. devhelp to make HTML files and a Devhelp project\r
+ echo. epub to make an epub\r
+ echo. epub3 to make an epub3\r
+ echo. latex to make LaTeX files, you can set PAPER=a4 or PAPER=letter\r
+ echo. text to make text files\r
+ echo. man to make manual pages\r
+ echo. texinfo to make Texinfo files\r
+ echo. gettext to make PO message catalogs\r
+ echo. changes to make an overview over all changed/added/deprecated items\r
+ echo. xml to make Docutils-native XML files\r
+ echo. pseudoxml to make pseudoxml-XML files for display purposes\r
+ echo. linkcheck to check all external links for integrity\r
+ echo. doctest to run all doctests embedded in the documentation if enabled\r
+ echo. coverage to run coverage check of the documentation if enabled\r
+ goto end\r
+)\r
+\r
+if "%1" == "clean" (\r
+ for /d %%i in (%BUILDDIR%\*) do rmdir /q /s %%i\r
+ del /q /s %BUILDDIR%\*\r
+ goto end\r
+)\r
+\r
+\r
+REM Check if sphinx-build is available and fallback to Python version if any\r
+%SPHINXBUILD% 1>NUL 2>NUL\r
+if errorlevel 9009 goto sphinx_python\r
+goto sphinx_ok\r
+\r
+:sphinx_python\r
+\r
+set SPHINXBUILD=python -m sphinx.__init__\r
+%SPHINXBUILD% 2> nul\r
+if errorlevel 9009 (\r
+ echo.\r
+ echo.The 'sphinx-build' command was not found. Make sure you have Sphinx\r
+ echo.installed, then set the SPHINXBUILD environment variable to point\r
+ echo.to the full path of the 'sphinx-build' executable. Alternatively you\r
+ echo.may add the Sphinx directory to PATH.\r
+ echo.\r
+ echo.If you don't have Sphinx installed, grab it from\r
+ echo.http://sphinx-doc.org/\r
+ exit /b 1\r
+)\r
+\r
+:sphinx_ok\r
+\r
+\r
+if "%1" == "html" (\r
+ %SPHINXBUILD% -b html %ALLSPHINXOPTS% %BUILDDIR%/html\r
+ if errorlevel 1 exit /b 1\r
+ echo.\r
+ echo.Build finished. The HTML pages are in %BUILDDIR%/html.\r
+ goto end\r
+)\r
+\r
+if "%1" == "dirhtml" (\r
+ %SPHINXBUILD% -b dirhtml %ALLSPHINXOPTS% %BUILDDIR%/dirhtml\r
+ if errorlevel 1 exit /b 1\r
+ echo.\r
+ echo.Build finished. The HTML pages are in %BUILDDIR%/dirhtml.\r
+ goto end\r
+)\r
+\r
+if "%1" == "singlehtml" (\r
+ %SPHINXBUILD% -b singlehtml %ALLSPHINXOPTS% %BUILDDIR%/singlehtml\r
+ if errorlevel 1 exit /b 1\r
+ echo.\r
+ echo.Build finished. The HTML pages are in %BUILDDIR%/singlehtml.\r
+ goto end\r
+)\r
+\r
+if "%1" == "pickle" (\r
+ %SPHINXBUILD% -b pickle %ALLSPHINXOPTS% %BUILDDIR%/pickle\r
+ if errorlevel 1 exit /b 1\r
+ echo.\r
+ echo.Build finished; now you can process the pickle files.\r
+ goto end\r
+)\r
+\r
+if "%1" == "json" (\r
+ %SPHINXBUILD% -b json %ALLSPHINXOPTS% %BUILDDIR%/json\r
+ if errorlevel 1 exit /b 1\r
+ echo.\r
+ echo.Build finished; now you can process the JSON files.\r
+ goto end\r
+)\r
+\r
+if "%1" == "htmlhelp" (\r
+ %SPHINXBUILD% -b htmlhelp %ALLSPHINXOPTS% %BUILDDIR%/htmlhelp\r
+ if errorlevel 1 exit /b 1\r
+ echo.\r
+ echo.Build finished; now you can run HTML Help Workshop with the ^\r
+.hhp project file in %BUILDDIR%/htmlhelp.\r
+ goto end\r
+)\r
+\r
+if "%1" == "qthelp" (\r
+ %SPHINXBUILD% -b qthelp %ALLSPHINXOPTS% %BUILDDIR%/qthelp\r
+ if errorlevel 1 exit /b 1\r
+ echo.\r
+ echo.Build finished; now you can run "qcollectiongenerator" with the ^\r
+.qhcp project file in %BUILDDIR%/qthelp, like this:\r
+ echo.^> qcollectiongenerator %BUILDDIR%\qthelp\OpenDaylightRelEngBuilder.qhcp\r
+ echo.To view the help file:\r
+ echo.^> assistant -collectionFile %BUILDDIR%\qthelp\OpenDaylightRelEngBuilder.ghc\r
+ goto end\r
+)\r
+\r
+if "%1" == "devhelp" (\r
+ %SPHINXBUILD% -b devhelp %ALLSPHINXOPTS% %BUILDDIR%/devhelp\r
+ if errorlevel 1 exit /b 1\r
+ echo.\r
+ echo.Build finished.\r
+ goto end\r
+)\r
+\r
+if "%1" == "epub" (\r
+ %SPHINXBUILD% -b epub %ALLSPHINXOPTS% %BUILDDIR%/epub\r
+ if errorlevel 1 exit /b 1\r
+ echo.\r
+ echo.Build finished. The epub file is in %BUILDDIR%/epub.\r
+ goto end\r
+)\r
+\r
+if "%1" == "epub3" (\r
+ %SPHINXBUILD% -b epub3 %ALLSPHINXOPTS% %BUILDDIR%/epub3\r
+ if errorlevel 1 exit /b 1\r
+ echo.\r
+ echo.Build finished. The epub3 file is in %BUILDDIR%/epub3.\r
+ goto end\r
+)\r
+\r
+if "%1" == "latex" (\r
+ %SPHINXBUILD% -b latex %ALLSPHINXOPTS% %BUILDDIR%/latex\r
+ if errorlevel 1 exit /b 1\r
+ echo.\r
+ echo.Build finished; the LaTeX files are in %BUILDDIR%/latex.\r
+ goto end\r
+)\r
+\r
+if "%1" == "latexpdf" (\r
+ %SPHINXBUILD% -b latex %ALLSPHINXOPTS% %BUILDDIR%/latex\r
+ cd %BUILDDIR%/latex\r
+ make all-pdf\r
+ cd %~dp0\r
+ echo.\r
+ echo.Build finished; the PDF files are in %BUILDDIR%/latex.\r
+ goto end\r
+)\r
+\r
+if "%1" == "latexpdfja" (\r
+ %SPHINXBUILD% -b latex %ALLSPHINXOPTS% %BUILDDIR%/latex\r
+ cd %BUILDDIR%/latex\r
+ make all-pdf-ja\r
+ cd %~dp0\r
+ echo.\r
+ echo.Build finished; the PDF files are in %BUILDDIR%/latex.\r
+ goto end\r
+)\r
+\r
+if "%1" == "text" (\r
+ %SPHINXBUILD% -b text %ALLSPHINXOPTS% %BUILDDIR%/text\r
+ if errorlevel 1 exit /b 1\r
+ echo.\r
+ echo.Build finished. The text files are in %BUILDDIR%/text.\r
+ goto end\r
+)\r
+\r
+if "%1" == "man" (\r
+ %SPHINXBUILD% -b man %ALLSPHINXOPTS% %BUILDDIR%/man\r
+ if errorlevel 1 exit /b 1\r
+ echo.\r
+ echo.Build finished. The manual pages are in %BUILDDIR%/man.\r
+ goto end\r
+)\r
+\r
+if "%1" == "texinfo" (\r
+ %SPHINXBUILD% -b texinfo %ALLSPHINXOPTS% %BUILDDIR%/texinfo\r
+ if errorlevel 1 exit /b 1\r
+ echo.\r
+ echo.Build finished. The Texinfo files are in %BUILDDIR%/texinfo.\r
+ goto end\r
+)\r
+\r
+if "%1" == "gettext" (\r
+ %SPHINXBUILD% -b gettext %I18NSPHINXOPTS% %BUILDDIR%/locale\r
+ if errorlevel 1 exit /b 1\r
+ echo.\r
+ echo.Build finished. The message catalogs are in %BUILDDIR%/locale.\r
+ goto end\r
+)\r
+\r
+if "%1" == "changes" (\r
+ %SPHINXBUILD% -b changes %ALLSPHINXOPTS% %BUILDDIR%/changes\r
+ if errorlevel 1 exit /b 1\r
+ echo.\r
+ echo.The overview file is in %BUILDDIR%/changes.\r
+ goto end\r
+)\r
+\r
+if "%1" == "linkcheck" (\r
+ %SPHINXBUILD% -b linkcheck %ALLSPHINXOPTS% %BUILDDIR%/linkcheck\r
+ if errorlevel 1 exit /b 1\r
+ echo.\r
+ echo.Link check complete; look for any errors in the above output ^\r
+or in %BUILDDIR%/linkcheck/output.txt.\r
+ goto end\r
+)\r
+\r
+if "%1" == "doctest" (\r
+ %SPHINXBUILD% -b doctest %ALLSPHINXOPTS% %BUILDDIR%/doctest\r
+ if errorlevel 1 exit /b 1\r
+ echo.\r
+ echo.Testing of doctests in the sources finished, look at the ^\r
+results in %BUILDDIR%/doctest/output.txt.\r
+ goto end\r
+)\r
+\r
+if "%1" == "coverage" (\r
+ %SPHINXBUILD% -b coverage %ALLSPHINXOPTS% %BUILDDIR%/coverage\r
+ if errorlevel 1 exit /b 1\r
+ echo.\r
+ echo.Testing of coverage in the sources finished, look at the ^\r
+results in %BUILDDIR%/coverage/python.txt.\r
+ goto end\r
+)\r
+\r
+if "%1" == "xml" (\r
+ %SPHINXBUILD% -b xml %ALLSPHINXOPTS% %BUILDDIR%/xml\r
+ if errorlevel 1 exit /b 1\r
+ echo.\r
+ echo.Build finished. The XML files are in %BUILDDIR%/xml.\r
+ goto end\r
+)\r
+\r
+if "%1" == "pseudoxml" (\r
+ %SPHINXBUILD% -b pseudoxml %ALLSPHINXOPTS% %BUILDDIR%/pseudoxml\r
+ if errorlevel 1 exit /b 1\r
+ echo.\r
+ echo.Build finished. The pseudo-XML files are in %BUILDDIR%/pseudoxml.\r
+ goto end\r
+)\r
+\r
+:end\r
--- /dev/null
+Release Workflow
+================
+
+This page documents the workflow for releasing for projects that are not built
+and released via the Autorelease project.
+
+Sections:
+
+.. contents::
+ :depth: 3
+ :local:
+
+Workflow
+--------
+
+OpenDaylight uses Nexus as it's artifact repository for releasing artifacts to
+the world. The workflow involves using Nexus to produce a staging repository
+which can be tested and reviewed before being approved to copy to the final
+destination opendaylight.release repo. The workflow in general is as follows:
+
+1. Project create release tag and push to Gerrit
+2. Project will contact helpdesk@opendaylight.org with project name and build
+ tag to produce a release candidate / staging repo
+3. Helpdesk will run a build and notify project of staging repo location
+4. Project tests staging repo and notifies Helpdesk with go ahead to release
+5. Helpdesk clicks Release repo button in Nexus
+6. (optional) Helpdesk runs Jenkins job to push update-site.zip to p2repos
+ sites repo
+
+Step 6 is only necessary for Eclipse projects that need to additionally deploy
+an update site to a webserver.
+
+Release Job
+-----------
+
+There is a JJB template release job which should be used for a project if the
+project needs to produce a staging repo for release. The supported Job types
+are listed below, use the one relevant to your project.
+
+**Maven|Java** {name}-release-java -- this job type will produce a staging repo
+in Nexus for Maven projects.
+
+**P2 Publisher** {name}-publish-p2repo -- this job type is useful for projects
+that produce a p2 repo that needs to be published to a special URL.
mkdir /tmp/Hbase
cd /tmp/Hbase
-wget --no-verbose https://archive.apache.org/dist/hbase/hbase-0.94.15/hbase-0.94.15.tar.gz
+wget --no-verbose http://apache.osuosl.org/hbase/hbase-0.94.27/hbase-0.94.27.tar.gz
echo "Installing the Hbase Server..."
-tar -xvf hbase*.tar.gz
+tar -xvf hbase-0.94.27.tar.gz
#Installation of Cassandra
mkdir /tmp/cassandra
cd /tmp/cassandra
-wget --no-verbose http://archive.apache.org/dist/cassandra/2.1.12/apache-cassandra-2.1.12-bin.tar.gz
+wget --no-verbose http://apache.osuosl.org/cassandra/2.1.14/apache-cassandra-2.1.14-bin.tar.gz
echo "Installing the Cassandra Server..."
-tar -xvf apache*.tar.gz
+tar -xvf apache-cassandra-2.1.14-bin.tar.gz
cat <<EOF >/etc/sudoers.d/89-jenkins-user-defaults
Defaults:jenkins !requiretty
# disable the firewall
/bin/bash ./disable_firewall.sh
+
+# Install vlan for vlan based tests in VTN suites
+apt-get install -y --force-yes -qq vlan
\ No newline at end of file
release-branch: '{branch}'
- autorelease-release-datestamp:
datestamp: true
+ - string:
+ name: CLONE_URL
+ default: 'ssh://jenkins-$SILO@git.opendaylight.org:29418/releng/autorelease'
+ description: "Autorelease clone URL"
scm:
- git:
- url: 'https://git.opendaylight.org/gerrit/releng/autorelease'
+ url: '$CLONE_URL'
refspec: ''
branches:
- 'origin/{branch}'
skip-tag: true
submodule:
recursive: true
+ timeout: 15
wrappers:
- autorelease-build-timeout
release-branch: '{branch}'
- autorelease-release-datestamp:
datestamp: true
+ - string:
+ name: CLONE_URL
+ default: 'ssh://jenkins-$SILO@git.opendaylight.org:29418/releng/autorelease'
+ description: "Autorelease clone URL"
scm:
- git:
- url: 'https://git.opendaylight.org/gerrit/releng/autorelease'
+ url: '$CLONE_URL'
refspec: ''
branches:
- 'origin/{branch}'
skip-tag: true
+ shallow-clone: true
submodule:
recursive: true
+ timeout: 15
wrappers:
- autorelease-build-timeout
release-branch: '{branch}'
- autorelease-release-datestamp:
datestamp: true
+ - string:
+ name: CLONE_URL
+ default: 'ssh://jenkins-$SILO@git.opendaylight.org:29418/releng/autorelease'
+ description: "Autorelease clone URL"
scm:
- git:
- url: 'https://git.opendaylight.org/gerrit/releng/autorelease'
+ url: '$CLONE_URL'
refspec: ''
branches:
- 'origin/{branch}'
skip-tag: true
+ shallow-clone: true
submodule:
recursive: true
+ timeout: 15
wrappers:
- autorelease-build-timeout
release-branch: '{branch}'
- autorelease-release-datestamp:
datestamp: true
+ - string:
+ name: CLONE_URL
+ default: 'ssh://jenkins-$SILO@git.opendaylight.org:29418/releng/autorelease'
+ description: "Autorelease clone URL"
scm:
- git:
- url: 'https://git.opendaylight.org/gerrit/releng/autorelease'
+ url: '$CLONE_URL'
refspec: ''
branches:
- 'origin/{branch}'
skip-tag: true
+ shallow-clone: true
submodule:
recursive: true
+ timeout: 15
wrappers:
- autorelease-build-timeout
logrotate:
daysToKeep: '{build-days-to-keep}'
- numToKeep: '{build-num-to-keep}'
artifactDaysToKeep: '14'
- artifactNumToKeep: '-1'
parameters:
- project-parameter:
release-branch: '{branch}'
- autorelease-release-datestamp:
datestamp: true
+ - string:
+ name: CLONE_URL
+ default: 'ssh://jenkins-$SILO@git.opendaylight.org:29418/releng/autorelease'
+ description: "Autorelease clone URL"
scm:
- git:
- url: 'https://git.opendaylight.org/gerrit/releng/autorelease'
+ url: '$CLONE_URL'
refspec: ''
branches:
- 'origin/{branch}'
skip-tag: true
+ shallow-clone: true
submodule:
recursive: true
+ timeout: 15
wrappers:
- autorelease-build-timeout
release-branch: '{branch}'
- autorelease-release-datestamp:
datestamp: true
+ - string:
+ name: CLONE_URL
+ default: 'ssh://jenkins-$SILO@git.opendaylight.org:29418/releng/autorelease'
+ description: "Autorelease clone URL"
scm:
- git:
- url: 'https://git.opendaylight.org/gerrit/releng/autorelease'
+ url: '$CLONE_URL'
refspec: ''
branches:
- 'origin/{branch}'
skip-tag: true
+ shallow-clone: true
submodule:
recursive: true
+ timeout: 15
wrappers:
- autorelease-build-timeout
release-branch: '{branch}'
- autorelease-release-datestamp:
datestamp: true
+ - string:
+ name: CLONE_URL
+ default: 'ssh://jenkins-$SILO@git.opendaylight.org:29418/releng/autorelease'
+ description: "Autorelease clone URL"
scm:
- git:
- url: 'https://git.opendaylight.org/gerrit/releng/autorelease'
+ url: '$CLONE_URL'
refspec: ''
branches:
- 'origin/{branch}'
skip-tag: true
+ shallow-clone: true
submodule:
recursive: true
+ timeout: 15
wrappers:
- autorelease-build-timeout
release-branch: '{branch}'
- autorelease-release-datestamp:
datestamp: true
+ - string:
+ name: CLONE_URL
+ default: 'ssh://jenkins-$SILO@git.opendaylight.org:29418/releng/autorelease'
+ description: "Autorelease clone URL"
scm:
- git:
- url: 'https://git.opendaylight.org/gerrit/releng/autorelease'
+ url: '$CLONE_URL'
refspec: ''
branches:
- 'origin/{branch}'
skip-tag: true
+ shallow-clone: true
submodule:
recursive: true
+ timeout: 15
wrappers:
- autorelease-build-timeout
mininet-vms: 0
# Features to install
- install-features: 'odl-restconf,odl-clustering-test-app,odl-jolokia'
+ install-features: 'odl-restconf,odl-clustering-test-app'
# Robot custom options
robot-options: ''
mininet-vms: 0
# Features to install
- install-features: 'odl-restconf,odl-clustering-test-app,odl-jolokia'
+ install-features: 'odl-restconf,odl-clustering-test-app'
# Robot custom options
robot-options: ''
release-branch: '{branch}'
- autorelease-release-datestamp:
datestamp: true
+ - string:
+ name: CLONE_URL
+ default: 'ssh://jenkins-$SILO@git.opendaylight.org:29418/releng/autorelease'
+ description: "Autorelease clone URL"
scm:
- git:
- url: 'https://git.opendaylight.org/gerrit/releng/autorelease'
+ url: '$CLONE_URL'
refspec: ''
branches:
- 'origin/{branch}'
skip-tag: true
+ shallow-clone: true
submodule:
recursive: true
+ timeout: 15
wrappers:
- autorelease-build-timeout
release-branch: '{branch}'
- autorelease-release-datestamp:
datestamp: true
+ - string:
+ name: CLONE_URL
+ default: 'ssh://jenkins-$SILO@git.opendaylight.org:29418/releng/autorelease'
+ description: "Autorelease clone URL"
scm:
- git:
- url: 'https://git.opendaylight.org/gerrit/releng/autorelease'
+ url: '$CLONE_URL'
refspec: ''
branches:
- 'origin/{branch}'
skip-tag: true
+ shallow-clone: true
submodule:
recursive: true
+ timeout: 15
wrappers:
- autorelease-build-timeout
release-branch: '{branch}'
- autorelease-release-datestamp:
datestamp: true
+ - string:
+ name: CLONE_URL
+ default: 'ssh://jenkins-$SILO@git.opendaylight.org:29418/releng/autorelease'
+ description: "Autorelease clone URL"
scm:
- git:
- url: 'https://git.opendaylight.org/gerrit/releng/autorelease'
+ url: '$CLONE_URL'
refspec: ''
branches:
- 'origin/{branch}'
skip-tag: true
+ shallow-clone: true
submodule:
recursive: true
+ timeout: 15
wrappers:
- autorelease-build-timeout
release-branch: '{branch}'
- autorelease-release-datestamp:
datestamp: true
+ - string:
+ name: CLONE_URL
+ default: 'ssh://jenkins-$SILO@git.opendaylight.org:29418/releng/autorelease'
+ description: "Autorelease clone URL"
scm:
- git:
- url: 'https://git.opendaylight.org/gerrit/releng/autorelease'
+ url: '$CLONE_URL'
refspec: ''
branches:
- 'origin/{branch}'
skip-tag: true
+ shallow-clone: true
submodule:
recursive: true
+ timeout: 15
wrappers:
- autorelease-build-timeout
release-branch: '{branch}'
- autorelease-release-datestamp:
datestamp: true
+ - string:
+ name: CLONE_URL
+ default: 'ssh://jenkins-$SILO@git.opendaylight.org:29418/releng/autorelease'
+ description: "Autorelease clone URL"
scm:
- git:
- url: 'https://git.opendaylight.org/gerrit/releng/autorelease'
+ url: '$CLONE_URL'
refspec: ''
branches:
- 'origin/{branch}'
skip-tag: true
+ shallow-clone: true
submodule:
recursive: true
+ timeout: 15
wrappers:
- autorelease-build-timeout
release-branch: '{branch}'
- autorelease-release-datestamp:
datestamp: true
+ - string:
+ name: CLONE_URL
+ default: 'ssh://jenkins-$SILO@git.opendaylight.org:29418/releng/autorelease'
+ description: "Autorelease clone URL"
scm:
- git:
- url: 'https://git.opendaylight.org/gerrit/releng/autorelease'
+ url: '$CLONE_URL'
refspec: ''
branches:
- 'origin/{branch}'
skip-tag: true
+ shallow-clone: true
submodule:
recursive: true
+ timeout: 15
wrappers:
- autorelease-build-timeout
--- /dev/null
+- project:
+ name: docs-rtd
+ jobs:
+ - '{name}-merge-rtd-{stream}'
+
+ stream:
+ - boron:
+ branch: 'master'
+
+ project: 'docs'
+ rtdproject: 'opendaylight'
release-branch: '{branch}'
- autorelease-release-datestamp:
datestamp: true
+ - string:
+ name: CLONE_URL
+ default: 'ssh://jenkins-$SILO@git.opendaylight.org:29418/releng/autorelease'
+ description: "Autorelease clone URL"
scm:
- git:
- url: 'https://git.opendaylight.org/gerrit/releng/autorelease'
+ url: '$CLONE_URL'
refspec: ''
branches:
- 'origin/{branch}'
skip-tag: true
+ shallow-clone: true
submodule:
recursive: true
+ timeout: 15
wrappers:
- autorelease-build-timeout
release-branch: '{branch}'
- autorelease-release-datestamp:
datestamp: true
+ - string:
+ name: CLONE_URL
+ default: 'ssh://jenkins-$SILO@git.opendaylight.org:29418/releng/autorelease'
+ description: "Autorelease clone URL"
scm:
- git:
- url: 'https://git.opendaylight.org/gerrit/releng/autorelease'
+ url: '$CLONE_URL'
refspec: ''
branches:
- 'origin/{branch}'
skip-tag: true
+ shallow-clone: true
submodule:
recursive: true
+ timeout: 15
wrappers:
- autorelease-build-timeout
release-branch: '{branch}'
- autorelease-release-datestamp:
datestamp: true
+ - string:
+ name: CLONE_URL
+ default: 'ssh://jenkins-$SILO@git.opendaylight.org:29418/releng/autorelease'
+ description: "Autorelease clone URL"
scm:
- git:
- url: 'https://git.opendaylight.org/gerrit/releng/autorelease'
+ url: '$CLONE_URL'
refspec: ''
branches:
- 'origin/{branch}'
skip-tag: true
+ shallow-clone: true
submodule:
recursive: true
+ timeout: 15
wrappers:
- autorelease-build-timeout
release-branch: '{branch}'
- autorelease-release-datestamp:
datestamp: true
+ - string:
+ name: CLONE_URL
+ default: 'ssh://jenkins-$SILO@git.opendaylight.org:29418/releng/autorelease'
+ description: "Autorelease clone URL"
scm:
- git:
- url: 'https://git.opendaylight.org/gerrit/releng/autorelease'
+ url: '$CLONE_URL'
refspec: ''
branches:
- 'origin/{branch}'
skip-tag: true
+ shallow-clone: true
submodule:
recursive: true
+ timeout: 15
wrappers:
- autorelease-build-timeout
--- /dev/null
+#!/bin/bash
+# @License EPL-1.0 <http://spdx.org/licenses/EPL-1.0>
+##############################################################################
+# Copyright (c) 2016 The Linux Foundation and others.
+#
+# All rights reserved. This program and the accompanying materials
+# are made available under the terms of the Eclipse Public License v1.0
+# which accompanies this distribution, and is available at
+# http://www.eclipse.org/legal/epl-v10.html
+##############################################################################
+
+FILE_NAME=`echo $P2ZIP_URL | awk -F'/' '{ print $NF }'`
+VERSION=`echo $P2ZIP_URL | awk -F'/' '{ print $(NF-1) }'`
+
+wget $P2ZIP_URL -O $FILE_NAME
+
+cat > ${WORKSPACE}/pom.xml <<EOF
+<project xmlns="http://maven.apache.org/POM/4.0.0" xmlns:xsi="http://www.w3.org/2001/XMLSchema-instance" xsi:schemaLocation="http://maven.apache.org/POM/4.0.0 http://maven.apache.org/xsd/maven-4.0.0.xsd">
+ <modelVersion>4.0.0</modelVersion>
+ <groupId>org.opendaylight.$PROJECT</groupId>
+ <artifactId>p2repo</artifactId>
+ <version>1.0.0-SNAPSHOT</version>
+ <packaging>pom</packaging>
+
+ <build>
+ <plugins>
+ <plugin>
+ <groupId>org.apache.maven.plugins</groupId>
+ <artifactId>maven-deploy-plugin</artifactId>
+ <version>2.8.2</version>
+ <configuration>
+ <skip>true</skip>
+ </configuration>
+ </plugin>
+ <plugin>
+ <groupId>org.sonatype.plugins</groupId>
+ <artifactId>maven-upload-plugin</artifactId>
+ <version>0.0.1</version>
+ <executions>
+ <execution>
+ <id>publish-site</id>
+ <phase>deploy</phase>
+ <goals>
+ <goal>upload-file</goal>
+ </goals>
+ <configuration>
+ <serverId>opendaylight-p2</serverId>
+ <repositoryUrl>https://nexus.opendaylight.org/service/local/repositories/p2repos/content-compressed</repositoryUrl>
+ <file>$FILE_NAME</file>
+ <repositoryPath>org.opendaylight.$PROJECT/$VERSION</repositoryPath>
+ </configuration>
+ </execution>
+ </executions>
+ </plugin>
+ </plugins>
+ </build>
+</project>
+EOF
--- /dev/null
+#!/bin/bash
+# @License EPL-1.0 <http://spdx.org/licenses/EPL-1.0>
+##############################################################################
+# Copyright (c) 2016 The Linux Foundation and others.
+#
+# All rights reserved. This program and the accompanying materials
+# are made available under the terms of the Eclipse Public License v1.0
+# which accompanies this distribution, and is available at
+# http://www.eclipse.org/legal/epl-v10.html
+##############################################################################
+
+# Assuming that mvn deploy created the hide/from/pom/files/stage directory.
+cd hide/from/pom/files
+mkdir -p m2repo/org/opendaylight/
+
+rsync -avz --exclude 'maven-metadata*' \
+ --exclude '_remote.repositories' \
+ --exclude 'resolver-status.properties' \
+ "stage/org/opendaylight/$m" m2repo/org/opendaylight/
+
+mvn org.sonatype.plugins:nexus-staging-maven-plugin:1.6.2:deploy-staged-repository \
+ -DrepositoryDirectory="`pwd`/m2repo" \
+ -DnexusUrl=https://nexus.opendaylight.org/ \
+ -DstagingProfileId="$STAGING_PROFILE_ID" \
+ -DserverId="opendaylight-staging" \
+ -s $SETTINGS_FILE \
+ -gs $GLOBAL_SETTINGS_FILE | tee $WORKSPACE/deploy-staged-repository.log
--- /dev/null
+#!/bin/bash
+# @License EPL-1.0 <http://spdx.org/licenses/EPL-1.0>
+##############################################################################
+# Copyright (c) 2016 The Linux Foundation and others.
+#
+# All rights reserved. This program and the accompanying materials
+# are made available under the terms of the Eclipse Public License v1.0
+# which accompanies this distribution, and is available at
+# http://www.eclipse.org/legal/epl-v10.html
+##############################################################################
+
+EPOCH_DATE=`date +%s%3N`
+MVN_METADATA=`echo $P2ZIP_URL | sed 's,/*[^/]\+/*$,,' | sed 's,/*[^/]\+/*$,,'`/maven-metadata.xml
+P2_COMPOSITE_ARTIFACTS=compositeArtifacts.xml
+P2_COMPOSITE_CONTENT=compositeContent.xml
+
+wget $MVN_METADATA -O maven-metadata.xml
+
+VERSIONS=`xmlstarlet sel -t -m "/metadata/versioning/versions" -v "version" maven-metadata.xml`
+NUM_VERSIONS=`echo $VERSIONS | wc -w`
+
+
+##
+## Create compositeArtifacts.xml and compositeContent.xml files
+##
+
+cat > $P2_COMPOSITE_ARTIFACTS <<EOF
+<?xml version='1.0' encoding='UTF-8'?>
+<?compositeArtifactRepository version='1.0.0'?>
+<repository name='OpenDaylight $PROJECT'
+ type='org.eclipse.equinox.internal.p2.artifact.repository.CompositeArtifactRepository' version='1.0.0'>
+ <properties size='1'>
+ <property name='p2.timestamp' value='$EPOCH_DATE'/>
+ </properties>
+ <children size='$NUM_VERSIONS'>
+EOF
+
+cat > $P2_COMPOSITE_CONTENT <<EOF
+<?xml version='1.0' encoding='UTF-8'?>
+<?compositeMetadataRepository version='1.0.0'?>
+<repository name='OpenDaylight $PROJECT'
+ type='org.eclipse.equinox.internal.p2.metadata.repository.CompositeMetadataRepository' version='1.0.0'>
+ <properties size='1'>
+ <property name='p2.timestamp' value='$EPOCH_DATE'/>
+ </properties>
+ <children size='$NUM_VERSIONS'>
+EOF
+
+##
+## Loop versions
+##
+
+for ver in $VERSIONS
+do
+ echo " <child location='$ver'/>" >> $P2_COMPOSITE_ARTIFACTS
+ echo " <child location='$ver'/>" >> $P2_COMPOSITE_CONTENT
+done
+
+##
+## Close files
+##
+
+cat >> $P2_COMPOSITE_ARTIFACTS <<EOF
+ </children>
+</repository>
+EOF
+
+cat >> $P2_COMPOSITE_CONTENT <<EOF
+ </children>
+</repository>
+EOF
+
+##
+## Create poms for uploading
+##
+
+zip composite-repo.zip $P2_COMPOSITE_ARTIFACTS $P2_COMPOSITE_CONTENT
+
+cat > deploy-composite-repo.xml <<EOF
+<project xmlns="http://maven.apache.org/POM/4.0.0" xmlns:xsi="http://www.w3.org/2001/XMLSchema-instance" xsi:schemaLocation="http://maven.apache.org/POM/4.0.0 http://maven.apache.org/xsd/maven-4.0.0.xsd">
+ <modelVersion>4.0.0</modelVersion>
+ <groupId>org.opendaylight.$PROJECT</groupId>
+ <artifactId>p2repo</artifactId>
+ <version>1.0.0-SNAPSHOT</version>
+ <packaging>pom</packaging>
+
+ <build>
+ <plugins>
+ <plugin>
+ <groupId>org.apache.maven.plugins</groupId>
+ <artifactId>maven-deploy-plugin</artifactId>
+ <version>2.8.2</version>
+ <configuration>
+ <skip>true</skip>
+ </configuration>
+ </plugin>
+ <plugin>
+ <groupId>org.sonatype.plugins</groupId>
+ <artifactId>maven-upload-plugin</artifactId>
+ <version>0.0.1</version>
+ <executions>
+ <execution>
+ <id>publish-site</id>
+ <phase>deploy</phase>
+ <goals>
+ <goal>upload-file</goal>
+ </goals>
+ <configuration>
+ <serverId>opendaylight-p2</serverId>
+ <repositoryUrl>https://nexus.opendaylight.org/service/local/repositories/p2repos/content-compressed</repositoryUrl>
+ <file>composite-repo.zip</file>
+ <repositoryPath>org.opendaylight.$PROJECT</repositoryPath>
+ </configuration>
+ </execution>
+ </executions>
+ </plugin>
+ </plugins>
+ </build>
+</project>
+EOF
release-branch: '{branch}'
- autorelease-release-datestamp:
datestamp: true
+ - string:
+ name: CLONE_URL
+ default: 'ssh://jenkins-$SILO@git.opendaylight.org:29418/releng/autorelease'
+ description: "Autorelease clone URL"
scm:
- git:
- url: 'https://git.opendaylight.org/gerrit/releng/autorelease'
+ url: '$CLONE_URL'
refspec: ''
branches:
- 'origin/{branch}'
skip-tag: true
+ shallow-clone: true
submodule:
recursive: true
+ timeout: 15
wrappers:
- autorelease-build-timeout
AKKACONF=/tmp/${BUNDLEFOLDER}/configuration/initial/akka.conf
MODULESCONF=/tmp/${BUNDLEFOLDER}/configuration/initial/modules.conf
MODULESHARDSCONF=/tmp/${BUNDLEFOLDER}/configuration/initial/module-shards.conf
-JOLOKIACONF=/tmp/${BUNDLEFOLDER}/deploy/jolokia.xml
+CONTROLLERMEM="2048m"
+
+if [ ${CONTROLLERSCOPE} == 'all' ]; then
+ ACTUALFEATURES="odl-integration-compatible-with-all,${CONTROLLERFEATURES}"
+ CONTROLLERMEM="3072m"
+else
+ ACTUALFEATURES="${CONTROLLERFEATURES}"
+fi
+# Some versions of jenkins job builder result in feature list containing spaces
+# and ending in newline. Remove all that.
+ACTUALFEATURES=`echo "${ACTUALFEATURES}" | tr -d '\n \r'`
# Utility function for joining strings.
function join {
- delim=',\n\t\t'
+ delim=' '
final=$1; shift
for str in $* ; do
echo ${final}
}
-# Create the strings for data and rpc seed nodes
-# First create various arrays with one element per controller.
-# Then merge each array using the join utility defined above.
-
+# Create the string for nodes
for i in `seq 1 ${NUM_ODL_SYSTEM}` ; do
CONTROLLERIP=ODL_SYSTEM_${i}_IP
- data_seed_nodes[$i]=\\\"akka.tcp:\\/\\/opendaylight-cluster-data@${!CONTROLLERIP}:2550\\\"
- rpc_seed_nodes[$i]=\\\"akka.tcp:\\/\\/odl-cluster-rpc@${!CONTROLLERIP}:2551\\\"
- member_names[$i]=\\\"member-${i}\\\"
+ nodes[$i]=${!CONTROLLERIP}
done
-data_seed_list=$(join ${data_seed_nodes[@]})
-rpc_seed_list=$(join ${rpc_seed_nodes[@]})
-member_name_list=$(join ${member_names[@]})
+nodes_list=$(join ${nodes[@]})
+
+echo ${nodes_list}
-# echo ${data_seed_list}
-# echo ${rpc_seed_list}
-# echo ${member_name_list}
+# Run script plan in case it exists
+if [ -f ${WORKSPACE}/test/csit/scriptplans/${TESTPLAN} ]; then
+ echo "scriptplan exists!!!"
+ echo "Changing the scriptplan path..."
+ cat ${WORKSPACE}/test/csit/scriptplans/${TESTPLAN} | sed "s:integration:${WORKSPACE}:" > scriptplan.txt
+ cat scriptplan.txt
+ for line in $( egrep -v '(^[[:space:]]*#|^[[:space:]]*$)' scriptplan.txt ); do
+ echo "Executing ${line}..."
+ source ${line}
+ done
+fi
# Create the configuration script to be run on controllers.
cat > ${WORKSPACE}/configuration-script.sh <<EOF
-CONTROLLERID="member-\$1"
-CONTROLLERIP=\$2
-
-echo "Configuring hostname in akka.conf"
-sed -i -e "s:{{HOST}}:\${CONTROLLERIP}:" ${AKKACONF}
-
-echo "Configuring data seed nodes in akka.conf"
-sed -i -e "s/{{{DS_SEED_NODES}}}/[${data_seed_list}]/g" ${AKKACONF}
+echo "Changing to /tmp"
+cd /tmp
+
+echo "Downloading the distribution from ${ACTUALBUNDLEURL}"
+wget --progress=dot:mega '${ACTUALBUNDLEURL}'
+
+echo "Extracting the new controller..."
+unzip -q ${BUNDLE}
+
+echo "Configuring the startup features..."
+FEATURESCONF=/tmp/${BUNDLEFOLDER}/etc/org.apache.karaf.features.cfg
+CUSTOMPROP=/tmp/${BUNDLEFOLDER}/etc/custom.properties
+sed -ie "s/featuresBoot=.*/featuresBoot=config,standard,region,package,kar,ssh,management,${ACTUALFEATURES}/g" \${FEATURESCONF}
+sed -ie "s%mvn:org.opendaylight.integration/features-integration-index/${BUNDLEVERSION}/xml/features%mvn:org.opendaylight.integration/features-integration-index/${BUNDLEVERSION}/xml/features,mvn:org.opendaylight.integration/features-integration-test/${BUNDLEVERSION}/xml/features%g" \${FEATURESCONF}
+cat \${FEATURESCONF}
+
+echo "Configuring the log..."
+LOGCONF=/tmp/${BUNDLEFOLDER}/etc/org.ops4j.pax.logging.cfg
+sed -ie 's/log4j.appender.out.maxBackupIndex=10/log4j.appender.out.maxBackupIndex=1/g' \${LOGCONF}
+# FIXME: Make log size limit configurable from build parameter.
+sed -ie 's/log4j.appender.out.maxFileSize=1MB/log4j.appender.out.maxFileSize=100GB/g' \${LOGCONF}
+cat \${LOGCONF}
+
+if [ "${ODL_ENABLE_L3_FWD}" == "yes" ]; then
+ echo "Enable the l3.fwd in custom.properties.."
+ echo "ovsdb.l3.fwd.enabled=yes" >> \${CUSTOMPROP}
+ cat \${CUSTOMPROP}
+fi
+
+echo "Configure java home and max memory..."
+MEMCONF=/tmp/${BUNDLEFOLDER}/bin/setenv
+sed -ie 's%^# export JAVA_HOME%export JAVA_HOME="\${JAVA_HOME:-${JAVA_HOME}}"%g' \${MEMCONF}
+sed -ie 's/JAVA_MAX_MEM="2048m"/JAVA_MAX_MEM="${CONTROLLERMEM}"/g' \${MEMCONF}
+cat \${MEMCONF}
+
+echo "Set Java version"
+sudo /usr/sbin/alternatives --install /usr/bin/java java ${JAVA_HOME}/bin/java 1
+sudo /usr/sbin/alternatives --set java ${JAVA_HOME}/bin/java
+echo "JDK default version ..."
+java -version
-echo "Configuring rpc seed nodes in akka.conf"
-sed -i -e "s/{{{RPC_SEED_NODES}}}/[${rpc_seed_list}]/g" ${AKKACONF}
+echo "Set JAVA_HOME"
+export JAVA_HOME="${JAVA_HOME}"
+# Did you know that in HERE documents, single quote is an ordinary character, but backticks are still executing?
+JAVA_RESOLVED=\`readlink -e "\${JAVA_HOME}/bin/java"\`
+echo "Java binary pointed at by JAVA_HOME: \${JAVA_RESOLVED}"
-echo "Define unique name in akka.conf"
-sed -i -e "s/{{MEMBER_NAME}}/\${CONTROLLERID}/g" ${AKKACONF}
+# Copy shard file if exists
+if [ -f /tmp/custom_shard_config.txt ]; then
+ echo "Custom shard config exists!!!"
+ echo "Copying the shard config..."
+ cp /tmp/custom_shard_config.txt /tmp/${BUNDLEFOLDER}/bin/
+fi
-echo "Define replication type in module-shards.conf"
-sed -i -e "s/{{{REPLICAS_1}}}/[${member_name_list}]/g" ${MODULESHARDSCONF}
-sed -i -e "s/{{{REPLICAS_2}}}/[${member_name_list}]/g" ${MODULESHARDSCONF}
-sed -i -e "s/{{{REPLICAS_3}}}/[${member_name_list}]/g" ${MODULESHARDSCONF}
-sed -i -e "s/{{{REPLICAS_4}}}/[${member_name_list}]/g" ${MODULESHARDSCONF}
+echo "Configuring cluster"
+/tmp/${BUNDLEFOLDER}/bin/configure_cluster.sh \$1 ${nodes_list}
echo "Dump akka.conf"
cat ${AKKACONF}
echo "Dump module-shards.conf"
cat ${MODULESHARDSCONF}
-echo "Set JAVA_HOME"
-export JAVA_HOME="${JAVA_HOME}"
-# Did you know that in HERE documents, single quote is an ordinary character, but backticks are still executing?
-JAVA_RESOLVED=\`readlink -e "\${JAVA_HOME}/bin/java"\`
-echo "Java binary pointed at by JAVA_HOME: \${JAVA_RESOLVED}"
-echo "JDK default version ..."
-java -version
+EOF
+
+# Create the startup script to be run on controllers.
+cat > ${WORKSPACE}/startup-script.sh <<EOF
echo "Starting controller..."
/tmp/${BUNDLEFOLDER}/bin/start
do
CONTROLLERIP=ODL_SYSTEM_${i}_IP
echo "Configuring member-${i} with IP address ${!CONTROLLERIP}"
- ssh ${!CONTROLLERIP} "mkdir /tmp/${BUNDLEFOLDER}/configuration/initial"
- scp ${WORKSPACE}/test/tools/clustering/cluster-deployer/templates/multi-node-test/akka.conf.template ${!CONTROLLERIP}:${AKKACONF}
- scp ${WORKSPACE}/test/tools/clustering/cluster-deployer/templates/multi-node-test/modules.conf.template ${!CONTROLLERIP}:${MODULESCONF}
- scp ${WORKSPACE}/test/tools/clustering/cluster-deployer/templates/multi-node-test/module-shards.conf.template ${!CONTROLLERIP}:${MODULESHARDSCONF}
- scp ${WORKSPACE}/test/tools/clustering/cluster-deployer/templates/multi-node-test/jolokia.xml.template ${!CONTROLLERIP}:${JOLOKIACONF}
- scp ${WORKSPACE}/configuration-script.sh ${!CONTROLLERIP}:/tmp/
- ssh ${!CONTROLLERIP} "bash /tmp/configuration-script.sh ${i} ${!CONTROLLERIP}"
+ scp ${WORKSPACE}/configuration-script.sh ${!CONTROLLERIP}:/tmp/
+ ssh ${!CONTROLLERIP} "bash /tmp/configuration-script.sh ${i}"
+done
+
+# Copy over the startup script to each controller and execute it.
+for i in `seq 1 ${NUM_ODL_SYSTEM}`
+do
+ CONTROLLERIP=ODL_SYSTEM_${i}_IP
+ echo "Starting member-${i} with IP address ${!CONTROLLERIP}"
+ scp ${WORKSPACE}/startup-script.sh ${!CONTROLLERIP}:/tmp/
+ ssh ${!CONTROLLERIP} "bash /tmp/startup-script.sh"
done
# vim: ts=4 sw=4 sts=4 et ft=sh :
rm -rf *
echo "Downloading the distribution..."
-wget --no-verbose ${ACTUALBUNDLEURL}
+wget --progress=dot:mega ${ACTUALBUNDLEURL}
echo "Extracting the new controller..."
unzip -q ${BUNDLE}
cd /tmp
echo "Downloading the distribution..."
-wget --no-verbose '${ACTUALBUNDLEURL}'
+wget --progress=dot:mega '${ACTUALBUNDLEURL}'
echo "Extracting the new controller..."
unzip -q ${BUNDLE}
echo "Configuring the startup features..."
FEATURESCONF=/tmp/${BUNDLEFOLDER}/etc/org.apache.karaf.features.cfg
+CUSTOMPROP=/tmp/${BUNDLEFOLDER}/etc/custom.properties
sed -ie "s/featuresBoot=.*/featuresBoot=config,standard,region,package,kar,ssh,management,${ACTUALFEATURES}/g" \${FEATURESCONF}
sed -ie "s%mvn:org.opendaylight.integration/features-integration-index/${BUNDLEVERSION}/xml/features%mvn:org.opendaylight.integration/features-integration-index/${BUNDLEVERSION}/xml/features,mvn:org.opendaylight.integration/features-integration-test/${BUNDLEVERSION}/xml/features%g" \${FEATURESCONF}
cat \${FEATURESCONF}
+if [ "${ODL_ENABLE_L3_FWD}" == "yes" ]; then
+ echo "ovsdb.l3.fwd.enabled=yes" >> \${CUSTOMPROP}
+fi
+cat \${CUSTOMPROP}
+
echo "Configuring the log..."
LOGCONF=/tmp/${BUNDLEFOLDER}/etc/org.ops4j.pax.logging.cfg
sed -ie 's/log4j.appender.out.maxBackupIndex=10/log4j.appender.out.maxBackupIndex=1/g' \${LOGCONF}
sed -ie 's/log4j.appender.out.maxFileSize=1MB/log4j.appender.out.maxFileSize=100GB/g' \${LOGCONF}
cat \${LOGCONF}
-echo "Configure max memory..."
+echo "Configure java home and max memory..."
MEMCONF=/tmp/${BUNDLEFOLDER}/bin/setenv
+sed -ie 's%^# export JAVA_HOME%export JAVA_HOME="\${JAVA_HOME:-${JAVA_HOME}}"%g' \${MEMCONF}
sed -ie 's/JAVA_MAX_MEM="2048m"/JAVA_MAX_MEM="${CONTROLLERMEM}"/g' \${MEMCONF}
cat \${MEMCONF}
echo "Listing all open ports on controller system..."
netstat -natu
+echo "Set Java version"
+sudo /usr/sbin/alternatives --install /usr/bin/java java ${JAVA_HOME}/bin/java 1
+sudo /usr/sbin/alternatives --set java ${JAVA_HOME}/bin/java
+echo "JDK default version ..."
+java -version
+
echo "Set JAVA_HOME"
export JAVA_HOME="$JAVA_HOME"
# Did you know that in HERE documents, single quote is an ordinary character, but backticks are still executing?
JAVA_RESOLVED=\`readlink -e "\${JAVA_HOME}/bin/java"\`
echo "Java binary pointed at by JAVA_HOME: \${JAVA_RESOLVED}"
-echo "JDK default version ..."
-java -version
echo "Starting controller..."
/tmp/${BUNDLEFOLDER}/bin/start
scp ${WORKSPACE}/controller-script.sh ${ODL_SYSTEM_IP}:/tmp
ssh ${ODL_SYSTEM_IP} 'bash /tmp/controller-script.sh'
+if [ ${NUM_OPENSTACK_SYSTEM} -gt 0 ]; then
+ echo "Exiting without running tests to deploy openstack for testing"
+ exit
+fi
+
echo "Locating test plan to use..."
testplan_filepath="${WORKSPACE}/test/csit/testplans/${STREAMTESTPLAN}"
if [ ! -f "${testplan_filepath}" ]; then
echo "Changing the testplan path..."
cat "${testplan_filepath}" | sed "s:integration:${WORKSPACE}:" > testplan.txt
cat testplan.txt
-
SUITES=$( egrep -v '(^[[:space:]]*#|^[[:space:]]*$)' testplan.txt | tr '\012' ' ' )
echo "Starting Robot test suites ${SUITES} ..."
rm -rf *
echo "Downloading the distribution..."
-wget --no-verbose ${ACTUALBUNDLEURL}
+wget --progress=dot:mega ${ACTUALBUNDLEURL}
echo "Extracting the new controller..."
unzip -q ${BUNDLE}
+++ /dev/null
-echo "#################################################"
-echo "## Install Distribution in Cluster ##"
-echo "#################################################"
-
-CONTROLLERMEM="2048m"
-
-if [ ${CONTROLLERSCOPE} == 'all' ]; then
- ACTUALFEATURES="odl-integration-compatible-with-all,${CONTROLLERFEATURES}"
- CONTROLLERMEM="3072m"
-else
- ACTUALFEATURES="${CONTROLLERFEATURES}"
-fi
-# Some versions of jenkins job builder result in feature list containing spaces
-# and ending in newline. Remove all that.
-ACTUALFEATURES=`echo "${ACTUALFEATURES}" | tr -d '\n \r'`
-
-cat > ${WORKSPACE}/deploy-controller-script.sh <<EOF
-
-echo "Changing to /tmp"
-cd /tmp
-
-echo "Downloading the distribution from ${ACTUALBUNDLEURL}"
-wget --no-verbose '${ACTUALBUNDLEURL}'
-
-echo "Extracting the new controller..."
-unzip -q ${BUNDLE}
-
-echo "Configuring the startup features..."
-FEATURESCONF=/tmp/${BUNDLEFOLDER}/etc/org.apache.karaf.features.cfg
-sed -ie "s/featuresBoot=.*/featuresBoot=config,standard,region,package,kar,ssh,management,${ACTUALFEATURES}/g" \${FEATURESCONF}
-sed -ie "s%mvn:org.opendaylight.integration/features-integration-index/${BUNDLEVERSION}/xml/features%mvn:org.opendaylight.integration/features-integration-index/${BUNDLEVERSION}/xml/features,mvn:org.opendaylight.integration/features-integration-test/${BUNDLEVERSION}/xml/features%g" \${FEATURESCONF}
-cat \${FEATURESCONF}
-
-echo "Configuring the log..."
-LOGCONF=/tmp/${BUNDLEFOLDER}/etc/org.ops4j.pax.logging.cfg
-sed -ie 's/log4j.appender.out.maxBackupIndex=10/log4j.appender.out.maxBackupIndex=1/g' \${LOGCONF}
-# FIXME: Make log size limit configurable from build parameter.
-sed -ie 's/log4j.appender.out.maxFileSize=1MB/log4j.appender.out.maxFileSize=100GB/g' \${LOGCONF}
-cat \${LOGCONF}
-
-echo "Configure max memory..."
-MEMCONF=/tmp/${BUNDLEFOLDER}/bin/setenv
-sed -ie 's/JAVA_MAX_MEM="2048m"/JAVA_MAX_MEM="${CONTROLLERMEM}"/g' \${MEMCONF}
-cat \${MEMCONF}
-
-EOF
-
-for i in `seq 1 ${NUM_ODL_SYSTEM}`
-do
- CONTROLLERIP=ODL_SYSTEM_${i}_IP
- echo "Installing distribution in member-${i} with IP address ${!CONTROLLERIP}"
- scp ${WORKSPACE}/deploy-controller-script.sh ${!CONTROLLERIP}:/tmp
- ssh ${!CONTROLLERIP} 'bash /tmp/deploy-controller-script.sh'
-done
-
-# vim: ts=4 sw=4 sts=4 et ft=sh :
echo "Starting Robot test suites ${SUITES} ..."
pybot -e exclude \
--v WORKSPACE:/tmp -v USER_HOME:$HOME -L TRACE \
+-v WORKSPACE:$WORKSPACE -v USER_HOME:$HOME -L TRACE \
-v DEVSTACK_SYSTEM_USER:$USER \
--v DEVSTACK_SYSTEM_IP:$ODL_SYSTEM_IP \
+-v DEVSTACK_SYSTEM_IP:$OPENSTACK_CONTROL_NODE_IP \
-v DEFAULT_LINUX_PROMPT:\]\> \
-v OPENSTACK_BRANCH:$OPENSTACK_BRANCH \
-v ODL_VERSION:$ODL_VERSION \
-v TEMPEST_REGEX:$TEMPEST_REGEX ${SUITES} || true
-scp $ODL_SYSTEM_IP:/opt/stack/logs/devstacklog.txt $WORKSPACE/
-scp -r $ODL_SYSTEM_IP:/opt/stack/logs/*karaf* $WORKSPACE/
+scp $OPENSTACK_CONTROL_NODE_IP:/opt/stack/logs/devstacklog.txt $WORKSPACE/
+scp -r $OPENSTACK_CONTROL_NODE_IP:/opt/stack/logs/*karaf* $WORKSPACE/
# vim: ts=4 sw=4 sts=4 et ft=sh :
--- /dev/null
+#@IgnoreInspection BashAddShebang
+# Activate robotframework virtualenv
+# ${ROBOT_VENV} comes from the include-raw-integration-install-robotframework.sh
+# script.
+source ${ROBOT_VENV}/bin/activate
+
+echo "#################################################"
+echo "## Deploy Openstack 3-node ##"
+echo "#################################################"
+
+function create_control_node_local_conf {
+local_conf_file_name=${WORKSPACE}/local.conf_control
+cat > ${local_conf_file_name} << EOF
+[[local|localrc]]
+LOGFILE=stack.sh.log
+SCREEN_LOGDIR=/opt/stack/data/log
+LOG_COLOR=False
+RECLONE=yes
+
+disable_service swift
+disable_service cinder
+disable_service n-net
+disable_service q-vpn
+enable_service q-svc
+enable_service q-dhcp
+enable_service q-meta
+enable_service tempest
+enable_service n-novnc
+enable_service n-cauth
+
+HOST_IP=$OPENSTACK_CONTROL_NODE_IP
+SERVICE_HOST=\$HOST_IP
+
+NEUTRON_CREATE_INITIAL_NETWORKS=False
+Q_PLUGIN=ml2
+Q_ML2_TENANT_NETWORK_TYPE=vxlan
+
+ENABLE_TENANT_TUNNELS=True
+
+
+MYSQL_HOST=\$SERVICE_HOST
+RABBIT_HOST=\$SERVICE_HOST
+GLANCE_HOSTPORT=\$SERVICE_HOST:9292
+KEYSTONE_AUTH_HOST=\$SERVICE_HOST
+KEYSTONE_SERVICE_HOST=\$SERVICE_HOST
+
+MYSQL_PASSWORD=mysql
+RABBIT_PASSWORD=rabbit
+SERVICE_TOKEN=service
+SERVICE_PASSWORD=admin
+ADMIN_PASSWORD=admin
+
+enable_plugin networking-odl ${ODL_ML2_DRIVER_REPO} ${ODL_ML2_BRANCH}
+
+ODL_PORT=8080
+ODL_MODE=externalodl
+
+EOF
+
+if [ "${NUM_ODL_SYSTEM}" -gt 1 ]; then
+odl_list=${ODL_SYSTEM_1_IP}
+for i in `seq 2 ${NUM_ODL_SYSTEM}`
+do
+odlip=ODL_SYSTEM_${i}_IP
+odl_list=${odl_list},${!odlip}
+done
+if [ "${ENABLE_HAPROXY_FOR_NEUTRON}" == "yes" ]; then
+HA_PROXY_INDEX=${NUM_OPENSTACK_SYSTEM}
+odlmgrip=OPENSTACK_COMPUTE_NODE_${HA_PROXY_INDEX}_IP
+odl_mgr_ip=${!odlmgrip}
+else
+odl_mgr_ip=${ODL_SYSTEM_1_IP}
+fi
+cat >> ${local_conf_file_name} << EOF
+ODL_OVS_MANAGERS=${odl_list}
+ODL_MGR_IP=${odl_mgr_ip}
+EOF
+else
+cat >> ${local_conf_file_name} << EOF
+ODL_MGR_IP=${ODL_SYSTEM_1_IP}
+EOF
+fi
+
+if [ "${ODL_ENABLE_L3_FWD}" == "yes" ]; then
+cat >> ${local_conf_file_name} << EOF
+
+ODL_PROVIDER_MAPPINGS=br-ex:br100
+
+disable_service q-l3
+Q_L3_ENABLED=True
+ODL_L3=True
+PUBLIC_INTERFACE=br100
+[[post-config|\$NEUTRON_CONF]]
+[DEFAULT]
+service_plugins = networking_odl.l3.l3_odl.OpenDaylightL3RouterPlugin
+
+EOF
+fi
+cat >> ${local_conf_file_name} << EOF
+[[post-config|/etc/neutron/plugins/ml2/ml2_conf.ini]]
+[agent]
+minimize_polling=True
+
+[[post-config|/etc/neutron/dhcp_agent.ini]]
+[DEFAULT]
+force_metadata = True
+enable_isolated_metadata = True
+
+[[post-config|/etc/nova/nova.conf]]
+[DEFAULT]
+force_config_drive = False
+
+EOF
+
+echo "local.conf Created...."
+cat ${local_conf_file_name}
+}
+
+function create_compute_node_local_conf {
+HOSTIP=$1
+local_conf_file_name=${WORKSPACE}/local.conf_compute_${HOSTIP}
+cat > ${local_conf_file_name} << EOF
+[[local|localrc]]
+LOGFILE=stack.sh.log
+LOG_COLOR=False
+SCREEN_LOGDIR=/opt/stack/data/log
+RECLONE=yes
+
+NOVA_VNC_ENABLED=True
+MULTI_HOST=1
+ENABLED_SERVICES=n-cpu
+
+HOST_IP=${HOSTIP}
+SERVICE_HOST=${OPENSTACK_CONTROL_NODE_IP}
+
+Q_PLUGIN=ml2
+ENABLE_TENANT_TUNNELS=True
+Q_ML2_TENANT_NETWORK_TYPE=vxlan
+
+Q_HOST=\$SERVICE_HOST
+MYSQL_HOST=\$SERVICE_HOST
+RABBIT_HOST=\$SERVICE_HOST
+GLANCE_HOSTPORT=\$SERVICE_HOST:9292
+KEYSTONE_AUTH_HOST=\$SERVICE_HOST
+KEYSTONE_SERVICE_HOST=\$SERVICE_HOST
+
+MYSQL_PASSWORD=mysql
+RABBIT_PASSWORD=rabbit
+SERVICE_TOKEN=service
+SERVICE_PASSWORD=admin
+ADMIN_PASSWORD=admin
+
+enable_plugin networking-odl ${ODL_ML2_DRIVER_REPO} ${ODL_ML2_BRANCH}
+ODL_MODE=compute
+
+EOF
+
+if [ "${NUM_ODL_SYSTEM}" -gt 1 ]; then
+odl_list=${ODL_SYSTEM_1_IP}
+for i in `seq 2 ${NUM_ODL_SYSTEM}`
+do
+odlip=ODL_SYSTEM_${i}_IP
+odl_list=${odl_list},${!odlip}
+done
+if [ "${ENABLE_HAPROXY_FOR_NEUTRON}" == "yes" ]; then
+HA_PROXY_INDEX=${NUM_OPENSTACK_SYSTEM}
+odlmgrip=OPENSTACK_COMPUTE_NODE_${HA_PROXY_INDEX}_IP
+odl_mgr_ip=${!odlmgrip}
+else
+odl_mgr_ip=${ODL_SYSTEM_1_IP}
+fi
+cat >> ${local_conf_file_name} << EOF
+ODL_OVS_MANAGERS=${odl_list}
+ODL_MGR_IP=${odl_mgr_ip}
+EOF
+else
+cat >> ${local_conf_file_name} << EOF
+ODL_MGR_IP=${ODL_SYSTEM_1_IP}
+EOF
+fi
+
+if [ "${ODL_ENABLE_L3_FWD}" == "yes" ]; then
+cat >> ${local_conf_file_name} << EOF
+# Uncomment lines below if odl-compute is to be used for l3 forwarding
+Q_L3_ENABLED=True
+ODL_L3=True
+PUBLIC_INTERFACE=br100
+EOF
+fi
+echo "local.conf Created...."
+cat ${local_conf_file_name}
+}
+
+function configure_haproxy_for_neutron_requests () {
+HA_PROXY_INDEX=${NUM_OPENSTACK_SYSTEM}
+odlmgrip=OPENSTACK_COMPUTE_NODE_${HA_PROXY_INDEX}_IP
+ha_proxy_ip=${!odlmgrip}
+
+cat > ${WORKSPACE}/install_ha_proxy.sh<< EOF
+sudo systemctl stop firewalld
+sudo yum -y install policycoreutils-python haproxy
+EOF
+
+cat > ${WORKSPACE}/haproxy.cfg << EOF
+global
+ daemon
+ group haproxy
+ log /dev/log local0
+ maxconn 20480
+ pidfile /tmp/haproxy.pid
+ user haproxy
+
+defaults
+ log global
+ maxconn 4096
+ mode tcp
+ retries 3
+ timeout http-request 10s
+ timeout queue 1m
+ timeout connect 10s
+ timeout client 1m
+ timeout server 1m
+ timeout check 10s
+
+listen opendaylight
+ bind ${ha_proxy_ip}:8080
+ balance source
+EOF
+
+for i in `seq 1 ${NUM_ODL_SYSTEM}`
+do
+odlip=ODL_SYSTEM_${i}_IP
+cat >> ${WORKSPACE}/haproxy.cfg << EOF
+ server controller-$i ${!odlip}:8080 check fall 5 inter 2000 rise 2
+EOF
+done
+
+cat > ${WORKSPACE}/deploy_ha_proxy.sh<< EOF
+sudo chown haproxy:haproxy /tmp/haproxy.cfg
+sudo sed -i 's/\\/etc\\/haproxy\\/haproxy.cfg/\\/tmp\\/haproxy.cfg/g' /usr/lib/systemd/system/haproxy.service
+sudo /usr/sbin/semanage permissive -a haproxy_t
+sudo systemctl restart haproxy
+sleep 3
+sudo netstat -tunpl
+sudo systemctl status haproxy
+true
+EOF
+scp ${WORKSPACE}/install_ha_proxy.sh ${ha_proxy_ip}:/tmp
+ssh ${ha_proxy_ip} "sudo bash /tmp/install_ha_proxy.sh"
+scp ${WORKSPACE}/haproxy.cfg ${ha_proxy_ip}:/tmp
+scp ${WORKSPACE}/deploy_ha_proxy.sh ${ha_proxy_ip}:/tmp
+ssh ${ha_proxy_ip} "sudo bash /tmp/deploy_ha_proxy.sh"
+}
+
+function collect_logs_and_exit (){
+set +e # We do not want to create red dot just because something went wrong while fetching logs.
+for i in `seq 1 ${NUM_ODL_SYSTEM}`
+do
+ CONTROLLERIP=ODL_SYSTEM_${i}_IP
+ echo "dumping first 500K bytes of karaf log..." > "odl${i}_karaf.log"
+ ssh "${!CONTROLLERIP}" head --bytes=500K "/tmp/${BUNDLEFOLDER}/data/log/karaf.log" >> "odl${i}_karaf.log"
+ echo "dumping last 500K bytes of karaf log..." >> "odl${i}_karaf.log"
+ ssh "${!CONTROLLERIP}" tail --bytes=500K "/tmp/${BUNDLEFOLDER}/data/log/karaf.log" >> "odl${i}_karaf.log"
+ echo "killing karaf process..."
+ ssh "${!CONTROLLERIP}" bash -c 'ps axf | grep karaf | grep -v grep | awk '"'"'{print "kill -9 " $1}'"'"' | sh'
+done
+sleep 5
+for i in `seq 1 ${NUM_ODL_SYSTEM}`
+do
+ CONTROLLERIP=ODL_SYSTEM_${i}_IP
+ ssh "${!CONTROLLERIP}" xz -9ekvv "/tmp/${BUNDLEFOLDER}/data/log/karaf.log"
+ scp "${!CONTROLLERIP}:/tmp/${BUNDLEFOLDER}/data/log/karaf.log.xz" "odl${i}_karaf.log.xz"
+done
+
+ssh ${OPENSTACK_CONTROL_NODE_IP} "xz -9ekvv /opt/stack/devstack/nohup.out"
+scp ${OPENSTACK_CONTROL_NODE_IP}:/opt/stack/devstack/nohup.out.xz "openstack_control_stack.log.xz"
+for i in `seq 1 $((NUM_OPENSTACK_SYSTEM - 1))`
+do
+ OSIP=OPENSTACK_COMPUTE_NODE_${i}_IP
+ scp "${!OSIP}:/opt/stack/devstack/nohup.out" "openstack_compute_stack_${i}.log"
+done
+}
+
+cat > ${WORKSPACE}/get_devstack.sh << EOF
+sudo systemctl stop firewalld
+sudo yum install bridge-utils -y
+#Workaround for mysql failure
+echo "127.0.0.1 localhost \${HOSTNAME}" > /tmp/hosts
+echo "::1 localhost \${HOSTNAME}" >> /tmp/hosts
+sudo mv /tmp/hosts /etc/hosts
+sudo /usr/sbin/brctl addbr br100
+sudo mkdir /opt/stack
+sudo chmod 777 /opt/stack
+cd /opt/stack
+git clone https://git.openstack.org/openstack-dev/devstack
+cd devstack
+git checkout $OPENSTACK_BRANCH
+EOF
+
+echo "Create HAProxy if needed"
+if [ "${ENABLE_HAPROXY_FOR_NEUTRON}" == "yes" ]; then
+ echo "Need to configure HAProxy"
+ configure_haproxy_for_neutron_requests
+fi
+
+os_node_list=()
+echo "Stack the Control Node"
+scp ${WORKSPACE}/get_devstack.sh ${OPENSTACK_CONTROL_NODE_IP}:/tmp
+ssh ${OPENSTACK_CONTROL_NODE_IP} "bash /tmp/get_devstack.sh"
+create_control_node_local_conf
+scp ${WORKSPACE}/local.conf_control ${OPENSTACK_CONTROL_NODE_IP}:/opt/stack/devstack/local.conf
+ssh ${OPENSTACK_CONTROL_NODE_IP} "cd /opt/stack/devstack; nohup ./stack.sh > /opt/stack/devstack/nohup.out 2>&1 &"
+ssh ${OPENSTACK_CONTROL_NODE_IP} "ps -ef | grep stack.sh"
+ssh ${OPENSTACK_CONTROL_NODE_IP} "ls -lrt /opt/stack/devstack/nohup.out"
+os_node_list+=(${OPENSTACK_CONTROL_NODE_IP})
+
+
+for i in `seq 1 $((NUM_OPENSTACK_SYSTEM - 1))`
+do
+ COMPUTEIP=OPENSTACK_COMPUTE_NODE_${i}_IP
+ scp ${WORKSPACE}/get_devstack.sh ${!COMPUTEIP}:/tmp
+ ssh ${!COMPUTEIP} "bash /tmp/get_devstack.sh"
+ create_compute_node_local_conf ${!COMPUTEIP}
+ scp ${WORKSPACE}/local.conf_compute_${!COMPUTEIP} ${!COMPUTEIP}:/opt/stack/devstack/local.conf
+ ssh ${!COMPUTEIP} "cd /opt/stack/devstack; nohup ./stack.sh > /opt/stack/devstack/nohup.out 2>&1 &"
+ ssh ${!COMPUTEIP} "ps -ef | grep stack.sh"
+ os_node_list+=(${!COMPUTEIP})
+done
+
+cat > ${WORKSPACE}/check_stacking.sh << EOF
+> /tmp/stack_progress
+ps -ef | grep "stack.sh" | grep -v grep
+ret=\$?
+if [ \${ret} -eq 1 ]; then
+ grep "This is your host IP address:" /opt/stack/devstack/nohup.out
+ if [ \$? -eq 0 ]; then
+ echo "Stacking Complete" > /tmp/stack_progress
+ else
+ echo "Stacking Failed" > /tmp/stack_progress
+ fi
+elif [ \${ret} -eq 0 ]; then
+ echo "Still Stacking" > /tmp/stack_progress
+fi
+EOF
+
+#the checking is repeated for an hour
+iteration=0
+in_progress=1
+while [ ${in_progress} -eq 1 ]; do
+iterator=$(($iterator + 1))
+for index in ${!os_node_list[@]}
+do
+echo "Check the status of stacking in ${os_node_list[index]}"
+scp ${WORKSPACE}/check_stacking.sh ${os_node_list[index]}:/tmp
+ssh ${os_node_list[index]} "bash /tmp/check_stacking.sh"
+scp ${os_node_list[index]}:/tmp/stack_progress .
+#debug
+cat stack_progress
+stacking_status=`cat stack_progress`
+if [ "$stacking_status" == "Still Stacking" ]; then
+ continue
+elif [ "$stacking_status" == "Stacking Failed" ]; then
+ collect_logs_and_exit
+ exit 1
+elif [ "$stacking_status" == "Stacking Complete" ]; then
+ unset os_node_list[index]
+ if [ ${#os_node_list[@]} -eq 0 ]; then
+ in_progress=0
+ fi
+fi
+done
+ echo "sleep for a minute before the next check"
+ sleep 60
+ if [ ${iteration} -eq 60 ]; then
+ collect_logs_and_exit
+ exit 1
+ fi
+done
+
+#Need to disable firewalld and iptables in control node
+echo "Stop Firewall in Control Node for compute nodes to be able to reach the ports and add to hypervisor-list"
+ssh ${OPENSTACK_CONTROL_NODE_IP} "sudo systemctl stop firewalld; sudo systemctl stop iptables"
+echo "sleep for a minute and print hypervisor-list"
+sleep 60
+ssh ${OPENSTACK_CONTROL_NODE_IP} "cd /opt/stack/devstack; source openrc admin admin; nova hypervisor-list"
+
+echo "Locating test plan to use..."
+testplan_filepath="${WORKSPACE}/test/csit/testplans/${STREAMTESTPLAN}"
+if [ ! -f "${testplan_filepath}" ]; then
+ testplan_filepath="${WORKSPACE}/test/csit/testplans/${TESTPLAN}"
+fi
+
+echo "Changing the testplan path..."
+cat "${testplan_filepath}" | sed "s:integration:${WORKSPACE}:" > testplan.txt
+cat testplan.txt
+
+SUITES=`egrep -v '(^[[:space:]]*#|^[[:space:]]*$)' testplan.txt | tr '\012' ' '`
+
+echo "Starting Robot test suites ${SUITES} ..."
+pybot -N ${TESTPLAN} -c critical -e exclude -v BUNDLEFOLDER:${BUNDLEFOLDER} -v WORKSPACE:/tmp -v BUNDLE_URL:${ACTUALBUNDLEURL} \
+-v NEXUSURL_PREFIX:${NEXUSURL_PREFIX} -v JDKVERSION:${JDKVERSION} -v ODL_STREAM:${DISTROSTREAM} \
+-v ODL_SYSTEM_IP:${ODL_SYSTEM_IP} -v ODL_SYSTEM_1_IP:${ODL_SYSTEM_1_IP} -v ODL_SYSTEM_2_IP:${ODL_SYSTEM_2_IP} \
+-v ODL_SYSTEM_3_IP:${ODL_SYSTEM_3_IP} -v NUM_ODL_SYSTEM:${NUM_ODL_SYSTEM} -v CONTROLLER_USER:${USER} -v OS_USER:${USER} \
+-v NUM_OS_SYSTEM:${NUM_OPENSTACK_SYSTEM} -v OS_CONTROL_NODE_IP:${OPENSTACK_CONTROL_NODE_IP} \
+-v DEVSTACK_DEPLOY_PATH:/opt/stack/devstack -v USER_HOME:${HOME} ${TESTOPTIONS} ${SUITES} || true
+
+echo "Tests Executed"
+collect_logs_and_exit
+
+true # perhaps Jenkins is testing last exit code
+# vim: ts=4 sw=4 sts=4 et ft=sh :
ODL_SYSTEM=()
TOOLS_SYSTEM=()
+OPENSTACK_SYSTEM=()
+
+echo "JCLOUDS IPS are ${JCLOUDS_IPS}"
IFS=',' read -ra ADDR <<< "${JCLOUDS_IPS}"
for i in "${ADDR[@]}"
do
REMHOST=`ssh ${i} hostname`
- if [ `echo ${REMHOST} | grep 'java\|devstack'` ]; then
- ODL_SYSTEM=( "${ODL_SYSTEM[@]}" "${i}" )
- else
- TOOLS_SYSTEM=( "${TOOLS_SYSTEM[@]}" "${i}" )
- fi
+ case ${REMHOST} in
+ *java*)
+ ODL_SYSTEM=( "${ODL_SYSTEM[@]}" "${i}" )
+ ;;
+ *devstack*)
+ OPENSTACK_SYSTEM=( "${OPENSTACK_SYSTEM[@]}" "${i}" )
+ ;;
+ *)
+ TOOLS_SYSTEM=( "${TOOLS_SYSTEM[@]}" "${i}" )
+ ;;
+ esac
done
echo "NUM_ODL_SYSTEM=${#ODL_SYSTEM[@]}" >> slave_addresses.txt
echo "NUM_TOOLS_SYSTEM=${#TOOLS_SYSTEM[@]}" >> slave_addresses.txt
+#if HA Proxy is requested the last devstack node will be configured as haproxy
+if [ "${ENABLE_HAPROXY_FOR_NEUTRON}" == "yes" ]; then
+ echo "NUM_OPENSTACK_SYSTEM=$(( ${#OPENSTACK_SYSTEM[@]} - 1 ))" >> slave_addresses.txt
+else
+ echo "NUM_OPENSTACK_SYSTEM=${#OPENSTACK_SYSTEM[@]}" >> slave_addresses.txt
+fi
# Add alias for ODL_SYSTEM_1_IP as ODL_SYSTEM_IP
echo "ODL_SYSTEM_IP=${ODL_SYSTEM[0]}" >> slave_addresses.txt
echo "TOOLS_SYSTEM_$((i+1))_IP=${TOOLS_SYSTEM[${i}]}" >> slave_addresses.txt
done
+echo "OPENSTACK_CONTROL_NODE_IP=${OPENSTACK_SYSTEM[0]}" >> slave_addresses.txt
+for i in `seq 1 $(( ${#OPENSTACK_SYSTEM[@]} - 1 ))`
+do
+ echo "OPENSTACK_COMPUTE_NODE_$((i))_IP=${OPENSTACK_SYSTEM[${i}]}" >> slave_addresses.txt
+done
# vim: sw=4 ts=4 sts=4 et ft=sh :
+
virtualenv ${ROBOT_VENV}
source ${ROBOT_VENV}/bin/activate
+
+set -exu
+
pip install -q --upgrade pip
-pip --version
+
+# The most recent version of paramiko currently fails to install.
+pip install -q --upgrade paramiko==1.16.0
+
pip install -q docker-py importlib requests scapy netifaces netaddr ipaddr
pip install -q robotframework{,-{httplibrary,requests,sshlibrary,selenium2library}}
ssh ${!CONTROLLERIP} "bash /tmp/verify-cluster-is-up.sh ${i} ${!CONTROLLERIP}"
done
+if [ ${NUM_OPENSTACK_SYSTEM} -gt 0 ]; then
+ echo "Exiting without running tests to deploy openstack for testing"
+ exit
+fi
+
if [ ${CONTROLLERSCOPE} == 'all' ]; then
COOLDOWN_PERIOD="180"
else
parameters:
- project-parameter:
project: 'integration/distribution'
+ - gerrit-parameter:
+ branch: '{branch}'
scm:
- gerrit-trigger-scm:
count: '{mininet-vms}'
stop-on-terminate: False
+# Macro: integration-jclouds-controller-devstack
+# Operation: this macro will spin the controller and devstack vms
+# Used by: {project}-csit-openstack job templates
+- wrapper:
+ name: integration-jclouds-controller-devstack
+ wrappers:
+ - jclouds:
+ instances:
+ - rk-c7-java:
+ cloud-name: 'Rackspace DFW - Integration Dynamic Lab'
+ count: '{controller-vms}'
+ stop-on-terminate: False
+ - rk-c7-devstack:
+ cloud-name: 'Rackspace DFW - Devstack'
+ count: '{openstack-vms}'
+ stop-on-terminate: False
+
#################
# Shell Scripts #
#################
!include-raw:
- include-raw-integration-deploy-devstack-testing.sh
-# Macro: integration-deploy-controller
-# Operation: this macro prepares 3-node cluster controller
-# Used by: {project}-csit-3node-* job templates
-
-- builder:
- name: integration-deploy-controller
- builders:
- - shell:
- !include-raw:
- - include-raw-integration-deploy-controller.sh
-
# Macro: integration-configure-clustering
# Operation: this macro configures the clustering
# Used by: {project}-csit-3node-* job templates
!include-raw:
- include-raw-integration-rebase-gerrit-patch.sh
+- builder:
+ name: integration-deploy-openstack-run-test
+ builders:
+ - shell:
+ !include-raw:
+ - include-raw-integration-deploy-openstack-run-test.sh
##############
# Publishers #
##############
- integration-controller-scope:
controller-scope: 'only'
- integration-controller-features:
- controller-features: '{install-features}'
+ controller-features: 'odl-jolokia,{install-features}'
- integration-test-options:
test-options: '{robot-options}'
- integration-stream-test-plan:
- integration-get-bundle-vars
- inject:
properties-file: 'bundle_vars.txt'
- - integration-deploy-controller
- integration-configure-clustering
- integration-start-cluster-run-test
- integration-cleanup-tmp
- integration-controller-scope:
controller-scope: '{scope}'
- integration-controller-features:
- controller-features: '{install-features}'
+ controller-features: 'odl-jolokia,{install-features}'
- integration-test-options:
test-options: '{robot-options}'
- integration-stream-test-plan:
- integration-get-bundle-vars
- inject:
properties-file: 'bundle_vars.txt'
- - integration-deploy-controller
- integration-configure-clustering
- integration-start-cluster-run-test
- integration-cleanup-tmp
- integration-controller-scope:
controller-scope: '{scope}'
- integration-controller-features:
- controller-features: '{install-features}'
+ controller-features: 'odl-jolokia,{install-features}'
- integration-test-options:
test-options: '{robot-options}'
- integration-stream-test-plan:
- integration-get-bundle-vars
- inject:
properties-file: 'bundle_vars.txt'
- - integration-deploy-controller
- integration-configure-clustering
- integration-start-cluster-run-test
- integration-cleanup-tmp
unstable-if: 0.0
pass-if: 100.0
- archive:
- artifacts: 'devstacklog.txt,*karaf*'
+ artifacts: '*log*'
+ - email-notification:
+ email-prefix: '[{project}]'
+
+- job-template:
+ name: '{project}-csit-1node-openstack-{openstack}-{functionality}-{stream}'
+ disabled: false
+
+ project-type: freestyle
+ node: dynamic_robot
+
+ logrotate:
+ daysToKeep: '{build-days-to-keep}'
+ numToKeep: '{build-num-to-keep}'
+ artifactDaysToKeep: '{build-artifact-days-to-keep}'
+ artifactNumToKeep: '{build-num-to-keep}'
+
+ parameters:
+ - project-parameter:
+ project: 'integration/test'
+ - integration-branch:
+ branch: '{branch}'
+ - integration-distribution-stream:
+ stream: '{stream}'
+ - integration-bundleurl:
+ bundleurl: 'last'
+ - integration-controller-features:
+ controller-features: '{install-features}'
+ - integration-test-plan:
+ test-plan: '{project}-1node-openstack.txt'
+ - integration-patch-refspec:
+ branch: 'master'
+ - integration-jdk-version:
+ jdkversion: '{jre}'
+ - string:
+ name: OPENSTACK_BRANCH
+ default: '{openstack-branch}'
+ description: 'Openstack version to use with devstack'
+ - string:
+ name: ODL_ML2_DRIVER_REPO
+ default: 'https://github.com/openstack/networking-odl'
+ description: 'URL to fetch networking-odl driver'
+ - string:
+ name: ODL_ML2_VERSION
+ default: '{odl-ml2-version}'
+ description: 'Version of networking-odl to checkout from the repo'
+ - string:
+ name: ODL_ENABLE_L3_FWD
+ default: '{odl-enable-l3}'
+ description: 'Enable L3 FWD in ODL for createing br-ex'
+ scm:
+ - integration-gerrit-scm:
+ credentials-id: '{ssh-credentials}'
+ basedir: 'test'
+ refspec: '$PATCHREFSPEC'
+ branch: 'master'
+
+ wrappers:
+ - build-timeout
+ - integration-jclouds-controller-devstack:
+ controller-vms: 1
+ openstack-vms: '{openstack-vms}'
+ - ssh-agent-credentials:
+ users:
+ - '{ssh-credentials}'
+
+ triggers:
+ - integration-csit-all
+
+ builders:
+ - integration-cleanup-workspace
+ - integration-install-robotframework
+ - inject:
+ properties-file: 'env.properties'
+ - integration-get-slave-addresses
+ - inject:
+ properties-file: 'slave_addresses.txt'
+ - integration-get-bundle-vars
+ - inject:
+ properties-file: 'bundle_vars.txt'
+ - integration-deploy-controller-run-test
+ - integration-deploy-openstack-run-test
+ - integration-cleanup-tmp
+
+ publishers:
+ - integration-robot:
+ unstable-if: 0.0
+ pass-if: 100.0
+ - archive:
+ artifacts: '*log*'
- email-notification:
- email-prefix: '[project]'
+ email-prefix: '[{project}]'
+
+- job-template:
+ name: '{project}-csit-3node-openstack-{openstack}-{functionality}-{stream}'
+ disabled: false
+
+ project-type: freestyle
+ node: dynamic_robot
+
+ logrotate:
+ daysToKeep: '{build-days-to-keep}'
+ numToKeep: '{build-num-to-keep}'
+ artifactDaysToKeep: '{build-artifact-days-to-keep}'
+ artifactNumToKeep: '{build-num-to-keep}'
+
+ parameters:
+ - project-parameter:
+ project: 'integration/test'
+ - integration-branch:
+ branch: '{branch}'
+ - integration-distribution-stream:
+ stream: '{stream}'
+ - integration-bundleurl:
+ bundleurl: 'last'
+ - integration-controller-features:
+ controller-features: 'odl-jolokia,{install-features}'
+ - integration-test-plan:
+ test-plan: '{project}-3node-openstack.txt'
+ - integration-patch-refspec:
+ branch: 'master'
+ - integration-jdk-version:
+ jdkversion: '{jre}'
+ - string:
+ name: OPENSTACK_BRANCH
+ default: '{openstack-branch}'
+ description: 'Openstack version to use with devstack'
+ - string:
+ name: ODL_ML2_DRIVER_REPO
+ default: 'https://github.com/openstack/networking-odl'
+ description: 'URL to fetch networking-odl driver'
+ - string:
+ name: ODL_ML2_VERSION
+ default: '{odl-ml2-version}'
+ description: 'Version of networking-odl to checkout from the repo'
+ - string:
+ name: ODL_ENABLE_L3_FWD
+ default: '{odl-enable-l3}'
+ description: 'Enable L3 FWD in ODL for createing br-ex'
+ - string:
+ name: ENABLE_HAPROXY_FOR_NEUTRON
+ default: '{enable-haproxy}'
+ description: 'Enable HAProxy for using neutron interface as HA'
+ scm:
+ - integration-gerrit-scm:
+ credentials-id: '{ssh-credentials}'
+ basedir: 'test'
+ refspec: '$PATCHREFSPEC'
+ branch: 'master'
+
+ wrappers:
+ - build-timeout
+ - integration-jclouds-controller-devstack:
+ controller-vms: 3
+ openstack-vms: '{openstack-vms}'
+ - ssh-agent-credentials:
+ users:
+ - '{ssh-credentials}'
+
+ triggers:
+ - integration-csit-all
+
+ builders:
+ - integration-cleanup-workspace
+ - integration-install-robotframework
+ - inject:
+ properties-file: 'env.properties'
+ - integration-get-slave-addresses
+ - inject:
+ properties-file: 'slave_addresses.txt'
+ - integration-get-bundle-vars
+ - inject:
+ properties-file: 'bundle_vars.txt'
+ - integration-configure-clustering
+ - integration-start-cluster-run-test
+ - integration-deploy-openstack-run-test
+ - integration-cleanup-tmp
+
+ publishers:
+ - integration-robot:
+ unstable-if: 0.0
+ pass-if: 100.0
+ - archive:
+ artifacts: '*log*'
+ - email-notification:
+ email-prefix: '[{project}]'
sxp-csit-1node-topology-all-{stream},
topoprocessing-csit-1node-topology-operations-all-{stream},
topoprocessing-csit-1node-topology-operations-only-{stream},
+ tsdr-csit-1node-cassandra-datastore-only-{stream},
tsdr-csit-1node-hbase-datastore-only-{stream},
+ tsdr-csit-1node-hsqldb-datastore-only-{stream},
usc-csit-1node-channel-only-{stream},
usc-csit-1node-channel-all-{stream},
vpnservice-csit-1node-vpnservice-only-{stream},
sxp-csit-1node-topology-all-{stream},
topoprocessing-csit-1node-topology-operations-all-{stream},
topoprocessing-csit-1node-topology-operations-only-{stream},
+ tsdr-csit-1node-cassandra-datastore-only-{stream},
tsdr-csit-1node-hbase-datastore-only-{stream},
+ tsdr-csit-1node-hsqldb-datastore-only-{stream},
usc-csit-1node-channel-only-{stream},
usc-csit-1node-channel-all-{stream},
vpnservice-csit-1node-vpnservice-only-{stream},
release-branch: '{branch}'
- autorelease-release-datestamp:
datestamp: true
+ - string:
+ name: CLONE_URL
+ default: 'ssh://jenkins-$SILO@git.opendaylight.org:29418/releng/autorelease'
+ description: "Autorelease clone URL"
scm:
- git:
- url: 'https://git.opendaylight.org/gerrit/releng/autorelease'
+ url: '$CLONE_URL'
refspec: ''
branches:
- 'origin/{branch}'
skip-tag: true
+ shallow-clone: true
submodule:
recursive: true
+ timeout: 15
wrappers:
- autorelease-build-timeout
release-branch: '{branch}'
- autorelease-release-datestamp:
datestamp: true
+ - string:
+ name: CLONE_URL
+ default: 'ssh://jenkins-$SILO@git.opendaylight.org:29418/releng/autorelease'
+ description: "Autorelease clone URL"
scm:
- git:
- url: 'https://git.opendaylight.org/gerrit/releng/autorelease'
+ url: '$CLONE_URL'
refspec: ''
branches:
- 'origin/{branch}'
skip-tag: true
+ shallow-clone: true
submodule:
recursive: true
+ timeout: 15
wrappers:
- autorelease-build-timeout
release-branch: '{branch}'
- autorelease-release-datestamp:
datestamp: true
+ - string:
+ name: CLONE_URL
+ default: 'ssh://jenkins-$SILO@git.opendaylight.org:29418/releng/autorelease'
+ description: "Autorelease clone URL"
scm:
- git:
- url: 'https://git.opendaylight.org/gerrit/releng/autorelease'
+ url: '$CLONE_URL'
refspec: ''
branches:
- 'origin/{branch}'
skip-tag: true
+ shallow-clone: true
submodule:
recursive: true
+ timeout: 15
wrappers:
- autorelease-build-timeout
release-branch: '{branch}'
- autorelease-release-datestamp:
datestamp: true
+ - string:
+ name: CLONE_URL
+ default: 'ssh://jenkins-$SILO@git.opendaylight.org:29418/releng/autorelease'
+ description: "Autorelease clone URL"
scm:
- git:
- url: 'https://git.opendaylight.org/gerrit/releng/autorelease'
+ url: '$CLONE_URL'
refspec: ''
branches:
- 'origin/{branch}'
skip-tag: true
+ shallow-clone: true
submodule:
recursive: true
+ timeout: 15
wrappers:
- autorelease-build-timeout
release-branch: '{branch}'
- autorelease-release-datestamp:
datestamp: true
+ - string:
+ name: CLONE_URL
+ default: 'ssh://jenkins-$SILO@git.opendaylight.org:29418/releng/autorelease'
+ description: "Autorelease clone URL"
scm:
- git:
- url: 'https://git.opendaylight.org/gerrit/releng/autorelease'
+ url: '$CLONE_URL'
refspec: ''
branches:
- 'origin/{branch}'
skip-tag: true
+ shallow-clone: true
submodule:
recursive: true
+ timeout: 15
wrappers:
- autorelease-build-timeout
release-branch: '{branch}'
- autorelease-release-datestamp:
datestamp: true
+ - string:
+ name: CLONE_URL
+ default: 'ssh://jenkins-$SILO@git.opendaylight.org:29418/releng/autorelease'
+ description: "Autorelease clone URL"
scm:
- git:
- url: 'https://git.opendaylight.org/gerrit/releng/autorelease'
+ url: '$CLONE_URL'
refspec: ''
branches:
- 'origin/{branch}'
skip-tag: true
+ shallow-clone: true
submodule:
recursive: true
+ timeout: 15
wrappers:
- autorelease-build-timeout
--- /dev/null
+STREAMS:
+- boron:
+ branch: master
+ jdks: openjdk8
+ autorelease: true
+DEPENDENCIES: controller,yangtools,odlparent,openflowplugin
--- /dev/null
+# REMOVE THIS LINE IF YOU WANT TO CUSTOMIZE ANYTHING BELOW
+# Otherwise this file will be automatically overwritten by the template
+# autobuilder.
+
+# ODL Releng build templates
+- project:
+ name: natapp
+ jobs:
+ - 'natapp-verify-{stream}'
+ - 'natapp-merge-{stream}'
+ - 'natapp-periodic-{stream}'
+ - 'natapp-distribution-{stream}'
+ - 'natapp-distribution-check-{stream}'
+ - 'natapp-integration-{stream}'
+ - 'natapp-sonar'
+ - 'natapp-clm-{stream}'
+ - 'natapp-validate-autorelease-{stream}'
+
+
+ # stream: release stream (eg. stable-lithium or beryllium)
+ # branch: git branch (eg. stable/lithium or master)
+ stream:
+ - boron:
+ branch: 'master'
+ jdk: openjdk8
+ jdks:
+ - openjdk8
+ disable_autorelease: False
+ disable_distribution_check: False
+
+ project: 'natapp'
+
+# For the Job templates below replace instances of:
+# PROJECT_SHORTNAME with your project name (eg. circuitsw)
+# PROJECT_PATH with your project name (eg. ofextensions/circuitsw)
+# MAVEN_GOALS with your maven goals to build
+# MAVEN_OPTS with your maven options to build
+
+- job-template:
+ name: 'natapp-verify-{stream}'
+
+ # Job template for ODL verify jobs
+ #
+ # The purpose of this job template is to setup a ODL verify job
+ #
+ # Required Variables:
+ # stream: release stream (eg. stable-lithium or beryllium)
+ # branch: git branch (eg. stable/lithium or master)
+
+ project-type: matrix
+ node: matrix_master
+ concurrent: true
+
+ axes:
+ - axis:
+ type: slave
+ name: nodes
+ values:
+ - dynamic_verify
+ - axis:
+ type: jdk
+ values: '{obj:jdks}'
+
+ logrotate:
+ daysToKeep: '{build-days-to-keep}'
+ numToKeep: '{build-num-to-keep}'
+ artifactDaysToKeep: '{build-artifact-days-to-keep}'
+ artifactNumToKeep: '{build-artifact-num-to-keep}'
+
+ parameters:
+ - project-parameter:
+ project: '{project}'
+ - gerrit-parameter:
+ branch: '{branch}'
+ - gerrit-refspec-parameter:
+ refspec: 'refs/heads/{branch}'
+
+ scm:
+ - gerrit-trigger-scm:
+ credentials-id: '{ssh-credentials}'
+ refspec: '$GERRIT_REFSPEC'
+ choosing-strategy: 'gerrit'
+
+ wrappers:
+ - build-timeout
+ - ssh-agent-credentials:
+ users:
+ - '{ssh-credentials}'
+
+ triggers:
+ - gerrit-trigger-patch-submitted:
+ name: 'natapp'
+ branch: '{branch}'
+
+ builders:
+ - wipe-org-opendaylight-repo
+ - jacoco-nojava-workaround
+ - provide-maven-settings:
+ global-settings-file: '{odl-global-settings}'
+ settings-file: '{natapp-settings}'
+ - maven-target:
+ maven-version: '{mvn33}'
+ pom: 'pom.xml'
+ goals: 'clean install -V -Dmaven.repo.local=/tmp/r -Dorg.ops4j.pax.url.mvn.localRepository=/tmp/r -Djenkins -Dstream={stream}'
+ java-opts:
+ - '-Xmx1024m -XX:MaxPermSize=256m'
+ settings: '{natapp-settings}'
+ global-settings: '{odl-global-settings}'
+
+ publishers:
+ - archive-artifacts:
+ artifacts: '**/target/surefire-reports/*-output.txt, '
+ - findbugs
+ - email-notification:
+ email-prefix: '[natapp]'
+ - jacoco-report
+
+- job-template:
+ name: 'natapp-merge-{stream}'
+
+ # Job template for ODL merge jobs
+ #
+ # The purpose of this job template is to setup a ODL merge job
+ # and deploy artifacts to Nexus.
+ #
+ # Required Variables:
+ # stream: release stream (eg. stable-lithium or beryllium)
+ # branch: git branch (eg. stable/lithium or master)
+
+ project-type: maven
+ node: dynamic_merge
+ jdk: '{jdk}'
+
+ logrotate:
+ daysToKeep: '14'
+ numToKeep: '10'
+ artifactDaysToKeep: '{build-artifact-days-to-keep}'
+ artifactNumToKeep: '{build-artifact-num-to-keep}'
+
+ parameters:
+ - project-parameter:
+ project: '{project}'
+ - gerrit-parameter:
+ branch: '{branch}'
+ - gerrit-refspec-parameter:
+ refspec: 'refs/heads/{branch}'
+
+ scm:
+ - gerrit-trigger-scm:
+ credentials-id: '{ssh-credentials}'
+ refspec: ''
+ choosing-strategy: 'default'
+
+ wrappers:
+ - build-timeout
+ - ssh-agent-credentials:
+ users:
+ - '{ssh-credentials}'
+
+ triggers:
+ - timed: 'H H * * 0'
+ - gerrit-trigger-patch-merged:
+ name: 'natapp'
+ branch: '{branch}'
+
+ prebuilders:
+ - wipe-org-opendaylight-repo
+ - jacoco-nojava-workaround
+ - provide-maven-settings:
+ global-settings-file: '{odl-global-settings}'
+ settings-file: '{natapp-settings}'
+
+ maven:
+ maven-name: '{mvn33}'
+ root-pom: 'pom.xml'
+ goals: 'clean install -V -Dmaven.repo.local=/tmp/r -Dorg.ops4j.pax.url.mvn.localRepository=/tmp/r -Djenkins -Dmerge -Dstream={stream}'
+ maven-opts: '-Xmx1024m -XX:MaxPermSize=256m'
+ settings: '{natapp-settings}'
+ global-settings: '{odl-global-settings}'
+ post-step-run-condition: UNSTABLE
+
+ postbuilders:
+ - conditional-step:
+ condition-kind: file-exists
+ condition-filename: deploy-site.xml
+ condition-basedir: workspace
+
+ # The strategy here is intentional to run Maven site:deploy twice
+ # once using regular pom.xml to produce a staged-site which is
+ # then used by deploy-site.xml to push to Nexus. This is a
+ # workaround to Maven Site's default linking code which creates
+ # incorrect URLs for sites due to auto-detection assuming your
+ # project is configured in a certain way which ODL is not.
+ steps:
+ - maven-target:
+ maven-version: '{mvn33}'
+ pom: pom.xml
+ goals: 'site:deploy -Dstream={stream}'
+ java-opts:
+ - '-Xmx2g'
+ settings: '{natapp-settings}'
+ global-settings: '{odl-global-settings}'
+ - maven-target:
+ maven-version: '{mvn33}'
+ pom: deploy-site.xml
+ goals: 'site:deploy -Dstream={stream}'
+ java-opts:
+ - '-Xmx2g'
+ settings: '{natapp-settings}'
+ global-settings: '{odl-global-settings}'
+
+ reporters:
+ - findbugs
+
+ publishers:
+ - archive-artifacts:
+ artifacts: '**/target/surefire-reports/*-output.txt, '
+ - email-notification:
+ email-prefix: '[natapp]'
+ - maven-deploy:
+ id: ''
+ unique-version: true
+ deploy-unstable: false
+ - jacoco-report
+
+- job-template:
+ name: 'natapp-periodic-{stream}'
+ disabled: false
+
+ # Job template for periodic builders
+ #
+ # The purpose of this job template is to setup a periodic
+ # builder.
+ #
+ # Required Variables:
+ # stream: release stream (eg. stable-lithium or beryllium)
+ # branch: git branch (eg. stable/lithium or master)
+
+ project-type: maven
+ node: dynamic_verify
+ jdk: '{jdk}'
+
+ logrotate:
+ daysToKeep: '14'
+ numToKeep: '10'
+ artifactDaysToKeep: '{build-artifact-days-to-keep}'
+ artifactNumToKeep: '{build-artifact-num-to-keep}'
+
+ parameters:
+ - project-parameter:
+ project: '{project}'
+
+ scm:
+ - git-scm:
+ credentials-id: '{ssh-credentials}'
+ refspec: ''
+ branch: '{branch}'
+
+ wrappers:
+ - build-timeout
+ - ssh-agent-credentials:
+ users:
+ - '{ssh-credentials}'
+
+ triggers:
+ - timed: '@daily'
+
+ prebuilders:
+ - wipe-org-opendaylight-repo
+ - jacoco-nojava-workaround
+ - provide-maven-settings:
+ global-settings-file: '{odl-global-settings}'
+ settings-file: '{natapp-settings}'
+
+ maven:
+ maven-name: '{mvn33}'
+ root-pom: 'pom.xml'
+ goals: 'clean install -V -Dmaven.repo.local=/tmp/r -Dorg.ops4j.pax.url.mvn.localRepository=/tmp/r -Djenkins'
+ maven-opts: '-Xmx1024m -XX:MaxPermSize=256m'
+ settings: '{natapp-settings}'
+ global-settings: '{odl-global-settings}'
+
+ reporters:
+ - findbugs
+
+ publishers:
+ - email-notification:
+ email-prefix: '[natapp]'
+ - jacoco-report
+
+- job-template:
+ name: 'natapp-distribution-{stream}'
+
+ # Required variables:
+ # stream: release stream (eg. stable-lithium or beryllium)
+ # branch: git branch (eg. stable/lithium or master)
+
+ project-type: maven
+ node: dynamic_merge
+ jdk: '{jdk}'
+
+ logrotate:
+ daysToKeep: '{build-days-to-keep}'
+ numToKeep: '{build-num-to-keep}'
+ artifactDaysToKeep: '{build-artifact-days-to-keep}'
+ artifactNumToKeep: '{build-artifact-num-to-keep}'
+
+ parameters:
+ - project-parameter:
+ project: 'integration/distribution'
+
+ scm:
+ - git-scm:
+ credentials-id: '{ssh-credentials}'
+ refspec: ''
+ branch: '{branch}'
+
+ wrappers:
+ - build-timeout
+ - ssh-agent-credentials:
+ users:
+ - '{ssh-credentials}'
+
+ triggers:
+ - reverse:
+ jobs: '{project}-merge-{stream}'
+ result: 'success'
+
+ prebuilders:
+ - wipe-org-opendaylight-repo
+ - provide-maven-settings:
+ global-settings-file: '{odl-global-settings}'
+ settings-file: '{natapp-settings}'
+
+ maven:
+ maven-name: '{mvn33}'
+ root-pom: 'pom.xml'
+ goals: 'clean install -Djenkins -Dmaven.repo.local=/tmp/r -Dorg.ops4j.pax.url.mvn.localRepository=/tmp/r'
+ maven-opts: '-Xmx1024m -XX:MaxPermSize=256m -Dmaven.compile.fork=true'
+ settings: '{integration-settings}'
+ global-settings: '{odl-global-settings}'
+
+ publishers:
+ - email-notification:
+ email-prefix: '[{project}]'
+ - maven-deploy:
+ id: ''
+ unique-version: true
+ deploy-unstable: false
+
+# Template: integration-patch-distribution-{stream}
+# Goal: Build a patch and make sure the distribution can deploy with this change
+# Operation: This job template builds a patch, creates a distribution containing
+# the patch, and triggers the distribution deploy test
+
+- job-template:
+ name: 'natapp-distribution-check-{stream}'
+ disabled: '{obj:disable_distribution_check}'
+
+ project-type: maven
+ node: dynamic_verify
+ concurrent: true
+ jdk: '{jdk}'
+
+ logrotate:
+ daysToKeep: '14'
+ numToKeep: '10'
+ artifactDaysToKeep: '{build-artifact-days-to-keep}'
+ artifactNumToKeep: '{build-artifact-num-to-keep}'
+
+ parameters:
+ - gerrit-project-parameter:
+ project: '{project}'
+ - gerrit-refspec-parameter:
+ refspec: '{branch}'
+ - project-parameter:
+ project: '{project}'
+ - integration-distribution-git-url
+
+ scm:
+ - integration-gerrit-scm:
+ credentials-id: '{ssh-credentials}'
+ basedir: '$GERRIT_PROJECT'
+ refspec: '$GERRIT_REFSPEC'
+ branch: '{branch}'
+ - integration-distribution-scm:
+ credentials-id: '{ssh-credentials}'
+ branch: '{branch}'
+
+ wrappers:
+ - build-timeout
+ - ssh-agent-credentials:
+ users:
+ - '{ssh-credentials}'
+
+ triggers:
+ - gerrit:
+ server-name: 'OpenDaylight'
+ trigger-on:
+ - patchset-created-event:
+ exclude-drafts: 'true'
+ exclude-trivial-rebase: 'false'
+ exclude-no-code-change: 'true'
+ - draft-published-event
+ - comment-added-contains-event:
+ comment-contains-value: 'test-distribution'
+ - comment-added-contains-event:
+ comment-contains-value: 'reverify'
+ - comment-added-contains-event:
+ comment-contains-value: 'recheck'
+ projects:
+ - project-compare-type: 'ANT'
+ project-pattern: '{name}'
+ branches:
+ - branch-compare-type: 'ANT'
+ branch-pattern: '**/{branch}'
+ skip-vote:
+ successful: false
+ failed: false
+ unstable: false
+ notbuilt: false
+
+ prebuilders:
+ - wipe-org-opendaylight-repo
+ - maven-target:
+ maven-version: '{mvn33}'
+ pom: '$GERRIT_PROJECT/pom.xml'
+ goals: 'clean install -Djenkins -DskipTests -Dcheckstyle.skip=true -Dmaven.javadoc.skip=true -Dmaven.site.skip=true -DgenerateReports=false -Dmaven.repo.local=/tmp/r -Dorg.ops4j.pax.url.mvn.localRepository=/tmp/r -Dstream={stream}'
+ java-opts:
+ - '-Xmx4096m -XX:MaxPermSize=1024m -Dmaven.compile.fork=true'
+ settings: '{natapp-settings}'
+ global-settings: '{odl-global-settings}'
+
+ maven:
+ maven-name: '{mvn33}'
+ root-pom: 'distribution/pom.xml'
+ goals: 'clean install -Djenkins -Dmaven.repo.local=/tmp/r -Dorg.ops4j.pax.url.mvn.localRepository=/tmp/r'
+ maven-opts: '-Xmx1024m -XX:MaxPermSize=256m -Dmaven.compile.fork=true'
+ settings: '{natapp-settings}'
+ global-settings: '{odl-global-settings}'
+ ignore-upstream-changes: true
+ post-step-run-condition: 'SUCCESS'
+
+ postbuilders:
+ - integration-distribution-check
+
+ publishers:
+ - archive:
+ artifacts: 'karaf.log'
+ - archive:
+ artifacts: 'karaf_console.log'
+ - email-notification:
+ email-prefix: '[natapp]'
+
+- job-template:
+ name: 'natapp-integration-{stream}'
+ disabled: false
+
+ # Job template for ODL integration verify jobs
+ #
+ # This is similar to a normal verify job, but it runs
+ # when a project that's a dependency of your project
+ # is successfully built.
+ #
+ # Required Variables:
+ # stream: release stream (eg. stable-lithium or beryllium)
+ # branch: git branch (eg. stable/lithium or master)
+
+ project-type: maven
+ node: dynamic_merge
+ jdk: '{jdk}'
+
+ logrotate:
+ daysToKeep: '{build-days-to-keep}'
+ numToKeep: '{build-num-to-keep}'
+ artifactDaysToKeep: '{build-artifact-days-to-keep}'
+ artifactNumToKeep: '{build-artifact-num-to-keep}'
+
+ parameters:
+ - project-parameter:
+ project: '{project}'
+
+ scm:
+ - git-scm:
+ credentials-id: '{ssh-credentials}'
+ refspec: ''
+ branch: '{branch}'
+
+ wrappers:
+ - build-timeout
+ - ssh-agent-credentials:
+ users:
+ - '{ssh-credentials}'
+
+ triggers:
+ - reverse:
+ jobs: 'controller-merge-{stream},yangtools-merge-{stream},odlparent-merge-{stream},openflowplugin-merge-{stream}'
+ result: 'success'
+
+ prebuilders:
+ - wipe-org-opendaylight-repo
+ - jacoco-nojava-workaround
+ - provide-maven-settings:
+ global-settings-file: '{odl-global-settings}'
+ settings-file: '{natapp-settings}'
+
+ maven:
+ maven-name: '{mvn33}'
+ root-pom: 'pom.xml'
+ goals: 'clean install -V -Dmaven.repo.local=/tmp/r -Dorg.ops4j.pax.url.mvn.localRepository=/tmp/r -Djenkins'
+ maven-opts: '-Xmx1024m -XX:MaxPermSize=256m'
+ settings: '{natapp-settings}'
+ global-settings: '{odl-global-settings}'
+
+ reporters:
+ - findbugs
+
+ publishers:
+ - email-notification:
+ email-prefix: '[natapp] [controller] [yangtools] [odlparent] [openflowplugin]'
+ - jacoco-report
+
+- job-template:
+ name: 'natapp-sonar'
+ disabled: false
+
+ project-type: maven
+ node: dynamic_verify
+ jdk: 'openjdk8'
+
+ logrotate:
+ daysToKeep: '7'
+ numToKeep: '10'
+ artifactDaysToKeep: '1'
+ artifactNumToKeep: '1'
+
+ parameters:
+ - project-parameter:
+ project: '{project}'
+
+ scm:
+ - git-scm:
+ credentials-id: '{ssh-credentials}'
+ refspec: ''
+ branch: 'master'
+
+ wrappers:
+ - build-timeout
+ - ssh-agent-credentials:
+ users:
+ - '{ssh-credentials}'
+
+ triggers:
+ - timed: 'H H * * 6'
+ - gerrit-trigger-patch-sonar:
+ name: 'natapp'
+
+ prebuilders:
+ - wipe-org-opendaylight-repo
+ - jacoco-nojava-workaround
+ - provide-maven-settings:
+ global-settings-file: '{odl-global-settings}'
+ settings-file: '{natapp-settings}'
+
+ maven:
+ maven-name: '{mvn33}'
+ root-pom: 'pom.xml'
+ goals: 'clean install -V -Dmaven.repo.local=/tmp/r -Dorg.ops4j.pax.url.mvn.localRepository=/tmp/r -Djenkins -Dsonar'
+ maven-opts: '-Xmx1024m -XX:MaxPermSize=256m'
+ settings: '{natapp-settings}'
+ global-settings: '{odl-global-settings}'
+
+ reporters:
+ - findbugs
+
+ publishers:
+ - sonar:
+ language: 'java'
+ maven-opts: '-Xmx6144m -XX:MaxPermSize=1024m'
+ - email-notification:
+ email-prefix: '[natapp]'
+ - jacoco-report
+
+- job-template:
+ name: 'natapp-clm-{stream}'
+ disabled: false
+
+ project-type: maven
+ node: dynamic_verify
+ jdk: '{jdk}'
+
+ logrotate:
+ daysToKeep: '{build-days-to-keep}'
+ numToKeep: '{build-num-to-keep}'
+ artifactDaysToKeep: '{build-artifact-days-to-keep}'
+ artifactNumToKeep: '{build-artifact-num-to-keep}'
+
+ parameters:
+ - project-parameter:
+ project: '{project}'
+
+ scm:
+ - git-scm:
+ credentials-id: '{ssh-credentials}'
+ refspec: ''
+ branch: '{branch}'
+
+ wrappers:
+ - build-timeout
+ - ssh-agent-credentials:
+ users:
+ - '{ssh-credentials}'
+
+ triggers:
+ - timed: 'H H * * 6'
+
+ prebuilders:
+ - wipe-org-opendaylight-repo
+ - provide-maven-settings:
+ global-settings-file: '{odl-global-settings}'
+ settings-file: '{natapp-settings}'
+
+ maven:
+ maven-name: '{mvn33}'
+ root-pom: 'pom.xml'
+ goals: 'clean install com.sonatype.clm:clm-maven-plugin:index -Djenkins -DskipTests=true -Dmaven.repo.local=/tmp/r -Dorg.ops4j.pax.url.mvn.localRepository=/tmp/r'
+ maven-opts: '-Xmx4096m -XX:MaxPermSize=512m'
+ settings: '{natapp-settings}'
+ global-settings: '{odl-global-settings}'
+
+ postbuilders:
+ - shell: 'find . -regex ".*karaf/target" | xargs rm -rf'
+ - check-clm:
+ application-name: natapp
+
+ publishers:
+ - email-notification:
+ email-prefix: '[natapp]'
+
+- job-template:
+ name: 'natapp-validate-autorelease-{stream}'
+ disabled: '{obj:disable_autorelease}'
+
+ project-type: maven
+ node: dynamic_verify
+ concurrent: true
+ jdk: '{jdk}'
+
+ logrotate:
+ daysToKeep: '{build-days-to-keep}'
+ numToKeep: '{build-num-to-keep}'
+ artifactDaysToKeep: '{build-artifact-days-to-keep}'
+ artifactNumToKeep: '{build-artifact-num-to-keep}'
+
+ parameters:
+ - project-parameter:
+ project: '{project}'
+ - gerrit-parameter:
+ branch: '{branch}'
+ - autorelease-release-tag:
+ release-tag: 'validate'
+ - autorelease-release-branch:
+ release-branch: '{branch}'
+ - autorelease-release-datestamp:
+ datestamp: true
+ - string:
+ name: CLONE_URL
+ default: 'ssh://jenkins-$SILO@git.opendaylight.org:29418/releng/autorelease'
+ description: "Autorelease clone URL"
+
+ scm:
+ - git:
+ url: '$CLONE_URL'
+ refspec: ''
+ branches:
+ - 'origin/{branch}'
+ skip-tag: true
+ shallow-clone: true
+ submodule:
+ recursive: true
+ timeout: 15
+
+ wrappers:
+ - autorelease-build-timeout
+ - ssh-agent-credentials:
+ users:
+ - '{ssh-credentials}'
+
+ triggers:
+ - gerrit:
+ server-name: 'OpenDaylight'
+ trigger-on:
+ - patchset-created-event:
+ exclude-drafts: 'false'
+ exclude-trivial-rebase: 'false'
+ exclude-no-code-change: 'false'
+ - draft-published-event
+ - comment-added-contains-event:
+ comment-contains-value: 'recheck'
+ - comment-added-contains-event:
+ comment-contains-value: 'reverify'
+ - comment-added-contains-event:
+ comment-contains-value: 'revalidate'
+ projects:
+ - project-compare-type: 'ANT'
+ project-pattern: 'natapp'
+ branches:
+ - branch-compare-type: 'ANT'
+ branch-pattern: '**/{branch}'
+ file-paths:
+ - compare-type: ANT
+ pattern: '**/*.xml'
+
+ prebuilders:
+ - wipe-org-opendaylight-repo
+ - jacoco-nojava-workaround
+ - autorelease-checkout-gerrit-patch
+ - autorelease-generate-release-patches
+ - maven-target:
+ maven-version: '{mvn33}'
+ pom: validate-pom.xml
+ goals: 'clean install -T1.5C -Djenkins -DskipTests -Dcheckstyle.skip=true -Dmaven.repo.local=/tmp/r -Dorg.ops4j.pax.url.mvn.localRepository=/tmp/r'
+ java-opts:
+ - '-Xmx8g'
+ settings: '{autorelease-settings}'
+ global-settings: '{odl-global-settings}'
+
+ maven:
+ maven-name: '{mvn33}'
+ root-pom: 'pom.xml'
+ goals: 'clean validate -Djenkins -Dmaven.repo.local=/tmp/r -Dorg.ops4j.pax.url.mvn.localRepository=/tmp/r'
+ maven-opts: '-Xmx8g -XX:MaxPermSize=1024m -Dmaven.compile.fork=true'
+ settings: '{autorelease-settings}'
+ global-settings: '{odl-global-settings}'
+ automatic-archiving: false
+ post-step-run-condition: UNSTABLE
+
+ postbuilders:
+ - autorelease-sys-stats
+
+ publishers:
+ - email-notification:
+ email-prefix: '[autorelease] [natapp]'
+
release-branch: '{branch}'
- autorelease-release-datestamp:
datestamp: true
+ - string:
+ name: CLONE_URL
+ default: 'ssh://jenkins-$SILO@git.opendaylight.org:29418/releng/autorelease'
+ description: "Autorelease clone URL"
scm:
- git:
- url: 'https://git.opendaylight.org/gerrit/releng/autorelease'
+ url: '$CLONE_URL'
refspec: ''
branches:
- 'origin/{branch}'
skip-tag: true
+ shallow-clone: true
submodule:
recursive: true
+ timeout: 15
wrappers:
- autorelease-build-timeout
--- /dev/null
+- project:
+ name: netconf-csit-clustering-scale
+ jobs:
+ - '{project}-csit-3node-periodic-{functionality}-{install}-{stream}'
+
+ # The project name
+ project: 'netconf'
+
+ # The functionality under test
+ functionality: 'clustering-scale'
+
+ # Project branches
+ stream:
+ - boron:
+ branch: 'master'
+ jre: 'openjdk8'
+ - beryllium:
+ branch: 'stable/beryllium'
+ jre: 'openjdk7'
+
+ install:
+ - only:
+ scope: 'only'
+ - all:
+ scope: 'all'
+
+ # Features to install
+ install-features: 'odl-restconf,odl-netconf-clustered-topology'
+
+ # Robot custom options
+ robot-options: '-v USE_NETCONF_CONNECTOR:False'
+
+ # This scaling test can take several hours to finish,
+ # so the job is defined to run once a day.
+ # Trigger jobs (daily)
+ schedule: 'H H * * *'
release-branch: '{branch}'
- autorelease-release-datestamp:
datestamp: true
+ - string:
+ name: CLONE_URL
+ default: 'ssh://jenkins-$SILO@git.opendaylight.org:29418/releng/autorelease'
+ description: "Autorelease clone URL"
scm:
- git:
- url: 'https://git.opendaylight.org/gerrit/releng/autorelease'
+ url: '$CLONE_URL'
refspec: ''
branches:
- 'origin/{branch}'
skip-tag: true
+ shallow-clone: true
submodule:
recursive: true
+ timeout: 15
wrappers:
- autorelease-build-timeout
release-branch: '{branch}'
- autorelease-release-datestamp:
datestamp: true
+ - string:
+ name: CLONE_URL
+ default: 'ssh://jenkins-$SILO@git.opendaylight.org:29418/releng/autorelease'
+ description: "Autorelease clone URL"
scm:
- git:
- url: 'https://git.opendaylight.org/gerrit/releng/autorelease'
+ url: '$CLONE_URL'
refspec: ''
branches:
- 'origin/{branch}'
skip-tag: true
+ shallow-clone: true
submodule:
recursive: true
+ timeout: 15
wrappers:
- autorelease-build-timeout
--- /dev/null
+- project:
+ name: netvirt-csit-openstack-integration
+ jobs:
+ - '{project}-csit-1node-openstack-{openstack}-{functionality}-{stream}'
+
+ # The project name
+ project: 'netvirt'
+
+ # The project name
+ functionality: 'openstack'
+
+ stream:
+ - boron:
+ branch: 'master'
+ jre: 'openjdk8'
+ - beryllium:
+ branch: 'stable/beryllium'
+ jre: 'openjdk7'
+ - stable-lithium:
+ branch: 'stable/lithium'
+ jre: 'openjdk7'
+
+ openstack:
+ - mitaka:
+ openstack-branch: 'stable/mitaka'
+ odl-ml2-version: 'master'
+ - liberty:
+ openstack-branch: 'stable/liberty'
+ odl-ml2-version: 'stable/liberty'
+
+ openstack-vms: 3
+
+ install-features: 'odl-ovsdb-openstack'
+
+ schedule: 'H H * * *'
+
+ odl-enable-l3: 'yes'
--- /dev/null
+- project:
+ name: netvirt-3node-csit-openstack-integration
+ jobs:
+ - '{project}-csit-3node-openstack-{openstack}-{functionality}-{stream}'
+
+ # The project name
+ project: 'netvirt'
+
+ # The project name
+ functionality: 'openstack'
+
+ stream:
+ - boron:
+ branch: 'master'
+ jre: 'openjdk8'
+ - beryllium:
+ branch: 'stable/beryllium'
+ jre: 'openjdk7'
+ - stable-lithium:
+ branch: 'stable/lithium'
+ jre: 'openjdk7'
+
+ openstack:
+ - mitaka:
+ openstack-branch: 'stable/mitaka'
+ odl-ml2-version: 'master'
+ - liberty:
+ openstack-branch: 'stable/liberty'
+ odl-ml2-version: 'stable/liberty'
+
+ openstack-vms: 4
+
+ install-features: 'odl-ovsdb-openstack,odl-jolokia'
+
+ schedule: 'H H * * *'
+
+ odl-enable-l3: 'yes'
+
+ enable-haproxy: 'yes'
+# REMOVE THIS LINE IF YOU WANT TO CUSTOMIZE ANYTHING BELOW
+# Otherwise this file will be automatically overwritten by the template
+# autobuilder.
+
+# ODL Releng build templates
- project:
- name: ovsdb-verify-custom
+ name: netvirt-verify-custom
jobs:
- - 'ovsdb-verify-{stream}'
+ - 'netvirt-verify-{stream}'
# stream: release stream (eg. stable-lithium or beryllium)
# branch: git branch (eg. stable/lithium or master)
jdk: openjdk8
jdks:
- openjdk8
- - clustering-netvirt-master:
- branch: 'topic/master/net-virt-clustering'
- jdk: openjdk7
- jdks:
- - openjdk7
- project: 'ovsdb'
+ project: 'netvirt'
--- /dev/null
+- project:
+ name: netvirt-sonar
+ jobs:
+ - 'netvirt-sonar'
+
+ project: 'netvirt'
+ jdk: 'openjdk8'
+
+- job-template:
+ name: 'netvirt-sonar'
+
+ project-type: maven
+ node: dynamic_docker
+ jdk: '{jdk}'
+
+ logrotate:
+ daysToKeep: '7'
+ numToKeep: '10'
+ artifactDaysToKeep: '1'
+ artifactNumToKeep: '1'
+
+ parameters:
+ - project-parameter:
+ project: '{project}'
+
+ scm:
+ - git-scm:
+ credentials-id: '{ssh-credentials}'
+ refspec: ''
+ branch: 'master'
+
+ wrappers:
+ - build-timeout
+ - ssh-agent-credentials:
+ users:
+ - '{ssh-credentials}'
+
+ triggers:
+ - timed: 'H H * * 6'
+ - gerrit-trigger-patch-sonar:
+ name: 'netvirt'
+
+ prebuilders:
+ - shell:
+ !include-raw-escape:
+ - include-raw-setup-docker.sh
+
+ maven:
+ maven-name: '{mvn33}'
+ root-pom: 'pom.xml'
+ goals: 'clean install -V -Dsonar -Dskip.karaf.featureTest=true -Dmaven.compile.fork=true -Dmaven.repo.local=/tmp/r -Dorg.ops4j.pax.url.mvn.localRepository=/tmp/r'
+ maven-opts: '-Xmx1024m -XX:MaxPermSize=256m'
+ settings: '{netvirt-settings}'
+ global-settings: '{odl-global-settings}'
+
+ postbuilders:
+ - inject:
+ properties-file: env.properties
+ - maven-target:
+ maven-version: '{mvn33}'
+ pom: 'pom.xml'
+ goals: 'verify -V -Pintegrationtest -Dsonar -Dskip.karaf.featureTest=true -Dovsdb.controller.address=${{CONTROLLER_IP}} -Dmaven.repo.local=/tmp/r -Dorg.ops4j.pax.url.mvn.localRepository=/tmp/r '
+ properties:
+ - 'ovsdbserver.ipaddress=127.0.0.1'
+ - 'ovsdbserver.port=6641'
+ - 'ovsdb.userspace.enabled=yes'
+ settings: '{netvirt-settings}'
+ global-settings: '{odl-global-settings}'
+ - shell:
+ !include-raw-escape:
+ - include-raw-cleanup-docker.sh
+ - jacoco-nojava-workaround
+
+ reporters:
+ - findbugs
+
+ publishers:
+ - sonar:
+ language: 'java'
+ maven-opts: '-Xmx6144m -XX:MaxPermSize=1024m'
+ - email-notification:
+ email-prefix: '[netvirt]'
+ - jacoco-report
jdks: openjdk7
distribution-check: false
DEPENDENCIES: controller,neutron,odlparent,openflowjava,openflowplugin,ovsdb,sfc,yangtools
+JOB_TEMPLATES: verify,merge,periodic,distribution,distribution-check,integration
- 'netvirt-distribution-{stream}'
- 'netvirt-distribution-check-{stream}'
- 'netvirt-integration-{stream}'
- - 'netvirt-sonar'
- 'netvirt-clm-{stream}'
- 'netvirt-validate-autorelease-{stream}'
email-prefix: '[netvirt] [controller] [neutron] [odlparent] [openflowjava] [openflowplugin] [ovsdb] [sfc] [yangtools]'
- jacoco-report
-- job-template:
- name: 'netvirt-sonar'
- disabled: false
-
- project-type: maven
- node: dynamic_verify
- jdk: 'openjdk8'
-
- logrotate:
- daysToKeep: '7'
- numToKeep: '10'
- artifactDaysToKeep: '1'
- artifactNumToKeep: '1'
-
- parameters:
- - project-parameter:
- project: '{project}'
-
- scm:
- - git-scm:
- credentials-id: '{ssh-credentials}'
- refspec: ''
- branch: 'master'
-
- wrappers:
- - build-timeout
- - ssh-agent-credentials:
- users:
- - '{ssh-credentials}'
-
- triggers:
- - timed: 'H H * * 6'
- - gerrit-trigger-patch-sonar:
- name: 'netvirt'
-
- prebuilders:
- - wipe-org-opendaylight-repo
- - jacoco-nojava-workaround
- - provide-maven-settings:
- global-settings-file: '{odl-global-settings}'
- settings-file: '{netvirt-settings}'
-
- maven:
- maven-name: '{mvn33}'
- root-pom: 'pom.xml'
- goals: 'clean install -V -Dmaven.repo.local=/tmp/r -Dorg.ops4j.pax.url.mvn.localRepository=/tmp/r -Djenkins -Dsonar'
- maven-opts: '-Xmx1024m -XX:MaxPermSize=256m'
- settings: '{netvirt-settings}'
- global-settings: '{odl-global-settings}'
-
- reporters:
- - findbugs
-
- publishers:
- - sonar:
- language: 'java'
- maven-opts: '-Xmx6144m -XX:MaxPermSize=1024m'
- - email-notification:
- email-prefix: '[netvirt]'
- - jacoco-report
-
- job-template:
name: 'netvirt-clm-{stream}'
disabled: false
release-branch: '{branch}'
- autorelease-release-datestamp:
datestamp: true
+ - string:
+ name: CLONE_URL
+ default: 'ssh://jenkins-$SILO@git.opendaylight.org:29418/releng/autorelease'
+ description: "Autorelease clone URL"
scm:
- git:
- url: 'https://git.opendaylight.org/gerrit/releng/autorelease'
+ url: '$CLONE_URL'
refspec: ''
branches:
- 'origin/{branch}'
skip-tag: true
+ shallow-clone: true
submodule:
recursive: true
+ timeout: 15
wrappers:
- autorelease-build-timeout
release-branch: '{branch}'
- autorelease-release-datestamp:
datestamp: true
+ - string:
+ name: CLONE_URL
+ default: 'ssh://jenkins-$SILO@git.opendaylight.org:29418/releng/autorelease'
+ description: "Autorelease clone URL"
scm:
- git:
- url: 'https://git.opendaylight.org/gerrit/releng/autorelease'
+ url: '$CLONE_URL'
refspec: ''
branches:
- 'origin/{branch}'
skip-tag: true
+ shallow-clone: true
submodule:
recursive: true
+ timeout: 15
wrappers:
- autorelease-build-timeout
release-branch: '{branch}'
- autorelease-release-datestamp:
datestamp: true
+ - string:
+ name: CLONE_URL
+ default: 'ssh://jenkins-$SILO@git.opendaylight.org:29418/releng/autorelease'
+ description: "Autorelease clone URL"
scm:
- git:
- url: 'https://git.opendaylight.org/gerrit/releng/autorelease'
+ url: '$CLONE_URL'
refspec: ''
branches:
- 'origin/{branch}'
skip-tag: true
+ shallow-clone: true
submodule:
recursive: true
+ timeout: 15
wrappers:
- autorelease-build-timeout
release-branch: '{branch}'
- autorelease-release-datestamp:
datestamp: true
+ - string:
+ name: CLONE_URL
+ default: 'ssh://jenkins-$SILO@git.opendaylight.org:29418/releng/autorelease'
+ description: "Autorelease clone URL"
scm:
- git:
- url: 'https://git.opendaylight.org/gerrit/releng/autorelease'
+ url: '$CLONE_URL'
refspec: ''
branches:
- 'origin/{branch}'
skip-tag: true
+ shallow-clone: true
submodule:
recursive: true
+ timeout: 15
wrappers:
- autorelease-build-timeout
--- /dev/null
+STREAMS:
+- boron:
+ branch: master
+ jdks: openjdk8
+ autorelease: true
+DEPENDENCIES: yangtools,controller
--- /dev/null
+# REMOVE THIS LINE IF YOU WANT TO CUSTOMIZE ANYTHING BELOW
+# Otherwise this file will be automatically overwritten by the template
+# autobuilder.
+
+# ODL Releng build templates
+- project:
+ name: ocpplugin
+ jobs:
+ - 'ocpplugin-verify-{stream}'
+ - 'ocpplugin-merge-{stream}'
+ - 'ocpplugin-periodic-{stream}'
+ - 'ocpplugin-distribution-{stream}'
+ - 'ocpplugin-distribution-check-{stream}'
+ - 'ocpplugin-integration-{stream}'
+ - 'ocpplugin-sonar'
+ - 'ocpplugin-clm-{stream}'
+ - 'ocpplugin-validate-autorelease-{stream}'
+
+
+ # stream: release stream (eg. stable-lithium or beryllium)
+ # branch: git branch (eg. stable/lithium or master)
+ stream:
+ - boron:
+ branch: 'master'
+ jdk: openjdk8
+ jdks:
+ - openjdk8
+ disable_autorelease: False
+ disable_distribution_check: False
+
+ project: 'ocpplugin'
+
+# For the Job templates below replace instances of:
+# PROJECT_SHORTNAME with your project name (eg. circuitsw)
+# PROJECT_PATH with your project name (eg. ofextensions/circuitsw)
+# MAVEN_GOALS with your maven goals to build
+# MAVEN_OPTS with your maven options to build
+
+- job-template:
+ name: 'ocpplugin-verify-{stream}'
+
+ # Job template for ODL verify jobs
+ #
+ # The purpose of this job template is to setup a ODL verify job
+ #
+ # Required Variables:
+ # stream: release stream (eg. stable-lithium or beryllium)
+ # branch: git branch (eg. stable/lithium or master)
+
+ project-type: matrix
+ node: matrix_master
+ concurrent: true
+
+ axes:
+ - axis:
+ type: slave
+ name: nodes
+ values:
+ - dynamic_verify
+ - axis:
+ type: jdk
+ values: '{obj:jdks}'
+
+ logrotate:
+ daysToKeep: '{build-days-to-keep}'
+ numToKeep: '{build-num-to-keep}'
+ artifactDaysToKeep: '{build-artifact-days-to-keep}'
+ artifactNumToKeep: '{build-artifact-num-to-keep}'
+
+ parameters:
+ - project-parameter:
+ project: '{project}'
+ - gerrit-parameter:
+ branch: '{branch}'
+ - gerrit-refspec-parameter:
+ refspec: 'refs/heads/{branch}'
+
+ scm:
+ - gerrit-trigger-scm:
+ credentials-id: '{ssh-credentials}'
+ refspec: '$GERRIT_REFSPEC'
+ choosing-strategy: 'gerrit'
+
+ wrappers:
+ - build-timeout
+ - ssh-agent-credentials:
+ users:
+ - '{ssh-credentials}'
+
+ triggers:
+ - gerrit-trigger-patch-submitted:
+ name: 'ocpplugin'
+ branch: '{branch}'
+
+ builders:
+ - wipe-org-opendaylight-repo
+ - jacoco-nojava-workaround
+ - provide-maven-settings:
+ global-settings-file: '{odl-global-settings}'
+ settings-file: '{ocpplugin-settings}'
+ - maven-target:
+ maven-version: '{mvn33}'
+ pom: 'pom.xml'
+ goals: 'clean install -V -Dmaven.repo.local=/tmp/r -Dorg.ops4j.pax.url.mvn.localRepository=/tmp/r -Djenkins -Dstream={stream}'
+ java-opts:
+ - '-Xmx1024m -XX:MaxPermSize=256m'
+ settings: '{ocpplugin-settings}'
+ global-settings: '{odl-global-settings}'
+
+ publishers:
+ - archive-artifacts:
+ artifacts: '**/target/surefire-reports/*-output.txt, '
+ - findbugs
+ - email-notification:
+ email-prefix: '[ocpplugin]'
+ - jacoco-report
+
+- job-template:
+ name: 'ocpplugin-merge-{stream}'
+
+ # Job template for ODL merge jobs
+ #
+ # The purpose of this job template is to setup a ODL merge job
+ # and deploy artifacts to Nexus.
+ #
+ # Required Variables:
+ # stream: release stream (eg. stable-lithium or beryllium)
+ # branch: git branch (eg. stable/lithium or master)
+
+ project-type: maven
+ node: dynamic_merge
+ jdk: '{jdk}'
+
+ logrotate:
+ daysToKeep: '14'
+ numToKeep: '10'
+ artifactDaysToKeep: '{build-artifact-days-to-keep}'
+ artifactNumToKeep: '{build-artifact-num-to-keep}'
+
+ parameters:
+ - project-parameter:
+ project: '{project}'
+ - gerrit-parameter:
+ branch: '{branch}'
+ - gerrit-refspec-parameter:
+ refspec: 'refs/heads/{branch}'
+
+ scm:
+ - gerrit-trigger-scm:
+ credentials-id: '{ssh-credentials}'
+ refspec: ''
+ choosing-strategy: 'default'
+
+ wrappers:
+ - build-timeout
+ - ssh-agent-credentials:
+ users:
+ - '{ssh-credentials}'
+
+ triggers:
+ - timed: 'H H * * 0'
+ - gerrit-trigger-patch-merged:
+ name: 'ocpplugin'
+ branch: '{branch}'
+
+ prebuilders:
+ - wipe-org-opendaylight-repo
+ - jacoco-nojava-workaround
+ - provide-maven-settings:
+ global-settings-file: '{odl-global-settings}'
+ settings-file: '{ocpplugin-settings}'
+
+ maven:
+ maven-name: '{mvn33}'
+ root-pom: 'pom.xml'
+ goals: 'clean install -V -Dmaven.repo.local=/tmp/r -Dorg.ops4j.pax.url.mvn.localRepository=/tmp/r -Djenkins -Dmerge -Dstream={stream}'
+ maven-opts: '-Xmx1024m -XX:MaxPermSize=256m'
+ settings: '{ocpplugin-settings}'
+ global-settings: '{odl-global-settings}'
+ post-step-run-condition: UNSTABLE
+
+ postbuilders:
+ - conditional-step:
+ condition-kind: file-exists
+ condition-filename: deploy-site.xml
+ condition-basedir: workspace
+
+ # The strategy here is intentional to run Maven site:deploy twice
+ # once using regular pom.xml to produce a staged-site which is
+ # then used by deploy-site.xml to push to Nexus. This is a
+ # workaround to Maven Site's default linking code which creates
+ # incorrect URLs for sites due to auto-detection assuming your
+ # project is configured in a certain way which ODL is not.
+ steps:
+ - maven-target:
+ maven-version: '{mvn33}'
+ pom: pom.xml
+ goals: 'site:deploy -Dstream={stream}'
+ java-opts:
+ - '-Xmx2g'
+ settings: '{ocpplugin-settings}'
+ global-settings: '{odl-global-settings}'
+ - maven-target:
+ maven-version: '{mvn33}'
+ pom: deploy-site.xml
+ goals: 'site:deploy -Dstream={stream}'
+ java-opts:
+ - '-Xmx2g'
+ settings: '{ocpplugin-settings}'
+ global-settings: '{odl-global-settings}'
+
+ reporters:
+ - findbugs
+
+ publishers:
+ - archive-artifacts:
+ artifacts: '**/target/surefire-reports/*-output.txt, '
+ - email-notification:
+ email-prefix: '[ocpplugin]'
+ - maven-deploy:
+ id: ''
+ unique-version: true
+ deploy-unstable: false
+ - jacoco-report
+
+- job-template:
+ name: 'ocpplugin-periodic-{stream}'
+ disabled: false
+
+ # Job template for periodic builders
+ #
+ # The purpose of this job template is to setup a periodic
+ # builder.
+ #
+ # Required Variables:
+ # stream: release stream (eg. stable-lithium or beryllium)
+ # branch: git branch (eg. stable/lithium or master)
+
+ project-type: maven
+ node: dynamic_verify
+ jdk: '{jdk}'
+
+ logrotate:
+ daysToKeep: '14'
+ numToKeep: '10'
+ artifactDaysToKeep: '{build-artifact-days-to-keep}'
+ artifactNumToKeep: '{build-artifact-num-to-keep}'
+
+ parameters:
+ - project-parameter:
+ project: '{project}'
+
+ scm:
+ - git-scm:
+ credentials-id: '{ssh-credentials}'
+ refspec: ''
+ branch: '{branch}'
+
+ wrappers:
+ - build-timeout
+ - ssh-agent-credentials:
+ users:
+ - '{ssh-credentials}'
+
+ triggers:
+ - timed: '@daily'
+
+ prebuilders:
+ - wipe-org-opendaylight-repo
+ - jacoco-nojava-workaround
+ - provide-maven-settings:
+ global-settings-file: '{odl-global-settings}'
+ settings-file: '{ocpplugin-settings}'
+
+ maven:
+ maven-name: '{mvn33}'
+ root-pom: 'pom.xml'
+ goals: 'clean install -V -Dmaven.repo.local=/tmp/r -Dorg.ops4j.pax.url.mvn.localRepository=/tmp/r -Djenkins'
+ maven-opts: '-Xmx1024m -XX:MaxPermSize=256m'
+ settings: '{ocpplugin-settings}'
+ global-settings: '{odl-global-settings}'
+
+ reporters:
+ - findbugs
+
+ publishers:
+ - email-notification:
+ email-prefix: '[ocpplugin]'
+ - jacoco-report
+
+- job-template:
+ name: 'ocpplugin-distribution-{stream}'
+
+ # Required variables:
+ # stream: release stream (eg. stable-lithium or beryllium)
+ # branch: git branch (eg. stable/lithium or master)
+
+ project-type: maven
+ node: dynamic_merge
+ jdk: '{jdk}'
+
+ logrotate:
+ daysToKeep: '{build-days-to-keep}'
+ numToKeep: '{build-num-to-keep}'
+ artifactDaysToKeep: '{build-artifact-days-to-keep}'
+ artifactNumToKeep: '{build-artifact-num-to-keep}'
+
+ parameters:
+ - project-parameter:
+ project: 'integration/distribution'
+
+ scm:
+ - git-scm:
+ credentials-id: '{ssh-credentials}'
+ refspec: ''
+ branch: '{branch}'
+
+ wrappers:
+ - build-timeout
+ - ssh-agent-credentials:
+ users:
+ - '{ssh-credentials}'
+
+ triggers:
+ - reverse:
+ jobs: '{project}-merge-{stream}'
+ result: 'success'
+
+ prebuilders:
+ - wipe-org-opendaylight-repo
+ - provide-maven-settings:
+ global-settings-file: '{odl-global-settings}'
+ settings-file: '{ocpplugin-settings}'
+
+ maven:
+ maven-name: '{mvn33}'
+ root-pom: 'pom.xml'
+ goals: 'clean install -Djenkins -Dmaven.repo.local=/tmp/r -Dorg.ops4j.pax.url.mvn.localRepository=/tmp/r'
+ maven-opts: '-Xmx1024m -XX:MaxPermSize=256m -Dmaven.compile.fork=true'
+ settings: '{integration-settings}'
+ global-settings: '{odl-global-settings}'
+
+ publishers:
+ - email-notification:
+ email-prefix: '[{project}]'
+ - maven-deploy:
+ id: ''
+ unique-version: true
+ deploy-unstable: false
+
+# Template: integration-patch-distribution-{stream}
+# Goal: Build a patch and make sure the distribution can deploy with this change
+# Operation: This job template builds a patch, creates a distribution containing
+# the patch, and triggers the distribution deploy test
+
+- job-template:
+ name: 'ocpplugin-distribution-check-{stream}'
+ disabled: '{obj:disable_distribution_check}'
+
+ project-type: maven
+ node: dynamic_verify
+ concurrent: true
+ jdk: '{jdk}'
+
+ logrotate:
+ daysToKeep: '14'
+ numToKeep: '10'
+ artifactDaysToKeep: '{build-artifact-days-to-keep}'
+ artifactNumToKeep: '{build-artifact-num-to-keep}'
+
+ parameters:
+ - gerrit-project-parameter:
+ project: '{project}'
+ - gerrit-refspec-parameter:
+ refspec: '{branch}'
+ - project-parameter:
+ project: '{project}'
+ - integration-distribution-git-url
+
+ scm:
+ - integration-gerrit-scm:
+ credentials-id: '{ssh-credentials}'
+ basedir: '$GERRIT_PROJECT'
+ refspec: '$GERRIT_REFSPEC'
+ branch: '{branch}'
+ - integration-distribution-scm:
+ credentials-id: '{ssh-credentials}'
+ branch: '{branch}'
+
+ wrappers:
+ - build-timeout
+ - ssh-agent-credentials:
+ users:
+ - '{ssh-credentials}'
+
+ triggers:
+ - gerrit:
+ server-name: 'OpenDaylight'
+ trigger-on:
+ - patchset-created-event:
+ exclude-drafts: 'true'
+ exclude-trivial-rebase: 'false'
+ exclude-no-code-change: 'true'
+ - draft-published-event
+ - comment-added-contains-event:
+ comment-contains-value: 'test-distribution'
+ - comment-added-contains-event:
+ comment-contains-value: 'reverify'
+ - comment-added-contains-event:
+ comment-contains-value: 'recheck'
+ projects:
+ - project-compare-type: 'ANT'
+ project-pattern: '{name}'
+ branches:
+ - branch-compare-type: 'ANT'
+ branch-pattern: '**/{branch}'
+ skip-vote:
+ successful: false
+ failed: false
+ unstable: false
+ notbuilt: false
+
+ prebuilders:
+ - wipe-org-opendaylight-repo
+ - maven-target:
+ maven-version: '{mvn33}'
+ pom: '$GERRIT_PROJECT/pom.xml'
+ goals: 'clean install -Djenkins -DskipTests -Dcheckstyle.skip=true -Dmaven.javadoc.skip=true -Dmaven.site.skip=true -DgenerateReports=false -Dmaven.repo.local=/tmp/r -Dorg.ops4j.pax.url.mvn.localRepository=/tmp/r -Dstream={stream}'
+ java-opts:
+ - '-Xmx4096m -XX:MaxPermSize=1024m -Dmaven.compile.fork=true'
+ settings: '{ocpplugin-settings}'
+ global-settings: '{odl-global-settings}'
+
+ maven:
+ maven-name: '{mvn33}'
+ root-pom: 'distribution/pom.xml'
+ goals: 'clean install -Djenkins -Dmaven.repo.local=/tmp/r -Dorg.ops4j.pax.url.mvn.localRepository=/tmp/r'
+ maven-opts: '-Xmx1024m -XX:MaxPermSize=256m -Dmaven.compile.fork=true'
+ settings: '{ocpplugin-settings}'
+ global-settings: '{odl-global-settings}'
+ ignore-upstream-changes: true
+ post-step-run-condition: 'SUCCESS'
+
+ postbuilders:
+ - integration-distribution-check
+
+ publishers:
+ - archive:
+ artifacts: 'karaf.log'
+ - archive:
+ artifacts: 'karaf_console.log'
+ - email-notification:
+ email-prefix: '[ocpplugin]'
+
+- job-template:
+ name: 'ocpplugin-integration-{stream}'
+ disabled: false
+
+ # Job template for ODL integration verify jobs
+ #
+ # This is similar to a normal verify job, but it runs
+ # when a project that's a dependency of your project
+ # is successfully built.
+ #
+ # Required Variables:
+ # stream: release stream (eg. stable-lithium or beryllium)
+ # branch: git branch (eg. stable/lithium or master)
+
+ project-type: maven
+ node: dynamic_merge
+ jdk: '{jdk}'
+
+ logrotate:
+ daysToKeep: '{build-days-to-keep}'
+ numToKeep: '{build-num-to-keep}'
+ artifactDaysToKeep: '{build-artifact-days-to-keep}'
+ artifactNumToKeep: '{build-artifact-num-to-keep}'
+
+ parameters:
+ - project-parameter:
+ project: '{project}'
+
+ scm:
+ - git-scm:
+ credentials-id: '{ssh-credentials}'
+ refspec: ''
+ branch: '{branch}'
+
+ wrappers:
+ - build-timeout
+ - ssh-agent-credentials:
+ users:
+ - '{ssh-credentials}'
+
+ triggers:
+ - reverse:
+ jobs: 'odlparent-merge-{stream},yangtools-merge-{stream},controller-merge-{stream}'
+ result: 'success'
+
+ prebuilders:
+ - wipe-org-opendaylight-repo
+ - jacoco-nojava-workaround
+ - provide-maven-settings:
+ global-settings-file: '{odl-global-settings}'
+ settings-file: '{ocpplugin-settings}'
+
+ maven:
+ maven-name: '{mvn33}'
+ root-pom: 'pom.xml'
+ goals: 'clean install -V -Dmaven.repo.local=/tmp/r -Dorg.ops4j.pax.url.mvn.localRepository=/tmp/r -Djenkins'
+ maven-opts: '-Xmx1024m -XX:MaxPermSize=256m'
+ settings: '{ocpplugin-settings}'
+ global-settings: '{odl-global-settings}'
+
+ reporters:
+ - findbugs
+
+ publishers:
+ - email-notification:
+ email-prefix: '[ocpplugin] [odlparent] [yangtools] [controller]'
+ - jacoco-report
+
+- job-template:
+ name: 'ocpplugin-sonar'
+ disabled: false
+
+ project-type: maven
+ node: dynamic_verify
+ jdk: 'openjdk8'
+
+ logrotate:
+ daysToKeep: '7'
+ numToKeep: '10'
+ artifactDaysToKeep: '1'
+ artifactNumToKeep: '1'
+
+ parameters:
+ - project-parameter:
+ project: '{project}'
+
+ scm:
+ - git-scm:
+ credentials-id: '{ssh-credentials}'
+ refspec: ''
+ branch: 'master'
+
+ wrappers:
+ - build-timeout
+ - ssh-agent-credentials:
+ users:
+ - '{ssh-credentials}'
+
+ triggers:
+ - timed: 'H H * * 6'
+ - gerrit-trigger-patch-sonar:
+ name: 'ocpplugin'
+
+ prebuilders:
+ - wipe-org-opendaylight-repo
+ - jacoco-nojava-workaround
+ - provide-maven-settings:
+ global-settings-file: '{odl-global-settings}'
+ settings-file: '{ocpplugin-settings}'
+
+ maven:
+ maven-name: '{mvn33}'
+ root-pom: 'pom.xml'
+ goals: 'clean install -V -Dmaven.repo.local=/tmp/r -Dorg.ops4j.pax.url.mvn.localRepository=/tmp/r -Djenkins -Dsonar'
+ maven-opts: '-Xmx1024m -XX:MaxPermSize=256m'
+ settings: '{ocpplugin-settings}'
+ global-settings: '{odl-global-settings}'
+
+ reporters:
+ - findbugs
+
+ publishers:
+ - sonar:
+ language: 'java'
+ maven-opts: '-Xmx6144m -XX:MaxPermSize=1024m'
+ - email-notification:
+ email-prefix: '[ocpplugin]'
+ - jacoco-report
+
+- job-template:
+ name: 'ocpplugin-clm-{stream}'
+ disabled: false
+
+ project-type: maven
+ node: dynamic_verify
+ jdk: '{jdk}'
+
+ logrotate:
+ daysToKeep: '{build-days-to-keep}'
+ numToKeep: '{build-num-to-keep}'
+ artifactDaysToKeep: '{build-artifact-days-to-keep}'
+ artifactNumToKeep: '{build-artifact-num-to-keep}'
+
+ parameters:
+ - project-parameter:
+ project: '{project}'
+
+ scm:
+ - git-scm:
+ credentials-id: '{ssh-credentials}'
+ refspec: ''
+ branch: '{branch}'
+
+ wrappers:
+ - build-timeout
+ - ssh-agent-credentials:
+ users:
+ - '{ssh-credentials}'
+
+ triggers:
+ - timed: 'H H * * 6'
+
+ prebuilders:
+ - wipe-org-opendaylight-repo
+ - provide-maven-settings:
+ global-settings-file: '{odl-global-settings}'
+ settings-file: '{ocpplugin-settings}'
+
+ maven:
+ maven-name: '{mvn33}'
+ root-pom: 'pom.xml'
+ goals: 'clean install com.sonatype.clm:clm-maven-plugin:index -Djenkins -DskipTests=true -Dmaven.repo.local=/tmp/r -Dorg.ops4j.pax.url.mvn.localRepository=/tmp/r'
+ maven-opts: '-Xmx4096m -XX:MaxPermSize=512m'
+ settings: '{ocpplugin-settings}'
+ global-settings: '{odl-global-settings}'
+
+ postbuilders:
+ - shell: 'find . -regex ".*karaf/target" | xargs rm -rf'
+ - check-clm:
+ application-name: ocpplugin
+
+ publishers:
+ - email-notification:
+ email-prefix: '[ocpplugin]'
+
+- job-template:
+ name: 'ocpplugin-validate-autorelease-{stream}'
+ disabled: '{obj:disable_autorelease}'
+
+ project-type: maven
+ node: dynamic_verify
+ concurrent: true
+ jdk: '{jdk}'
+
+ logrotate:
+ daysToKeep: '{build-days-to-keep}'
+ numToKeep: '{build-num-to-keep}'
+ artifactDaysToKeep: '{build-artifact-days-to-keep}'
+ artifactNumToKeep: '{build-artifact-num-to-keep}'
+
+ parameters:
+ - project-parameter:
+ project: '{project}'
+ - gerrit-parameter:
+ branch: '{branch}'
+ - autorelease-release-tag:
+ release-tag: 'validate'
+ - autorelease-release-branch:
+ release-branch: '{branch}'
+ - autorelease-release-datestamp:
+ datestamp: true
+ - string:
+ name: CLONE_URL
+ default: 'ssh://jenkins-$SILO@git.opendaylight.org:29418/releng/autorelease'
+ description: "Autorelease clone URL"
+
+ scm:
+ - git:
+ url: '$CLONE_URL'
+ refspec: ''
+ branches:
+ - 'origin/{branch}'
+ skip-tag: true
+ shallow-clone: true
+ submodule:
+ recursive: true
+ timeout: 15
+
+ wrappers:
+ - autorelease-build-timeout
+ - ssh-agent-credentials:
+ users:
+ - '{ssh-credentials}'
+
+ triggers:
+ - gerrit:
+ server-name: 'OpenDaylight'
+ trigger-on:
+ - patchset-created-event:
+ exclude-drafts: 'false'
+ exclude-trivial-rebase: 'false'
+ exclude-no-code-change: 'false'
+ - draft-published-event
+ - comment-added-contains-event:
+ comment-contains-value: 'recheck'
+ - comment-added-contains-event:
+ comment-contains-value: 'reverify'
+ - comment-added-contains-event:
+ comment-contains-value: 'revalidate'
+ projects:
+ - project-compare-type: 'ANT'
+ project-pattern: 'ocpplugin'
+ branches:
+ - branch-compare-type: 'ANT'
+ branch-pattern: '**/{branch}'
+ file-paths:
+ - compare-type: ANT
+ pattern: '**/*.xml'
+
+ prebuilders:
+ - wipe-org-opendaylight-repo
+ - jacoco-nojava-workaround
+ - autorelease-checkout-gerrit-patch
+ - autorelease-generate-release-patches
+ - maven-target:
+ maven-version: '{mvn33}'
+ pom: validate-pom.xml
+ goals: 'clean install -T1.5C -Djenkins -DskipTests -Dcheckstyle.skip=true -Dmaven.repo.local=/tmp/r -Dorg.ops4j.pax.url.mvn.localRepository=/tmp/r'
+ java-opts:
+ - '-Xmx8g'
+ settings: '{autorelease-settings}'
+ global-settings: '{odl-global-settings}'
+
+ maven:
+ maven-name: '{mvn33}'
+ root-pom: 'pom.xml'
+ goals: 'clean validate -Djenkins -Dmaven.repo.local=/tmp/r -Dorg.ops4j.pax.url.mvn.localRepository=/tmp/r'
+ maven-opts: '-Xmx8g -XX:MaxPermSize=1024m -Dmaven.compile.fork=true'
+ settings: '{autorelease-settings}'
+ global-settings: '{odl-global-settings}'
+ automatic-archiving: false
+ post-step-run-condition: UNSTABLE
+
+ postbuilders:
+ - autorelease-sys-stats
+
+ publishers:
+ - email-notification:
+ email-prefix: '[autorelease] [ocpplugin]'
+
release-branch: '{branch}'
- autorelease-release-datestamp:
datestamp: true
+ - string:
+ name: CLONE_URL
+ default: 'ssh://jenkins-$SILO@git.opendaylight.org:29418/releng/autorelease'
+ description: "Autorelease clone URL"
scm:
- git:
- url: 'https://git.opendaylight.org/gerrit/releng/autorelease'
+ url: '$CLONE_URL'
refspec: ''
branches:
- 'origin/{branch}'
skip-tag: true
+ shallow-clone: true
submodule:
recursive: true
+ timeout: 15
wrappers:
- autorelease-build-timeout
release-branch: '{branch}'
- autorelease-release-datestamp:
datestamp: true
+ - string:
+ name: CLONE_URL
+ default: 'ssh://jenkins-$SILO@git.opendaylight.org:29418/releng/autorelease'
+ description: "Autorelease clone URL"
scm:
- git:
- url: 'https://git.opendaylight.org/gerrit/releng/autorelease'
+ url: '$CLONE_URL'
refspec: ''
branches:
- 'origin/{branch}'
skip-tag: true
+ shallow-clone: true
submodule:
recursive: true
+ timeout: 15
wrappers:
- autorelease-build-timeout
release-branch: '{branch}'
- autorelease-release-datestamp:
datestamp: true
+ - string:
+ name: CLONE_URL
+ default: 'ssh://jenkins-$SILO@git.opendaylight.org:29418/releng/autorelease'
+ description: "Autorelease clone URL"
scm:
- git:
- url: 'https://git.opendaylight.org/gerrit/releng/autorelease'
+ url: '$CLONE_URL'
refspec: ''
branches:
- 'origin/{branch}'
skip-tag: true
+ shallow-clone: true
submodule:
recursive: true
+ timeout: 15
wrappers:
- autorelease-build-timeout
release-branch: '{branch}'
- autorelease-release-datestamp:
datestamp: true
+ - string:
+ name: CLONE_URL
+ default: 'ssh://jenkins-$SILO@git.opendaylight.org:29418/releng/autorelease'
+ description: "Autorelease clone URL"
scm:
- git:
- url: 'https://git.opendaylight.org/gerrit/releng/autorelease'
+ url: '$CLONE_URL'
refspec: ''
branches:
- 'origin/{branch}'
skip-tag: true
+ shallow-clone: true
submodule:
recursive: true
+ timeout: 15
wrappers:
- autorelease-build-timeout
release-branch: '{branch}'
- autorelease-release-datestamp:
datestamp: true
+ - string:
+ name: CLONE_URL
+ default: 'ssh://jenkins-$SILO@git.opendaylight.org:29418/releng/autorelease'
+ description: "Autorelease clone URL"
scm:
- git:
- url: 'https://git.opendaylight.org/gerrit/releng/autorelease'
+ url: '$CLONE_URL'
refspec: ''
branches:
- 'origin/{branch}'
skip-tag: true
+ shallow-clone: true
submodule:
recursive: true
+ timeout: 15
wrappers:
- autorelease-build-timeout
- project:
- name: ovsdb-custom
+ name: ovsdb-sonar
jobs:
- 'ovsdb-sonar'
- '{ssh-credentials}'
triggers:
- - timed: '@weekly'
+ - timed: 'H H * * 6'
- gerrit-trigger-patch-sonar:
name: 'ovsdb'
branch: stable/lithium
jdks: openjdk7
distribution-check: false
-DEPENDENCIES: odlparent,controller,yangtools,openflowplugin,neutron
-JOB_TEMPLATES: verify,merge,periodic,distribution,integration
+DEPENDENCIES: odlparent,controller,yangtools
+JOB_TEMPLATES: verify,merge,periodic,distribution,distribution-check,integration
- 'ovsdb-merge-{stream}'
- 'ovsdb-periodic-{stream}'
- 'ovsdb-distribution-{stream}'
+ - 'ovsdb-distribution-check-{stream}'
- 'ovsdb-integration-{stream}'
- 'ovsdb-clm-{stream}'
- 'ovsdb-validate-autorelease-{stream}'
unique-version: true
deploy-unstable: false
+# Template: integration-patch-distribution-{stream}
+# Goal: Build a patch and make sure the distribution can deploy with this change
+# Operation: This job template builds a patch, creates a distribution containing
+# the patch, and triggers the distribution deploy test
+
+- job-template:
+ name: 'ovsdb-distribution-check-{stream}'
+ disabled: '{obj:disable_distribution_check}'
+
+ project-type: maven
+ node: dynamic_verify
+ concurrent: true
+ jdk: '{jdk}'
+
+ logrotate:
+ daysToKeep: '14'
+ numToKeep: '10'
+ artifactDaysToKeep: '{build-artifact-days-to-keep}'
+ artifactNumToKeep: '{build-artifact-num-to-keep}'
+
+ parameters:
+ - gerrit-project-parameter:
+ project: '{project}'
+ - gerrit-refspec-parameter:
+ refspec: '{branch}'
+ - project-parameter:
+ project: '{project}'
+ - integration-distribution-git-url
+
+ scm:
+ - integration-gerrit-scm:
+ credentials-id: '{ssh-credentials}'
+ basedir: '$GERRIT_PROJECT'
+ refspec: '$GERRIT_REFSPEC'
+ branch: '{branch}'
+ - integration-distribution-scm:
+ credentials-id: '{ssh-credentials}'
+ branch: '{branch}'
+
+ wrappers:
+ - build-timeout
+ - ssh-agent-credentials:
+ users:
+ - '{ssh-credentials}'
+
+ triggers:
+ - gerrit:
+ server-name: 'OpenDaylight'
+ trigger-on:
+ - patchset-created-event:
+ exclude-drafts: 'true'
+ exclude-trivial-rebase: 'false'
+ exclude-no-code-change: 'true'
+ - draft-published-event
+ - comment-added-contains-event:
+ comment-contains-value: 'test-distribution'
+ - comment-added-contains-event:
+ comment-contains-value: 'reverify'
+ - comment-added-contains-event:
+ comment-contains-value: 'recheck'
+ projects:
+ - project-compare-type: 'ANT'
+ project-pattern: '{name}'
+ branches:
+ - branch-compare-type: 'ANT'
+ branch-pattern: '**/{branch}'
+ skip-vote:
+ successful: false
+ failed: false
+ unstable: false
+ notbuilt: false
+
+ prebuilders:
+ - wipe-org-opendaylight-repo
+ - maven-target:
+ maven-version: '{mvn33}'
+ pom: '$GERRIT_PROJECT/pom.xml'
+ goals: 'clean install -Djenkins -DskipTests -Dcheckstyle.skip=true -Dmaven.javadoc.skip=true -Dmaven.site.skip=true -DgenerateReports=false -Dmaven.repo.local=/tmp/r -Dorg.ops4j.pax.url.mvn.localRepository=/tmp/r -Dstream={stream}'
+ java-opts:
+ - '-Xmx4096m -XX:MaxPermSize=1024m -Dmaven.compile.fork=true'
+ settings: '{ovsdb-settings}'
+ global-settings: '{odl-global-settings}'
+
+ maven:
+ maven-name: '{mvn33}'
+ root-pom: 'distribution/pom.xml'
+ goals: 'clean install -Djenkins -Dmaven.repo.local=/tmp/r -Dorg.ops4j.pax.url.mvn.localRepository=/tmp/r'
+ maven-opts: '-Xmx1024m -XX:MaxPermSize=256m -Dmaven.compile.fork=true'
+ settings: '{ovsdb-settings}'
+ global-settings: '{odl-global-settings}'
+ ignore-upstream-changes: true
+ post-step-run-condition: 'SUCCESS'
+
+ postbuilders:
+ - integration-distribution-check
+
+ publishers:
+ - archive:
+ artifacts: 'karaf.log'
+ - archive:
+ artifacts: 'karaf_console.log'
+ - email-notification:
+ email-prefix: '[ovsdb]'
+
- job-template:
name: 'ovsdb-integration-{stream}'
disabled: false
triggers:
- reverse:
- jobs: 'odlparent-merge-{stream},controller-merge-{stream},yangtools-merge-{stream},openflowplugin-merge-{stream},neutron-merge-{stream}'
+ jobs: 'odlparent-merge-{stream},controller-merge-{stream},yangtools-merge-{stream}'
result: 'success'
prebuilders:
publishers:
- email-notification:
- email-prefix: '[ovsdb] [odlparent] [controller] [yangtools] [openflowplugin] [neutron]'
+ email-prefix: '[ovsdb] [odlparent] [controller] [yangtools]'
- jacoco-report
- job-template:
release-branch: '{branch}'
- autorelease-release-datestamp:
datestamp: true
+ - string:
+ name: CLONE_URL
+ default: 'ssh://jenkins-$SILO@git.opendaylight.org:29418/releng/autorelease'
+ description: "Autorelease clone URL"
scm:
- git:
- url: 'https://git.opendaylight.org/gerrit/releng/autorelease'
+ url: '$CLONE_URL'
refspec: ''
branches:
- 'origin/{branch}'
skip-tag: true
+ shallow-clone: true
submodule:
recursive: true
+ timeout: 15
wrappers:
- autorelease-build-timeout
release-branch: '{branch}'
- autorelease-release-datestamp:
datestamp: true
+ - string:
+ name: CLONE_URL
+ default: 'ssh://jenkins-$SILO@git.opendaylight.org:29418/releng/autorelease'
+ description: "Autorelease clone URL"
scm:
- git:
- url: 'https://git.opendaylight.org/gerrit/releng/autorelease'
+ url: '$CLONE_URL'
refspec: ''
branches:
- 'origin/{branch}'
skip-tag: true
+ shallow-clone: true
submodule:
recursive: true
+ timeout: 15
wrappers:
- autorelease-build-timeout
release-branch: '{branch}'
- autorelease-release-datestamp:
datestamp: true
+ - string:
+ name: CLONE_URL
+ default: 'ssh://jenkins-$SILO@git.opendaylight.org:29418/releng/autorelease'
+ description: "Autorelease clone URL"
scm:
- git:
- url: 'https://git.opendaylight.org/gerrit/releng/autorelease'
+ url: '$CLONE_URL'
refspec: ''
branches:
- 'origin/{branch}'
skip-tag: true
+ shallow-clone: true
submodule:
recursive: true
+ timeout: 15
wrappers:
- autorelease-build-timeout
integration-settings: 'org.jenkinsci.plugins.configfiles.maven.MavenSettingsConfig1414693976622'
integration-distribution-settings: 'org.jenkinsci.plugins.configfiles.maven.MavenSettingsConfig1414693976622'
iotdm-settings: 'org.jenkinsci.plugins.configfiles.maven.MavenSettingsConfig1418228763195'
+ jsonrpc-settings: 'org.jenkinsci.plugins.configfiles.maven.MavenSettingsConfig1461352069244'
kafkaproducer-settings: 'org.jenkinsci.plugins.configfiles.maven.MavenSettingsConfig1452526644682'
l2switch-settings: 'org.jenkinsci.plugins.configfiles.maven.MavenSettingsConfig1414694505032'
lacp-settings: 'org.jenkinsci.plugins.configfiles.maven.MavenSettingsConfig1418417438677'
default: '{refspec}'
description: "GERRIT_REFSPEC parameter if not given by trigger"
+- parameter:
+ name: build-tag
+ parameters:
+ - string:
+ name: BUILD_TAG
+ default: ''
+ description: 'Tag in Git to checkout'
+
- parameter:
name: controller-version-parameter
parameters:
default: 'master'
description: 'distribution repo branch to build with'
+- parameter:
+ name: p2zip-parameter
+ parameters:
+ - string:
+ name: P2ZIP_URL
+ default: ''
+ description: 'Nexus staging profile id'
+
+- parameter:
+ name: stage-id-parameter
+ parameters:
+ - string:
+ name: STAGING_PROFILE_ID
+ default: '{stage-id}'
+ description: 'Nexus staging profile id'
+
- scm:
name: git-scm
scm:
variable: 'GLOBAL_SETTINGS_FILE'
- file-id: '{settings-file}'
variable: 'SETTINGS_FILE'
+
+- builder:
+ name: releng-generate-p2pom
+ builders:
+ - shell: !include-raw: include-raw-generate-p2pom.sh
+
+- builder:
+ name: releng-update-p2composite-metadata
+ builders:
+ - shell: !include-raw: include-raw-update-p2composite-metadata.sh
+ - maven-target:
+ maven-version: '{maven-version}'
+ pom: 'deploy-composite-repo.xml'
+ goals: 'clean deploy -V -Dmaven.repo.local=/tmp/r'
+ settings: '{settings}'
+ global-settings: '{global-settings}'
+
+- builder:
+ name: releng-stage-release
+ builders:
+ - shell: !include-raw: include-raw-stage-release.sh
+- job-template:
+ name: '{name}-release-java'
+
+ # Job template for producing a release candidate by creating a staging repo
+ # in Nexus.
+
+ project-type: maven
+ node: dynamic_merge
+ jdk: '{jdk}'
+
+ logrotate:
+ daysToKeep: '30'
+
+ parameters:
+ - project-parameter:
+ project: '{project}'
+ - build-tag
+ - stage-id-parameter:
+ stage-id: '{stage-id}'
+
+ scm:
+ - git:
+ credentials-id: '{ssh-credentials}'
+ url: '$GIT_BASE'
+ branches:
+ - '$BUILD_TAG'
+ skip-tag: true
+ shallow-clone: true
+ wipe-workspace: true
+
+ wrappers:
+ - build-timeout
+ - ssh-agent-credentials:
+ users:
+ - '{ssh-credentials}'
+
+ prebuilders:
+ - wipe-org-opendaylight-repo
+ - jacoco-nojava-workaround
+ - provide-maven-settings:
+ global-settings-file: '{odl-global-settings}'
+ settings-file: '{autorelease-settings}'
+
+ maven:
+ maven-name: '{mvn33}'
+ root-pom: 'pom.xml'
+ goals: 'clean deploy -V -Dmaven.repo.local=/tmp/r -Dorg.ops4j.pax.url.mvn.localRepository=/tmp/r -Djenkins -DaltDeploymentRepository=staging::default::file:hide/from/pom/files/stage'
+ maven-opts: '-Xmx1024m -XX:MaxPermSize=256m'
+ settings: '{autorelease-settings}'
+ global-settings: '{odl-global-settings}'
+ post-step-run-condition: UNSTABLE
+
+ postbuilders:
+ - releng-stage-release
+
+ publishers:
+ - archive-artifacts:
+ artifacts: '*.log'
+ - email-notification:
+ email-prefix: '[releng]'
+
+
+- job-template:
+ name: '{name}-publish-p2repo'
+
+ # Job template for publishing a p2 repo given a URL to a zipped p2repo.
+
+ project-type: maven
+ node: dynamic_merge
+ jdk: '{jdk}'
+
+ parameters:
+ - project-parameter:
+ project: '{project}'
+ - p2zip-parameter
+
+ prebuilders:
+ - wipe-org-opendaylight-repo
+ - jacoco-nojava-workaround
+ - releng-generate-p2pom
+
+ maven:
+ maven-name: '{mvn33}'
+ root-pom: 'pom.xml'
+ goals: 'clean deploy -V -Dmaven.repo.local=/tmp/r'
+ settings: '{project-settings}'
+ global-settings: '{odl-global-settings}'
+ post-step-run-condition: UNSTABLE
+
+ postbuilders:
+ - releng-update-p2composite-metadata:
+ maven-version: '{mvn33}'
+ settings: '{project-settings}'
+ global-settings: '{odl-global-settings}'
+
+ publishers:
+ - email-notification:
+ email-prefix: '[releng]'
+
- job-template:
name: '{name}-verify-python-{stream}'
publishers:
- email-notification:
email-prefix: '[{project}]'
+
+- job-template:
+ name: '{name}-merge-rtd-{stream}'
+ # Template for ReadTheDocs triggering
+
+ project-type: freestyle
+ node: dynamic_verify
+ concurrent: false
+
+ parameters:
+ - project-parameter:
+ project: '{project}'
+ - gerrit-parameter:
+ branch: '{branch}'
+ - gerrit-refspec-parameter:
+ refspec: 'refs/heads/{branch}'
+
+ wrappers:
+ - build-timeout
+ - ssh-agent-credentials:
+ users:
+ - '{ssh-credentials}'
+
+ triggers:
+ - gerrit-trigger-patch-merged:
+ name: '{project}'
+ branch: '{branch}'
+
+ builders:
+ - shell: |
+ curl -X POST https://readthedocs.org/build/{rtdproject}
+
+ publishers:
+ - email-notification:
+ email-prefix: '[{project}]'
release-branch: '{branch}'
- autorelease-release-datestamp:
datestamp: true
+ - string:
+ name: CLONE_URL
+ default: 'ssh://jenkins-$SILO@git.opendaylight.org:29418/releng/autorelease'
+ description: "Autorelease clone URL"
scm:
- git:
- url: 'https://git.opendaylight.org/gerrit/releng/autorelease'
+ url: '$CLONE_URL'
refspec: ''
branches:
- 'origin/{branch}'
skip-tag: true
+ shallow-clone: true
submodule:
recursive: true
+ timeout: 15
wrappers:
- autorelease-build-timeout
release-branch: '{branch}'
- autorelease-release-datestamp:
datestamp: true
+ - string:
+ name: CLONE_URL
+ default: 'ssh://jenkins-$SILO@git.opendaylight.org:29418/releng/autorelease'
+ description: "Autorelease clone URL"
scm:
- git:
- url: 'https://git.opendaylight.org/gerrit/releng/autorelease'
+ url: '$CLONE_URL'
refspec: ''
branches:
- 'origin/{branch}'
skip-tag: true
+ shallow-clone: true
submodule:
recursive: true
+ timeout: 15
wrappers:
- autorelease-build-timeout
release-branch: '{branch}'
- autorelease-release-datestamp:
datestamp: true
+ - string:
+ name: CLONE_URL
+ default: 'ssh://jenkins-$SILO@git.opendaylight.org:29418/releng/autorelease'
+ description: "Autorelease clone URL"
scm:
- git:
- url: 'https://git.opendaylight.org/gerrit/releng/autorelease'
+ url: '$CLONE_URL'
refspec: ''
branches:
- 'origin/{branch}'
skip-tag: true
+ shallow-clone: true
submodule:
recursive: true
+ timeout: 15
wrappers:
- autorelease-build-timeout
release-branch: '{branch}'
- autorelease-release-datestamp:
datestamp: true
+ - string:
+ name: CLONE_URL
+ default: 'ssh://jenkins-$SILO@git.opendaylight.org:29418/releng/autorelease'
+ description: "Autorelease clone URL"
scm:
- git:
- url: 'https://git.opendaylight.org/gerrit/releng/autorelease'
+ url: '$CLONE_URL'
refspec: ''
branches:
- 'origin/{branch}'
skip-tag: true
+ shallow-clone: true
submodule:
recursive: true
+ timeout: 15
wrappers:
- autorelease-build-timeout
release-branch: '{branch}'
- autorelease-release-datestamp:
datestamp: true
+ - string:
+ name: CLONE_URL
+ default: 'ssh://jenkins-$SILO@git.opendaylight.org:29418/releng/autorelease'
+ description: "Autorelease clone URL"
scm:
- git:
- url: 'https://git.opendaylight.org/gerrit/releng/autorelease'
+ url: '$CLONE_URL'
refspec: ''
branches:
- 'origin/{branch}'
skip-tag: true
+ shallow-clone: true
submodule:
recursive: true
+ timeout: 15
wrappers:
- autorelease-build-timeout
release-branch: '{branch}'
- autorelease-release-datestamp:
datestamp: true
+ - string:
+ name: CLONE_URL
+ default: 'ssh://jenkins-$SILO@git.opendaylight.org:29418/releng/autorelease'
+ description: "Autorelease clone URL"
scm:
- git:
- url: 'https://git.opendaylight.org/gerrit/releng/autorelease'
+ url: '$CLONE_URL'
refspec: ''
branches:
- 'origin/{branch}'
skip-tag: true
+ shallow-clone: true
submodule:
recursive: true
+ timeout: 15
wrappers:
- autorelease-build-timeout
- project:
name: spectrometer
jobs:
- - 'spectrometer-merge-{stream}'
+ - '{name}-merge-rtd-{stream}'
- '{name}-verify-python-{stream}'
stream:
branch: 'master'
project: 'spectrometer'
+ rtdproject: 'opendaylight-spectrometer'
toxdir: server
-
-- job-template:
- name: 'spectrometer-merge-{stream}'
-
- project-type: freestyle
- node: dynamic_verify
- concurrent: true
-
- parameters:
- - project-parameter:
- project: '{project}'
- - gerrit-parameter:
- branch: '{branch}'
- - gerrit-refspec-parameter:
- refspec: 'refs/heads/{branch}'
-
- scm:
- - gerrit-trigger-scm:
- credentials-id: '{ssh-credentials}'
- refspec: '$GERRIT_REFSPEC'
- choosing-strategy: 'gerrit'
-
- wrappers:
- - build-timeout
- - ssh-agent-credentials:
- users:
- - '{ssh-credentials}'
-
- triggers:
- - gerrit-trigger-patch-merged:
- name: 'spectrometer'
- branch: '{branch}'
-
- builders:
- - shell: |
- wget --post-data=test -qO- https://readthedocs.org/build/opendaylight-spectrometer &> /dev/null
-
- publishers:
- - email-notification:
- email-prefix: '[spectrometer]'
release-branch: '{branch}'
- autorelease-release-datestamp:
datestamp: true
+ - string:
+ name: CLONE_URL
+ default: 'ssh://jenkins-$SILO@git.opendaylight.org:29418/releng/autorelease'
+ description: "Autorelease clone URL"
scm:
- git:
- url: 'https://git.opendaylight.org/gerrit/releng/autorelease'
+ url: '$CLONE_URL'
refspec: ''
branches:
- 'origin/{branch}'
skip-tag: true
+ shallow-clone: true
submodule:
recursive: true
+ timeout: 15
wrappers:
- autorelease-build-timeout
release-branch: '{branch}'
- autorelease-release-datestamp:
datestamp: true
+ - string:
+ name: CLONE_URL
+ default: 'ssh://jenkins-$SILO@git.opendaylight.org:29418/releng/autorelease'
+ description: "Autorelease clone URL"
scm:
- git:
- url: 'https://git.opendaylight.org/gerrit/releng/autorelease'
+ url: '$CLONE_URL'
refspec: ''
branches:
- 'origin/{branch}'
skip-tag: true
+ shallow-clone: true
submodule:
recursive: true
+ timeout: 15
wrappers:
- autorelease-build-timeout
release-branch: '{branch}'
- autorelease-release-datestamp:
datestamp: true
+ - string:
+ name: CLONE_URL
+ default: 'ssh://jenkins-$SILO@git.opendaylight.org:29418/releng/autorelease'
+ description: "Autorelease clone URL"
scm:
- git:
- url: 'https://git.opendaylight.org/gerrit/releng/autorelease'
+ url: '$CLONE_URL'
refspec: ''
branches:
- 'origin/{branch}'
skip-tag: true
+ shallow-clone: true
submodule:
recursive: true
+ timeout: 15
wrappers:
- autorelease-build-timeout
release-branch: '{branch}'
- autorelease-release-datestamp:
datestamp: true
+ - string:
+ name: CLONE_URL
+ default: 'ssh://jenkins-$SILO@git.opendaylight.org:29418/releng/autorelease'
+ description: "Autorelease clone URL"
scm:
- git:
- url: 'https://git.opendaylight.org/gerrit/releng/autorelease'
+ url: '$CLONE_URL'
refspec: ''
branches:
- 'origin/{branch}'
skip-tag: true
+ shallow-clone: true
submodule:
recursive: true
+ timeout: 15
wrappers:
- autorelease-build-timeout
release-branch: '{branch}'
- autorelease-release-datestamp:
datestamp: true
+ - string:
+ name: CLONE_URL
+ default: 'ssh://jenkins-$SILO@git.opendaylight.org:29418/releng/autorelease'
+ description: "Autorelease clone URL"
scm:
- git:
- url: 'https://git.opendaylight.org/gerrit/releng/autorelease'
+ url: '$CLONE_URL'
refspec: ''
branches:
- 'origin/{branch}'
skip-tag: true
+ shallow-clone: true
submodule:
recursive: true
+ timeout: 15
wrappers:
- autorelease-build-timeout
release-branch: '{branch}'
- autorelease-release-datestamp:
datestamp: true
+ - string:
+ name: CLONE_URL
+ default: 'ssh://jenkins-$SILO@git.opendaylight.org:29418/releng/autorelease'
+ description: "Autorelease clone URL"
scm:
- git:
- url: 'https://git.opendaylight.org/gerrit/releng/autorelease'
+ url: '$CLONE_URL'
refspec: ''
branches:
- 'origin/{branch}'
skip-tag: true
+ shallow-clone: true
submodule:
recursive: true
+ timeout: 15
wrappers:
- autorelease-build-timeout
release-branch: '{branch}'
- autorelease-release-datestamp:
datestamp: true
+ - string:
+ name: CLONE_URL
+ default: 'ssh://jenkins-$SILO@git.opendaylight.org:29418/releng/autorelease'
+ description: "Autorelease clone URL"
scm:
- git:
- url: 'https://git.opendaylight.org/gerrit/releng/autorelease'
+ url: '$CLONE_URL'
refspec: ''
branches:
- 'origin/{branch}'
skip-tag: true
+ shallow-clone: true
submodule:
recursive: true
+ timeout: 15
wrappers:
- autorelease-build-timeout
release-branch: '{branch}'
- autorelease-release-datestamp:
datestamp: true
+ - string:
+ name: CLONE_URL
+ default: 'ssh://jenkins-$SILO@git.opendaylight.org:29418/releng/autorelease'
+ description: "Autorelease clone URL"
scm:
- git:
- url: 'https://git.opendaylight.org/gerrit/releng/autorelease'
+ url: '$CLONE_URL'
refspec: ''
branches:
- 'origin/{branch}'
skip-tag: true
+ shallow-clone: true
submodule:
recursive: true
+ timeout: 15
wrappers:
- autorelease-build-timeout
release-branch: '{branch}'
- autorelease-release-datestamp:
datestamp: true
+ - string:
+ name: CLONE_URL
+ default: 'ssh://jenkins-$SILO@git.opendaylight.org:29418/releng/autorelease'
+ description: "Autorelease clone URL"
scm:
- git:
- url: 'https://git.opendaylight.org/gerrit/releng/autorelease'
+ url: '$CLONE_URL'
refspec: ''
branches:
- 'origin/{branch}'
skip-tag: true
+ shallow-clone: true
submodule:
recursive: true
+ timeout: 15
wrappers:
- autorelease-build-timeout
release-branch: '{branch}'
- autorelease-release-datestamp:
datestamp: true
+ - string:
+ name: CLONE_URL
+ default: 'ssh://jenkins-$SILO@git.opendaylight.org:29418/releng/autorelease'
+ description: "Autorelease clone URL"
scm:
- git:
- url: 'https://git.opendaylight.org/gerrit/releng/autorelease'
+ url: '$CLONE_URL'
refspec: ''
branches:
- 'origin/{branch}'
skip-tag: true
+ shallow-clone: true
submodule:
recursive: true
+ timeout: 15
wrappers:
- autorelease-build-timeout
release-branch: '{branch}'
- autorelease-release-datestamp:
datestamp: true
+ - string:
+ name: CLONE_URL
+ default: 'ssh://jenkins-$SILO@git.opendaylight.org:29418/releng/autorelease'
+ description: "Autorelease clone URL"
scm:
- git:
- url: 'https://git.opendaylight.org/gerrit/releng/autorelease'
+ url: '$CLONE_URL'
refspec: ''
branches:
- 'origin/{branch}'
skip-tag: true
+ shallow-clone: true
submodule:
recursive: true
+ timeout: 15
wrappers:
- autorelease-build-timeout
release-branch: '{branch}'
- autorelease-release-datestamp:
datestamp: true
+ - string:
+ name: CLONE_URL
+ default: 'ssh://jenkins-$SILO@git.opendaylight.org:29418/releng/autorelease'
+ description: "Autorelease clone URL"
scm:
- git:
- url: 'https://git.opendaylight.org/gerrit/releng/autorelease'
+ url: '$CLONE_URL'
refspec: ''
branches:
- 'origin/{branch}'
skip-tag: true
+ shallow-clone: true
submodule:
recursive: true
+ timeout: 15
wrappers:
- autorelease-build-timeout
--- /dev/null
+- project:
+ name: yangide-releng
+ jobs:
+ - '{name}-publish-p2repo'
+ - '{name}-release-java'
+
+ project: yangide
+ jdk: 'openjdk8'
+ stage-id: 'fba306956f98'
+ project-settings: 'org.jenkinsci.plugins.configfiles.maven.MavenSettingsConfig1448465159080'
release-branch: '{branch}'
- autorelease-release-datestamp:
datestamp: true
+ - string:
+ name: CLONE_URL
+ default: 'ssh://jenkins-$SILO@git.opendaylight.org:29418/releng/autorelease'
+ description: "Autorelease clone URL"
scm:
- git:
- url: 'https://git.opendaylight.org/gerrit/releng/autorelease'
+ url: '$CLONE_URL'
refspec: ''
branches:
- 'origin/{branch}'
skip-tag: true
+ shallow-clone: true
submodule:
recursive: true
+ timeout: 15
wrappers:
- autorelease-build-timeout
release-branch: '{branch}'
- autorelease-release-datestamp:
datestamp: true
+ - string:
+ name: CLONE_URL
+ default: 'ssh://jenkins-$SILO@git.opendaylight.org:29418/releng/autorelease'
+ description: "Autorelease clone URL"
scm:
- git:
- url: 'https://git.opendaylight.org/gerrit/releng/autorelease'
+ url: '$CLONE_URL'
refspec: ''
branches:
- 'origin/{branch}'
skip-tag: true
+ shallow-clone: true
submodule:
recursive: true
+ timeout: 15
wrappers:
- autorelease-build-timeout
--- /dev/null
+# Releasing Workflow
+
+This page documents the workflow for releasing for projects that are not built
+and released via the Autorelease project.
+
+## Workflow
+
+OpenDaylight uses Nexus as it's artifact repository for releasing artifacts to
+the world. The workflow involves using Nexus to produce a staging repository
+which can be tested and reviewed before being approved to copy to the final
+destination opendaylight.release repo. The workflow in general is as follows:
+
+1. Project create release tag and push to Gerrit
+2. Project will contact helpdesk@opendaylight.org with project name and build
+ tag to produce a release candidate / staging repo
+3. Helpdesk will run a build and notify project of staging repo location
+4. Project tests staging repo and notifies Helpdesk with go ahead to release
+5. Helpdesk clicks Release repo button in Nexus
+6. (optional) Helpdesk runs Jenkins job to push update-site.zip to p2repos
+ sites repo
+
+Step 6 is only necessary for Eclipse projects that need to additionally deploy
+an update site to a webserver.
+
+## Release Job
+
+There is a JJB template release job which should be used for a project if the
+project needs to produce a staging repo for release. The supported Job types
+are listed below, use the one relevant to your project.
+
+**Maven|Java** {name}-release-java -- this job type will produce a staging repo
+in Nexus for Maven projects.
+
+**P2 Publisher** {name}-publish-p2repo -- this job type is useful for projects
+that produce a p2 repo that needs to be published to a special URL.
-basline can be used to prepare systems in the Rackspace (or potentially
-other environments) for following vagrant layers.
+baseline can be used for preparing basic test images. It's suitable for
+use only as a verification that our baseline library script is working
+as expected or for a very vanilla image.
-While the base image that is looked for is
-'Fedora 20 (Heisenbug) (PVHVM)' which is no longer even offered, the
-variable is being left in place so to prompt selection of a proper base
-image to spin up against.
-
-This is controlled by the RSIMAGE environment variable
-
-ex:
-
-$ RSIMAGE='CentOS 7 (PVHVM)' vagrant up --provider=rackspace
-
-This vagrant will just set the instance up at the most basic to be
-Vagrant capable and also SELinux enforcing. It will then "reseal" itself
-and state the the system is ready for imaging. Any further RackSpace
-specific Vagrant definitions will expect a base system of the form
-"$DISTRO - Vagrant ready" for the base image name
+This is controlled by the IMAGE environment variable
ex:
-Fedora 20 (Heisenbug) - Vagrant ready
-
-or
-
-CentOS 7 - Vagrant ready
-
-NOTE: The reseal operation _destroys_ the SSH keys that were used to
-bring the Vagrant system up effectively making the system unable to
-perform SSH based logins again. This is intentional.
+$ export RESEAL=true
+$ IMAGE='CentOS 7' vagrant up --provider=openstack
-If you are bringing up an Ubuntu system you _must_ also set
-RSPTY='default' or the bring up will hang indefinitely during the OS
-upgrade phase.
+If $RESEAL is not set then the system will not be cleaned up in
+preparation for snapshotting. This is mostly useful for troubleshooting
+a vagrant definition before you do your final creation and snapshot.
# getting the ssh key for some reason, root does
# so use that
config.ssh.username = 'root'
-
- # Fedora and EL systems default to requiring a tty for sudo
- if (ENV['RSPTY'] == 'default')
- config.ssh.pty = false
- else
- config.ssh.pty = true
- end
-
- # The rackspace provider by default tries to rsync
- # the local folder / vagrant box to /vagrant
- # unfortunately, even with config.ssh.pty = true
- # this fails because it doesn't recognize the pty requirement
- # when doing the sudo based rsync (not that it needs to sudo
- # when doing things as root). To avoid this, disable the
- # default sync, we don't need it anyway.
- config.vm.synced_folder '.', '/vagrant', :disabled => true
+ # DEPRECATED
+ # ==========
+ #
+ # NOTE: The Rackspace provider section is deprecated as we are moving into a
+ # private OpenStack cloud. It may be revived after we've migrated and have a
+ # chance to do work to reconfigure the Rackspace public cloud to work for
+ # burst access
+ #
# make sure to set the following in your
# ~/.vagrant.d/boxes/dummy/0/rackspace/Vagrantfile
# rs.username
# Default to the Fedora 20 image unless overridden by a RSIMAGE
# environment variable
- if ENV['RSIMAGE']
- rs.image = ENV['RSIMAGE']
+ if ENV['IMAGE']
+ rs.image = ENV['IMAGE']
else
rs.image = 'Fedora 20 (Heisenbug) (PVHVM)'
end
end
+ # /DEPRECATED
- # Do a full system update and force enforcing on (it's in permissive
- # by default in the rackspace base images)
- config.vm.provision 'shell', path: 'bootstrap.sh'
+ # Configuration used by ODL Private cloud
+ # Should be mostly usable by any OpenStack cloud that can
+ # utilize upstream cloud images
+ config.vm.provider :openstack do |os, override|
+ if ENV['BOX']
+ override.vm.box = ENV['BOX']
+ else
+ override.vm.box = 'dummy'
+ end
+ config.ssh.username = 'centos'
+ os.flavor = 'm1.small'
+
+ # require an IMAGE to be passed in
+ # IMAGE must be a human name and not an image ID!
+ if ENV['IMAGE']
+ os.image = ENV['IMAGE']
+ else
+ os.image = 'BAD IMAGE'
+ override.ssh.username = 'baduser'
+ end
+
+ case ENV['IMAGE']
+ when /.*ubuntu.*/i
+ override.ssh.username = 'ubuntu'
+
+ when /.*fedora.*/i
+ override.ssh.username = 'fedora'
+
+ # take care of the tty requirement by fedora for sudo
+ os.user_data = "#!/bin/bash
+/bin/sed -i 's/ requiretty/ !requiretty/' /etc/sudoers;"
+
+ when /.*centos.*/i
+ override.ssh.username = 'centos'
+
+ # take care of the tty requirement by centos for sudo
+ os.user_data = "#!/bin/bash
+/bin/sed -i 's/ requiretty/ !requiretty/' /etc/sudoers;"
+ end
+
+ end
- # disable the default requiretty for sudo that Fedora and CentOS have
- config.vm.provision 'shell', path: 'remove_requiretty.sh'
+ # Do a full system update and enable enforcing if needed
+ config.vm.provision 'shell', path: '../lib/baseline.sh'
# Execute a system clean-up in prep for imaging so that this base
# image can be used for other Rackspace Vagrant configurations
- config.vm.provision 'shell', path: 'system_reseal.sh'
+ config.vm.provision 'shell', path: '../lib/system_reseal.sh'
end
+++ /dev/null
-#!/bin/bash
-
-# Make sure we have the leading space so multiple runs
-# are idempotent
-/bin/sed -i 's/ requiretty/ !requiretty/' /etc/sudoers;
+++ /dev/null
-#!/bin/bash
-
-# vim: sw=2 ts=2 sts=2 et :
-
-if [ -f /.autorelabel ]; then
- echo "**********************************************"
- echo "* SYSTEM REQUIRES RELABELING SKIPPING RESEAL *"
- echo "* PLEASE RESTART SYSTEM AND RERUN *"
- echo "* PROVISIONING SCRIPTS *"
- echo "**********************************************"
- exit 1;
-fi
-
-# clean-up from any prior cloud-init networking
-rm -rf /etc/sysconfig/network-scripts/{ifcfg,route}-eth*
-
-rm -rf /etc/Pegasus/*.cnf /etc/Pegasus/*.crt /etc/Pegasus/*.csr /etc/Pegasus/*.pem /etc/Pegasus/*.srl /root/anaconda-ks.cfg /root/anaconda-post.log /root/initial-setup-ks.cfg /root/install.log /root/install.log.syslog /var/cache/fontconfig/* /var/cache/gdm/* /var/cache/man/* /var/lib/AccountService/users/* /var/lib/fprint/* /var/lib/logrotate.status /var/log/*.log* /var/log/BackupPC/LOG /var/log/ConsoleKit/* /var/log/anaconda.syslog /var/log/anaconda/* /var/log/apache2/*_log /var/log/apache2/*_log-* /var/log/apt/* /var/log/aptitude* /var/log/audit/* /var/log/btmp* /var/log/ceph/*.log /var/log/chrony/*.log /var/log/cron* /var/log/cups/*_log /var/log/debug* /var/log/dmesg* /var/log/exim4/* /var/log/faillog* /var/log/gdm/* /var/log/glusterfs/*glusterd.vol.log /var/log/glusterfs/glusterfs.log /var/log/httpd/*log /var/log/installer/* /var/log/jetty/jetty-console.log /var/log/journal/* /var/log/lastlog* /var/log/libvirt/libvirtd.log /var/log/libvirt/lxc/*.log /var/log/libvirt/qemu/*.log /var/log/libvirt/uml/*.log /var/log/lightdm/* /var/log/mail/* /var/log/maillog* /var/log/messages* /var/log/ntp /var/log/ntpstats/* /var/log/ppp/connect-errors /var/log/rhsm/* /var/log/sa/* /var/log/secure* /var/log/setroubleshoot/*.log /var/log/spooler* /var/log/squid/*.log /var/log/syslog* /var/log/tallylog* /var/log/tuned/tuned.log /var/log/wtmp* /var/named/data/named.run
-
-rm -rf ~/.viminfo /etc/ssh/ssh*key*
-
-# kill any cloud-init related bits
-rm -rf /var/lib/cloud/*
-
-if [ -e /usr/bin/facter ]
-then
- if [ `/usr/bin/facter operatingsystem` = 'Ubuntu' ]
- then
- rm -rf /etc/hostname* /etc/hosts /etc/network/interfaces /etc/network/interfaces.*.bak~
- cat <<EOINT >> /etc/network/interfaces
-# Used by ifup(8) and ifdown(8). See the interfaces(5) manpage or
-# /usr/share/doc/ifupdown/examples for more information.
-# The loopback network interface
-auto lo
-iface lo inet loopback
-EOINT
- fi
-fi
-
-echo "********************************************"
-echo "* PLEASE SNAPSHOT IMAGE AT THIS TIME *"
-echo "********************************************"
# config.ssh.private_key_path -- set this outside the openstack block
# in your base box
config.vm.provider :openstack do |os, override|
- # Default the CentOS 6.5 - Vagrant ready image unless overriden by a RSIMAGE
- # environment variable
+ if ENV['BOX']
+ override.vm.box = ENV['BOX']
+ else
+ override.vm.box = 'dummy'
+ end
+ config.ssh.username = 'centos'
+ os.flavor = 'm1.small'
+
+ # require an IMAGE to be passed in
+ # IMAGE must be a human name and not an image ID!
if ENV['IMAGE']
os.image = ENV['IMAGE']
else
- os.image = 'CentOS 7 - baseline - 20151215'
+ os.image = 'BAD IMAGE'
+ override.ssh.username = 'baduser'
end
case ENV['IMAGE']
+ when /.*ubuntu.*/i
+ override.ssh.username = 'ubuntu'
+
+ when /.*fedora.*/i
+ override.ssh.username = 'fedora'
+
+ # take care of the tty requirement by fedora for sudo
+ os.user_data = "#!/bin/bash
+/bin/sed -i 's/ requiretty/ !requiretty/' /etc/sudoers;"
+
when /.*centos.*/i
override.ssh.username = 'centos'
+
+ # take care of the tty requirement by centos for sudo
+ os.user_data = "#!/bin/bash
+/bin/sed -i 's/ requiretty/ !requiretty/' /etc/sudoers;"
end
end
config.vm.synced_folder ".", "/vagrant"
config.vm.synced_folder "../lib/", "/vagrant/lib"
- # run our bootstrapping for the ovsdb-devstack system
+ # Do a full system update and enable enforcing if needed
+ config.vm.provision 'shell', path: '../lib/baseline.sh'
+
+ # run our bootstrapping
config.vm.provision 'shell', path: 'bootstrap.sh'
#################
# vim: sw=4 ts=4 sts=4 et tw=72 :
-yum clean all
-
-# Make sure the system is fully up to date
-yum update -q -y
-
# The following packages are not needed by all projects, but they are
# needed by enough to make them useful everywhere
yum install -q -y @development perl-{Digest-SHA,ExtUtils-MakeMaker} \
VAGRANTFILE_API_VERSION = "2"
Vagrant.configure(VAGRANTFILE_API_VERSION) do |config|
- # root off of the rackspace provider dummy box
+ # root off of the openstack provider dummy box
config.vm.box = "dummy"
-
- # rackspace systems, even with cloud-init
- # don't seem to have the cloud int user ${osname} (or similar)
- # getting the ssh key for some reason, root does for sure
- # so use that
config.ssh.username = 'root'
- # Fedora and EL systems default to requiring tty for sudo
- # This should have been disabled with the Vagrant ready
- # base box conversion (see rackspace-convert-base vagrant)
- # but just to be safe
- config.ssh.pty = false
-
# make sure to set the following in your
- # ~/.vagrant.d/boxes/dummy/0/rackspace/Vagrantfile
- # rs.username
- # rs.api_key
- # rs.rackspace_region
+ # ~/.vagrant.d/boxes/dummy/0/openstack/Vagrantfile
+ #
+ # os.openstack_auth_url
+ # os.endpoint_type
+ # os.flavor
+ # os.tenant_name
+ # os.username
+ # os.password
+ # os.networks
#
# If you are not using an SSH token / smartcard also set this
- # rs.key_name
- # config.ssh.private_key_path -- set this outside the rackspace block
+ # os.key_name
+ # config.ssh.private_key_path -- set this outside the openstack block
# in your base box
- config.vm.provider :rackspace do |rs|
- # create these base builds always on the smallest system possible
- rs.flavor = 'general1-1'
-
- # allow for switching to ORD cloud but default to DFW
- if (ENV['RSREGION'] == 'ord')
- rs.rackspace_region = :ord
+ config.vm.provider :openstack do |os, override|
+ if ENV['BOX']
+ override.vm.box = ENV['BOX']
else
- rs.rackspace_region = :dfw
+ override.vm.box = 'dummy'
end
+ config.ssh.username = 'centos'
+ os.flavor = 'm1.small'
- # Default the Fedora 20 (Heisenbug) - Vagrant ready image unless overriden by a RSIMAGE
- # environment variable
- if ENV['RSIMAGE']
- rs.image = ENV['RSIMAGE']
+ # require an IMAGE to be passed in
+ # IMAGE must be a human name and not an image ID!
+ if ENV['IMAGE']
+ os.image = ENV['IMAGE']
else
- rs.image = 'Fedora 20 (Heisenbug) - Vagrant ready'
+ os.image = 'BAD IMAGE'
+ override.ssh.username = 'baduser'
end
- end
- # Explicitlly set default shared folder and load lib folder
- config.vm.synced_folder ".", "/vagrant"
- config.vm.synced_folder "../lib/", "/vagrant/lib"
+ case ENV['IMAGE']
+ when /.*ubuntu.*/i
+ override.ssh.username = 'ubuntu'
- # run our bootstrapping for the ovsdb-devstack system
- config.vm.provision 'shell', path: 'bootstrap.sh'
+ when /.*fedora.*/i
+ override.ssh.username = 'fedora'
+ # take care of the tty requirement by fedora for sudo
+ os.user_data = "#!/bin/bash
+/bin/sed -i 's/ requiretty/ !requiretty/' /etc/sudoers;"
- #################
- # LF NETWORKING #
- #################
+ when /.*centos.*/i
+ override.ssh.username = 'centos'
- if ENV['LFNETWORK']
- # reconfigure the network setup to support our special private setup
- config.vm.provision 'shell', path: '../lib/lf-networking/configure_lf_infra.sh',
- args: ENV['RSSUBDOMAIN']
+ # take care of the tty requirement by centos for sudo
+ os.user_data = "#!/bin/bash
+/bin/sed -i 's/ requiretty/ !requiretty/' /etc/sudoers;"
+ end
end
+ # Explicitlly set default shared folder and load lib folder
+ config.vm.synced_folder ".", "/vagrant"
+ config.vm.synced_folder "../lib/", "/vagrant/lib"
+
+ # Do a full system update and enable enforcing if needed
+ config.vm.provision 'shell', path: '../lib/baseline.sh'
+
+ # run our bootstrapping
+ config.vm.provision 'shell', path: 'bootstrap.sh'
#################
# FINAL CLEANUP #
#################
- # set RSRESEAL to... anything if you want to snap an image of this box
+ # set RESEAL to... anything if you want to snap an image of this box
# not setting the environment variable will cause the system to come
# up fully and not be in a resealable state
- if ENV['RSRESEAL']
+ if ENV['RESEAL']
config.vm.provision 'shell', path: '../lib/system_reseal.sh'
end
end
# vim: sw=4 ts=4 sts=4 et :
rh_changes() {
- # make sure we're fully updated
- echo "---> Updating OS"
- yum clean all
- yum update -y -q
-
+ echo "---> RH changes"
# install docker and enable it
echo "---> Installing docker"
yum install -y docker supervisor bridge-utils
}
ubuntu_changes() {
- # make sure we're fully updated
- echo "---> Updating OS"
- apt-get update
- apt-get upgrade -y -qq
+ echo "---> Ubuntu changes"
}
-OS=`/usr/bin/facter operatingsystem`
+OS=$(/usr/bin/facter operatingsystem)
case "$OS" in
CentOS|Fedora|RedHat)
rh_changes
VAGRANTFILE_API_VERSION = "2"
Vagrant.configure(VAGRANTFILE_API_VERSION) do |config|
- # root off of the rackspace provider dummy box
+ # root off of the openstack provider dummy box
config.vm.box = "dummy"
-
- # rackspace systems, even with cloud-init
- # don't seem to have the cloud int user ${osname} (or similar)
- # getting the ssh key for some reason, root does for sure
- # so use that
config.ssh.username = 'root'
- # Fedora and EL systems default to requiring tty for sudo
- # This should have been disabled with the Vagrant ready
- # base box conversion (see rackspace-convert-base vagrant)
- # but just to be safe
- config.ssh.pty = true
-
# make sure to set the following in your
- # ~/.vagrant.d/boxes/dummy/0/rackspace/Vagrantfile
- # rs.username
- # rs.api_key
- # rs.rackspace_region
+ # ~/.vagrant.d/boxes/dummy/0/openstack/Vagrantfile
+ #
+ # os.openstack_auth_url
+ # os.endpoint_type
+ # os.flavor
+ # os.tenant_name
+ # os.username
+ # os.password
+ # os.networks
#
# If you are not using an SSH token / smartcard also set this
- # rs.key_name
- # config.ssh.private_key_path -- set this outside the rackspace block
+ # os.key_name
+ # config.ssh.private_key_path -- set this outside the openstack block
# in your base box
- config.vm.provider :rackspace do |rs|
- # create these base builds always on the smallest system possible
- rs.flavor = 'general1-1'
-
- # allow for switching to ORD cloud but default to DFW
- if (ENV['RSREGION'] == 'ord')
- rs.rackspace_region = :ord
+ config.vm.provider :openstack do |os, override|
+ if ENV['BOX']
+ override.vm.box = ENV['BOX']
else
- rs.rackspace_region = :dfw
+ override.vm.box = 'dummy'
end
+ config.ssh.username = 'centos'
+ os.flavor = 'm1.small'
- # Default the CentOS 6.5 - Vagrant ready image unless overriden by a RSIMAGE
- # environment variable
- if ENV['RSIMAGE']
- rs.image = ENV['RSIMAGE']
+ # require an IMAGE to be passed in
+ # IMAGE must be a human name and not an image ID!
+ if ENV['IMAGE']
+ os.image = ENV['IMAGE']
else
- rs.image = 'CentOS 6.5 - Vagrant ready'
+ os.image = 'BAD IMAGE'
+ override.ssh.username = 'baduser'
end
- end
- # Explicitlly set default shared folder and load lib folder
- config.vm.synced_folder ".", "/vagrant"
- config.vm.synced_folder "../lib/", "/vagrant/lib"
+ case ENV['IMAGE']
+ when /.*ubuntu.*/i
+ override.ssh.username = 'ubuntu'
- # run our bootstrapping for the robotframework system
- config.vm.provision 'shell', path: 'bootstrap.sh'
+ when /.*fedora.*/i
+ override.ssh.username = 'fedora'
+ # take care of the tty requirement by fedora for sudo
+ os.user_data = "#!/bin/bash
+/bin/sed -i 's/ requiretty/ !requiretty/' /etc/sudoers;"
- #################
- # LF NETWORKING #
- #################
+ when /.*centos.*/i
+ override.ssh.username = 'centos'
- if ENV['LFNETWORK']
- # reconfigure the network setup to support our special private setup
- config.vm.provision 'shell', path: '../lib/lf-networking/configure_lf_infra.sh',
- args: ENV['RSSUBDOMAIN']
+ # take care of the tty requirement by centos for sudo
+ os.user_data = "#!/bin/bash
+/bin/sed -i 's/ requiretty/ !requiretty/' /etc/sudoers;"
+ end
end
+ # Explicitlly set default shared folder and load lib folder
+ config.vm.synced_folder ".", "/vagrant"
+ config.vm.synced_folder "../lib/", "/vagrant/lib"
+
+ # Do a full system update and enable enforcing if needed
+ config.vm.provision 'shell', path: '../lib/baseline.sh'
+
+ # run our bootstrapping
+ config.vm.provision 'shell', path: 'bootstrap.sh'
#################
# FINAL CLEANUP #
#################
- # set RSRESEAL to... anything if you want to snap an image of this box
+ # set RESEAL to... anything if you want to snap an image of this box
# not setting the environment variable will cause the system to come
# up fully and not be in a resealable state
- if ENV['RSRESEAL']
+ if ENV['RESEAL']
config.vm.provision 'shell', path: '../lib/system_reseal.sh'
end
end
# vim: sw=4 ts=4 sts=4 et tw=72 :
-yum clean all
-yum update -q -y
-
# Install minimal python requirements to get virtualenv going
# Additional python dependencies should be installed via JJB configuration
# inside project jobs using a virtualenv setup.
# Chrome need a other library named chromedriver so let start with
# one already supported with selenium.
yum install -y -q firefox xorg-x11-server-Xvfb
-
-# To handle the prompt style that is expected all over the environment
-# with how use use robotframework we need to make sure that it is
-# consistent for any of the users that are created during dynamic spin
-# ups
-echo 'PS1="[\u@\h \W]> "' >> /etc/skel/.bashrc
rh_systems() {
# Handle the occurance where SELINUX is actually disabled
- if [ `grep SELINUX=permissive /etc/selinux/config` ]; then
- # make sure that the filesystem is properly labelled.
- # it could be not fully labeled correctly if it was just switched
- # from disabled, the autorelabel misses some things
- # skip relabelling on /dev as it will generally throw errors
- restorecon -R -e /dev /
-
- # enable enforcing mode from the very start
- setenforce enforcing
-
- # configure system for enforcing mode on next boot
- sed -i 's/SELINUX=permissive/SELINUX=enforcing/' /etc/selinux/config
- else
- sed -i 's/SELINUX=disabled/SELINUX=permissive/' /etc/selinux/config
- touch /.autorelabel
-
- echo "*******************************************"
- echo "** SYSTEM REQUIRES A RESTART FOR SELINUX **"
- echo "*******************************************"
- fi
+ SELINUX=$(grep -E '^SELINUX=(disabled|permissive|enforcing)$' /etc/selinux/config)
+ MODE=$(echo "$SELINUX" | cut -f 2 -d '=')
+ case "$MODE" in
+ permissive)
+ echo "************************************"
+ echo "** SYSTEM ENTERING ENFORCING MODE **"
+ echo "************************************"
+ # make sure that the filesystem is properly labelled.
+ # it could be not fully labeled correctly if it was just switched
+ # from disabled, the autorelabel misses some things
+ # skip relabelling on /dev as it will generally throw errors
+ restorecon -R -e /dev /
+
+ # enable enforcing mode from the very start
+ setenforce enforcing
+
+ # configure system for enforcing mode on next boot
+ sed -i 's/SELINUX=permissive/SELINUX=enforcing/' /etc/selinux/config
+ ;;
+ disabled)
+ sed -i 's/SELINUX=disabled/SELINUX=permissive/' /etc/selinux/config
+ touch /.autorelabel
+
+ echo "*******************************************"
+ echo "** SYSTEM REQUIRES A RESTART FOR SELINUX **"
+ echo "*******************************************"
+ ;;
+ enforcing)
+ echo "*********************************"
+ echo "** SYSTEM IS IN ENFORCING MODE **"
+ echo "*********************************"
+ ;;
+ esac
# Allow jenkins access to alternatives command to switch java version
cat <<EOF >/etc/sudoers.d/89-jenkins-user-defaults
# add in components we need or want on systems
echo "---> Installing base packages"
- yum install -y -q @base
+ yum install -y -q @base https://dl.fedoraproject.org/pub/epel/epel-release-latest-7.noarch.rpm
# separate group installs from package installs since a non-existing
# group with dnf based systems (F21+) will fail the install if such
# a group does not exist
echo "---> Configuring OpenJDK"
yum install -y -q 'java-*-openjdk-devel'
- FACTER_OS=`/usr/bin/facter operatingsystem`
- FACTER_OSVER=`/usr/bin/facter operatingsystemrelease`
+ FACTER_OS=$(/usr/bin/facter operatingsystem)
+ FACTER_OSVER=$(/usr/bin/facter operatingsystemrelease)
case "$FACTER_OS" in
Fedora)
if [ "$FACTER_OSVER" -ge "21" ]
cat <<EOF >/etc/sudoers.d/89-jenkins-user-defaults
Defaults:jenkins !requiretty
jenkins ALL = NOPASSWD: /usr/bin/update-alternatives
+EOF
+
+ export DEBIAN_FRONTEND=noninteractive
+ cat <<EOF >> /etc/apt/apt.conf
+APT {
+ Get {
+ Assume-Yes "true";
+ allow-change-held-packages "true";
+ allow-downgrades "true";
+ allow-remove-essential "true";
+ };
+};
+
+Dpkg::Options {
+ "--force-confdef";
+ "--force-confold";
+};
+
EOF
echo "---> Updating operating system"
- apt-get update -qq
- apt-get upgrade -y --force-yes -qq
+ apt-get update
+ apt-get upgrade
# add in stuff we know we need
echo "---> Installing base packages"
- apt-get install -y --force-yes -qq unzip xz-utils puppet git libxml-xpath-perl
+ apt-get install unzip xz-utils puppet git libxml-xpath-perl
# install Java 7
echo "---> Configuring OpenJDK"
- apt-get install -y --force-yes -qq openjdk-7-jdk
+ apt-get install openjdk-7-jdk
# make jdk8 available
add-apt-repository -y ppa:openjdk-r/ppa
- apt-get update -qq
+ apt-get update
# We need to force openjdk-8-jdk to install
- apt-get install -y -qq openjdk-8-jdk
+ apt-get install openjdk-8-jdk
# make sure that we still default to openjdk 7
update-alternatives --set java /usr/lib/jvm/java-7-openjdk-amd64/jre/bin/java
# Do any Distro specific installations here
echo "Checking distribution"
- FACTER_OS=`/usr/bin/facter operatingsystem`
+ FACTER_OS=$(/usr/bin/facter operatingsystem)
case "$FACTER_OS" in
RedHat|CentOS)
- if [ `/usr/bin/facter operatingsystemrelease | /bin/cut -d '.' -f1` = "7" ]; then
+ if [ "$(/usr/bin/facter operatingsystemrelease | /bin/cut -d '.' -f1)" = "7" ]; then
echo
echo "---> CentOS 7"
echo "No extra steps currently for CentOS 7"
}
echo "---> Attempting to detect OS"
-# OS selector
-if [ -f /usr/bin/yum ]
-then
- OS='RH'
-else
- OS='UBUNTU'
-fi
-
-case "$OS" in
- RH)
+# upstream cloud images use the distro name as the initial user
+ORIGIN=$(logname)
+
+case "${ORIGIN}" in
+ fedora|centos)
echo "---> RH type system detected"
rh_systems
;;
- UBUNTU)
+ ubuntu)
echo "---> Ubuntu system detected"
ubuntu_systems
;;
exit 1;
fi
-# clean-up from any prior cloud-init networking
-rm -rf /etc/sysconfig/network-scripts/ifcfg-eth*
-
rm -rf /etc/Pegasus/*.cnf /etc/Pegasus/*.crt /etc/Pegasus/*.csr /etc/Pegasus/*.pem /etc/Pegasus/*.srl /root/anaconda-ks.cfg /root/anaconda-post.log /root/initial-setup-ks.cfg /root/install.log /root/install.log.syslog /var/cache/fontconfig/* /var/cache/gdm/* /var/cache/man/* /var/lib/AccountService/users/* /var/lib/fprint/* /var/lib/logrotate.status /var/log/*.log* /var/log/BackupPC/LOG /var/log/ConsoleKit/* /var/log/anaconda.syslog /var/log/anaconda/* /var/log/apache2/*_log /var/log/apache2/*_log-* /var/log/apt/* /var/log/aptitude* /var/log/audit/* /var/log/btmp* /var/log/ceph/*.log /var/log/chrony/*.log /var/log/cron* /var/log/cups/*_log /var/log/debug* /var/log/dmesg* /var/log/exim4/* /var/log/faillog* /var/log/gdm/* /var/log/glusterfs/*glusterd.vol.log /var/log/glusterfs/glusterfs.log /var/log/httpd/*log /var/log/installer/* /var/log/jetty/jetty-console.log /var/log/journal/* /var/log/lastlog* /var/log/libvirt/libvirtd.log /var/log/libvirt/lxc/*.log /var/log/libvirt/qemu/*.log /var/log/libvirt/uml/*.log /var/log/lightdm/* /var/log/mail/* /var/log/maillog* /var/log/messages* /var/log/ntp /var/log/ntpstats/* /var/log/ppp/connect-errors /var/log/rhsm/* /var/log/sa/* /var/log/secure* /var/log/setroubleshoot/*.log /var/log/spooler* /var/log/squid/*.log /var/log/syslog* /var/log/tallylog* /var/log/tuned/tuned.log /var/log/wtmp* /var/named/data/named.run
-rm -rf ~/.viminfo /etc/ssh/ssh*key*
+rm -rf ~/.viminfo /etc/ssh/ssh*key* ~/.ssh/* /root/.ssh/* /home/$(logname)/.ssh/*
# kill any cloud-init related bits
rm -rf /var/lib/cloud/*
-if [ -e /usr/bin/facter ]
-then
- if [ `/usr/bin/facter operatingsystem` = 'Ubuntu' ]
- then
- rm -rf /etc/hostname* /etc/hosts /etc/network/interfaces /etc/network/interfaces.*.bak~
- cat <<EOINT >> /etc/network/interfaces
-# Used by ifup(8) and ifdown(8). See the interfaces(5) manpage or
-# /usr/share/doc/ifupdown/examples for more information.
-# The loopback network interface
-auto lo
-iface lo inet loopback
-EOINT
- fi
-fi
-
# cleanup /vagrant
rm -rf /vagrant
+# Force a system sync and sleep to get around any SSD issues
+echo "Forcing sync and sleep for 10sec"
+sync
+sleep 10
+
echo "********************************************"
echo "* PLEASE SNAPSHOT IMAGE AT THIS TIME *"
echo "********************************************"
VAGRANTFILE_API_VERSION = "2"
Vagrant.configure(VAGRANTFILE_API_VERSION) do |config|
- # root off of the rackspace provider dummy box
+ # root off of the openstack provider dummy box
config.vm.box = "dummy"
-
- # rackspace systems, even with cloud-init
- # don't seem to have the cloud int user ${osname} (or similar)
- # getting the ssh key for some reason, root does for sure
- # so use that
config.ssh.username = 'root'
- # Fedora and EL systems default to requiring tty for sudo
- # This should have been disabled with the Vagrant ready
- # base box conversion (see rackspace-convert-base vagrant)
- # but just to be safe
- config.ssh.pty = true
-
# make sure to set the following in your
- # ~/.vagrant.d/boxes/dummy/0/rackspace/Vagrantfile
- # rs.username
- # rs.api_key
- # rs.rackspace_region
+ # ~/.vagrant.d/boxes/dummy/0/openstack/Vagrantfile
+ #
+ # os.openstack_auth_url
+ # os.endpoint_type
+ # os.flavor
+ # os.tenant_name
+ # os.username
+ # os.password
+ # os.networks
#
# If you are not using an SSH token / smartcard also set this
- # rs.key_name
- # config.ssh.private_key_path -- set this outside the rackspace block
+ # os.key_name
+ # config.ssh.private_key_path -- set this outside the openstack block
# in your base box
- config.vm.provider :rackspace do |rs|
- # create these base builds always on the smallest system possible
- rs.flavor = 'general1-1'
-
- # allow for switching to ORD cloud but default to DFW
- if (ENV['RSREGION'] == 'ord')
- rs.rackspace_region = :ord
+ config.vm.provider :openstack do |os, override|
+ if ENV['BOX']
+ override.vm.box = ENV['BOX']
else
- rs.rackspace_region = :dfw
+ override.vm.box = 'dummy'
end
+ config.ssh.username = 'centos'
+ os.flavor = 'm1.small'
- # Default the Fedora 20 Vagrant ready image unless overriden by a RSIMAGE
- # environment variable
- if ENV['RSIMAGE']
- rs.image = ENV['RSIMAGE']
+ # require an IMAGE to be passed in
+ # IMAGE must be a human name and not an image ID!
+ if ENV['IMAGE']
+ os.image = ENV['IMAGE']
else
- rs.image = 'Fedora 20 - Vagrant ready'
+ os.image = 'BAD IMAGE'
+ override.ssh.username = 'baduser'
end
- end
- # Explicitlly set default shared folder and load lib folder
- config.vm.synced_folder ".", "/vagrant"
- config.vm.synced_folder "../lib/", "/vagrant/lib"
+ case ENV['IMAGE']
+ when /.*ubuntu.*/i
+ override.ssh.username = 'ubuntu'
- # run our bootstrapping for the ovsdb-devstack system
- config.vm.provision 'shell', path: 'bootstrap.sh'
+ when /.*fedora.*/i
+ override.ssh.username = 'fedora'
+ # take care of the tty requirement by fedora for sudo
+ os.user_data = "#!/bin/bash
+/bin/sed -i 's/ requiretty/ !requiretty/' /etc/sudoers;"
- #################
- # LF NETWORKING #
- #################
+ when /.*centos.*/i
+ override.ssh.username = 'centos'
- if ENV['LFNETWORK']
- # reconfigure the network setup to support our special private setup
- config.vm.provision 'shell', path: '../lib/lf-networking/configure_lf_infra.sh',
- args: ENV['RSSUBDOMAIN']
+ # take care of the tty requirement by centos for sudo
+ os.user_data = "#!/bin/bash
+/bin/sed -i 's/ requiretty/ !requiretty/' /etc/sudoers;"
+ end
end
+ # Explicitlly set default shared folder and load lib folder
+ config.vm.synced_folder ".", "/vagrant"
+ config.vm.synced_folder "../lib/", "/vagrant/lib"
+
+ # Do a full system update and enable enforcing if needed
+ config.vm.provision 'shell', path: '../lib/baseline.sh'
+
+ # run our bootstrapping
+ config.vm.provision 'shell', path: 'bootstrap.sh'
#################
# FINAL CLEANUP #
#################
- # set RSRESEAL to... anything if you want to snap an image of this box
+ # set RESEAL to... anything if you want to snap an image of this box
# not setting the environment variable will cause the system to come
# up fully and not be in a resealable state
- if ENV['RSRESEAL']
+ if ENV['RESEAL']
config.vm.provision 'shell', path: '../lib/system_reseal.sh'
end
end
#!/bin/bash
-# enable enforcing mode from the very start
-setenforce enforcing
-
-# configure system for enforcing mode on next boot
-sed -i 's/SELINUX=permissive/SELINUX=enforcing/' /etc/selinux/config
-
-yum clean all
-yum update -y
+echo '---> Installing non-baseline requirements'
yum install -q -y deltarpm python{,-{crypto,devel,lxml,setuptools}} \
- @development {lib{xml2,xslt,ffi},openssl}-devel \
- java git sudo
-
-# figure out what the latest kernel installed is and switch to it
-# NOTE: This is done like this becase the Rackspace F20 images are using
-# extlinux / syslinux and don't switch to the newest kernel on update
-NEWKERNEL=`rpm -qa | grep kernel-3 | sort -r | head -1 | cut -c 8-`
-if [ -f /boot/extlinux.conf ]; then
- BOOTLABEL=`grep ${NEWKERNEL} /boot/extlinux.conf | grep LABEL | cut -c 7-`
- sed -i "s/ONTIMEOUT linux/ONTIMEOUT ${BOOTLABEL}/" /boot/extlinux.conf
-fi
+ @development {lib{xml2,xslt,ffi},openssl}-devel
+echo '---> Updating net link setup'
if [ ! -f /etc/udev/rules.d/80-net-setup-link.rules ]; then
ln -s /dev/null /etc/udev/rules.d/80-net-setup-link.rules
fi
VAGRANTFILE_API_VERSION = "2"
Vagrant.configure(VAGRANTFILE_API_VERSION) do |config|
- # root off of the rackspace provider dummy box
+ # root off of the openstack provider dummy box
config.vm.box = "dummy"
-
- # rackspace systems, even with cloud-init
- # don't seem to have the cloud int user ${osname} (or similar)
- # getting the ssh key for some reason, root does for sure
- # so use that
config.ssh.username = 'root'
- # Only baseline image should have config.ssh.pty = true
- # Ensure we disable it.
- config.ssh.pty = false
-
# make sure to set the following in your
- # ~/.vagrant.d/boxes/dummy/0/rackspace/Vagrantfile
- # rs.username
- # rs.api_key
- # rs.rackspace_region
+ # ~/.vagrant.d/boxes/dummy/0/openstack/Vagrantfile
+ #
+ # os.openstack_auth_url
+ # os.endpoint_type
+ # os.flavor
+ # os.tenant_name
+ # os.username
+ # os.password
+ # os.networks
#
# If you are not using an SSH token / smartcard also set this
- # rs.key_name
- # config.ssh.private_key_path -- set this outside the rackspace block
+ # os.key_name
+ # config.ssh.private_key_path -- set this outside the openstack block
# in your base box
- config.vm.provider :rackspace do |rs|
- # create these base builds always on the smallest system possible
- rs.flavor = 'general1-1'
-
- # allow for switching to ORD cloud but default to DFW
- if (ENV['RSREGION'] == 'ord')
- rs.rackspace_region = :ord
+ config.vm.provider :openstack do |os, override|
+ if ENV['BOX']
+ override.vm.box = ENV['BOX']
else
- rs.rackspace_region = :dfw
+ override.vm.box = 'dummy'
end
+ config.ssh.username = 'centos'
+ os.flavor = 'm1.small'
- # Default the Ubuntu 14.04 - Vagrant ready image unless overriden by a RSIMAGE
- # environment variable
- if ENV['RSIMAGE']
- rs.image = ENV['RSIMAGE']
+ # require an IMAGE to be passed in
+ # IMAGE must be a human name and not an image ID!
+ if ENV['IMAGE']
+ os.image = ENV['IMAGE']
else
- rs.image = 'Ubuntu 14.04 - Vagrant ready'
+ os.image = 'BAD IMAGE'
+ override.ssh.username = 'baduser'
+ end
+
+ case ENV['IMAGE']
+ when /.*ubuntu.*/i
+ override.ssh.username = 'ubuntu'
+
+ when /.*fedora.*/i
+ override.ssh.username = 'fedora'
+
+ # take care of the tty requirement by fedora for sudo
+ os.user_data = "#!/bin/bash
+/bin/sed -i 's/ requiretty/ !requiretty/' /etc/sudoers;"
+
+ when /.*centos.*/i
+ override.ssh.username = 'centos'
+
+ # take care of the tty requirement by centos for sudo
+ os.user_data = "#!/bin/bash
+/bin/sed -i 's/ requiretty/ !requiretty/' /etc/sudoers;"
end
end
config.vm.synced_folder ".", "/vagrant"
config.vm.synced_folder "../lib/", "/vagrant/lib"
- # run our bootstrapping for the system
- config.vm.provision 'shell', path: 'bootstrap.sh'
-
-
- #################
- # LF NETWORKING #
- #################
+ # Do a full system update and enable enforcing if needed
+ config.vm.provision 'shell', path: '../lib/baseline.sh'
- if ENV['LFNETWORK']
- # reconfigure the network setup to support our special private setup
- config.vm.provision 'shell', path: '../lib/lf-networking/configure_lf_infra.sh',
- args: ENV['RSSUBDOMAIN']
- end
+ # run our bootstrapping
+ config.vm.provision 'shell', path: 'bootstrap.sh'
#################
# vim: sw=4 ts=4 sts=4 et tw=72 :
-echo "---> Updating operating system"
-apt-get update -qq
-DEBIAN_FRONTEND=noninteractive apt-get upgrade -y --force-yes -qq \
- -o Dpkg::Options::="--force-confdef" -o Dpkg::Options::="--force-confold"
+# Ensure that necessary variables are set to enable noninteractive mode in
+# commands.
+export DEBIAN_FRONTEND=noninteractive
# To handle the prompt style that is expected all over the environment
# with how use use robotframework we need to make sure that it is
# ups
echo 'PS1="[\u@\h \W]> "' >> /etc/skel/.bashrc
-# Install OpenVSwitch 2.3.1
+echo '---> Install OpenVSwitch 2.3.1'
add-apt-repository -y ppa:vshn/openvswitch
-apt-get update -qq
apt-get install -y --force-yes -qq openvswitch-switch
-# Install CPqD
+echo '---> Installing CPqD and dependencies'
apt-get install -y --force-yes -qq build-essential cmake flex
apt-get install -y --force-yes -qq libpcre++-dev libxerces-c-dev libpcap-dev libboost-all-dev
make install
cd ..
-# Install mininet 2.2.1
+echo '---> Installing mininet 2.2.1'
git clone git://github.com/mininet/mininet
cd mininet
git checkout -b 2.2.1 2.2.1
cd ..
mininet/util/install.sh -nf
-# cbench installation for running openflow performance tests
-
+echo '---> Installing cbench for openflow performance tests'
OF_DIR=$HOME/openflow # Directory that contains OpenFlow code
OFLOPS_DIR=$HOME/oflops # Directory that contains oflops repo
make
make install
-# Installing exabgp
+echo '---> Installing exabgp'
apt-get install -y --force-yes -qq exabgp
-
VAGRANTFILE_API_VERSION = "2"
Vagrant.configure(VAGRANTFILE_API_VERSION) do |config|
- # root off of the rackspace provider dummy box
+ # root off of the openstack provider dummy box
config.vm.box = "dummy"
-
- # rackspace systems, even with cloud-init
- # don't seem to have the cloud int user ${osname} (or similar)
- # getting the ssh key for some reason, root does for sure
- # so use that
config.ssh.username = 'root'
- # Only baseline image should have config.ssh.pty = true
- # Ensure we disable it.
- config.ssh.pty = false
-
# make sure to set the following in your
- # ~/.vagrant.d/boxes/dummy/0/rackspace/Vagrantfile
- # rs.username
- # rs.api_key
- # rs.rackspace_region
+ # ~/.vagrant.d/boxes/dummy/0/openstack/Vagrantfile
+ #
+ # os.openstack_auth_url
+ # os.endpoint_type
+ # os.flavor
+ # os.tenant_name
+ # os.username
+ # os.password
+ # os.networks
#
# If you are not using an SSH token / smartcard also set this
- # rs.key_name
- # config.ssh.private_key_path -- set this outside the rackspace block
+ # os.key_name
+ # config.ssh.private_key_path -- set this outside the openstack block
# in your base box
- config.vm.provider :rackspace do |rs|
- # create these base builds always on the smallest system possible
- rs.flavor = 'general1-1'
-
- # allow for switching to ORD cloud but default to DFW
- if (ENV['RSREGION'] == 'ord')
- rs.rackspace_region = :ord
+ config.vm.provider :openstack do |os, override|
+ if ENV['BOX']
+ override.vm.box = ENV['BOX']
else
- rs.rackspace_region = :dfw
+ override.vm.box = 'dummy'
end
+ config.ssh.username = 'centos'
+ os.flavor = 'm1.small'
- # Default the Ubuntu 14.04 - Vagrant ready image unless overriden by a RSIMAGE
- # environment variable
- if ENV['RSIMAGE']
- rs.image = ENV['RSIMAGE']
+ # require an IMAGE to be passed in
+ # IMAGE must be a human name and not an image ID!
+ if ENV['IMAGE']
+ os.image = ENV['IMAGE']
else
- rs.image = 'Ubuntu 14.04 - Vagrant ready'
+ os.image = 'BAD IMAGE'
+ override.ssh.username = 'baduser'
+ end
+
+ case ENV['IMAGE']
+ when /.*ubuntu.*/i
+ override.ssh.username = 'ubuntu'
+
+ when /.*fedora.*/i
+ override.ssh.username = 'fedora'
+
+ # take care of the tty requirement by fedora for sudo
+ os.user_data = "#!/bin/bash
+/bin/sed -i 's/ requiretty/ !requiretty/' /etc/sudoers;"
+
+ when /.*centos.*/i
+ override.ssh.username = 'centos'
+
+ # take care of the tty requirement by centos for sudo
+ os.user_data = "#!/bin/bash
+/bin/sed -i 's/ requiretty/ !requiretty/' /etc/sudoers;"
end
end
config.vm.synced_folder ".", "/vagrant"
config.vm.synced_folder "../lib/", "/vagrant/lib"
- # run our bootstrapping for the system
- config.vm.provision 'shell', path: 'bootstrap.sh'
-
-
- #################
- # LF NETWORKING #
- #################
+ # Do a full system update and enable enforcing if needed
+ config.vm.provision 'shell', path: '../lib/baseline.sh'
- if ENV['LFNETWORK']
- # reconfigure the network setup to support our special private setup
- config.vm.provision 'shell', path: '../lib/lf-networking/configure_lf_infra.sh',
- args: ENV['RSSUBDOMAIN']
- end
+ # run our bootstrapping
+ config.vm.provision 'shell', path: 'bootstrap.sh'
#################
# vim: sw=4 ts=4 sts=4 et tw=72 :
-echo "---> Updating operating system"
-apt-get update -qq
-DEBIAN_FRONTEND=noninteractive apt-get upgrade -y --force-yes -qq \
- -o Dpkg::Options::="--force-confdef" -o Dpkg::Options::="--force-confold"
+# Ensure that necessary variables are set to enable noninteractive mode in
+# commands.
+export DEBIAN_FRONTEND=noninteractive
# To handle the prompt style that is expected all over the environment
# with how use use robotframework we need to make sure that it is
# ups
echo 'PS1="[\u@\h \W]> "' >> /etc/skel/.bashrc
-# Install mininet
-# apt-get install -y --force-yes -qq mininet
-
-# Install mininet with OF13 patch
+echo '---> Install mininet with OF13 patch'
cd /tmp
cat > newOptions.patch <<EOF
--- mininet/node.py 2014-09-12 13:48:03.165628683 +0100
cd ./util
./install.sh -nfv
-# Install CPqD
+echo '---> Install CPqD and dependencies'
apt-get install -y --force-yes -qq build-essential cmake flex
apt-get install -y --force-yes -qq libpcre++-dev libxerces-c-dev libpcap-dev libboost-all-dev
make install
cd ..
-# cbench installation for running openflow performance tests
-
+echo '---> Installing cbench installation for running openflow performance tests'
OF_DIR=$HOME/openflow # Directory that contains OpenFlow code
OFLOPS_DIR=$HOME/oflops # Directory that contains oflops repo
make
make install
-# Install vlan for vlan based tests in VTN suites
+echo '---> Installing vlan for vlan based tests in VTN suites'
apt-get install -y --force-yes -qq vlan