##############################################################################
import datetime
+import glob
import os
import re
+from string import Template
import subprocess
import sys
+import tarfile
+import urllib
from urllib2 import urlopen
try:
sys.stderr.write("Else, do `pip install -r requirements.txt` in a venv.\n")
raise
+# Path to directory for cache artifacts
+cache_dir = os.path.join(os.path.dirname(os.path.abspath(__file__)), "cache")
+
+# Templates that can be specialized into common artifact names per-build
+# NB: Templates can't be concatenated with other Templates or strings, or
+# cast to strings for concatenation. If they could, we would do elegant
+# refactoring like concatenating paths to templates here and only calling
+# Template.substitute in the build_rpm function.
+distro_template = Template("opendaylight-$version_major.$version_minor."
+ "$version_patch-$pkg_version")
+unitfile_template = Template("opendaylight-$sysd_commit.service")
+unitfile_url_template = Template("https://git.opendaylight.org/gerrit/"
+ "gitweb?p=integration/packaging.git;a="
+ "blob_plain;f=packages/unitfiles/"
+ "opendaylight.service;hb=$sysd_commit")
+
def extract_version(url):
"""Determine ODL version information from the ODL tarball build URL
# distribution-karaf-0.3.4-Lithium-SR4.tar.gz
# major_version = 3
# minor_version = 4
- re_out = re.search(r'\d\.(\d)\.(\d)', url)
+ re_out = re.search(r'\d\.(\d+)\.(\d)', url)
version["version_major"] = re_out.group(1)
version["version_minor"] = re_out.group(2)
return version
-def get_snap_url(version_major, version_minor=None):
- """Fetches tarball url for snapshot releases using version information
+def get_snap_url(version_major):
+ """Get the most recent snapshot build of the given ODL major version
+
+ :arg str version_major: ODL major version to get latest snapshot of
+ :return str snapshot_url: URL to latest snapshot tarball of ODL version
- :arg str version_major: Major version for snapshot build
- :arg str version_minor: Minor version for snapshot build(optional)
- :return arg snapshot_url: URL of the snapshot release
"""
- parent_dir = "https://nexus.opendaylight.org/content/repositories/" \
- "opendaylight.snapshot/org/opendaylight/integration/{}/" \
- .format(get_distro_name_prefix(version_major))
-
- # If the minor verison is given, get the sub-directory directly
- # else, find the latest sub-directory
- sub_dir = ''
- snapshot_dir = ''
- if version_minor:
- sub_dir = '0.' + version_major + '.' + version_minor + '-SNAPSHOT/'
- snapshot_dir = parent_dir + sub_dir
- else:
- subdir_url = urlopen(parent_dir)
- content = subdir_url.read().decode('utf-8')
- all_dirs = BeautifulSoup(content, 'html.parser')
-
- # Loops through all the sub-directories present and stores the
- # latest sub directory as sub-directories are already sorted
- # in early to late order.
- for tag in all_dirs.find_all('a', href=True):
- # Checks if the sub-directory name is of the form
- # '0.<major_version>.<minor_version>-SNAPSHOT'.
- dir = re.search(r'\/(\d)\.(\d)\.(\d).(.*)\/', tag['href'])
- # If the major version matches the argument provided
- # store the minor version, else ignore.
- if dir:
- if dir.group(2) == version_major:
- snapshot_dir = tag['href']
- version_minor = dir.group(3)
-
- try:
- req = requests.get(snapshot_dir)
- req.raise_for_status()
- except HTTPError:
- print "Could not find the snapshot directory"
- else:
- urlpath = urlopen(snapshot_dir)
- content = urlpath.read().decode('utf-8')
- html_content = BeautifulSoup(content, 'html.parser')
- # Loops through all the files present in `snapshot_dir`
- # and stores the url of latest tarball because files are
- # already sorted in early to late order.
- for tag in html_content.find_all('a', href=True):
- if tag['href'].endswith('tar.gz'):
- snapshot_url = tag['href']
- return snapshot_url
+ # Dir that contains all shapshot build dirs, varies based on Karaf 3/4
+ parent_dir_url = "https://nexus.opendaylight.org/content/repositories/" \
+ "opendaylight.snapshot/org/opendaylight/integration/{}/" \
+ .format(get_distro_name_prefix(version_major))
+
+ # Get HTML of dir that contains all shapshot dirs
+ parent_dir_html = urlopen(parent_dir_url).read().decode('utf-8')
+
+ # Get most recent minor version of the given major version
+ version_minor = max(re.findall(
+ r'>\d\.{}\.(\d)-SNAPSHOT\/'.format(version_major),
+ parent_dir_html))
+
+ # Dir that contains snapshot builds for the given major version
+ snapshot_dir_url = parent_dir_url + "0.{}.{}-SNAPSHOT/".format(
+ version_major,
+ version_minor)
+
+ # Get HTML of dir that contains snapshot builds for given major version
+ snapshot_dir_html = urlopen(snapshot_dir_url).read().decode('utf-8')
+
+ # Find most recent URL to tarball, ie most recent snapshot build
+ return re.findall(r'href="(.*\.tar\.gz)"', snapshot_dir_html)[-1]
def get_sysd_commit():
raise ValueError("Unknown package type: {}".format(pkg_type))
-def get_distro_name_prefix(version_major):
- """Return Karaf 3 or 4-style distro name prefix based on ODL major version
+def get_distro_name_prefix(version_major, download_url=""):
+ """Return distro name prefix based on ODL major version or distro URL.
- :arg str major_version: OpenDaylight major version umber
- :return str distro_name_style: Karaf 3 or 4-style distro name prefix
+ :arg str version_major: OpenDaylight major version number
+ :arg str download_url: URL to ODL distribution
+ :return str distro_prefix: MR, Karaf 3 or 4-style distro name prefix
"""
+ mrel_prefix = "opendaylight"
+ k3_prefix = "distribution-karaf"
+ k4_prefix = "karaf"
+ mrel_url_base = "https://nexus.opendaylight.org/content/repositories/public/org/opendaylight/integration/opendaylight/"
+ k3_url_base = "https://nexus.opendaylight.org/content/repositories/public/org/opendaylight/integration/distribution-karaf/"
+ k4_url_base = "https://nexus.opendaylight.org/content/repositories/public/org/opendaylight/integration/karaf/"
+
+ if mrel_url_base in download_url:
+ return mrel_prefix
+ elif k3_url_base in download_url:
+ return k3_prefix
+ elif k4_url_base in download_url:
+ return k4_prefix
+
if int(version_major) < 7:
# ODL versions before Nitrogen use Karaf 3, distribution-karaf- names
- return "distribution-karaf"
+ return k3_prefix
else:
# ODL versions Nitrogen and after use Karaf 4, karaf- names
- return "karaf"
+ return k4_prefix
+
+
+def cache_distro(build):
+ """Cache the OpenDaylight distribution to package as RPM/Deb.
+
+ :param build: Description of an RPM build
+ :type build: dict
+ :return str distro_tar_path: Path to cached distribution tarball
+
+ """
+ # Specialize templates for the given build
+ distro = distro_template.substitute(build)
+
+ # Append file extensions to get ODL distro zip/tarball templates
+ distro_tar = distro + ".tar.gz"
+ distro_zip = distro + ".zip"
+
+ # Prepend cache dir path to get template of full path to cached zip/tarball
+ distro_tar_path = os.path.join(cache_dir, distro_tar)
+ distro_zip_path = os.path.join(cache_dir, distro_zip)
+
+ # Cache OpenDaylight tarball to be packaged
+ if not os.path.isfile(distro_tar_path):
+ if build["download_url"].endswith(".tar.gz"):
+ print("Downloading: {}".format(build["download_url"]))
+ urllib.urlretrieve(build["download_url"], distro_tar_path)
+ print("Cached: {}".format(distro_tar))
+ # If download_url points at a zip, repackage as a tarball
+ elif build["download_url"].endswith(".zip"):
+ if not os.path.isfile(distro_zip):
+ print("URL is to a zip, will download and convert to tar.gz")
+ print("Downloading: {}".format(build["download_url"]))
+ urllib.urlretrieve(build["download_url"], distro_zip_path)
+ print("Downloaded {}".format(distro_zip_path))
+ else:
+ print("Already cached: {}".format(distro_zip_path))
+ # Extract zip archive
+ # NB: zipfile.ZipFile.extractall doesn't preserve permissions
+ # https://bugs.python.org/issue15795
+ subprocess.call(["unzip", "-oq", distro_zip_path, "-d", cache_dir])
+ # Get files in cache dir
+ cache_dir_ls_all = glob.glob(os.path.join(cache_dir, "*"))
+ # Remove pyc files that may be newer than just-extracted zip
+ cache_dir_ls = filter(lambda f: '.pyc' not in f, cache_dir_ls_all)
+ # Get the most recent file in cache dir, hopefully unzipped archive
+ unzipped_distro_path = max(cache_dir_ls, key=os.path.getctime)
+ print("Extracted: {}".format(unzipped_distro_path))
+ # Remove path from 'unzipped_distro_path', as will cd to dir below
+ unzipped_distro = os.path.basename(unzipped_distro_path)
+ # Using the full paths here creates those paths in the tarball,
+ # which breaks the build. There's a way to change the working dir
+ # during a single tar command using the system tar binary, but I
+ # don't see a way to do that with Python.
+ # TODO: Can this be done without changing directories?
+ # TODO: Try https://goo.gl/XMx5gb
+ cwd = os.getcwd()
+ os.chdir(cache_dir)
+ with tarfile.open(distro_tar, "w:gz") as tb:
+ tb.add(unzipped_distro)
+ print("Taring {} into {}".format(unzipped_distro, distro_tar))
+ os.chdir(cwd)
+ print("Cached: {}".format(distro_tar))
+ else:
+ print("Already cached: {}".format(distro_tar))
+
+ return distro_tar_path
+
+
+def cache_sysd(build):
+ """Cache the artifacts required for the given RPM build.
+
+ :param build: Description of an RPM build
+ :type build: dict
+ :return dict unitfile_path: Paths to cached unit file and unit file tarball
+
+ """
+ # Specialize templates for the given build
+ unitfile = unitfile_template.substitute(build)
+ unitfile_url = unitfile_url_template.substitute(build)
+
+ # Append file extensions to get ODL distro zip/tarball templates
+ unitfile_tar = unitfile + ".tar.gz"
+
+ # Prepend cache dir path to get template of full path to cached zip/tarball
+ unitfile_path = os.path.join(cache_dir, unitfile)
+ unitfile_tar_path = os.path.join(cache_dir, unitfile_tar)
+
+ # Download ODL's systemd unit file
+ if not os.path.isfile(unitfile_path):
+ urllib.urlretrieve(unitfile_url, unitfile_path)
+ print("Cached: {}".format(unitfile))
+ else:
+ print("Already cached: {}".format(unitfile_path))
+
+ # Cache ODL's systemd unit file as a tarball
+ if not os.path.isfile(unitfile_tar_path):
+ # Using the full paths here creates those paths in the tarball, which
+ # breaks the build. There's a way to change the working dir during a
+ # single tar command using the system tar binary, but I don't see a
+ # way to do that with Python.
+ # TODO: Is there a good way to do this without changing directories?
+ # TODO: Try https://goo.gl/XMx5gb
+ cwd = os.getcwd()
+ os.chdir(cache_dir)
+ # Create a .tar.gz archive containing ODL's systemd unitfile
+ with tarfile.open(unitfile_tar, "w:gz") as tb:
+ tb.add(unitfile)
+ os.chdir(cwd)
+
+ print("Cached: {}".format(unitfile_tar))
+ else:
+ print("Already cached: {}".format(unitfile_tar_path))
+
+ return {"unitfile_tar_path": unitfile_tar_path,
+ "unitfile_path": unitfile_path}