if not args.sysd_commit:
args.sysd_commit = vars.get_sysd_commit()
+ # If the flag `--build-latest-snap` is true, extract information
+ # from the snapshot URL using major version and minor version(optional)
+ if args.build_latest_snap:
+ if args.major:
+ build.update({'version_major': args.major})
+ if args.minor:
+ build.update({'version_minor': args.minor})
+ args.download_url = vars.get_snap_url(args.major, args.minor)
+
# If download_url is given, update version info
if args.download_url:
build.update({"download_url": args.download_url})
build.update(version)
# Common parameters for all new and snapshot builds
- build.update({"sysd_commit": args.sysd_commit,
+ build.update({"download_url": args.download_url,
+ "sysd_commit": args.sysd_commit,
"changelog_name": args.changelog_name,
"changelog_email": args.changelog_email,
"changelog_date": args.changelog_date,
})
- # If the flag `--build-latest-snap` is true, extract information
- # from the snapshot URL using major version and minor version(optional)
- # info, else proceed directly to build the RPM
- if args.build_latest_snap:
- if args.major:
- build.update({'version_major': args.major})
- if args.minor:
- build.update({'version_minor': args.minor})
- build_rpm.build_snapshot_rpm(build)
- else:
- build_rpm.build_rpm(build)
+ build_rpm.build_rpm(build)
import shutil
from string import Template
import subprocess
-import sys
-from urllib2 import urlopen
import cache.cache as cache
import specs.build_specs as build_specs
-try:
- from bs4 import BeautifulSoup
- import requests
- from requests.exceptions import HTTPError
-except ImportError:
- sys.stderr.write("We recommend using our included Vagrant env.\n")
- sys.stderr.write("Else, do `pip install -r requirements.txt` in a venv.\n")
- raise
-
-
# Common paths used in this script
# This file is assumed to be in the root of the RPM build logic's dir structure
project_root = os.path.dirname(os.path.abspath(__file__))
# Copy the RPMs/SRPMs from their output dir to the cache dir
shutil.copy(rpm_out_path, cache_dir)
shutil.copy(srpm_out_path, cache_dir)
-
-
-def build_snapshot_rpm(build):
- """Build latest snapshot RPMs fetching information from URL.
-
- :param build: Description of an RPM build, from parent_dir URL
- :type build: dict
-
- """
- parent_dir = "https://nexus.opendaylight.org/content/repositories/" \
- "opendaylight.snapshot/org/opendaylight/integration/"\
- "distribution-karaf/"
-
- # If the minor verison is given, get the sub-directory directly
- # else, find the latest sub-directory
- sub_dir = ''
- snapshot_dir = ''
- try:
- sub_dir = '0.' + build['version_major'] + '.' + \
- build['version_minor'] + '-SNAPSHOT/'
- snapshot_dir = parent_dir + sub_dir
- except KeyError:
- subdir_url = urlopen(parent_dir)
- content = subdir_url.read().decode('utf-8')
- all_dirs = BeautifulSoup(content, 'html.parser')
-
- # Loops through all the sub-directories present and stores the
- # latest sub directory as sub-directories are already sorted
- # in early to late order.
- for tag in all_dirs.find_all('a', href=True):
- # Checks if the sub-directory name is of the form
- # '0.<major_version>.<minor_version>-SNAPSHOT'.
- dir = re.search(r'\/(\d)\.(\d)\.(\d).(.*)\/', tag['href'])
- # If the major version matches the argument provided
- # store the minor version, else ignore.
- if dir:
- if dir.group(2) == build['version_major']:
- snapshot_dir = tag['href']
- build['version_minor'] = dir.group(3)
-
- try:
- req = requests.get(snapshot_dir)
- req.raise_for_status()
- except HTTPError:
- print "Could not find the snapshot directory"
- else:
- urlpath = urlopen(snapshot_dir)
- content = urlpath.read().decode('utf-8')
- html_content = BeautifulSoup(content, 'html.parser')
- # Loops through all the files present in `snapshot_dir`
- # and stores the url of latest tarball because files are
- # already sorted in early to late order.
- for tag in html_content.find_all('a', href=True):
- if tag['href'].endswith('tar.gz'):
- snapshot_url = tag['href']
-
- # Get download_url
- build['download_url'] = snapshot_url
-
- # Call `extract_version` function to get version information
- # except Major and Minor version which are already present
- version = extract_version(build['download_url'])
- build['version_patch'] = version['version_patch']
- build['rpm_release'] = version['rpm_release']
- build['codename'] = version['codename']
- urlpath.close()
-
- build_rpm(build)
+#!/usr/bin/env python
+
+##############################################################################
+# Copyright (c) 2016 Daniel Farrell and Others. All rights reserved.
+#
+# This program and the accompanying materials are made available under the
+# terms of the Eclipse Public License v1.0 which accompanies this distribution,
+# and is available at http://www.eclipse.org/legal/epl-v10.html
+##############################################################################
+
import datetime
import re
import subprocess
+import sys
+from urllib2 import urlopen
+
+try:
+ from bs4 import BeautifulSoup
+ import requests
+ from requests.exceptions import HTTPError
+except ImportError:
+ sys.stderr.write("We recommend using our included Vagrant env.\n")
+ sys.stderr.write("Else, do `pip install -r requirements.txt` in a venv.\n")
+ raise
def extract_version(url):
return version
+def get_snap_url(version_major, version_minor):
+ """Fetches tarball url for snapshot releases using version information
+
+ :arg str version_major: Major version for snapshot build
+ :arg str version_minor: Minor version for snapshot build(optional)
+ :return arg snapshot_url: URL of the snapshot release
+ """
+ parent_dir = "https://nexus.opendaylight.org/content/repositories/" \
+ "opendaylight.snapshot/org/opendaylight/integration/"\
+ "distribution-karaf/"
+
+ # If the minor verison is given, get the sub-directory directly
+ # else, find the latest sub-directory
+ sub_dir = ''
+ snapshot_dir = ''
+ if version_minor:
+ sub_dir = '0.' + version_major + '.' + version_minor + '-SNAPSHOT/'
+ snapshot_dir = parent_dir + sub_dir
+ else:
+ subdir_url = urlopen(parent_dir)
+ content = subdir_url.read().decode('utf-8')
+ all_dirs = BeautifulSoup(content, 'html.parser')
+
+ # Loops through all the sub-directories present and stores the
+ # latest sub directory as sub-directories are already sorted
+ # in early to late order.
+ for tag in all_dirs.find_all('a', href=True):
+ # Checks if the sub-directory name is of the form
+ # '0.<major_version>.<minor_version>-SNAPSHOT'.
+ dir = re.search(r'\/(\d)\.(\d)\.(\d).(.*)\/', tag['href'])
+ # If the major version matches the argument provided
+ # store the minor version, else ignore.
+ if dir:
+ if dir.group(2) == version_major:
+ snapshot_dir = tag['href']
+ version_minor = dir.group(3)
+
+ try:
+ req = requests.get(snapshot_dir)
+ req.raise_for_status()
+ except HTTPError:
+ print "Could not find the snapshot directory"
+ else:
+ urlpath = urlopen(snapshot_dir)
+ content = urlpath.read().decode('utf-8')
+ html_content = BeautifulSoup(content, 'html.parser')
+ # Loops through all the files present in `snapshot_dir`
+ # and stores the url of latest tarball because files are
+ # already sorted in early to late order.
+ for tag in html_content.find_all('a', href=True):
+ if tag['href'].endswith('tar.gz'):
+ snapshot_url = tag['href']
+ return snapshot_url
+
+
def get_sysd_commit():
"""Get latest Int/Pack repo commit hash"""