if [ "$GERRIT_BRANCH" = "master" ]; then
RTD_BUILD_VERSION=latest
else
- RTD_BUILD_VERSION=$(echo $GERRIT_BRANCH | sed 's@/@-@g')
+ RTD_BUILD_VERSION=$(echo "$GERRIT_BRANCH" | sed 's@/@-@g')
fi
# shellcheck disable=SC1083
# Create the string for nodes
for i in $(seq 1 "${NUM_ODL_SYSTEM}") ; do
CONTROLLERIP=ODL_SYSTEM_${i}_IP
- nodes[$i]=${!CONTROLLERIP}
+ nodes[i]=${!CONTROLLERIP}
done
nodes_list=$(join "${nodes[@]}")
ctrl_index=$((ctrl_index -1))
tmp_addr=${OPENSTACK_SYSTEM[0]}
OPENSTACK_SYSTEM[0]=${OPENSTACK_SYSTEM[$ctrl_index]}
- OPENSTACK_SYSTEM[$ctrl_index]=$tmp_addr
+ OPENSTACK_SYSTEM[ctrl_index]=$tmp_addr
fi
# Add alias for ODL_SYSTEM_1_IP as ODL_SYSTEM_IP
done
fi
mark-unstable-if-failed: true
-
name: lf-odl-releasedockerhub-macro
builders:
- shell: !include-raw-escape:
- - ./run_releasedockerhub.sh
+ - ./run_releasedockerhub.sh
##- ../shell/run_releasedockerhub.sh "{org} {summary} {verbose} {copy}"
-
- job:
name: lf-odl-release-docker-hub
build-timeout: 90
project-type: freestyle
node: ubuntu1804-docker-4c-4g
disable-job: false
- disabled: '{disable-job}'
+ disabled: "{disable-job}"
# Runs once a day
triggers:
- - timed: '@daily'
+ - timed: "@daily"
-# Define needed parameters for lf-infra-docker-login
+ # Define needed parameters for lf-infra-docker-login
parameters:
- string:
name: DOCKER_REGISTRY
- default: 'nexus3.opendaylight.org'
+ default: "nexus3.opendaylight.org"
- string:
name: REGISTRY_PORTS
- default: '10002'
+ default: "10002"
- string:
name: DOCKERHUB_REGISTRY
- default: 'docker.io'
+ default: "docker.io"
-# Define needed parameters for lf-odl-releasedockerhub-macro
+ # Define needed parameters for lf-odl-releasedockerhub-macro
- string:
name: RELEASEDOCKERHUB_ORG
- default: 'opendaylight'
- description: ''
+ default: "opendaylight"
+ description: ""
- string:
name: RELEASEDOCKERHUB_REPO
- default: ''
- description: 'Only match repos that contains this string'
+ default: ""
+ description: "Only match repos that contains this string"
- string:
name: RELEASEDOCKERHUB_EXACT
- default: ''
- description: 'Repo has to exactly match RELEASEDOCKERHUB_REPO '
+ default: ""
+ description: "Repo has to exactly match RELEASEDOCKERHUB_REPO "
- string:
name: RELEASEDOCKERHUB_SUMMARY
- default: 'Yes'
- description: 'Provides a brief statistic summary of what it found, and what it should do. Activate with Yes'
+ default: "Yes"
+ description: "Provides a brief statistic summary of what it found, and what it should do. Activate with Yes"
- string:
name: RELEASEDOCKERHUB_VERBOSE
- default: ''
- description: 'Provides a verbose output of the operations. Activate with Yes'
+ default: ""
+ description: "Provides a verbose output of the operations. Activate with Yes"
- string:
name: RELEASEDOCKERHUB_COPY
- default: 'Yes'
- description: 'If a copy operation should be done from Nexus3 to docker hub. Activate with Yes'
-
+ default: "Yes"
+ description: "If a copy operation should be done from Nexus3 to docker hub. Activate with Yes"
builders:
- - lf-infra-pre-build ## shell/python-tools-install.sh
+ - lf-infra-pre-build ## shell/python-tools-install.sh
- lf-infra-docker-login:
- global-settings-file: 'global-settings'
- settings-file: 'odl-releasedockerhub-authz-settings'
+ global-settings-file: "global-settings"
+ settings-file: "odl-releasedockerhub-authz-settings"
- lf-odl-releasedockerhub-macro:
-
##############################################################################
# Ensure we fail the job if any steps fail.
+# shellcheck disable=SC2039,SC3040
set -eu -o pipefail
# shellcheck disable=SC1090
# $STAGING_PROFILE_ID : Provided by a job parameter.
# Ensure we fail the job if any steps fail.
+# shellcheck disable=SC2039,SC3040
set -xeu -o pipefail
TMP_FILE="$(mktemp)"
+---
####################
# COMMON FUNCTIONS #
####################
echo "cmd_str = >>$cmd_str<<"
# Run the releasedockerhub command in lftools
+# shellcheck disable=SC2086
lftools nexus docker releasedockerhub $cmd_str
# This program and the accompanying materials are made available under the
# terms of the Eclipse Public License v1.0 which accompanies this distribution,
# and is available at http://www.eclipse.org/legal/epl-v10.html
+"""Main function for branch cutting a new stable release."""
import re
import requests
import python_lib
+
# pylint: disable=wrong-import-order
from pathlib import Path
from bs4 import BeautifulSoup
-from lxml import etree
+
+# from lxml import etree
+from defusedxml import lxml as etree
+
+"""Get the version from the groupId and artifactId."""
def get_version_for_artifact(group_id, artifact_id):
+ """Retrive version number from the groupId and artifactId."""
versions_list = []
- url = f'https://repo1.maven.org/maven2/org/opendaylight/{group_id}/{artifact_id}/'
+ url = f"https://repo1.maven.org/maven2/org/opendaylight/{group_id}/{artifact_id}/"
response = requests.get(url).content
- soup = BeautifulSoup(response, 'html.parser')
+ soup = BeautifulSoup(response, "html.parser")
try:
- html_lines = str(soup.find_all('pre')[0]).splitlines()
+ html_lines = str(soup.find_all("pre")[0]).splitlines()
except IndexError:
return "NOT FOUND"
for line in html_lines:
# Use a regular expression to find version
- pattern = re.compile(r'\d+\.\d+\.\d+')
+ pattern = re.compile(r"\d+\.\d+\.\d+")
title = pattern.search(line)
try:
versions_list.append(title.group())
# get all xml files
-for path in Path(python_lib.bumping_dir).rglob('*.xml'):
+for path in Path(python_lib.bumping_dir).rglob("*.xml"):
if "test/resources" in str(path):
continue
- tree = etree.parse(path)
+ parser = etree.XMLParser(resolve_entities=False, no_network=True)
+ tree = etree.parse(path, parser)
root = tree.getroot()
# update major and minor artifacts versions
if "pom.xml" in str(path):
prefix = "{" + root.nsmap[None] + "}"
- # line above can trigger a 'KeyError: None' outside pom.xml and feature.xml files.
- all_elements = tree.findall(
- f'.//{prefix}parent') + tree.findall(f'.//{prefix}dependency')
+ # line above can trigger a 'KeyError: None' outside pom.xml and
+ # feature.xml files.
+ all_elements = tree.findall(f".//{prefix}parent") + tree.findall(
+ f".//{prefix}dependency"
+ )
for element in all_elements:
- group_id_elem = (element.find(f'{prefix}groupId'))
- artifact_id_elem = (element.find(f'{prefix}artifactId'))
- version = (element.find(f'{prefix}version'))
+ group_id_elem = element.find(f"{prefix}groupId")
+ artifact_id_elem = element.find(f"{prefix}artifactId")
+ version = element.find(f"{prefix}version")
try:
if "org.opendaylight" in group_id_elem.text and version is not None:
# skip artifacts containing items in skipped list
skipped = ["${project.version}", "SNAPSHOT", "@project.version@"]
if not any(x in version.text for x in skipped):
new_version = get_version_for_artifact(
- group_id_elem.text.split(".")[2], artifact_id_elem.text)
+ group_id_elem.text.split(".")[2], artifact_id_elem.text
+ )
if python_lib.check_minor_version(version, new_version):
- print(python_lib.log_artifact(
- path, group_id_elem, artifact_id_elem, version.text, new_version))
+ print(
+ python_lib.log_artifact(
+ path,
+ group_id_elem,
+ artifact_id_elem,
+ version.text,
+ new_version,
+ )
+ )
version.text = new_version
- tree.write(path, encoding="UTF-8", pretty_print=True,
- doctype='<?xml version="1.0" encoding="UTF-8"?>')
+ tree.write(
+ path,
+ encoding="UTF-8",
+ pretty_print=True,
+ doctype='<?xml version="1.0" encoding="UTF-8"?>',
+ )
except AttributeError:
pass
# update feature versions
if "feature.xml" in str(path):
prefix = "{" + root.nsmap[None] + "}"
- # line above can trigger a 'KeyError: None' outside pom.xml and feature.xml files.
- all_features = tree.findall(f'.//{prefix}feature')
+ # line above can trigger a 'KeyError: None' outside pom.xml and
+ # feature.xml files.
+ all_features = tree.findall(f".//{prefix}feature")
# feature versions add +1
for feature in all_features:
try:
- if feature.attrib["version"] and feature.attrib["version"] != "${project.version}":
+ if (
+ feature.attrib["version"]
+ and feature.attrib["version"] != "${project.version}"
+ ):
current_version = feature.attrib["version"]
# workaround for float feature versions
- nums = current_version[1:-1].split(',')
+ nums = current_version[1:-1].split(",")
if "." in nums[0]:
nums[0] = str(round((float(nums[0]) + 0.01), 2))
else:
nums[0] = str(int(nums[0]) + 1)
nums[1] = str(int(nums[1]) + 1)
- result = '[' + ','.join(nums) + ')'
+ result = "[" + ",".join(nums) + ")"
feature.attrib["version"] = result
- print(python_lib.log_artifact(
- path=path, version=current_version, new_version=result))
- standalone = ''
+ print(
+ python_lib.log_artifact(
+ path=path, version=current_version, new_version=result
+ )
+ )
+ standalone = ""
if tree.docinfo.standalone:
standalone = ' standalone="yes"'
- tree.write(path, encoding="UTF-8", pretty_print=True,
- doctype=f'<?xml version="1.0" encoding="UTF-8"{standalone}?>')
+ tree.write(
+ path,
+ encoding="UTF-8",
+ pretty_print=True,
+ doctype=f'<?xml version="1.0" encoding="UTF-8"{standalone}?>',
+ )
except KeyError:
pass
# terms of the Eclipse Public License v1.0 which accompanies this distribution,
# and is available at http://www.eclipse.org/legal/epl-v10.html
+"""Library calls for branch cutting a new stable release."""
+
# modify this dir for pick up project from there
bumping_dir = "repos"
def find_highest_revision(revisions):
+ """Return bigger versions in the tuple."""
# convert list of strings to list of tuples
- converted_items = [tuple(map(int, item.split('.'))) for item in revisions]
+ converted_items = [tuple(map(int, item.split("."))) for item in revisions]
biggest_item = max(converted_items, key=lambda x: x)
- biggest_version = '.'.join(str(x) for x in biggest_item)
+ biggest_version = ".".join(str(x) for x in biggest_item)
return biggest_version
def log_artifact(path, group_id=None, artifact_id=None, version=None, new_version=None):
+ """Add filename and path, artifactId, versions to log."""
log = ""
log += "XML FILE: " + str(path) + "\n"
# if none, printing feature update
if group_id is None:
- log_line = ("path:", path, "VERSION:", version,
- "NEW VERSION:", new_version)
+ log_line = ("path:", path, "VERSION:", version, "NEW VERSION:", new_version)
# else printing artifact update
else:
- log_line = ("groupId:", group_id.text, "ARTIFACT ID:",
- artifact_id.text, "VERSION:", version, "NEW VERSION:", new_version)
+ log_line = (
+ "groupId:",
+ group_id.text,
+ "ARTIFACT ID:",
+ artifact_id.text,
+ "VERSION:",
+ version,
+ "NEW VERSION:",
+ new_version,
+ )
log += str(log_line) + "\n"
log += str(100 * "*" + "\n")
return log
def check_minor_version(version, new_version):
+ """Take two version string and returns True if its same are the new version."""
# compares the corresponding elements of the two version strings
- if any(int(elem_a) != int(elem_b) for elem_a, elem_b in zip(version.text.split("."), new_version.split("."))):
+ if any(
+ int(elem_a) != int(elem_b)
+ for elem_a, elem_b in zip(version.text.split("."), new_version.split("."))
+ ):
return True
return False
+<!--
# Copyright (c) 2023 PANTHEON.tech s.r.o. All rights reserved.
-#
-# This program and the accompanying materials are made available under the
-# terms of the Eclipse Public License v1.0 which accompanies this distribution,
-# and is available at http://www.eclipse.org/legal/epl-v10.html
-
+# This program and the accompanying materials are available under the
+# terms of the Eclipse Public License v1.0 which accompanies this
+# distribution, and is available at
+# http://www.eclipse.org/legal/epl-v10.html
+-->
# Bumping MRI versions tool
-This program is making versions changes in pom.xml files. For example 10.0.1 to 10.0.2
-The change will aply only if groupId.text contain "org.opendaylight".
-
-This program is also making changes in feature.xml files. For example [0.16,1) to [0.17,1)
+This program is making versions changes in pom.xml files. For example 10.0.1
+to 10.0.2. The change will apply if groupId.text contain "org.opendaylight".
+This program is also making changes in feature.xml files. For example
+[0.16,1) to [0.17,1)
## Installing
-*Prerequisite:* The followings are required for building test:
+_Prerequisite:_
- Python 3.8+
GET THE CODE:
USING HTTPS:
- git clone "https://git.opendaylight.org/gerrit/releng/builder"
+git clone "https://git.opendaylight.org/gerrit/releng/builder"
USING SSH:
- git clone "ssh://{USERNAME}@git.opendaylight.org:29418/releng/builder"
+git clone "ssh://{USERNAME}@git.opendaylight.org:29418/releng/builder"
NAVIGATE TO:
- cd ~/builder/scripts/bump_mri_versions
+cd ~/builder/scripts/bump_mri_versions
INSTALL VIRTUAL ENVIROMENT PACKAGE:
- sudo apt install python3-virtualenv
+sudo apt install python3-virtualenv
CREATE NEW VIRTUAL ENVIROMENT:
- virtualenv venv
+virtualenv venv
ACTIVATE VIRTUAL ENVIROMENT:
- . venv/bin/activate
+. venv/bin/activate
INSTALL LIBRARIES:
- pip install requests bs4 lxml
+pip install requests bs4 lxml
SET FOLDER FOR TESTING:
- clone repo for version updating in ~/builder/scripts/bump_mri_versions/repos or
- update "bumping_dir" variable in python_lib.py file
-
+clone repo for version updating in ~/builder/scripts/bump_mri_versions/repos or
+update "bumping_dir" variable in python_lib.py file
## Running
## Logs
PRINT:
- Every change will be printed out to the console.
+All changes will be output to the console.
examples here:
XML FILE: repos/ovsdb/southbound/southbound-features/odl-ovsdb-southbound-impl/src/main/feature/feature.xml
('path:', PosixPath('repos/ovsdb/southbound/southbound-features/odl-ovsdb-southbound-impl/src/main/feature/feature.xml'), 'VERSION:', '[4,5)', 'NEW VERSION:', '[5,6)')
- ****************************************************************************************************
\ No newline at end of file
+ ****************************************************************************************************
search_string=$1
-echo -n "Enter system (sandbox|releng): "
+printf "Enter system (sandbox|releng): "
read -r system
-echo -n "Enter username: "
+printf "Enter username: "
read -r username
-echo -n "Enter api_token: "
+printf "Enter api_token: "
read -r password
echo "$username:$password"
search_string=$1
replace_string=$2
-echo -n "Enter system (sandbox|releng): "
+printf "Enter system (sandbox|releng): "
read -r system
-echo -n "Enter username: "
+printf "Enter username: "
read -r username
-echo -n "Enter api_token: "
+printf "Enter api_token: "
read -r password
echo "$username:$password"
grep "$search_string")
for job in $(echo "$jobs" | tr "\n" " "); do
- new_job=$(echo $job | sed "s@$search_string@$replace_string@g")
+ new_job=$(echo "$job" | sed "s@$search_string@$replace_string@g")
echo "Renaming $job to $new_job"
#curl --data "newName=${new_job}" "https://$username:
[email protected]/$system/job/${job}/doRename"
done
[tox]
minversion = 1.6
envlist = docs,docs-linkcheck,jjb-version,prefix,robot
-# pre-commit
skipsdist = true
[testenv:docs]