Merge "Purge ODL Sodium jobs from builder"
authorAndrew Grimberg <agrimberg@linuxfoundation.org>
Fri, 18 Sep 2020 14:18:08 +0000 (14:18 +0000)
committerGerrit Code Review <gerrit@opendaylight.org>
Fri, 18 Sep 2020 14:18:08 +0000 (14:18 +0000)
jjb/integration/integration-deploy-openstack-run-test.sh
jjb/lf-infra/lf-infra-jobs.yaml
jjb/netconf/netconf-scale.yaml
jjb/netvirt/netvirt-csit-multi-openstack.yaml
packer/provision/local-builder.yaml
packer/templates/builder.json [changed from symlink to file mode: 0644]
scripts/branch_cut/branch_cut.awk [deleted file]
scripts/branch_cut/branch_cutter.sh [deleted file]
scripts/cut-branch-jobs.py [new file with mode: 0755]
tox.ini

index d44b4b3f9f7bf2c0cf11a284fd960949247f539b..1dbb1ed7b061e1941f4deab9599bfd0573548081 100644 (file)
@@ -136,8 +136,8 @@ function install_rdo_release() {
           ${SSH} "${ip}" "sudo yum install -y https://repos.fedorapeople.org/repos/openstack/openstack-queens/rdo-release-queens-1.noarch.rpm"
           ;;
 
-       master)
-          ${SSH} "${ip}" "sudo yum install -y https://repos.fedorapeople.org/repos/openstack/openstack-rocky/rdo-release-rocky-1.noarch.rpm"
+       *stein*)
+          ${SSH} "${ip}" "sudo yum install -y https://repos.fedorapeople.org/repos/openstack/openstack-stein/rdo-release-stein-3.noarch.rpm"
           ;;
     esac
 }
@@ -353,6 +353,8 @@ minimize_polling=True
 # MTU(1400) + VXLAN(50) + VLAN(4) = 1454 < MTU eth0/br-physnet1(1458)
 physical_network_mtus = ${PUBLIC_PHYSICAL_NETWORK}:1400
 path_mtu = 1458
+[ml2_type_vlan]
+network_vlan_ranges=${PUBLIC_PHYSICAL_NETWORK}:1:4094
 EOF
     if [ "${ENABLE_GRE_TYPE_DRIVERS}" == "yes" ]; then
         cat >> "${local_conf_file_name}" << EOF
@@ -778,6 +780,10 @@ echo
 echo "workaround: do not upgrade openvswitch"
 sudo yum install -y yum-plugin-versionlock
 sudo yum versionlock add openvswitch
+
+echo "workaround: upgrade pip and setuptools"
+sudo pip install --upgrade pip
+sudo pip install --upgrade setuptools
 EOF
 
 cat > "${WORKSPACE}/setup_host_cell_mapping.sh" << EOF
index f534b8f9fd23a4a8b59efefdad6444ed2b422f8a..ee1f8461ed3f7c5ab122091e82ae27da036c4de3 100644 (file)
     rtd-token: 47a135ceba9053c6f88394a7171ebf5602a9fc95
     stream: any
     branch: "*"
+    parallel: true
 
     git-url: https://gerrit.linuxfoundation.org/infra/$GERRIT_PROJECT
     gerrit-server-name: lf-releng
index 75ae8c65641ed775ac1bca5d736a3c29b29a35c6..c50fe7821b6955090850dedb816bcf3b81e73c99 100644 (file)
@@ -56,8 +56,8 @@
           branch: "master"
           jre: "openjdk11"
           robot-options:
-            "-v USE_NETCONF_CONNECTOR:False -v SCHEMA_MODEL:juniper -v INIT_DEVICE_COUNT:250
-            -v MAX_DEVICE_COUNT:1000 -v DEVICE_INCREMENT:100 -v TESTTOOL_BOOT_TIMEOUT:600s
+            "-v USE_NETCONF_CONNECTOR:False -v SCHEMA_MODEL:juniper -v INIT_DEVICE_COUNT:400
+            -v MAX_DEVICE_COUNT:3000 -v DEVICE_INCREMENT:200 -v TESTTOOL_BOOT_TIMEOUT:600s
             -v MAX_HEAP:4G --flattenkeywords name:*repeat*keyword*"
       - magnesium:
           branch: "stable/magnesium"
@@ -89,3 +89,5 @@
     01-plot-yaxis: "Devices"
     01-plot-group: "Netconf Devices"
     01-plot-data-file: "devices.csv"
+
+    build-timeout: "720"
index 5d7124ddb8942bfdb01f26f5d9e260cda30a40e9..af147a715ab61fc0e2f7b9916d50ed50080c2625 100644 (file)
@@ -15,7 +15,7 @@
     security-group-mode: "stateful"
     odl_system_flavor: odl-highcpu-2
     openstack_system_flavor: odl-highcpu-8
-    openstack_system2_flavor: odl-highcpu-4
+    openstack_system2_flavor: odl-standard-4
     tools_system_count: 0
     os-cmb-cnt: 0
     os-ctl-cnt: 1
@@ -86,7 +86,8 @@
     name: netvirt-csit-openstack-integration-silicon-stein
     jobs:
       - inttest-csit-openstack-custom
-
+    #temporarily disabling Stein jobs
+    disable-job: true
     project: "netvirt"
     stream: "silicon"
     branch: "master"
     security-group-mode: "stateful"
     odl_system_flavor: odl-highcpu-2
     openstack_system_flavor: odl-highcpu-8
-    openstack_system2_flavor: odl-highcpu-4
+    openstack_system2_flavor: odl-standard-4
     tools_system_count: 0
     os-cmb-cnt: 0
     os-ctl-cnt: 1
     jobs:
       - inttest-csit-openstack-custom
 
+    #temporarily disabling Stein jobs
+    disable-job: true
     project: "netvirt"
     stream: "aluminium"
     branch: "stable/aluminium"
     jobs:
       - inttest-csit-openstack-custom
 
+    #temporarily disabling Stein jobs
+    disable-job: true
     project: "netvirt"
     stream: "magnesium"
     branch: "stable/magnesium"
     jobs:
       - inttest-csit-openstack-custom
 
+    #temporarily disabling Stein jobs
+    disable-job: true
     project: "netvirt"
     testplan: "{project}-{topology}-openstack.txt"
     odl-ml2-driver-version: "v2"
     jobs:
       - inttest-csit-openstack-custom
 
+    #temporarily disabling Stein jobs
+    disable-job: true
     project: "netvirt"
     testplan: "{project}-{topology}-openstack.txt"
     odl-ml2-driver-version: "v2"
index cb56f89c443fe7fcfbe06f7337b8366038d1a817..ff9d444c3f0208b4ba2223d15ee592efb195a2ac 100644 (file)
@@ -1,5 +1,6 @@
 ---
 - import_playbook: ../common-packer/provision/baseline.yaml
+- import_playbook: ../common-packer/provision/docker.yaml
 
 - hosts: all
   become_user: root
deleted file mode 120000 (symlink)
index bd227be506197b62f37dcd7037a31763d97606f7..0000000000000000000000000000000000000000
+++ /dev/null
@@ -1 +0,0 @@
-../common-packer/templates/builder.json
\ No newline at end of file
new file mode 100644 (file)
index 0000000000000000000000000000000000000000..fa19226a85adb5f9813cc9734ba4c22c69b2b211
--- /dev/null
@@ -0,0 +1,62 @@
+{
+  "variables": {
+    "ansible_roles_path": ".galaxy",
+    "arch": "x86_64",
+    "base_image": null,
+    "cloud_network": null,
+    "cloud_user_data": null,
+    "cloud_region": "ca-ymq-1",
+    "cloud_availability_zone": "ca-ymq-2",
+    "vm_use_block_storage": "true",
+    "vm_volume_size": "20",
+    "vm_image_disk_format": "",
+    "distro": null,
+    "flavor": "v2-highcpu-4",
+    "ssh_user": null,
+    "ssh_proxy_host": ""
+  },
+  "builders": [
+    {
+      "name": "vexxhost",
+      "image_name": "ZZCI - {{user `distro`}} - builder - {{user `arch`}} - {{isotime \"20060102-150405.000\"}}",
+      "instance_name": "{{user `distro`}}-builder-{{uuid}}",
+      "source_image_name": "{{user `base_image`}}",
+      "type": "openstack",
+      "region": "{{user `cloud_region`}}",
+      "availability_zone": "{{user `cloud_availability_zone`}}",
+      "networks": ["{{user `cloud_network`}}"],
+      "user_data_file": "{{user `cloud_user_data`}}",
+      "ssh_username": "{{user `ssh_user`}}",
+      "ssh_proxy_host": "{{user `ssh_proxy_host`}}",
+      "flavor": "{{user `flavor`}}",
+      "metadata": {
+        "ci_managed": "yes"
+      },
+      "use_blockstorage_volume": "{{user `vm_use_block_storage`}}",
+      "volume_size": "{{user `vm_volume_size`}}",
+      "image_disk_format": "{{user `vm_image_disk_format`}}"
+    }
+  ],
+  "provisioners": [
+    {
+      "type": "shell",
+      "scripts": ["common-packer/provision/install-python.sh"],
+      "execute_command": "chmod +x {{ .Path }}; if [ \"$UID\" == \"0\" ]; then {{ .Vars }} '{{ .Path }}'; else {{ .Vars }} sudo -E '{{ .Path }}'; fi"
+    },
+    {
+      "type": "shell-local",
+      "command": "./common-packer/ansible-galaxy.sh {{user `ansible_roles_path`}}"
+    },
+    {
+      "type": "ansible",
+      "playbook_file": "provision/local-builder.yaml",
+      "ansible_env_vars": [
+        "ANSIBLE_NOCOWS=1",
+        "ANSIBLE_PIPELINING=True",
+        "ANSIBLE_ROLES_PATH={{user `ansible_roles_path`}}",
+        "ANSIBLE_CALLBACK_WHITELIST=profile_tasks",
+        "ANSIBLE_STDOUT_CALLBACK=debug"
+      ]
+    }
+  ]
+}
diff --git a/scripts/branch_cut/branch_cut.awk b/scripts/branch_cut/branch_cut.awk
deleted file mode 100755 (executable)
index a317662..0000000
+++ /dev/null
@@ -1,241 +0,0 @@
-#!/usr/bin/awk -f
-# SPDX-License-Identifier: EPL-1.0
-##############################################################################
-# Copyright (c) 2017 The Linux Foundation and others.
-#
-# All rights reserved. This program and the accompanying materials
-# are made available under the terms of the Eclipse Public License v1.0
-# which accompanies this distribution, and is available at
-# http://www.eclipse.org/legal/epl-v10.html
-#
-##############################################################################
-
-BEGIN {
-    new_tag                     = new_reltag       # new release tag
-    curr_tag                    = curr_reltag      # current release tag
-    prev_tag                    = prev_reltag      # previous release tag
-
-    new_release                 = tolower(new_tag)
-    curr_release                = tolower(curr_tag)
-    prev_release                = tolower(prev_tag)
-
-    ws = "[\\t ]*"                                 # white-spaces
-    startpat = "^" ws "- project:"                 # start pattern
-    endpat = startpat                              # end pattern
-    op = "^" ws "---" ws "$"                       # match files starts with "---"
-
-    next_release_tag            = "^" ws "next-release-tag: '{stream}'"
-    master                      = "'master'"
-    new_branch                  = "'stable/" new_release "'"
-    curr_branch                 = "'stable/" curr_release "'"
-    prev_branch                 = "'stable/" prev_release "'"
-
-    # replace block to add new release
-    new_rel_yaml_tag            = "- " new_release ":";
-    br_master_yaml_tag          = "    branch: 'master'";
-    jre_yaml_tag                = "    jre: 'openjdk11'";
-    java_version_yaml_tag       = "    java-version: 'openjdk11'";
-    curr_rel_yaml_tag           = "- " curr_release ":";
-    br_stable_curr_yaml_tag     = "    branch: 'stable/" curr_release "'";
-
-    # replace block for autorelease-projects
-    #new_rel_yaml_tag           = "- " new_release ":";
-    next_rel_tag_new_yaml_tag   = "    next-release-tag: '{stream}'";
-    #br_master_yaml_tag         = "    branch: 'master'";
-    jdk_yaml_tag                = "    jdk: 'openjdk8'";
-    intg_test_yaml_tag          = "    integration-test: " new_release;
-    extra_mvn_opts_tag          = "    extra-mvn-opts: -Dsft.heap.max=4g"
-    #curr_rel_yaml_tag          = "- " curr_release ":";
-    next_rel_tag_curr_yaml_tag  = "    next-release-tag: '{stream}'";
-    #br_stable_curr_yaml_tag    = "    branch: 'stable/" curr_release "'";
-
-    # search patterns
-    smaster = "^" ws "- master:"
-    sstream = "^" ws "stream:"
-    srelease = "^" ws "- " curr_release ":"
-    snext_release_tag = "^" ws "next-release-tag:"
-    #if (l ~ next_release_tag) { next_release_tag = 1; continue; }
-    sbranch = "^" ws "branch: " master
-    sfunctionality = "^" ws "functionality:"
-
-    debug = 0                                   # set to 1 to print debug info
-    file_format = 2                             # project stream format
-
-    release_found = 0
-    stream_found = 0
-    nrt_found = 0
-    func_found = 0
-}
-
-{
-    # exit if release info is not available
-    if ((length(new_release) == 0 || length(curr_release) == 0)) {
-        exit;
-    }
-
-    # read all lines of the file into an array
-    file[NR] = $0
-}
-
-END {
-    n = NR                                      # total number of lines
-    find_blks()                                 # gets number of blocks between start/end pattern
-    process_blk(arr_bs[1], arr_be[1], 1)        # pass start and end of each block and process first block
-    update_file(arr_be[1])                      # write processed content
-
-    if (debug) {
-        print "number of blocks="nb;
-        print "total records in file[]="length(f);
-        print "size of firstblk[]="length(firstblk);
-        print "size of newblk[]="length(newblk);
-        print "size of oldmaster[]="length(oldmaster);
-        print "size of newblk[]="length(newblk);
-    }
-}
-
-function find_blks(   i, l, bs, be) {
-    for (i = 1; i <= n; i++) {
-        l = file[i]
-        if (l ~ startpat) project = 1                        # start pattern
-        if (bs > be && l ~ endpat) arr_be[++be] = i - 1      # block end
-        if (           l ~ startpat) arr_bs[++bs] = i - 1    # block start
-    }
-    nb = be
-
-    # to handle files with single blocks
-    if (nb == 0 && length(file) > 1 && project == 1) {
-        nb = 1
-        arr_bs[1] = 1                               # start after line '---'
-        arr_be[1] = length(file)                    # set length of the file
-    }
-
-    if (debug) {
-        for (i = 1; i < nb; i++)
-            print "find_blks: nb=" nb " arr_bs[" i "]="arr_bs[i]" arr_be[" i "]="arr_be[i];
-    }
-}
-
-function process_blk(bs, be, bn,   i, l) {
-    if (debug) {
-        print "process_blk: bn=" bn ", bs=" bs " ,be=" be
-    }
-
-    # get the first block
-    for (i = bs + 1; i <= be ; i++) {
-        l = file[i]
-        # determine file format
-        if (l ~ /stream:/) {
-            x=index(l,":")
-            s = substr(l, x+2, length(l) - x)
-            if (s == curr_release || s == new_release) {
-                file_format = 1
-            } else if (length(s) == 0 ) {
-                file_format = 0
-            }
-        }
-        firstblk[++nex] = l
-    }
-
-    if (debug) {
-        print "process_blk: stream='" s "' length(s)=" length(s)" file_format='" file_format "'"
-    }
-
-    # Handle single stream format
-    if (file_format == 1) {
-        # create new block to be inserted
-        for (i = 1; i <= length(firstblk); i++) {
-            l = firstblk[i]
-            if (l ~ /name:|stream:/) sub(curr_release, new_release, l)
-            newblk[++nex1] = l
-        }
-        # re-create old block and change master to stable/branch
-        for (i = 1; i <= length(firstblk)-1; i++) {
-            l = firstblk[i]
-            if (l ~ /branch:/) sub(master, curr_branch, l)
-            oldmaster[++nex2] = l
-        }
-    } else if (file_format == 0) {
-        # Handle multi-stream format
-        for (i = 1; i <= length(firstblk)-1; i++) {
-            l = firstblk[i]
-            if (l ~ sstream) { stream_found = 1; }
-            if (l ~ srelease) { release_found = 1; indent = substr(l, 1, index(l, "-")-1); continue; }
-            if (l ~ sfunctionality) { func_found = 1; }
-            if (l ~ snext_release_tag) { nrt_found = 1; }
-            if (l ~ sbranch) {
-                # append lines
-                if (stream_found && release_found && !nrt_found) {
-                    newblk[++nex3] = indent new_rel_yaml_tag;
-                    newblk[++nex3] = indent br_master_yaml_tag;
-                    newblk[++nex3] = indent java_version_yaml_tag;
-                    newblk[++nex3] = indent curr_rel_yaml_tag;
-                    newblk[++nex3] = indent br_stable_curr_yaml_tag;
-                    stream_found = 0;
-                    release_found = 0;
-                    func_found = 0;
-                    continue;
-                }
-                if (stream_found && release_found && nrt_found) {
-                    newblk[++nex3] = indent new_rel_yaml_tag;
-                    newblk[++nex3] = indent next_rel_tag_new_yaml_tag;
-                    newblk[++nex3] = indent br_master_yaml_tag;
-                    newblk[++nex3] = indent intg_test_yaml_tag;
-                    newblk[++nex3] = indent extra_mvn_opts_tag;
-                    newblk[++nex3] = indent curr_rel_yaml_tag;
-                    newblk[++nex3] = indent next_rel_tag_curr_yaml_tag;
-                    newblk[++nex3] = indent br_stable_curr_yaml_tag;
-                    stream_found = 0; release_found = 0; nrt_found=0;
-                    continue;
-                }
-            }
-            newblk[++nex3] = l
-
-            if (debug) {
-                print "process_blk: append(newblk[]) : stream="stream" release_found="release_found
-            }
-        }
-    } else {
-        # exit on unknown file format
-        exit;
-    }
-}
-
-function update_file(be,   i, j, l) {
-    i = 1
-    # handle lines before "---"
-    while (i <= n) {
-        print l = file[i++]
-        if (l ~ op) break
-    }
-
-    if (debug) {
-        print "writing master block"
-    }
-
-    # Handle single stream format
-    if (file_format == 1) {
-        for (j = 1; j <= nex1; j++)                   # write new branch block
-            print newblk[j]
-
-        if (debug) {
-            print "writing stable block"
-        }
-
-        for (j = 1; j <= nex2; j++)                   # write updated branch block
-            print oldmaster[j]
-
-    # Handle multi-stream format
-    } else if (file_format == 0) {
-        # print the first block
-        for (j = 1; j <= nex3; j++)                   # write multi-stream block
-            print newblk[j]
-    }
-
-    if (debug) {
-        print "writing rest of the file"
-    }
-
-    while (be <= n) {                                 # write rest of the file
-        print file[be++]
-    }
-}
diff --git a/scripts/branch_cut/branch_cutter.sh b/scripts/branch_cut/branch_cutter.sh
deleted file mode 100755 (executable)
index 80fc652..0000000
+++ /dev/null
@@ -1,83 +0,0 @@
-#!/bin/bash
-# SPDX-License-Identifier: EPL-1.0
-##############################################################################
-# Copyright (c) 2017 The Linux Foundation and others.
-#
-# All rights reserved. This program and the accompanying materials
-# are made available under the terms of the Eclipse Public License v1.0
-# which accompanies this distribution, and is available at
-# http://www.eclipse.org/legal/epl-v10.html
-#
-##############################################################################
-
-# List of directories, files to exclude
-declare -a excludes=("defaults.yaml"
-                     "releng-macros.yaml"
-                     "global-jjb"
-                     "lf-infra"
-                     "-macros.yaml"
-                     "validate-autorelease"
-                     "opflex-dependencies.yaml")
-
-TEMP="/tmp/tmp.yaml"
-mod=0
-count=0
-
-function usage {
-    echo "Usage: $(basename "$0") options (-c [current release]) (-n [next release]) (-p [previous release]) -h for help";
-    echo "example:"
-    echo "branch_cutter.sh -n oxygen -c nitrogen -p carbon"
-    exit 0;
-}
-
-if ( ! getopts ":n:c:p:h" opt ); then
-    usage;
-fi
-
-while getopts ":n:c:p:h" opt; do
-    case $opt in
-        n)
-            new_reltag=$OPTARG
-            ;;
-        c)
-            curr_reltag=$OPTARG
-            ;;
-        p)
-            prev_reltag=$OPTARG
-            ;;
-        \?)
-            echo "Invalid option: -$OPTARG" >&2
-            exit 1
-            ;;
-        h)
-            usage
-            ;;
-        :)
-            echo "Option -$OPTARG requires an argument." >&2
-            exit 1
-            ;;
-    esac
-done
-
-echo "Start Branch Cutting:"
-
-while IFS="" read -r file; do
-    found=0
-    for exclude in "${excludes[@]}"; do
-        if [[ $file =~ $exclude && $found -eq 0 ]]; then
-            found=1
-            break
-        fi
-    done
-    if [[ $found -eq 1 ]]; then
-        echo "Ignore file $file found in excludes list"
-    else
-        ./branch_cut.awk -v new_reltag="$new_reltag" -v curr_reltag="$curr_reltag" -v prev_reltag="$prev_reltag" "$file" > "$TEMP"
-        [[ ! -s "$TEMP" ]] && echo "$file: excluded"
-        [[ -s "$TEMP" ]] && mv "$TEMP" "$file" && echo "$file: Done" && (( mod++ ))
-        (( count++ ))
-    fi
-done < <(find ../../jjb -name "*.yaml")
-
-echo "Modified $mod out of $count files"
-echo "Completed"
diff --git a/scripts/cut-branch-jobs.py b/scripts/cut-branch-jobs.py
new file mode 100755 (executable)
index 0000000..29cb121
--- /dev/null
@@ -0,0 +1,371 @@
+# SPDX-License-Identifier: EPL-1.0
+##############################################################################
+# Copyright (c) 2020 Thanh Ha
+#
+# All rights reserved. This program and the accompanying materials
+# are made available under the terms of the Eclipse Public License v1.0
+# which accompanies this distribution, and is available at
+# http://www.eclipse.org/legal/epl-v10.html
+#
+##############################################################################
+"""Script for cutting new jobs when branching a new stable release."""
+
+import argparse
+from argparse import RawTextHelpFormatter
+import copy
+import fileinput
+import os
+import shutil
+import sys
+
+try:
+    import ruamel.yaml
+except ModuleNotFoundError:
+    print("ERROR: This script requires the package 'ruamel.yaml', please install it.")
+    print(
+        "If ruamel.yaml is not available in your system's package manager you"
+        " can install from PyPi with:"
+    )
+    print("")
+    print("    pip install --user ruamel.yaml")
+    sys.exit(1)
+
+yaml = ruamel.yaml.YAML()
+yaml.allow_duplicate_keys = True
+yaml.preserve_quotes = True
+
+default_branch = "master"  # This is the primary dev branch of the project
+
+
+def create_and_update_project_jobs(
+    release_on_stable_branch, release_on_current_branch, job_dir
+):
+    """Create and update project build jobs for the current and next dev release.
+
+    Project jobs are jobs defined in the project.yaml that have the same name
+    the directory they are in.
+
+    Only updates projects where the top project configuration has a name that
+    is equivalent to the current release. For example project name
+    "aaa-silicon" would have a release that matches what was passed to
+    release_on_stable_branch.
+    """
+    for directory in filter(
+        lambda x: os.path.isdir(os.path.join(job_dir, x)), os.listdir(job_dir)
+    ):
+        try:
+            with open(
+                os.path.join(job_dir, directory, "{}.yaml".format(directory)), "r"
+            ) as f:
+                data = yaml.load(f)
+
+                # Only create new jobs if the top level project name matches
+                # release_on_stable_branch variable
+                if not data[0]["project"]["name"] == "{}-{}".format(
+                    directory, release_on_stable_branch
+                ):
+                    continue
+
+                # Create a new job for the next release on the default_branch
+                new_job = copy.deepcopy(data[0])
+                new_job["project"]["name"] = "{}-{}".format(
+                    directory, release_on_current_branch
+                )
+                new_job["project"]["branch"] = default_branch
+                new_job["project"]["stream"] = "{}".format(release_on_current_branch)
+
+                # Update exiting job for the new stable branch
+                data[0]["project"]["branch"] = "stable/{}".format(
+                    release_on_stable_branch
+                )
+
+                data.insert(0, new_job)
+
+            with open(
+                os.path.join(job_dir, directory, "{}.yaml".format(directory)), "w"
+            ) as f:
+                stream = ruamel.yaml.round_trip_dump(data)
+                f.write("---\n")
+                f.write(stream)
+        except FileNotFoundError:  # If project.yaml file does not exist we can skip
+            pass
+
+
+def update_job_streams(release_on_stable_branch, release_on_current_branch, job_dir):
+    """Update projects that have a stream variable that is a list.
+
+    If a stream variable is a list that means the project likely has multiple
+    maintainance branches supported.
+
+    This function also does not support {project}.yaml files as parsing those
+    are handled by other functions in this script.
+
+    Only updates projects where the top stream in the list is equivalent to the
+    current release. For example stream "silicon" would have a release that
+    matches what was passed to release_on_stable_branch.
+    """
+    for directory in filter(
+        lambda d: os.path.isdir(os.path.join(job_dir, d)), os.listdir(job_dir)
+    ):
+        for job_file in filter(
+            lambda f: os.path.isfile(os.path.join(job_dir, directory, f)),
+            os.listdir(os.path.join(job_dir, directory)),
+        ):
+
+            # Projects may have non-yaml files in their repos so ignore them.
+            if not job_file.endswith(".yaml"):
+                continue
+
+            # Ignore project.yaml files as they are not supported by this function.
+            if job_file == "{}.yaml".format(directory):
+                continue
+
+            file_changed = False
+
+            with open(os.path.join(job_dir, directory, job_file), "r") as f:
+                data = yaml.load(f)
+
+                for project in data:
+                    streams = project.get("project", {}).get("stream", None)
+
+                    if not isinstance(streams, list):  # We only support lists streams
+                        continue
+
+                    # Skip if the stream does not match
+                    # release_on_stable_branch in the first item
+                    if not streams[0].get(release_on_stable_branch, None):
+                        continue
+
+                    # Create the next release stream
+                    new_stream = {}
+                    new_stream[release_on_current_branch] = copy.deepcopy(
+                        streams[0].get(release_on_stable_branch)
+                    )
+
+                    # Update the previous release stream branch to
+                    # stable/{stream} instead of default_branch
+                    streams[0][release_on_stable_branch]["branch"] = "stable/{}".format(
+                        release_on_stable_branch
+                    )
+
+                    streams.insert(0, new_stream)
+                    file_changed = True
+
+            # Because we are looping every file we only want to save if we made changes.
+            if file_changed:
+                with open(os.path.join(job_dir, directory, job_file), "w") as f:
+                    stream = ruamel.yaml.round_trip_dump(data)
+                    f.write("---\n")
+                    f.write(stream)
+
+
+def update_integration_csit_list(
+    release_on_stable_branch, release_on_current_branch, job_dir
+):
+    """Update csit-*-list variables and files integration-test-jobs.yaml."""
+
+    class Generic:
+        def __init__(self, tag, value, style=None):
+            self._value = value
+            self._tag = tag
+            self._style = style
+
+    class GenericScalar(Generic):
+        @classmethod
+        def to_yaml(self, representer, node):
+            return representer.represent_scalar(node._tag, node._value)
+
+        @staticmethod
+        def construct(constructor, node):
+            return constructor.construct_scalar(node)
+
+    def default_constructor(constructor, tag_suffix, node):
+        generic = {ruamel.yaml.ScalarNode: GenericScalar,}.get(  # noqa
+            type(node)
+        )
+        if generic is None:
+            raise NotImplementedError("Node: " + str(type(node)))
+        style = getattr(node, "style", None)
+        instance = generic.__new__(generic)
+        yield instance
+        state = generic.construct(constructor, node)
+        instance.__init__(tag_suffix, state, style=style)
+
+    ruamel.yaml.add_multi_constructor(
+        "", default_constructor, Loader=ruamel.yaml.SafeLoader
+    )
+    yaml.register_class(GenericScalar)
+
+    integration_test_jobs_yaml = os.path.join(
+        job_dir, "integration", "integration-test-jobs.yaml"
+    )
+
+    with open(integration_test_jobs_yaml, "r") as f:
+        data = yaml.load(f)
+
+        for project in data:
+            # Skip items that are not of "project" type
+            if not project.get("project"):
+                continue
+
+            streams = project.get("project", {}).get("stream", None)
+
+            # Skip projects that do not have a stream configured
+            if not isinstance(streams, list):  # We only support lists streams
+                continue
+
+            # Skip if the stream does not match
+            # release_on_current_branch in the first item
+            if not streams[0].get(release_on_current_branch, None):
+                continue
+
+            # Update csit-list parameters for next release
+            if streams[0][release_on_current_branch].get("csit-list"):
+                update_stream = streams[0][release_on_current_branch]
+                update_stream["csit-list"] = GenericScalar(
+                    "!include:", "csit-jobs-{}.lst".format(release_on_current_branch)
+                )
+
+            # Update csit-mri-list parameters for next release
+            if streams[0][release_on_current_branch].get("csit-mri-list"):
+                update_stream = streams[0][release_on_current_branch]
+                update_stream["csit-mri-list"] = "{{csit-mri-list-{}}}".format(
+                    release_on_current_branch
+                )
+
+            # Update csit-weekly-list parameters for next release
+            if streams[0][release_on_current_branch].get("csit-weekly-list"):
+                update_stream = streams[0][release_on_current_branch]
+                update_stream["csit-weekly-list"] = "{{csit-weekly-list-{}}}".format(
+                    release_on_current_branch
+                )
+
+            # Update csit-sanity-list parameters for next release
+            if streams[0][release_on_current_branch].get("csit-sanity-list"):
+                update_stream = streams[0][release_on_current_branch]
+                update_stream["csit-sanity-list"] = "{{csit-sanity-list-{}}}".format(
+                    release_on_current_branch
+                )
+
+    with open(integration_test_jobs_yaml, "w") as f:
+        stream = ruamel.yaml.round_trip_dump(data)
+        f.write("---\n")
+        f.write(stream)
+
+    # Update the csit-*-list variables in defaults.yaml
+
+    defaults_yaml = os.path.join(job_dir, "defaults.yaml")
+
+    with open(defaults_yaml, "r") as f:
+        data = yaml.load(f)
+
+        # Add next release csit-mri-list-RELEASE
+        new_csit_mri_list = copy.deepcopy(
+            data[0]["defaults"].get("csit-mri-list-{}".format(release_on_stable_branch))
+        )
+        data[0]["defaults"][
+            "csit-mri-list-{}".format(release_on_current_branch)
+        ] = new_csit_mri_list.replace(
+            release_on_stable_branch, release_on_current_branch
+        )
+
+        # Add next release csit-mri-list-RELEASE
+        new_csit_mri_list = copy.deepcopy(
+            data[0]["defaults"].get("csit-mri-list-{}".format(release_on_stable_branch))
+        )
+        data[0]["defaults"][
+            "csit-mri-list-{}".format(release_on_current_branch)
+        ] = new_csit_mri_list.replace(
+            release_on_stable_branch, release_on_current_branch
+        )
+
+        # Add next release csit-weekly-list-RELEASE
+        new_csit_mri_list = copy.deepcopy(
+            data[0]["defaults"].get(
+                "csit-weekly-list-{}".format(release_on_stable_branch)
+            )
+        )
+        data[0]["defaults"][
+            "csit-weekly-list-{}".format(release_on_current_branch)
+        ] = new_csit_mri_list.replace(
+            release_on_stable_branch, release_on_current_branch
+        )
+
+        # Add next release csit-sanity-list-RELEASE
+        new_csit_mri_list = copy.deepcopy(
+            data[0]["defaults"].get(
+                "csit-sanity-list-{}".format(release_on_stable_branch)
+            )
+        )
+        data[0]["defaults"][
+            "csit-sanity-list-{}".format(release_on_current_branch)
+        ] = new_csit_mri_list.replace(
+            release_on_stable_branch, release_on_current_branch
+        )
+
+    with open(defaults_yaml, "w") as f:
+        stream = ruamel.yaml.round_trip_dump(data)
+        f.write("---\n")
+        f.write(stream)
+
+    # Handle copying and updating the csit-*.lst files
+    csit_file = "csit-jobs-{}.lst".format(release_on_stable_branch)
+    src = os.path.join(job_dir, "integration", csit_file)
+    dest = os.path.join(
+        job_dir,
+        "integration",
+        csit_file.replace(release_on_stable_branch, release_on_current_branch),
+    )
+    shutil.copyfile(src, dest)
+    with fileinput.FileInput(dest, inplace=True) as file:
+        for line in file:
+            print(
+                line.replace(release_on_stable_branch, release_on_current_branch),
+                end="",
+            )
+
+
+parser = argparse.ArgumentParser(
+    description="""Creates & updates jobs for ODL projects when branch cutting.
+
+    Example usage: python scripts/cut-branch.sh Silicon Phosphorus jjb/
+
+    ** If calling from tox the JOD_DIR is auto-detected so only pass the current
+    and next release stream name. **
+    """,
+    formatter_class=RawTextHelpFormatter,
+)
+parser.add_argument(
+    "release_on_stable_branch",
+    metavar="RELEASE_ON_STABLE_BRANCH",
+    type=str,
+    help="The ODL release codename for the stable branch that was cut.",
+)
+parser.add_argument(
+    "release_on_current_branch",
+    metavar="RELEASE_ON_CURRENT_BRANCH",
+    type=str,
+    help="""The ODL release codename for the new {}
+        (eg. Magnesium, Aluminium, Silicon).""".format(
+        default_branch
+    ),
+)
+parser.add_argument(
+    "job_dir",
+    metavar="JOB_DIR",
+    type=str,
+    help="Path to the directory containing JJB config.",
+)
+args = parser.parse_args()
+
+# We only handle lower release codenames
+release_on_stable_branch = args.release_on_stable_branch.lower()
+release_on_current_branch = args.release_on_current_branch.lower()
+
+create_and_update_project_jobs(
+    release_on_stable_branch, release_on_current_branch, args.job_dir
+)
+update_job_streams(release_on_stable_branch, release_on_current_branch, args.job_dir)
+update_integration_csit_list(
+    release_on_stable_branch, release_on_current_branch, args.job_dir
+)
diff --git a/tox.ini b/tox.ini
index 9277b269b97f4acf535415d1cc08b1db19baafb7..a432fb1bb558b8239e6a50b456b0498cd75b5d5b 100644 (file)
--- a/tox.ini
+++ b/tox.ini
@@ -30,3 +30,12 @@ commands = python {toxinidir}/check_prefix.py
 [testenv:robot]
 basepython = python3
 commands = python {toxinidir}/check_robot.py
+
+[testenv:cut-branch-jobs]
+basepython = python3
+deps =
+    pre-commit
+    ruamel.yaml
+commands =
+    python scripts/cut-branch-jobs.py {posargs:--help} {toxinidir}/jjb
+    - pre-commit run --all-files prettier